From 924ecef66386b61e5bef1c4a712aa16501d62d38 Mon Sep 17 00:00:00 2001 From: Elizabeth Kenneally Date: Tue, 26 Mar 2024 17:15:42 -0400 Subject: [PATCH 001/507] Make command line option for fs ingress --- CPAC/pipeline/engine.py | 113 ++++++++++++++++++++++------------------ CPAC/pipeline/schema.py | 1 + dev/docker_data/run.py | 8 +++ 3 files changed, 70 insertions(+), 52 deletions(-) diff --git a/CPAC/pipeline/engine.py b/CPAC/pipeline/engine.py index 3675c4035d..c5aee5834d 100644 --- a/CPAC/pipeline/engine.py +++ b/CPAC/pipeline/engine.py @@ -1753,61 +1753,70 @@ def ingress_raw_anat_data(wf, rpool, cfg, data_paths, unique_id, part_id, def ingress_freesurfer(wf, rpool, cfg, data_paths, unique_id, part_id, ses_id): - if 'anat' not in data_paths: + if not cfg.pipeline_setup['freesurfer_dir']: print('No FreeSurfer data present.') return rpool + fs_path = os.path.join(cfg.pipeline_setup['freesurfer_dir'], part_id) + if os.path.exists(os.path.join(fs_path, part_id)): + fs_path = os.path.join(fs_path, part_id) + print(fs_path) + if not os.path.exists(fs_path): + if 'sub' in part_id: + fs_path = os.path.join(cfg.pipeline_setup['freesurfer_dir'], part_id.replace('sub-', '')) + else: + fs_path = os.path.join(cfg.pipeline_setup['freesurfer_dir'], 'sub-', part_id) + if not os.path.exists(fs_path): + print(f'No FreeSurfer data found for subject {part_id}') + return rpool + fs_ingress = create_general_datasource('gather_freesurfer_dir') + fs_ingress.inputs.inputnode.set( + unique_id=unique_id, + data=fs_path, + creds_path=data_paths['creds_path'], + dl_dir=cfg.pipeline_setup['working_directory']['path']) + rpool.set_data("freesurfer-subject-dir", fs_ingress, 'outputspec.data', + {}, "", "freesurfer_config_ingress") + + recon_outs = { + 'pipeline-fs_raw-average': 'mri/rawavg.mgz', + 'pipeline-fs_subcortical-seg': 'mri/aseg.mgz', + 'pipeline-fs_brainmask': 'mri/brainmask.mgz', + 'pipeline-fs_wmparc': 'mri/wmparc.mgz', + 'pipeline-fs_T1': 'mri/T1.mgz', + 'pipeline-fs_hemi-L_desc-surface_curv': 'surf/lh.curv', + 'pipeline-fs_hemi-R_desc-surface_curv': 'surf/rh.curv', + 'pipeline-fs_hemi-L_desc-surfaceMesh_pial': 'surf/lh.pial', + 'pipeline-fs_hemi-R_desc-surfaceMesh_pial': 'surf/rh.pial', + 'pipeline-fs_hemi-L_desc-surfaceMesh_smoothwm': 'surf/lh.smoothwm', + 'pipeline-fs_hemi-R_desc-surfaceMesh_smoothwm': 'surf/rh.smoothwm', + 'pipeline-fs_hemi-L_desc-surfaceMesh_sphere': 'surf/lh.sphere', + 'pipeline-fs_hemi-R_desc-surfaceMesh_sphere': 'surf/rh.sphere', + 'pipeline-fs_hemi-L_desc-surfaceMap_sulc': 'surf/lh.sulc', + 'pipeline-fs_hemi-R_desc-surfaceMap_sulc': 'surf/rh.sulc', + 'pipeline-fs_hemi-L_desc-surfaceMap_thickness': 'surf/lh.thickness', + 'pipeline-fs_hemi-R_desc-surfaceMap_thickness': 'surf/rh.thickness', + 'pipeline-fs_hemi-L_desc-surfaceMap_volume': 'surf/lh.volume', + 'pipeline-fs_hemi-R_desc-surfaceMap_volume': 'surf/rh.volume', + 'pipeline-fs_hemi-L_desc-surfaceMesh_white': 'surf/lh.white', + 'pipeline-fs_hemi-R_desc-surfaceMesh_white': 'surf/rh.white', + 'pipeline-fs_xfm': 'mri/transforms/talairach.lta' + } - if 'freesurfer_dir' in data_paths['anat']: - fs_ingress = create_general_datasource('gather_freesurfer_dir') - fs_ingress.inputs.inputnode.set( - unique_id=unique_id, - data=data_paths['anat']['freesurfer_dir'], - creds_path=data_paths['creds_path'], - dl_dir=cfg.pipeline_setup['working_directory']['path']) - rpool.set_data("freesurfer-subject-dir", fs_ingress, 'outputspec.data', - {}, "", "freesurfer_config_ingress") - - recon_outs = { - 'pipeline-fs_raw-average': 'mri/rawavg.mgz', - 'pipeline-fs_subcortical-seg': 'mri/aseg.mgz', - 'pipeline-fs_brainmask': 'mri/brainmask.mgz', - 'pipeline-fs_wmparc': 'mri/wmparc.mgz', - 'pipeline-fs_T1': 'mri/T1.mgz', - 'pipeline-fs_hemi-L_desc-surface_curv': 'surf/lh.curv', - 'pipeline-fs_hemi-R_desc-surface_curv': 'surf/rh.curv', - 'pipeline-fs_hemi-L_desc-surfaceMesh_pial': 'surf/lh.pial', - 'pipeline-fs_hemi-R_desc-surfaceMesh_pial': 'surf/rh.pial', - 'pipeline-fs_hemi-L_desc-surfaceMesh_smoothwm': 'surf/lh.smoothwm', - 'pipeline-fs_hemi-R_desc-surfaceMesh_smoothwm': 'surf/rh.smoothwm', - 'pipeline-fs_hemi-L_desc-surfaceMesh_sphere': 'surf/lh.sphere', - 'pipeline-fs_hemi-R_desc-surfaceMesh_sphere': 'surf/rh.sphere', - 'pipeline-fs_hemi-L_desc-surfaceMap_sulc': 'surf/lh.sulc', - 'pipeline-fs_hemi-R_desc-surfaceMap_sulc': 'surf/rh.sulc', - 'pipeline-fs_hemi-L_desc-surfaceMap_thickness': 'surf/lh.thickness', - 'pipeline-fs_hemi-R_desc-surfaceMap_thickness': 'surf/rh.thickness', - 'pipeline-fs_hemi-L_desc-surfaceMap_volume': 'surf/lh.volume', - 'pipeline-fs_hemi-R_desc-surfaceMap_volume': 'surf/rh.volume', - 'pipeline-fs_hemi-L_desc-surfaceMesh_white': 'surf/lh.white', - 'pipeline-fs_hemi-R_desc-surfaceMesh_white': 'surf/rh.white', - 'pipeline-fs_xfm': 'mri/transforms/talairach.lta' - } - - for key, outfile in recon_outs.items(): - fullpath = os.path.join(data_paths['anat']['freesurfer_dir'], - outfile) - if os.path.exists(fullpath): - fs_ingress = create_general_datasource(f'gather_fs_{key}_dir') - fs_ingress.inputs.inputnode.set( - unique_id=unique_id, - data=fullpath, - creds_path=data_paths['creds_path'], - dl_dir=cfg.pipeline_setup['working_directory']['path']) - rpool.set_data(key, fs_ingress, 'outputspec.data', - {}, "", f"fs_{key}_ingress") - else: - warnings.warn(str( - LookupError("\n[!] Path does not exist for " - f"{fullpath}.\n"))) + for key, outfile in recon_outs.items(): + fullpath = os.path.join(fs_path, outfile) + if os.path.exists(fullpath): + fs_ingress = create_general_datasource(f'gather_fs_{key}_dir') + fs_ingress.inputs.inputnode.set( + unique_id=unique_id, + data=fullpath, + creds_path=data_paths['creds_path'], + dl_dir=cfg.pipeline_setup['working_directory']['path']) + rpool.set_data(key, fs_ingress, 'outputspec.data', + {}, "", f"fs_{key}_ingress") + else: + warnings.warn(str( + LookupError("\n[!] Path does not exist for " + f"{fullpath}.\n"))) return rpool diff --git a/CPAC/pipeline/schema.py b/CPAC/pipeline/schema.py index b1ebc7d947..60fd1d9d77 100644 --- a/CPAC/pipeline/schema.py +++ b/CPAC/pipeline/schema.py @@ -425,6 +425,7 @@ def sanitize(filename): 'Debugging': { 'verbose': bool1_1, }, + 'freesurfer_dir': str, 'outdir_ingress': { 'run': bool1_1, 'Template': Maybe(str), diff --git a/dev/docker_data/run.py b/dev/docker_data/run.py index 344f7206c3..acdbbfbb02 100755 --- a/dev/docker_data/run.py +++ b/dev/docker_data/run.py @@ -343,6 +343,11 @@ def run_main(): 'need to bind the port using the Docker ' 'flag "-p".', action='store_true') + + parser.add_argument('--freesurfer_dir', '--freesurfer-dir', + help='Specify path to pre-computed FreeSurfer outputs ' + 'to pull into C-PAC run', + default=False) # get the command line arguments args = parser.parse_args( @@ -641,6 +646,9 @@ def run_main(): .format(c['pipeline_setup']['system_config'][ 'num_participants_at_once'])) + if args.freesurfer_dir: + c['pipeline_setup']['freesurfer_dir'] = args.freesurfer_dir + if not args.data_config_file: print("Input directory: {0}".format(bids_dir)) From d4335139800b282f0e50df0d33511f00a6a10f38 Mon Sep 17 00:00:00 2001 From: Elizabeth Kenneally Date: Mon, 1 Apr 2024 11:18:53 -0400 Subject: [PATCH 002/507] Make filename parsing more generalizable --- CPAC/pipeline/engine.py | 20 ++++++++++++++------ 1 file changed, 14 insertions(+), 6 deletions(-) diff --git a/CPAC/pipeline/engine.py b/CPAC/pipeline/engine.py index c5aee5834d..3c671c56f4 100644 --- a/CPAC/pipeline/engine.py +++ b/CPAC/pipeline/engine.py @@ -1756,18 +1756,26 @@ def ingress_freesurfer(wf, rpool, cfg, data_paths, unique_id, part_id, if not cfg.pipeline_setup['freesurfer_dir']: print('No FreeSurfer data present.') return rpool + fs_path = os.path.join(cfg.pipeline_setup['freesurfer_dir'], part_id) - if os.path.exists(os.path.join(fs_path, part_id)): - fs_path = os.path.join(fs_path, part_id) - print(fs_path) if not os.path.exists(fs_path): if 'sub' in part_id: fs_path = os.path.join(cfg.pipeline_setup['freesurfer_dir'], part_id.replace('sub-', '')) else: - fs_path = os.path.join(cfg.pipeline_setup['freesurfer_dir'], 'sub-', part_id) + fs_path = os.path.join(cfg.pipeline_setup['freesurfer_dir'], ('sub-' + part_id)) + + # patch for flo-specific data if not os.path.exists(fs_path): - print(f'No FreeSurfer data found for subject {part_id}') - return rpool + subj_ses = part_id + '-' + ses_id + fs_path = os.path.join(cfg.pipeline_setup['freesurfer_dir'], subj_ses) + if not os.path.exists(fs_path): + print(f'No FreeSurfer data found for subject {part_id}') + return rpool + + # Check for double nested subj names + if os.path.exists(os.path.join(fs_path, os.path.basename(fs_path))): + fs_path = os.path.join(fs_path, part_id) + fs_ingress = create_general_datasource('gather_freesurfer_dir') fs_ingress.inputs.inputnode.set( unique_id=unique_id, From 6f289a42f25c22aa1edb3e2520433f6ee6874877 Mon Sep 17 00:00:00 2001 From: Jon Clucas Date: Wed, 6 Dec 2023 11:52:28 -0500 Subject: [PATCH 003/507] :construction_worker: Make slurm_testing owner configurable via environment [run reg-suite] --- .github/workflows/regression_test_lite.yml | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/.github/workflows/regression_test_lite.yml b/.github/workflows/regression_test_lite.yml index 380a3cc6b4..a4cd2c4154 100644 --- a/.github/workflows/regression_test_lite.yml +++ b/.github/workflows/regression_test_lite.yml @@ -28,6 +28,7 @@ jobs: name: Regression Test - Lite environment: ACCESS env: + SLURM_TESTING_OWNER: ${{ vars.SLURM_TESTING_OWNER }} SSH_PRIVATE_KEY: ${{ secrets.SSH_PRIVATE_KEY }} SSH_HOST: ${{ secrets.SSH_HOST }} if: "${{ github.env.SSH_PRIVATE_KEY }} != ''" @@ -74,14 +75,14 @@ jobs: script: | cd ${{ secrets.SSH_WORK_DIR }} if [ ! -d slurm_testing ] ; then - git clone https://github.com/${{ github.repository_owner }}/slurm_testing slurm_testing + git clone https://github.com/${{ env.SLURM_TESTING_OWNER }}/slurm_testing slurm_testing else cd slurm_testing - git pull origin regression/after_runs + git pull origin main cd .. fi mkdir -p ./logs/${{ github.sha }} - sbatch --export="HOME_DIR=${{ secrets.SSH_WORK_DIR }},IMAGE=${{ env.DOCKER_TAG }},OWNER=${{ github.repository_owner }},PATH_EXTRA=${{ secrets.GH_CLI_BIN_PATH }},REPO=$(echo ${{ github.repository }} | cut -d '/' -f 2),SHA=${{ github.sha }}" --output=${{ secrets.SSH_WORK_DIR }}/logs/${{ github.sha }}/out.log --error=${{ secrets.SSH_WORK_DIR }}/logs/${{ github.sha }}/error.log slurm_testing/.github/scripts/launch_regtest_lite.SLURM + sbatch --export="HOME_DIR=${{ secrets.SSH_WORK_DIR }},IMAGE=${{ env.DOCKER_TAG }},OWNER=${{ github.repository_owner }},PATH_EXTRA=${{ secrets.GH_CLI_BIN_PATH }},REPO=$(echo ${{ github.repository }},TESTING_OWNER=$(echo ${{ env.SLURM_TESTING_OWNER }}) | cut -d '/' -f 2),SHA=${{ github.sha }}" --output=${{ secrets.SSH_WORK_DIR }}/logs/${{ github.sha }}/out.log --error=${{ secrets.SSH_WORK_DIR }}/logs/${{ github.sha }}/error.log slurm_testing/.github/scripts/launch_regtest_lite.SLURM - name: Cleanup SSH run: | From 823969a2999e88cad1a04300b6f18aafd43a37a9 Mon Sep 17 00:00:00 2001 From: Jon Clucas Date: Fri, 8 Dec 2023 21:50:18 -0500 Subject: [PATCH 004/507] :construction_worker: Don't try to run when sources are forks [run reg-suite] --- .github/workflows/regression_test_lite.yml | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/.github/workflows/regression_test_lite.yml b/.github/workflows/regression_test_lite.yml index a4cd2c4154..dd5b0fabf6 100644 --- a/.github/workflows/regression_test_lite.yml +++ b/.github/workflows/regression_test_lite.yml @@ -31,7 +31,11 @@ jobs: SLURM_TESTING_OWNER: ${{ vars.SLURM_TESTING_OWNER }} SSH_PRIVATE_KEY: ${{ secrets.SSH_PRIVATE_KEY }} SSH_HOST: ${{ secrets.SSH_HOST }} - if: "${{ github.env.SSH_PRIVATE_KEY }} != ''" + if: | + ${{ github.env.SSH_PRIVATE_KEY != '' && + (github.event_name == 'workflow_dispatch' || + (github.event_name == 'pull_request' && + github.event.repository.fork == 'false')) }} runs-on: ubuntu-latest steps: - name: Get C-PAC branch From 8f7b640b718dd5c0f85a06fb05487d88a38cba99 Mon Sep 17 00:00:00 2001 From: Jon Clucas Date: Tue, 2 Jan 2024 22:38:33 -0500 Subject: [PATCH 005/507] :construction_worker: Set `$COMPARISON_PATH` from GH environment --- .github/workflows/regression_test_lite.yml | 19 +++++++++++++------ 1 file changed, 13 insertions(+), 6 deletions(-) diff --git a/.github/workflows/regression_test_lite.yml b/.github/workflows/regression_test_lite.yml index dd5b0fabf6..ce593c0a7b 100644 --- a/.github/workflows/regression_test_lite.yml +++ b/.github/workflows/regression_test_lite.yml @@ -28,9 +28,16 @@ jobs: name: Regression Test - Lite environment: ACCESS env: + COMPARISON_PATH: ${{ secrets.COMPARISON_PATH }} + DASHBOARD_REPO: ${{ vars.DASHBOARD_REPO}} + DOCKER_TAG: + GH_CLI_BIN_PATH: ${{ secrets.GH_CLI_BIN_PATH }} + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} SLURM_TESTING_OWNER: ${{ vars.SLURM_TESTING_OWNER }} SSH_PRIVATE_KEY: ${{ secrets.SSH_PRIVATE_KEY }} SSH_HOST: ${{ secrets.SSH_HOST }} + SSH_USER: ${{ secrets.SSH_USER }} + SSH_WORK_DIR: ${{ secrets.SSH_WORK_DIR }} if: | ${{ github.env.SSH_PRIVATE_KEY != '' && (github.event_name == 'workflow_dispatch' || @@ -64,7 +71,7 @@ jobs: - name: Initiate check uses: guibranco/github-status-action-v2@v1.1.7 with: - authToken: ${{ secrets.GITHUB_TOKEN }} + authToken: ${{ env.GITHUB_TOKEN }} context: Launch lite regression test description: launching state: pending @@ -72,12 +79,12 @@ jobs: - name: Connect and Run Regression Test Lite uses: appleboy/ssh-action@v1.0.0 with: - host: ${{ secrets.SSH_HOST }} - username: ${{ secrets.SSH_USER }} - key: ${{ secrets.SSH_PRIVATE_KEY }} + host: ${{ env.SSH_HOST }} + username: ${{ env.SSH_USER }} + key: ${{ env.SSH_PRIVATE_KEY }} command_timeout: 200m script: | - cd ${{ secrets.SSH_WORK_DIR }} + cd ${{ env.SSH_WORK_DIR }} if [ ! -d slurm_testing ] ; then git clone https://github.com/${{ env.SLURM_TESTING_OWNER }}/slurm_testing slurm_testing else @@ -86,7 +93,7 @@ jobs: cd .. fi mkdir -p ./logs/${{ github.sha }} - sbatch --export="HOME_DIR=${{ secrets.SSH_WORK_DIR }},IMAGE=${{ env.DOCKER_TAG }},OWNER=${{ github.repository_owner }},PATH_EXTRA=${{ secrets.GH_CLI_BIN_PATH }},REPO=$(echo ${{ github.repository }},TESTING_OWNER=$(echo ${{ env.SLURM_TESTING_OWNER }}) | cut -d '/' -f 2),SHA=${{ github.sha }}" --output=${{ secrets.SSH_WORK_DIR }}/logs/${{ github.sha }}/out.log --error=${{ secrets.SSH_WORK_DIR }}/logs/${{ github.sha }}/error.log slurm_testing/.github/scripts/launch_regtest_lite.SLURM + sbatch --export="COMPARISON_PATH=${{ env.COMPARISON_PATH }},DASHBOARD_REPO=${{ env.DASHBOARD_REPO}},HOME_DIR=${{ env.SSH_WORK_DIR }},IMAGE=${{ env.DOCKER_TAG }},OWNER=${{ github.repository_owner }},PATH_EXTRA=${{ env.GH_CLI_BIN_PATH }},REPO=$(echo ${{ github.repository }},TESTING_OWNER=$(echo ${{ env.SLURM_TESTING_OWNER }}) | cut -d '/' -f 2),SHA=${{ github.sha }}" --output=${{ env.SSH_WORK_DIR }}/logs/${{ github.sha }}/out.log --error=${{ env.SSH_WORK_DIR }}/logs/${{ github.sha }}/error.log slurm_testing/.github/scripts/launch_regtest_lite.SLURM - name: Cleanup SSH run: | From 163cd4add4f3b9751241e12a3dbc29fefb3bacc6 Mon Sep 17 00:00:00 2001 From: Jon Clucas Date: Mon, 8 Jan 2024 15:34:03 -0500 Subject: [PATCH 006/507] :construction_worker: Refactor slurm testing repo config --- .github/workflows/regression_test_lite.yml | 17 +++++++++-------- 1 file changed, 9 insertions(+), 8 deletions(-) diff --git a/.github/workflows/regression_test_lite.yml b/.github/workflows/regression_test_lite.yml index ce593c0a7b..b63e575e74 100644 --- a/.github/workflows/regression_test_lite.yml +++ b/.github/workflows/regression_test_lite.yml @@ -33,7 +33,8 @@ jobs: DOCKER_TAG: GH_CLI_BIN_PATH: ${{ secrets.GH_CLI_BIN_PATH }} GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - SLURM_TESTING_OWNER: ${{ vars.SLURM_TESTING_OWNER }} + SLURM_TESTING_BRANCH: ${{ vars.SLURM_TESTING_BRANCH }} + SLURM_TESTING_REPO: ${{ vars.SLURM_TESTING_REPO }} SSH_PRIVATE_KEY: ${{ secrets.SSH_PRIVATE_KEY }} SSH_HOST: ${{ secrets.SSH_HOST }} SSH_USER: ${{ secrets.SSH_USER }} @@ -85,15 +86,15 @@ jobs: command_timeout: 200m script: | cd ${{ env.SSH_WORK_DIR }} - if [ ! -d slurm_testing ] ; then - git clone https://github.com/${{ env.SLURM_TESTING_OWNER }}/slurm_testing slurm_testing - else - cd slurm_testing - git pull origin main - cd .. + if [ -d C-PAC_slurm_testing ] ; then + rm -rf C-PAC_slurm_testing fi + git clone https://github.com/${{ env.SLURM_TESTING_REPO }} C-PAC_slurm_testing + cd C-PAC_slurm_testing + git checkout ${{ env.SLURM_TESTING_BRANCH }} + cd .. mkdir -p ./logs/${{ github.sha }} - sbatch --export="COMPARISON_PATH=${{ env.COMPARISON_PATH }},DASHBOARD_REPO=${{ env.DASHBOARD_REPO}},HOME_DIR=${{ env.SSH_WORK_DIR }},IMAGE=${{ env.DOCKER_TAG }},OWNER=${{ github.repository_owner }},PATH_EXTRA=${{ env.GH_CLI_BIN_PATH }},REPO=$(echo ${{ github.repository }},TESTING_OWNER=$(echo ${{ env.SLURM_TESTING_OWNER }}) | cut -d '/' -f 2),SHA=${{ github.sha }}" --output=${{ env.SSH_WORK_DIR }}/logs/${{ github.sha }}/out.log --error=${{ env.SSH_WORK_DIR }}/logs/${{ github.sha }}/error.log slurm_testing/.github/scripts/launch_regtest_lite.SLURM + sbatch --export="COMPARISON_PATH=${{ env.COMPARISON_PATH }},DASHBOARD_REPO=${{ env.DASHBOARD_REPO}},HOME_DIR=${{ env.SSH_WORK_DIR }},IMAGE=${{ env.DOCKER_TAG }},OWNER=${{ github.repository_owner }},PATH_EXTRA=${{ env.GH_CLI_BIN_PATH }},REPO=${{ github.repository }},TESTING_REPO=${{ env.SLURM_TESTING_REPO }},SHA=${{ github.sha }}" --output=${{ env.SSH_WORK_DIR }}/logs/${{ github.sha }}/out.log --error=${{ env.SSH_WORK_DIR }}/logs/${{ github.sha }}/error.log C-PAC_slurm_testing/.github/scripts/launch_regtest_lite.SLURM - name: Cleanup SSH run: | From 16c47a5e3f1ee3c712c87bedc0b1d3795223dfff Mon Sep 17 00:00:00 2001 From: Jon Clucas Date: Tue, 9 Jan 2024 20:47:09 -0500 Subject: [PATCH 007/507] :construction_worker: Pass just repo as `$REPO` to regtest [run reg-suite] --- .github/workflows/regression_test_lite.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/regression_test_lite.yml b/.github/workflows/regression_test_lite.yml index b63e575e74..f5074942f5 100644 --- a/.github/workflows/regression_test_lite.yml +++ b/.github/workflows/regression_test_lite.yml @@ -94,7 +94,7 @@ jobs: git checkout ${{ env.SLURM_TESTING_BRANCH }} cd .. mkdir -p ./logs/${{ github.sha }} - sbatch --export="COMPARISON_PATH=${{ env.COMPARISON_PATH }},DASHBOARD_REPO=${{ env.DASHBOARD_REPO}},HOME_DIR=${{ env.SSH_WORK_DIR }},IMAGE=${{ env.DOCKER_TAG }},OWNER=${{ github.repository_owner }},PATH_EXTRA=${{ env.GH_CLI_BIN_PATH }},REPO=${{ github.repository }},TESTING_REPO=${{ env.SLURM_TESTING_REPO }},SHA=${{ github.sha }}" --output=${{ env.SSH_WORK_DIR }}/logs/${{ github.sha }}/out.log --error=${{ env.SSH_WORK_DIR }}/logs/${{ github.sha }}/error.log C-PAC_slurm_testing/.github/scripts/launch_regtest_lite.SLURM + sbatch --export="COMPARISON_PATH=${{ env.COMPARISON_PATH }},DASHBOARD_REPO=${{ env.DASHBOARD_REPO}},HOME_DIR=${{ env.SSH_WORK_DIR }},IMAGE=${{ env.DOCKER_TAG }},OWNER=${{ github.repository_owner }},PATH_EXTRA=${{ env.GH_CLI_BIN_PATH }},REPO=$(echo ${{ github.repository }} | cut -d '/' -f 2),TESTING_REPO=${{ env.SLURM_TESTING_REPO }},SHA=${{ github.sha }}" --output=${{ env.SSH_WORK_DIR }}/logs/${{ github.sha }}/out.log --error=${{ env.SSH_WORK_DIR }}/logs/${{ github.sha }}/error.log C-PAC_slurm_testing/.github/scripts/launch_regtest_lite.SLURM - name: Cleanup SSH run: | From 9eb125af58a1f07725cc4129264a79d104494bca Mon Sep 17 00:00:00 2001 From: Jon Clucas Date: Thu, 11 Jan 2024 23:24:53 -0500 Subject: [PATCH 008/507] :necktie: Consolidate GitHub Check [run reg-suite] --- .github/workflows/regression_test_lite.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/regression_test_lite.yml b/.github/workflows/regression_test_lite.yml index f5074942f5..3f0719bf91 100644 --- a/.github/workflows/regression_test_lite.yml +++ b/.github/workflows/regression_test_lite.yml @@ -73,7 +73,7 @@ jobs: uses: guibranco/github-status-action-v2@v1.1.7 with: authToken: ${{ env.GITHUB_TOKEN }} - context: Launch lite regression test + context: lite regression test description: launching state: pending From 964eab2ac4b876033e53df53d0e5413381381cba Mon Sep 17 00:00:00 2001 From: Jon Clucas Date: Fri, 12 Jan 2024 13:41:40 -0500 Subject: [PATCH 009/507] :closed_lock_with_key: Pass `$TOKEN_FILE` as a secret [run reg-suite] --- .github/workflows/regression_test_lite.yml | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/.github/workflows/regression_test_lite.yml b/.github/workflows/regression_test_lite.yml index 3f0719bf91..97b284a844 100644 --- a/.github/workflows/regression_test_lite.yml +++ b/.github/workflows/regression_test_lite.yml @@ -39,6 +39,7 @@ jobs: SSH_HOST: ${{ secrets.SSH_HOST }} SSH_USER: ${{ secrets.SSH_USER }} SSH_WORK_DIR: ${{ secrets.SSH_WORK_DIR }} + TOKEN_FILE: ${{ secrets.TOKEN_FILE }} if: | ${{ github.env.SSH_PRIVATE_KEY != '' && (github.event_name == 'workflow_dispatch' || @@ -94,7 +95,7 @@ jobs: git checkout ${{ env.SLURM_TESTING_BRANCH }} cd .. mkdir -p ./logs/${{ github.sha }} - sbatch --export="COMPARISON_PATH=${{ env.COMPARISON_PATH }},DASHBOARD_REPO=${{ env.DASHBOARD_REPO}},HOME_DIR=${{ env.SSH_WORK_DIR }},IMAGE=${{ env.DOCKER_TAG }},OWNER=${{ github.repository_owner }},PATH_EXTRA=${{ env.GH_CLI_BIN_PATH }},REPO=$(echo ${{ github.repository }} | cut -d '/' -f 2),TESTING_REPO=${{ env.SLURM_TESTING_REPO }},SHA=${{ github.sha }}" --output=${{ env.SSH_WORK_DIR }}/logs/${{ github.sha }}/out.log --error=${{ env.SSH_WORK_DIR }}/logs/${{ github.sha }}/error.log C-PAC_slurm_testing/.github/scripts/launch_regtest_lite.SLURM + sbatch --export="COMPARISON_PATH=${{ env.COMPARISON_PATH }},DASHBOARD_REPO=${{ env.DASHBOARD_REPO}},HOME_DIR=${{ env.SSH_WORK_DIR }},IMAGE=${{ env.DOCKER_TAG }},OWNER=${{ github.repository_owner }},PATH_EXTRA=${{ env.GH_CLI_BIN_PATH }},REPO=$(echo ${{ github.repository }} | cut -d '/' -f 2),TESTING_REPO=${{ env.SLURM_TESTING_REPO }},SHA=${{ github.sha }},TOKEN_FILE=${{ env.TOKEN_FILE }}" --output=${{ env.SSH_WORK_DIR }}/logs/${{ github.sha }}/out.log --error=${{ env.SSH_WORK_DIR }}/logs/${{ github.sha }}/error.log C-PAC_slurm_testing/.github/scripts/launch_regtest_lite.SLURM - name: Cleanup SSH run: | From 21f4b9572c5b326fc2a7686550726f34a474c0ae Mon Sep 17 00:00:00 2001 From: "Theodore (Machine User)" Date: Thu, 18 Jan 2024 15:58:42 +0000 Subject: [PATCH 010/507] :bookmark: Update version to 1.8.7dev1 (:twisted_rightwards_arrows: Merge 'develop' into 'slurm_testing_owner' [run reg-suite]) --- CPAC/resources/configs/pipeline_config_regtest-1.yml | 4 ++++ CPAC/resources/configs/pipeline_config_regtest-2.yml | 4 ++++ 2 files changed, 8 insertions(+) diff --git a/CPAC/resources/configs/pipeline_config_regtest-1.yml b/CPAC/resources/configs/pipeline_config_regtest-1.yml index fccbb36fc1..867851efde 100644 --- a/CPAC/resources/configs/pipeline_config_regtest-1.yml +++ b/CPAC/resources/configs/pipeline_config_regtest-1.yml @@ -75,6 +75,10 @@ anatomical_preproc: brain_extraction: run: On + FSL-BET: + + # Mask created along with skull stripping. It should be `On`, if selected functionalMasking : ['Anatomical_Refined'] and `FSL` as skull-stripping method. + mask_boolean: Off segmentation: diff --git a/CPAC/resources/configs/pipeline_config_regtest-2.yml b/CPAC/resources/configs/pipeline_config_regtest-2.yml index 08d546bdc2..c00009687c 100644 --- a/CPAC/resources/configs/pipeline_config_regtest-2.yml +++ b/CPAC/resources/configs/pipeline_config_regtest-2.yml @@ -84,6 +84,10 @@ anatomical_preproc: # using: ['3dSkullStrip', 'BET', 'UNet', 'niworkflows-ants', 'FreeSurfer-ABCD', 'FreeSurfer-BET-Tight', 'FreeSurfer-BET-Loose', 'FreeSurfer-Brainmask'] # this is a fork option using: [BET] + FSL-BET: + + # Mask created along with skull stripping. It should be `On`, if selected functionalMasking : ['Anatomical_Refined'] and `FSL` as skull-stripping method. + mask_boolean: Off segmentation: From ac35888d33ba01c110caf69b4903e827671c7981 Mon Sep 17 00:00:00 2001 From: Jon Clucas Date: Thu, 18 Jan 2024 22:58:48 -0500 Subject: [PATCH 011/507] :construction_worker: Shallow clone --- .github/workflows/regression_test_lite.yml | 15 ++------------- 1 file changed, 2 insertions(+), 13 deletions(-) diff --git a/.github/workflows/regression_test_lite.yml b/.github/workflows/regression_test_lite.yml index 97b284a844..3cc63595ea 100644 --- a/.github/workflows/regression_test_lite.yml +++ b/.github/workflows/regression_test_lite.yml @@ -32,7 +32,6 @@ jobs: DASHBOARD_REPO: ${{ vars.DASHBOARD_REPO}} DOCKER_TAG: GH_CLI_BIN_PATH: ${{ secrets.GH_CLI_BIN_PATH }} - GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} SLURM_TESTING_BRANCH: ${{ vars.SLURM_TESTING_BRANCH }} SLURM_TESTING_REPO: ${{ vars.SLURM_TESTING_REPO }} SSH_PRIVATE_KEY: ${{ secrets.SSH_PRIVATE_KEY }} @@ -69,14 +68,6 @@ jobs: echo "${{ env.SSH_PRIVATE_KEY }}" > ~/.ssh/id_rsa chmod 600 ~/.ssh/id_rsa ssh-keyscan -H -t rsa "${{ env.SSH_HOST }}" > ~/.ssh/known_hosts - - - name: Initiate check - uses: guibranco/github-status-action-v2@v1.1.7 - with: - authToken: ${{ env.GITHUB_TOKEN }} - context: lite regression test - description: launching - state: pending - name: Connect and Run Regression Test Lite uses: appleboy/ssh-action@v1.0.0 @@ -86,14 +77,12 @@ jobs: key: ${{ env.SSH_PRIVATE_KEY }} command_timeout: 200m script: | + set -x cd ${{ env.SSH_WORK_DIR }} if [ -d C-PAC_slurm_testing ] ; then rm -rf C-PAC_slurm_testing fi - git clone https://github.com/${{ env.SLURM_TESTING_REPO }} C-PAC_slurm_testing - cd C-PAC_slurm_testing - git checkout ${{ env.SLURM_TESTING_BRANCH }} - cd .. + git clone --branch ${{ env.SLURM_TESTING_BRANCH }} --depth 1 https://github.com/${{ env.SLURM_TESTING_REPO }} C-PAC_slurm_testing mkdir -p ./logs/${{ github.sha }} sbatch --export="COMPARISON_PATH=${{ env.COMPARISON_PATH }},DASHBOARD_REPO=${{ env.DASHBOARD_REPO}},HOME_DIR=${{ env.SSH_WORK_DIR }},IMAGE=${{ env.DOCKER_TAG }},OWNER=${{ github.repository_owner }},PATH_EXTRA=${{ env.GH_CLI_BIN_PATH }},REPO=$(echo ${{ github.repository }} | cut -d '/' -f 2),TESTING_REPO=${{ env.SLURM_TESTING_REPO }},SHA=${{ github.sha }},TOKEN_FILE=${{ env.TOKEN_FILE }}" --output=${{ env.SSH_WORK_DIR }}/logs/${{ github.sha }}/out.log --error=${{ env.SSH_WORK_DIR }}/logs/${{ github.sha }}/error.log C-PAC_slurm_testing/.github/scripts/launch_regtest_lite.SLURM From e0676c7e3866d84b11bb0149946ac3c942800b09 Mon Sep 17 00:00:00 2001 From: Jon Clucas Date: Mon, 22 Jan 2024 17:11:52 -0500 Subject: [PATCH 012/507] :alembic: Rerun regtest lite [run reg-suite] From 24a70b1e3edadf0a980516e177f500ab3eab3732 Mon Sep 17 00:00:00 2001 From: Jon Clucas Date: Tue, 2 Apr 2024 11:22:52 -0400 Subject: [PATCH 013/507] fixup! :bookmark: Update version to 1.8.7dev1 (:twisted_rightwards_arrows: Merge 'develop' into 'slurm_testing_owner' --- CPAC/resources/configs/1.7-1.8-nesting-mappings.yml | 5 ----- CPAC/resources/configs/pipeline_config_blank.yml | 3 ++- CPAC/resources/configs/pipeline_config_regtest-1.yml | 4 ---- CPAC/resources/configs/pipeline_config_regtest-2.yml | 4 ---- .../configs/test_configs/pipe-test_ANTs-3dSk-AllNuis.yml | 9 --------- .../test_configs/pipe-test_ANTs-3dSk-DistCorr3dSk.yml | 9 --------- .../test_configs/pipe-test_ANTs-3dSk-DistCorrBET.yml | 9 --------- .../configs/test_configs/pipe-test_ANTs-BET-AllNuis.yml | 5 ----- .../test_configs/pipe-test_FNIRT-3dSk-AllNuis.yml | 9 --------- .../test_configs/pipe-test_FNIRT-BET-AllNuis-BASC.yml | 5 ----- .../pipe-test_FNIRT-BET-AllNuis-ISC-voxel.yml | 5 ----- .../test_configs/pipe-test_FNIRT-BET-AllNuis-ISC.yml | 5 ----- .../test_configs/pipe-test_FNIRT-BET-AllNuis-MDMR.yml | 5 ----- .../configs/test_configs/pipe-test_FNIRT-BET-AllNuis.yml | 5 ----- CPAC/resources/configs/test_configs/pipe-test_all.yml | 9 --------- 15 files changed, 2 insertions(+), 89 deletions(-) diff --git a/CPAC/resources/configs/1.7-1.8-nesting-mappings.yml b/CPAC/resources/configs/1.7-1.8-nesting-mappings.yml index dd83685bc1..65bcb95b9c 100644 --- a/CPAC/resources/configs/1.7-1.8-nesting-mappings.yml +++ b/CPAC/resources/configs/1.7-1.8-nesting-mappings.yml @@ -251,11 +251,6 @@ bet_frac: - brain_extraction - FSL-BET - frac -bet_mask_boolean: - - anatomical_preproc - - brain_extraction - - FSL-BET - - mask_boolean bet_mesh_boolean: - anatomical_preproc - brain_extraction diff --git a/CPAC/resources/configs/pipeline_config_blank.yml b/CPAC/resources/configs/pipeline_config_blank.yml index f4c049b1b5..d73a17f9cb 100644 --- a/CPAC/resources/configs/pipeline_config_blank.yml +++ b/CPAC/resources/configs/pipeline_config_blank.yml @@ -352,7 +352,8 @@ anatomical_preproc: monkey: Off FSL-BET: - # Swich "On" to crop out neck regions before generating the mask (default: Off). + + # Switch "On" to crop out neck regions before generating the mask (default: Off). Robustfov: Off # Set the threshold value controling the brain vs non-brain voxels, default is 0.5 diff --git a/CPAC/resources/configs/pipeline_config_regtest-1.yml b/CPAC/resources/configs/pipeline_config_regtest-1.yml index 867851efde..fccbb36fc1 100644 --- a/CPAC/resources/configs/pipeline_config_regtest-1.yml +++ b/CPAC/resources/configs/pipeline_config_regtest-1.yml @@ -75,10 +75,6 @@ anatomical_preproc: brain_extraction: run: On - FSL-BET: - - # Mask created along with skull stripping. It should be `On`, if selected functionalMasking : ['Anatomical_Refined'] and `FSL` as skull-stripping method. - mask_boolean: Off segmentation: diff --git a/CPAC/resources/configs/pipeline_config_regtest-2.yml b/CPAC/resources/configs/pipeline_config_regtest-2.yml index c00009687c..08d546bdc2 100644 --- a/CPAC/resources/configs/pipeline_config_regtest-2.yml +++ b/CPAC/resources/configs/pipeline_config_regtest-2.yml @@ -84,10 +84,6 @@ anatomical_preproc: # using: ['3dSkullStrip', 'BET', 'UNet', 'niworkflows-ants', 'FreeSurfer-ABCD', 'FreeSurfer-BET-Tight', 'FreeSurfer-BET-Loose', 'FreeSurfer-Brainmask'] # this is a fork option using: [BET] - FSL-BET: - - # Mask created along with skull stripping. It should be `On`, if selected functionalMasking : ['Anatomical_Refined'] and `FSL` as skull-stripping method. - mask_boolean: Off segmentation: diff --git a/CPAC/resources/configs/test_configs/pipe-test_ANTs-3dSk-AllNuis.yml b/CPAC/resources/configs/test_configs/pipe-test_ANTs-3dSk-AllNuis.yml index e68f31d827..605f62f427 100644 --- a/CPAC/resources/configs/test_configs/pipe-test_ANTs-3dSk-AllNuis.yml +++ b/CPAC/resources/configs/test_configs/pipe-test_ANTs-3dSk-AllNuis.yml @@ -68,15 +68,6 @@ pipeline_setup: # If you have specified an FSL path in your .bashrc file, this path will be set automatically. FSLDIR: FSLDIR -anatomical_preproc: - - brain_extraction: - - FSL-BET: - - # Mask created along with skull stripping. It should be `On`, if selected functionalMasking : ['Anatomical_Refined'] and `FSL` as skull-stripping method. - mask_boolean: Off - segmentation: tissue_segmentation: diff --git a/CPAC/resources/configs/test_configs/pipe-test_ANTs-3dSk-DistCorr3dSk.yml b/CPAC/resources/configs/test_configs/pipe-test_ANTs-3dSk-DistCorr3dSk.yml index 510e3a7ef9..cead7b1e6d 100644 --- a/CPAC/resources/configs/test_configs/pipe-test_ANTs-3dSk-DistCorr3dSk.yml +++ b/CPAC/resources/configs/test_configs/pipe-test_ANTs-3dSk-DistCorr3dSk.yml @@ -68,15 +68,6 @@ pipeline_setup: # If you have specified an FSL path in your .bashrc file, this path will be set automatically. FSLDIR: FSLDIR -anatomical_preproc: - - brain_extraction: - - FSL-BET: - - # Mask created along with skull stripping. It should be `On`, if selected functionalMasking : ['Anatomical_Refined'] and `FSL` as skull-stripping method. - mask_boolean: Off - segmentation: tissue_segmentation: diff --git a/CPAC/resources/configs/test_configs/pipe-test_ANTs-3dSk-DistCorrBET.yml b/CPAC/resources/configs/test_configs/pipe-test_ANTs-3dSk-DistCorrBET.yml index e6940dc729..0720df0f81 100644 --- a/CPAC/resources/configs/test_configs/pipe-test_ANTs-3dSk-DistCorrBET.yml +++ b/CPAC/resources/configs/test_configs/pipe-test_ANTs-3dSk-DistCorrBET.yml @@ -68,15 +68,6 @@ pipeline_setup: # If you have specified an FSL path in your .bashrc file, this path will be set automatically. FSLDIR: FSLDIR -anatomical_preproc: - - brain_extraction: - - FSL-BET: - - # Mask created along with skull stripping. It should be `On`, if selected functionalMasking : ['Anatomical_Refined'] and `FSL` as skull-stripping method. - mask_boolean: Off - segmentation: tissue_segmentation: diff --git a/CPAC/resources/configs/test_configs/pipe-test_ANTs-BET-AllNuis.yml b/CPAC/resources/configs/test_configs/pipe-test_ANTs-BET-AllNuis.yml index 0e9e8f34c5..bee92fa7a2 100644 --- a/CPAC/resources/configs/test_configs/pipe-test_ANTs-BET-AllNuis.yml +++ b/CPAC/resources/configs/test_configs/pipe-test_ANTs-BET-AllNuis.yml @@ -76,11 +76,6 @@ anatomical_preproc: # this is a fork option using: [BET] - FSL-BET: - - # Mask created along with skull stripping. It should be `On`, if selected functionalMasking : ['Anatomical_Refined'] and `FSL` as skull-stripping method. - mask_boolean: Off - segmentation: tissue_segmentation: diff --git a/CPAC/resources/configs/test_configs/pipe-test_FNIRT-3dSk-AllNuis.yml b/CPAC/resources/configs/test_configs/pipe-test_FNIRT-3dSk-AllNuis.yml index 3326d3427f..a3bbb13861 100644 --- a/CPAC/resources/configs/test_configs/pipe-test_FNIRT-3dSk-AllNuis.yml +++ b/CPAC/resources/configs/test_configs/pipe-test_FNIRT-3dSk-AllNuis.yml @@ -68,15 +68,6 @@ pipeline_setup: # If you have specified an FSL path in your .bashrc file, this path will be set automatically. FSLDIR: FSLDIR -anatomical_preproc: - - brain_extraction: - - FSL-BET: - - # Mask created along with skull stripping. It should be `On`, if selected functionalMasking : ['Anatomical_Refined'] and `FSL` as skull-stripping method. - mask_boolean: Off - segmentation: tissue_segmentation: diff --git a/CPAC/resources/configs/test_configs/pipe-test_FNIRT-BET-AllNuis-BASC.yml b/CPAC/resources/configs/test_configs/pipe-test_FNIRT-BET-AllNuis-BASC.yml index 8f4faae6c1..70f347d406 100644 --- a/CPAC/resources/configs/test_configs/pipe-test_FNIRT-BET-AllNuis-BASC.yml +++ b/CPAC/resources/configs/test_configs/pipe-test_FNIRT-BET-AllNuis-BASC.yml @@ -76,11 +76,6 @@ anatomical_preproc: # this is a fork option using: [BET] - FSL-BET: - - # Mask created along with skull stripping. It should be `On`, if selected functionalMasking : ['Anatomical_Refined'] and `FSL` as skull-stripping method. - mask_boolean: Off - segmentation: tissue_segmentation: diff --git a/CPAC/resources/configs/test_configs/pipe-test_FNIRT-BET-AllNuis-ISC-voxel.yml b/CPAC/resources/configs/test_configs/pipe-test_FNIRT-BET-AllNuis-ISC-voxel.yml index 8f4faae6c1..70f347d406 100644 --- a/CPAC/resources/configs/test_configs/pipe-test_FNIRT-BET-AllNuis-ISC-voxel.yml +++ b/CPAC/resources/configs/test_configs/pipe-test_FNIRT-BET-AllNuis-ISC-voxel.yml @@ -76,11 +76,6 @@ anatomical_preproc: # this is a fork option using: [BET] - FSL-BET: - - # Mask created along with skull stripping. It should be `On`, if selected functionalMasking : ['Anatomical_Refined'] and `FSL` as skull-stripping method. - mask_boolean: Off - segmentation: tissue_segmentation: diff --git a/CPAC/resources/configs/test_configs/pipe-test_FNIRT-BET-AllNuis-ISC.yml b/CPAC/resources/configs/test_configs/pipe-test_FNIRT-BET-AllNuis-ISC.yml index 8f4faae6c1..70f347d406 100644 --- a/CPAC/resources/configs/test_configs/pipe-test_FNIRT-BET-AllNuis-ISC.yml +++ b/CPAC/resources/configs/test_configs/pipe-test_FNIRT-BET-AllNuis-ISC.yml @@ -76,11 +76,6 @@ anatomical_preproc: # this is a fork option using: [BET] - FSL-BET: - - # Mask created along with skull stripping. It should be `On`, if selected functionalMasking : ['Anatomical_Refined'] and `FSL` as skull-stripping method. - mask_boolean: Off - segmentation: tissue_segmentation: diff --git a/CPAC/resources/configs/test_configs/pipe-test_FNIRT-BET-AllNuis-MDMR.yml b/CPAC/resources/configs/test_configs/pipe-test_FNIRT-BET-AllNuis-MDMR.yml index 8f4faae6c1..70f347d406 100644 --- a/CPAC/resources/configs/test_configs/pipe-test_FNIRT-BET-AllNuis-MDMR.yml +++ b/CPAC/resources/configs/test_configs/pipe-test_FNIRT-BET-AllNuis-MDMR.yml @@ -76,11 +76,6 @@ anatomical_preproc: # this is a fork option using: [BET] - FSL-BET: - - # Mask created along with skull stripping. It should be `On`, if selected functionalMasking : ['Anatomical_Refined'] and `FSL` as skull-stripping method. - mask_boolean: Off - segmentation: tissue_segmentation: diff --git a/CPAC/resources/configs/test_configs/pipe-test_FNIRT-BET-AllNuis.yml b/CPAC/resources/configs/test_configs/pipe-test_FNIRT-BET-AllNuis.yml index 8f4faae6c1..70f347d406 100644 --- a/CPAC/resources/configs/test_configs/pipe-test_FNIRT-BET-AllNuis.yml +++ b/CPAC/resources/configs/test_configs/pipe-test_FNIRT-BET-AllNuis.yml @@ -76,11 +76,6 @@ anatomical_preproc: # this is a fork option using: [BET] - FSL-BET: - - # Mask created along with skull stripping. It should be `On`, if selected functionalMasking : ['Anatomical_Refined'] and `FSL` as skull-stripping method. - mask_boolean: Off - segmentation: tissue_segmentation: diff --git a/CPAC/resources/configs/test_configs/pipe-test_all.yml b/CPAC/resources/configs/test_configs/pipe-test_all.yml index b6feb9c42c..f7dafde5cf 100644 --- a/CPAC/resources/configs/test_configs/pipe-test_all.yml +++ b/CPAC/resources/configs/test_configs/pipe-test_all.yml @@ -68,15 +68,6 @@ pipeline_setup: # If you have specified an FSL path in your .bashrc file, this path will be set automatically. FSLDIR: FSLDIR -anatomical_preproc: - - brain_extraction: - - FSL-BET: - - # Mask created along with skull stripping. It should be `On`, if selected functionalMasking : ['Anatomical_Refined'] and `FSL` as skull-stripping method. - mask_boolean: Off - segmentation: tissue_segmentation: From 20572de56d66664bb718e0919766eabfeb2c6f36 Mon Sep 17 00:00:00 2001 From: Elizabeth Kenneally Date: Tue, 16 Apr 2024 11:18:52 -0400 Subject: [PATCH 014/507] Change reg workflow to produce consistent outputs --- CPAC/registration/registration.py | 72 ++++++++++--------------------- 1 file changed, 23 insertions(+), 49 deletions(-) diff --git a/CPAC/registration/registration.py b/CPAC/registration/registration.py index 8658aae219..9373a1f7a5 100644 --- a/CPAC/registration/registration.py +++ b/CPAC/registration/registration.py @@ -1421,15 +1421,9 @@ def FSL_registration_connector(wf_name, cfg, orig="T1w", opt=None, if opt == 'FSL': - if cfg.registration_workflows['anatomical_registration']['registration']['FSL-FNIRT']['ref_resolution'] == \ - cfg.registration_workflows['anatomical_registration']['resolution_for_anat']: - fnirt_reg_anat_mni = create_fsl_fnirt_nonlinear_reg( - f'anat_mni_fnirt_register{symm}' - ) - else: - fnirt_reg_anat_mni = create_fsl_fnirt_nonlinear_reg_nhp( - f'anat_mni_fnirt_register{symm}' - ) + fnirt_reg_anat_mni = create_fsl_fnirt_nonlinear_reg_nhp( + f'anat_mni_fnirt_register{symm}' + ) wf.connect(inputNode, 'input_brain', fnirt_reg_anat_mni, 'inputspec.input_brain') @@ -1454,33 +1448,22 @@ def FSL_registration_connector(wf_name, cfg, orig="T1w", opt=None, wf.connect(inputNode, 'fnirt_config', fnirt_reg_anat_mni, 'inputspec.fnirt_config') - if cfg.registration_workflows['anatomical_registration']['registration']['FSL-FNIRT']['ref_resolution'] == \ - cfg.registration_workflows['anatomical_registration']['resolution_for_anat']: - # NOTE: this is an UPDATE because of the opt block above - added_outputs = { - f'space-{sym}template_desc-preproc_{orig}': ( - fnirt_reg_anat_mni, 'outputspec.output_brain'), - f'from-{orig}_to-{sym}{tmpl}template_mode-image_xfm': ( - fnirt_reg_anat_mni, 'outputspec.nonlinear_xfm') - } - outputs.update(added_outputs) - else: - # NOTE: this is an UPDATE because of the opt block above - added_outputs = { - f'space-{sym}template_desc-preproc_{orig}': ( - fnirt_reg_anat_mni, 'outputspec.output_brain'), - f'space-{sym}template_desc-head_{orig}': ( - fnirt_reg_anat_mni, 'outputspec.output_head'), - f'space-{sym}template_desc-{orig}_mask': ( - fnirt_reg_anat_mni, 'outputspec.output_mask'), - f'space-{sym}template_desc-T1wT2w_biasfield': ( - fnirt_reg_anat_mni, 'outputspec.output_biasfield'), - f'from-{orig}_to-{sym}{tmpl}template_mode-image_xfm': ( - fnirt_reg_anat_mni, 'outputspec.nonlinear_xfm'), - f'from-{orig}_to-{sym}{tmpl}template_mode-image_warp': ( - fnirt_reg_anat_mni, 'outputspec.nonlinear_warp') - } - outputs.update(added_outputs) + # NOTE: this is an UPDATE because of the opt block above + added_outputs = { + f'space-{sym}template_desc-preproc_{orig}': ( + fnirt_reg_anat_mni, 'outputspec.output_brain'), + f'space-{sym}template_desc-head_{orig}': ( + fnirt_reg_anat_mni, 'outputspec.output_head'), + f'space-{sym}template_desc-{orig}_mask': ( + fnirt_reg_anat_mni, 'outputspec.output_mask'), + f'space-{sym}template_desc-T1wT2w_biasfield': ( + fnirt_reg_anat_mni, 'outputspec.output_biasfield'), + f'from-{orig}_to-{sym}{tmpl}template_mode-image_xfm': ( + fnirt_reg_anat_mni, 'outputspec.nonlinear_xfm'), + f'from-{orig}_to-{sym}{tmpl}template_mode-image_warp': ( + fnirt_reg_anat_mni, 'outputspec.nonlinear_warp') + } + outputs.update(added_outputs) return (wf, outputs) @@ -2030,20 +2013,11 @@ def register_FSL_anat_to_template(wf, cfg, strat_pool, pipe_num, opt=None): node, out = connect wf.connect(node, out, fsl, 'inputspec.input_brain') - if cfg.registration_workflows['anatomical_registration']['registration']['FSL-FNIRT']['ref_resolution'] == \ - cfg.registration_workflows['anatomical_registration']['resolution_for_anat']: - - node, out = strat_pool.get_data('T1w-brain-template') - wf.connect(node, out, fsl, 'inputspec.reference_brain') - - node, out = strat_pool.get_data('T1w-template') - wf.connect(node, out, fsl, 'inputspec.reference_head') - else: - node, out = strat_pool.get_data('FNIRT-T1w-brain-template') - wf.connect(node, out, fsl, 'inputspec.reference_brain') + node, out = strat_pool.get_data('T1w-brain-template') + wf.connect(node, out, fsl, 'inputspec.reference_brain') - node, out = strat_pool.get_data('FNIRT-T1w-template') - wf.connect(node, out, fsl, 'inputspec.reference_head') + node, out = strat_pool.get_data('T1w-template') + wf.connect(node, out, fsl, 'inputspec.reference_head') node, out = strat_pool.get_data(["desc-preproc_T1w", "space-longitudinal_desc-reorient_T1w"]) From 9f81479a3b233d2fbd8a7fb02aabeac83e83db16 Mon Sep 17 00:00:00 2001 From: Elizabeth Kenneally Date: Wed, 17 Apr 2024 10:35:04 -0400 Subject: [PATCH 015/507] Remove restore-brain option from pipeline config --- CPAC/pipeline/schema.py | 1 - CPAC/registration/registration.py | 10 ++-------- CPAC/resources/configs/pipeline_config_blank.yml | 4 ---- CPAC/resources/configs/pipeline_config_default.yml | 4 ---- 4 files changed, 2 insertions(+), 17 deletions(-) diff --git a/CPAC/pipeline/schema.py b/CPAC/pipeline/schema.py index 60fd1d9d77..9f903f1cad 100644 --- a/CPAC/pipeline/schema.py +++ b/CPAC/pipeline/schema.py @@ -641,7 +641,6 @@ def sanitize(filename): 'functional_registration': { 'coregistration': { 'run': bool1_1, - 'reference': In({'brain', 'restore-brain'}), 'interpolation': In({'trilinear', 'sinc', 'spline'}), 'using': str, 'input': str, diff --git a/CPAC/registration/registration.py b/CPAC/registration/registration.py index 9373a1f7a5..2d0ecaf926 100644 --- a/CPAC/registration/registration.py +++ b/CPAC/registration/registration.py @@ -2913,7 +2913,7 @@ def coregistration_prep_fmriprep(wf, cfg, strat_pool, pipe_num, opt=None): ), ( "desc-preproc_T1w", - "desc-restore-brain_T1w", + ["desc-restore-brain_T1w", "desc-preproc_T1w"], "desc-preproc_T2w", "desc-preproc_T2w", "T2w", @@ -2979,13 +2979,7 @@ def coregistration(wf, cfg, strat_pool, pipe_num, opt=None): node, out = strat_pool.get_data('sbref') wf.connect(node, out, func_to_anat, 'inputspec.func') - if cfg.registration_workflows['functional_registration'][ - 'coregistration']['reference'] == 'brain': - # TODO: use JSON meta-data to confirm - node, out = strat_pool.get_data('desc-preproc_T1w') - elif cfg.registration_workflows['functional_registration'][ - 'coregistration']['reference'] == 'restore-brain': - node, out = strat_pool.get_data('desc-restore-brain_T1w') + node, out = strat_pool.get_data(['desc-restore-brain_T1w', 'desc-preproc_T1w']) wf.connect(node, out, func_to_anat, 'inputspec.anat') if diff_complete: diff --git a/CPAC/resources/configs/pipeline_config_blank.yml b/CPAC/resources/configs/pipeline_config_blank.yml index f4c049b1b5..67e1277c79 100644 --- a/CPAC/resources/configs/pipeline_config_blank.yml +++ b/CPAC/resources/configs/pipeline_config_blank.yml @@ -740,10 +740,6 @@ registration_workflows: # It is not necessary to change this path unless you intend to use non-standard MNI registration. bbr_schedule: $FSLDIR/etc/flirtsch/bbr.sch - # reference: 'brain' or 'restore-brain' - # In ABCD-options pipeline, 'restore-brain' is used as coregistration reference - reference: brain - # Choose FSL or ABCD as coregistration method using: FSL diff --git a/CPAC/resources/configs/pipeline_config_default.yml b/CPAC/resources/configs/pipeline_config_default.yml index 71609ce58d..cb0bca639c 100644 --- a/CPAC/resources/configs/pipeline_config_default.yml +++ b/CPAC/resources/configs/pipeline_config_default.yml @@ -759,10 +759,6 @@ registration_workflows: run: On - # reference: 'brain' or 'restore-brain' - # In ABCD-options pipeline, 'restore-brain' is used as coregistration reference - reference: brain - # Choose FSL or ABCD as coregistration method using: FSL From 68e3c41fa21ddc05e023d8d5ab1625622a65936d Mon Sep 17 00:00:00 2001 From: Biraj Shrestha Date: Wed, 17 Apr 2024 13:04:25 -0400 Subject: [PATCH 016/507] Modified warp_timeseries_to_T1template_abcd to create space-template_res-bold_desc-brain_T1w if needed --- CPAC/registration/registration.py | 88 ++++++++++++++++++++++++------- CPAC/surface/surf_preproc.py | 1 - 2 files changed, 70 insertions(+), 19 deletions(-) diff --git a/CPAC/registration/registration.py b/CPAC/registration/registration.py index 8658aae219..9dc45c0eea 100644 --- a/CPAC/registration/registration.py +++ b/CPAC/registration/registration.py @@ -1348,7 +1348,7 @@ def create_wf_calculate_ants_warp( def FSL_registration_connector(wf_name, cfg, orig="T1w", opt=None, symmetric=False, template="T1w"): - + outputs={} wf = pe.Workflow(name=wf_name) inputNode = pe.Node( @@ -3766,8 +3766,14 @@ def warp_timeseries_to_T1template_deriv(wf, cfg, strat_pool, pipe_num, "space-template_res-bold_desc-brain_T1w", "space-template_desc-bold_mask", "T1w-brain-template-funcreg", + 'space-template_desc-preproc_T1w', + 'T1w-template-funcreg', + 'space-template_desc-T1w_mask', + 'desc-preproc_bold', ], outputs={ + "space-template_res-bold_desc-brain_T1w": { + "Template": "T1w-brain-template-funcreg"}, "space-template_desc-preproc_bold": { "Template": "T1w-brain-template-funcreg"}, "space-template_desc-scout_bold": { @@ -3816,8 +3822,38 @@ def warp_timeseries_to_T1template_abcd(wf, cfg, strat_pool, pipe_num, opt=None node, out = strat_pool.get_data('from-T1w_to-template_mode-image_xfm') wf.connect(node, out, convert_func_to_standard_warp, 'warp2') - node, out = strat_pool.get_data('space-template_res-bold_desc-brain_T1w') - wf.connect(node, out, convert_func_to_standard_warp, 'reference') + if strat_pool.check_rpool('space-template_res-bold_desc-brain_T1w'): + node, out = strat_pool.get_data('space-template_res-bold_desc-brain_T1w') + wf.connect(node, out, convert_func_to_standard_warp, 'reference') + else: + anat_brain_to_func_res = pe.Node(interface=fsl.ApplyWarp(), + name=f'resample_anat_brain_in_standard_{pipe_num}') + anat_brain_to_func_res.inputs.interp = 'spline' + anat_brain_to_func_res.inputs.premat = cfg.registration_workflows[ + 'anatomical_registration']['registration']['FSL-FNIRT']['identity_matrix'] + + node, out = strat_pool.get_data('space-template_desc-preproc_T1w') + wf.connect(node, out, anat_brain_to_func_res, 'in_file') + + node, out = strat_pool.get_data('T1w-template-funcreg') + wf.connect(node, out, anat_brain_to_func_res, 'ref_file') + + wf.connect(anat_brain_to_func_res, 'out_file', + convert_func_to_standard_warp, 'reference') + # Create brain masks in this space from the FreeSurfer output (changing resolution) + # applywarp --rel --interp=nn -i ${FreeSurferBrainMask}.nii.gz -r ${WD}/${T1wImageFile}.${FinalfMRIResolution} --premat=$FSLDIR/etc/flirtsch/ident.mat -o ${WD}/${FreeSurferBrainMaskFile}.${FinalfMRIResolution}.nii.gz + anat_brain_mask_to_func_res = pe.Node(interface=fsl.ApplyWarp(), + name=f'resample_anat_brain_mask_in_standard_{pipe_num}') + + anat_brain_mask_to_func_res.inputs.interp = 'nn' + anat_brain_mask_to_func_res.inputs.premat = cfg.registration_workflows[ + 'anatomical_registration']['registration']['FSL-FNIRT']['identity_matrix'] + + node, out = strat_pool.get_data('space-template_desc-T1w_mask') + wf.connect(node, out, anat_brain_mask_to_func_res, 'in_file') + + wf.connect(anat_brain_to_func_res, 'out_file', + anat_brain_mask_to_func_res, 'ref_file') # TODO add condition: if no gradient distortion # https://github.com/DCAN-Labs/DCAN-HCP/blob/master/fMRIVolume/GenericfMRIVolumeProcessingPipeline.sh#L283-L284 @@ -3876,8 +3912,11 @@ def warp_timeseries_to_T1template_abcd(wf, cfg, strat_pool, pipe_num, opt=None convert_registration_warp.inputs.out_relwarp = True convert_registration_warp.inputs.relwarp = True - node, out = strat_pool.get_data('space-template_res-bold_desc-brain_T1w') - wf.connect(node, out, convert_registration_warp, 'reference') + if strat_pool.check_rpool('space-template_res-bold_desc-brain_T1w'): + node, out = strat_pool.get_data('space-template_res-bold_desc-brain_T1w') + wf.connect(node, out, convert_registration_warp, 'reference') + else: + wf.connect(anat_brain_to_func_res, 'out_file', convert_registration_warp, 'reference') wf.connect(convert_motion_distortion_warp, 'out_file', convert_registration_warp, 'warp1') @@ -3909,9 +3948,11 @@ def warp_timeseries_to_T1template_abcd(wf, cfg, strat_pool, pipe_num, opt=None wf.connect(convert_registration_warp, 'out_file', applywarp_func_to_standard, 'field_file') - node, out = strat_pool.get_data('space-template_res-bold_desc-brain_T1w') - wf.connect(node, out, - applywarp_func_to_standard, 'ref_file') + if strat_pool.check_rpool('space-template_res-bold_desc-brain_T1w'): + node, out = strat_pool.get_data('space-template_res-bold_desc-brain_T1w') + wf.connect(node, out, applywarp_func_to_standard, 'reference') + else: + wf.connect(anat_brain_to_func_res, 'out_file', applywarp_func_to_standard, 'ref_file') # applywarp --rel --interp=nn --in=${WD}/prevols/vol${vnum}_mask.nii.gz --warp=${MotionMatrixFolder}/${MotionMatrixPrefix}${vnum}_all_warp.nii.gz --ref=${WD}/${T1wImageFile}.${FinalfMRIResolution} --out=${WD}/postvols/vol${vnum}_mask.nii.gz applywarp_func_mask_to_standard = pe.MapNode(interface=fsl.ApplyWarp(), @@ -3927,9 +3968,11 @@ def warp_timeseries_to_T1template_abcd(wf, cfg, strat_pool, pipe_num, opt=None wf.connect(convert_registration_warp, 'out_file', applywarp_func_mask_to_standard, 'field_file') - node, out = strat_pool.get_data('space-template_res-bold_desc-brain_T1w') - wf.connect(node, out, - applywarp_func_mask_to_standard, 'ref_file') + if strat_pool.check_rpool('space-template_res-bold_desc-brain_T1w'): + node, out = strat_pool.get_data('space-template_res-bold_desc-brain_T1w') + wf.connect(node, out, applywarp_func_mask_to_standard, 'reference') + else: + wf.connect(anat_brain_to_func_res, 'out_file', applywarp_func_mask_to_standard, 'ref_file') ### Loop ends! ### @@ -3969,8 +4012,11 @@ def warp_timeseries_to_T1template_abcd(wf, cfg, strat_pool, pipe_num, opt=None convert_dc_warp.inputs.out_relwarp = True convert_dc_warp.inputs.relwarp = True - node, out = strat_pool.get_data('space-template_res-bold_desc-brain_T1w') - wf.connect(node, out, convert_dc_warp, 'reference') + if strat_pool.check_rpool('space-template_res-bold_desc-brain_T1w'): + node, out = strat_pool.get_data('space-template_res-bold_desc-brain_T1w') + wf.connect(node, out, convert_dc_warp, 'reference') + else: + wf.connect(anat_brain_to_func_res, 'out_file', convert_dc_warp, 'reference') wf.connect(multiply_func_roi_by_zero, 'out_file', convert_dc_warp, 'warp1') @@ -3988,8 +4034,11 @@ def warp_timeseries_to_T1template_abcd(wf, cfg, strat_pool, pipe_num, opt=None node, out = strat_pool.get_data('motion-basefile') wf.connect(node, out, applywarp_scout, 'in_file') - node, out = strat_pool.get_data('space-template_res-bold_desc-brain_T1w') - wf.connect(node, out, applywarp_scout, 'ref_file') + if strat_pool.check_rpool('space-template_res-bold_desc-brain_T1w'): + node, out = strat_pool.get_data('space-template_res-bold_desc-brain_T1w') + wf.connect(node, out, applywarp_scout, 'reference') + else: + wf.connect(anat_brain_to_func_res, 'out_file', applywarp_scout, 'ref_file') wf.connect(convert_dc_warp, 'out_file', applywarp_scout, 'field_file') @@ -3998,9 +4047,11 @@ def warp_timeseries_to_T1template_abcd(wf, cfg, strat_pool, pipe_num, opt=None merge_func_mask = pe.Node(util.Merge(2), name=f'merge_func_mask_{pipe_num}') - node, out = strat_pool.get_data('space-template_desc-bold_mask') - wf.connect(node, out, merge_func_mask, 'in1') - + if strat_pool.check_rpool('space-template_desc-bold_mask'): + node, out = strat_pool.get_data('space-template_desc-bold_mask') + wf.connect(node, out, merge_func_mask, 'in1') + else: + wf.connect(anat_brain_mask_to_func_res, 'out_file', merge_func_mask, 'in1') wf.connect(find_min_mask, 'out_file', merge_func_mask, 'in2') extract_func_brain = pe.Node(interface=fsl.MultiImageMaths(), @@ -4029,6 +4080,7 @@ def warp_timeseries_to_T1template_abcd(wf, cfg, strat_pool, pipe_num, opt=None extract_scout_brain, 'operand_files') outputs = { + 'space-template_res-bold_desc-brain_T1w': (anat_brain_to_func_res, 'out_file'), 'space-template_desc-preproc_bold': (extract_func_brain, 'out_file'), 'space-template_desc-scout_bold': (extract_scout_brain, 'out_file'), 'space-template_desc-head_bold': (merge_func_to_standard, 'merged_file') diff --git a/CPAC/surface/surf_preproc.py b/CPAC/surface/surf_preproc.py index 2b0cb64741..806fbe486c 100755 --- a/CPAC/surface/surf_preproc.py +++ b/CPAC/surface/surf_preproc.py @@ -573,7 +573,6 @@ def run_surface(post_freesurfer_folder, "hemi-R_space-native_white", ], ) - def surface_postproc(wf, cfg, strat_pool, pipe_num, opt=None): surf = pe.Node(util.Function(input_names=['post_freesurfer_folder', From 03a503c0370fa9915b9b841b0bd96e7eda3aa95b Mon Sep 17 00:00:00 2001 From: Elizabeth Kenneally Date: Wed, 17 Apr 2024 14:30:42 -0400 Subject: [PATCH 017/507] Remove coregistration -> reference option --- CPAC/resources/configs/pipeline_config_abcd-options.yml | 4 ---- CPAC/resources/configs/pipeline_config_abcd-prep.yml | 4 ---- 2 files changed, 8 deletions(-) diff --git a/CPAC/resources/configs/pipeline_config_abcd-options.yml b/CPAC/resources/configs/pipeline_config_abcd-options.yml index 10ea6bece3..ef3dcaad18 100644 --- a/CPAC/resources/configs/pipeline_config_abcd-options.yml +++ b/CPAC/resources/configs/pipeline_config_abcd-options.yml @@ -207,10 +207,6 @@ registration_workflows: # input: ['Mean_Functional', 'Selected_Functional_Volume', 'fmriprep_reference'] input: [Selected_Functional_Volume] - # reference: 'brain' or 'restore-brain' - # In ABCD-options pipeline, 'restore-brain' is used as coregistration reference - reference: restore-brain - # Choose coregistration interpolation interpolation: spline diff --git a/CPAC/resources/configs/pipeline_config_abcd-prep.yml b/CPAC/resources/configs/pipeline_config_abcd-prep.yml index 7aee4e80ad..c34a4138d0 100644 --- a/CPAC/resources/configs/pipeline_config_abcd-prep.yml +++ b/CPAC/resources/configs/pipeline_config_abcd-prep.yml @@ -168,10 +168,6 @@ registration_workflows: # input: ['Mean_Functional', 'Selected_Functional_Volume', 'fmriprep_reference'] input: [Selected_Functional_Volume] - # reference: 'brain' or 'restore-brain' - # In ABCD-options pipeline, 'restore-brain' is used as coregistration reference - reference: restore-brain - # Choose coregistration interpolation interpolation: spline From fa8d481bb32909ba3c4cdd067dda6a2b86d395ed Mon Sep 17 00:00:00 2001 From: Biraj Shrestha Date: Wed, 17 Apr 2024 17:56:09 -0400 Subject: [PATCH 018/507] added bold_mask to the list of nodeblock outputs --- CPAC/registration/registration.py | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/CPAC/registration/registration.py b/CPAC/registration/registration.py index 9dc45c0eea..e67d7ac3c0 100644 --- a/CPAC/registration/registration.py +++ b/CPAC/registration/registration.py @@ -3774,6 +3774,8 @@ def warp_timeseries_to_T1template_deriv(wf, cfg, strat_pool, pipe_num, outputs={ "space-template_res-bold_desc-brain_T1w": { "Template": "T1w-brain-template-funcreg"}, + "space-template_desc-bold_mask": { + "Template": "T1w-brain-template-funcreg"}, "space-template_desc-preproc_bold": { "Template": "T1w-brain-template-funcreg"}, "space-template_desc-scout_bold": { @@ -3840,6 +3842,7 @@ def warp_timeseries_to_T1template_abcd(wf, cfg, strat_pool, pipe_num, opt=None wf.connect(anat_brain_to_func_res, 'out_file', convert_func_to_standard_warp, 'reference') + # Create brain masks in this space from the FreeSurfer output (changing resolution) # applywarp --rel --interp=nn -i ${FreeSurferBrainMask}.nii.gz -r ${WD}/${T1wImageFile}.${FinalfMRIResolution} --premat=$FSLDIR/etc/flirtsch/ident.mat -o ${WD}/${FreeSurferBrainMaskFile}.${FinalfMRIResolution}.nii.gz anat_brain_mask_to_func_res = pe.Node(interface=fsl.ApplyWarp(), @@ -4081,6 +4084,7 @@ def warp_timeseries_to_T1template_abcd(wf, cfg, strat_pool, pipe_num, opt=None outputs = { 'space-template_res-bold_desc-brain_T1w': (anat_brain_to_func_res, 'out_file'), + 'space-template_desc-bold_mask': (anat_brain_mask_to_func_res, 'out_file'), 'space-template_desc-preproc_bold': (extract_func_brain, 'out_file'), 'space-template_desc-scout_bold': (extract_scout_brain, 'out_file'), 'space-template_desc-head_bold': (merge_func_to_standard, 'merged_file') From d68a7333c0dbec67f8d256c42907a75d4c81b1a9 Mon Sep 17 00:00:00 2001 From: Biraj Shrestha Date: Thu, 18 Apr 2024 11:45:31 -0400 Subject: [PATCH 019/507] Revert "added bold_mask to the list of nodeblock outputs" This reverts commit fa8d481bb32909ba3c4cdd067dda6a2b86d395ed. --- CPAC/registration/registration.py | 4 ---- 1 file changed, 4 deletions(-) diff --git a/CPAC/registration/registration.py b/CPAC/registration/registration.py index e67d7ac3c0..9dc45c0eea 100644 --- a/CPAC/registration/registration.py +++ b/CPAC/registration/registration.py @@ -3774,8 +3774,6 @@ def warp_timeseries_to_T1template_deriv(wf, cfg, strat_pool, pipe_num, outputs={ "space-template_res-bold_desc-brain_T1w": { "Template": "T1w-brain-template-funcreg"}, - "space-template_desc-bold_mask": { - "Template": "T1w-brain-template-funcreg"}, "space-template_desc-preproc_bold": { "Template": "T1w-brain-template-funcreg"}, "space-template_desc-scout_bold": { @@ -3842,7 +3840,6 @@ def warp_timeseries_to_T1template_abcd(wf, cfg, strat_pool, pipe_num, opt=None wf.connect(anat_brain_to_func_res, 'out_file', convert_func_to_standard_warp, 'reference') - # Create brain masks in this space from the FreeSurfer output (changing resolution) # applywarp --rel --interp=nn -i ${FreeSurferBrainMask}.nii.gz -r ${WD}/${T1wImageFile}.${FinalfMRIResolution} --premat=$FSLDIR/etc/flirtsch/ident.mat -o ${WD}/${FreeSurferBrainMaskFile}.${FinalfMRIResolution}.nii.gz anat_brain_mask_to_func_res = pe.Node(interface=fsl.ApplyWarp(), @@ -4084,7 +4081,6 @@ def warp_timeseries_to_T1template_abcd(wf, cfg, strat_pool, pipe_num, opt=None outputs = { 'space-template_res-bold_desc-brain_T1w': (anat_brain_to_func_res, 'out_file'), - 'space-template_desc-bold_mask': (anat_brain_mask_to_func_res, 'out_file'), 'space-template_desc-preproc_bold': (extract_func_brain, 'out_file'), 'space-template_desc-scout_bold': (extract_scout_brain, 'out_file'), 'space-template_desc-head_bold': (merge_func_to_standard, 'merged_file') From 4239a5f14340c2d878b2af643e604d7eac5a547e Mon Sep 17 00:00:00 2001 From: Biraj Shrestha Date: Thu, 18 Apr 2024 11:46:00 -0400 Subject: [PATCH 020/507] Revert "Modified warp_timeseries_to_T1template_abcd to create space-template_res-bold_desc-brain_T1w if needed" This reverts commit 68e3c41fa21ddc05e023d8d5ab1625622a65936d. --- CPAC/registration/registration.py | 88 +++++++------------------------ CPAC/surface/surf_preproc.py | 1 + 2 files changed, 19 insertions(+), 70 deletions(-) diff --git a/CPAC/registration/registration.py b/CPAC/registration/registration.py index 9dc45c0eea..8658aae219 100644 --- a/CPAC/registration/registration.py +++ b/CPAC/registration/registration.py @@ -1348,7 +1348,7 @@ def create_wf_calculate_ants_warp( def FSL_registration_connector(wf_name, cfg, orig="T1w", opt=None, symmetric=False, template="T1w"): - outputs={} + wf = pe.Workflow(name=wf_name) inputNode = pe.Node( @@ -3766,14 +3766,8 @@ def warp_timeseries_to_T1template_deriv(wf, cfg, strat_pool, pipe_num, "space-template_res-bold_desc-brain_T1w", "space-template_desc-bold_mask", "T1w-brain-template-funcreg", - 'space-template_desc-preproc_T1w', - 'T1w-template-funcreg', - 'space-template_desc-T1w_mask', - 'desc-preproc_bold', ], outputs={ - "space-template_res-bold_desc-brain_T1w": { - "Template": "T1w-brain-template-funcreg"}, "space-template_desc-preproc_bold": { "Template": "T1w-brain-template-funcreg"}, "space-template_desc-scout_bold": { @@ -3822,38 +3816,8 @@ def warp_timeseries_to_T1template_abcd(wf, cfg, strat_pool, pipe_num, opt=None node, out = strat_pool.get_data('from-T1w_to-template_mode-image_xfm') wf.connect(node, out, convert_func_to_standard_warp, 'warp2') - if strat_pool.check_rpool('space-template_res-bold_desc-brain_T1w'): - node, out = strat_pool.get_data('space-template_res-bold_desc-brain_T1w') - wf.connect(node, out, convert_func_to_standard_warp, 'reference') - else: - anat_brain_to_func_res = pe.Node(interface=fsl.ApplyWarp(), - name=f'resample_anat_brain_in_standard_{pipe_num}') - anat_brain_to_func_res.inputs.interp = 'spline' - anat_brain_to_func_res.inputs.premat = cfg.registration_workflows[ - 'anatomical_registration']['registration']['FSL-FNIRT']['identity_matrix'] - - node, out = strat_pool.get_data('space-template_desc-preproc_T1w') - wf.connect(node, out, anat_brain_to_func_res, 'in_file') - - node, out = strat_pool.get_data('T1w-template-funcreg') - wf.connect(node, out, anat_brain_to_func_res, 'ref_file') - - wf.connect(anat_brain_to_func_res, 'out_file', - convert_func_to_standard_warp, 'reference') - # Create brain masks in this space from the FreeSurfer output (changing resolution) - # applywarp --rel --interp=nn -i ${FreeSurferBrainMask}.nii.gz -r ${WD}/${T1wImageFile}.${FinalfMRIResolution} --premat=$FSLDIR/etc/flirtsch/ident.mat -o ${WD}/${FreeSurferBrainMaskFile}.${FinalfMRIResolution}.nii.gz - anat_brain_mask_to_func_res = pe.Node(interface=fsl.ApplyWarp(), - name=f'resample_anat_brain_mask_in_standard_{pipe_num}') - - anat_brain_mask_to_func_res.inputs.interp = 'nn' - anat_brain_mask_to_func_res.inputs.premat = cfg.registration_workflows[ - 'anatomical_registration']['registration']['FSL-FNIRT']['identity_matrix'] - - node, out = strat_pool.get_data('space-template_desc-T1w_mask') - wf.connect(node, out, anat_brain_mask_to_func_res, 'in_file') - - wf.connect(anat_brain_to_func_res, 'out_file', - anat_brain_mask_to_func_res, 'ref_file') + node, out = strat_pool.get_data('space-template_res-bold_desc-brain_T1w') + wf.connect(node, out, convert_func_to_standard_warp, 'reference') # TODO add condition: if no gradient distortion # https://github.com/DCAN-Labs/DCAN-HCP/blob/master/fMRIVolume/GenericfMRIVolumeProcessingPipeline.sh#L283-L284 @@ -3912,11 +3876,8 @@ def warp_timeseries_to_T1template_abcd(wf, cfg, strat_pool, pipe_num, opt=None convert_registration_warp.inputs.out_relwarp = True convert_registration_warp.inputs.relwarp = True - if strat_pool.check_rpool('space-template_res-bold_desc-brain_T1w'): - node, out = strat_pool.get_data('space-template_res-bold_desc-brain_T1w') - wf.connect(node, out, convert_registration_warp, 'reference') - else: - wf.connect(anat_brain_to_func_res, 'out_file', convert_registration_warp, 'reference') + node, out = strat_pool.get_data('space-template_res-bold_desc-brain_T1w') + wf.connect(node, out, convert_registration_warp, 'reference') wf.connect(convert_motion_distortion_warp, 'out_file', convert_registration_warp, 'warp1') @@ -3948,11 +3909,9 @@ def warp_timeseries_to_T1template_abcd(wf, cfg, strat_pool, pipe_num, opt=None wf.connect(convert_registration_warp, 'out_file', applywarp_func_to_standard, 'field_file') - if strat_pool.check_rpool('space-template_res-bold_desc-brain_T1w'): - node, out = strat_pool.get_data('space-template_res-bold_desc-brain_T1w') - wf.connect(node, out, applywarp_func_to_standard, 'reference') - else: - wf.connect(anat_brain_to_func_res, 'out_file', applywarp_func_to_standard, 'ref_file') + node, out = strat_pool.get_data('space-template_res-bold_desc-brain_T1w') + wf.connect(node, out, + applywarp_func_to_standard, 'ref_file') # applywarp --rel --interp=nn --in=${WD}/prevols/vol${vnum}_mask.nii.gz --warp=${MotionMatrixFolder}/${MotionMatrixPrefix}${vnum}_all_warp.nii.gz --ref=${WD}/${T1wImageFile}.${FinalfMRIResolution} --out=${WD}/postvols/vol${vnum}_mask.nii.gz applywarp_func_mask_to_standard = pe.MapNode(interface=fsl.ApplyWarp(), @@ -3968,11 +3927,9 @@ def warp_timeseries_to_T1template_abcd(wf, cfg, strat_pool, pipe_num, opt=None wf.connect(convert_registration_warp, 'out_file', applywarp_func_mask_to_standard, 'field_file') - if strat_pool.check_rpool('space-template_res-bold_desc-brain_T1w'): - node, out = strat_pool.get_data('space-template_res-bold_desc-brain_T1w') - wf.connect(node, out, applywarp_func_mask_to_standard, 'reference') - else: - wf.connect(anat_brain_to_func_res, 'out_file', applywarp_func_mask_to_standard, 'ref_file') + node, out = strat_pool.get_data('space-template_res-bold_desc-brain_T1w') + wf.connect(node, out, + applywarp_func_mask_to_standard, 'ref_file') ### Loop ends! ### @@ -4012,11 +3969,8 @@ def warp_timeseries_to_T1template_abcd(wf, cfg, strat_pool, pipe_num, opt=None convert_dc_warp.inputs.out_relwarp = True convert_dc_warp.inputs.relwarp = True - if strat_pool.check_rpool('space-template_res-bold_desc-brain_T1w'): - node, out = strat_pool.get_data('space-template_res-bold_desc-brain_T1w') - wf.connect(node, out, convert_dc_warp, 'reference') - else: - wf.connect(anat_brain_to_func_res, 'out_file', convert_dc_warp, 'reference') + node, out = strat_pool.get_data('space-template_res-bold_desc-brain_T1w') + wf.connect(node, out, convert_dc_warp, 'reference') wf.connect(multiply_func_roi_by_zero, 'out_file', convert_dc_warp, 'warp1') @@ -4034,11 +3988,8 @@ def warp_timeseries_to_T1template_abcd(wf, cfg, strat_pool, pipe_num, opt=None node, out = strat_pool.get_data('motion-basefile') wf.connect(node, out, applywarp_scout, 'in_file') - if strat_pool.check_rpool('space-template_res-bold_desc-brain_T1w'): - node, out = strat_pool.get_data('space-template_res-bold_desc-brain_T1w') - wf.connect(node, out, applywarp_scout, 'reference') - else: - wf.connect(anat_brain_to_func_res, 'out_file', applywarp_scout, 'ref_file') + node, out = strat_pool.get_data('space-template_res-bold_desc-brain_T1w') + wf.connect(node, out, applywarp_scout, 'ref_file') wf.connect(convert_dc_warp, 'out_file', applywarp_scout, 'field_file') @@ -4047,11 +3998,9 @@ def warp_timeseries_to_T1template_abcd(wf, cfg, strat_pool, pipe_num, opt=None merge_func_mask = pe.Node(util.Merge(2), name=f'merge_func_mask_{pipe_num}') - if strat_pool.check_rpool('space-template_desc-bold_mask'): - node, out = strat_pool.get_data('space-template_desc-bold_mask') - wf.connect(node, out, merge_func_mask, 'in1') - else: - wf.connect(anat_brain_mask_to_func_res, 'out_file', merge_func_mask, 'in1') + node, out = strat_pool.get_data('space-template_desc-bold_mask') + wf.connect(node, out, merge_func_mask, 'in1') + wf.connect(find_min_mask, 'out_file', merge_func_mask, 'in2') extract_func_brain = pe.Node(interface=fsl.MultiImageMaths(), @@ -4080,7 +4029,6 @@ def warp_timeseries_to_T1template_abcd(wf, cfg, strat_pool, pipe_num, opt=None extract_scout_brain, 'operand_files') outputs = { - 'space-template_res-bold_desc-brain_T1w': (anat_brain_to_func_res, 'out_file'), 'space-template_desc-preproc_bold': (extract_func_brain, 'out_file'), 'space-template_desc-scout_bold': (extract_scout_brain, 'out_file'), 'space-template_desc-head_bold': (merge_func_to_standard, 'merged_file') diff --git a/CPAC/surface/surf_preproc.py b/CPAC/surface/surf_preproc.py index 806fbe486c..2b0cb64741 100755 --- a/CPAC/surface/surf_preproc.py +++ b/CPAC/surface/surf_preproc.py @@ -573,6 +573,7 @@ def run_surface(post_freesurfer_folder, "hemi-R_space-native_white", ], ) + def surface_postproc(wf, cfg, strat_pool, pipe_num, opt=None): surf = pe.Node(util.Function(input_names=['post_freesurfer_folder', From 72f24bc7dda5f912d2aded64979920991573a394 Mon Sep 17 00:00:00 2001 From: Elizabeth Kenneally Date: Thu, 18 Apr 2024 11:59:35 -0400 Subject: [PATCH 021/507] :bug: Fix bug in fs ingress commandline --- CPAC/pipeline/engine.py | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/CPAC/pipeline/engine.py b/CPAC/pipeline/engine.py index 42662ec3a0..09be47823a 100644 --- a/CPAC/pipeline/engine.py +++ b/CPAC/pipeline/engine.py @@ -1761,11 +1761,13 @@ def ingress_raw_anat_data(wf, rpool, cfg, data_paths, unique_id, part_id, def ingress_freesurfer(wf, rpool, cfg, data_paths, unique_id, part_id, ses_id): - if not cfg.pipeline_setup['freesurfer_dir']: + try: + fs_path = os.path.join(cfg.pipeline_setup['freesurfer_dir'], part_id) + except KeyError: print('No FreeSurfer data present.') return rpool - fs_path = os.path.join(cfg.pipeline_setup['freesurfer_dir'], part_id) + #fs_path = os.path.join(cfg.pipeline_setup['freesurfer_dir'], part_id) if not os.path.exists(fs_path): if 'sub' in part_id: fs_path = os.path.join(cfg.pipeline_setup['freesurfer_dir'], part_id.replace('sub-', '')) From 4818a203b5d7d648a8ee1c9794a05a3f1f3f1707 Mon Sep 17 00:00:00 2001 From: Biraj Shrestha Date: Thu, 18 Apr 2024 13:23:30 -0400 Subject: [PATCH 022/507] appending the nodeblock bold_mask_anatomical_resampled inside transform_timeseries_to_T1template_abcd if resource space-template_res-bold_desc-brain_T1w is missing --- CPAC/registration/registration.py | 10 ++++++++-- 1 file changed, 8 insertions(+), 2 deletions(-) diff --git a/CPAC/registration/registration.py b/CPAC/registration/registration.py index 8658aae219..f57ed89f43 100644 --- a/CPAC/registration/registration.py +++ b/CPAC/registration/registration.py @@ -3783,6 +3783,12 @@ def warp_timeseries_to_T1template_abcd(wf, cfg, strat_pool, pipe_num, opt=None # https://github.com/DCAN-Labs/DCAN-HCP/blob/master/fMRIVolume/scripts/DistortionCorrectionAndEPIToT1wReg_FLIRTBBRAndFreeSurferBBRbased.sh#L548 # convertwarp --relout --rel -m ${WD}/fMRI2str.mat --ref=${T1wImage} --out=${WD}/fMRI2str.nii.gz + + if not strat_pool.check_rpool("space-template_res-bold_desc-brain_T1w"): + outputs = {} + else: + wf, outputs = bold_mask_anatomical_resampled(wf, cfg, strat_pool, pipe_num, opt) + convert_func_to_anat_linear_warp = pe.Node(interface=fsl.ConvertWarp(), name=f'convert_func_to_anat_linear_warp_{pipe_num}') @@ -4028,11 +4034,11 @@ def warp_timeseries_to_T1template_abcd(wf, cfg, strat_pool, pipe_num, opt=None wf.connect(merge_func_mask, 'out', extract_scout_brain, 'operand_files') - outputs = { + outputs.update({ 'space-template_desc-preproc_bold': (extract_func_brain, 'out_file'), 'space-template_desc-scout_bold': (extract_scout_brain, 'out_file'), 'space-template_desc-head_bold': (merge_func_to_standard, 'merged_file') - } + }) return (wf, outputs) From 4476aad2c30c1f0efd66b381fa20efe0e03acdc3 Mon Sep 17 00:00:00 2001 From: Jon Clucas Date: Wed, 17 Apr 2024 23:28:31 -0400 Subject: [PATCH 023/507] :construction: WIP :recycle: Launch from Python CLI [run reg-suite] --- .github/workflows/regression_test_lite.yml | 7 ++----- 1 file changed, 2 insertions(+), 5 deletions(-) diff --git a/.github/workflows/regression_test_lite.yml b/.github/workflows/regression_test_lite.yml index 3cc63595ea..fcf786503f 100644 --- a/.github/workflows/regression_test_lite.yml +++ b/.github/workflows/regression_test_lite.yml @@ -79,12 +79,9 @@ jobs: script: | set -x cd ${{ env.SSH_WORK_DIR }} - if [ -d C-PAC_slurm_testing ] ; then - rm -rf C-PAC_slurm_testing - fi - git clone --branch ${{ env.SLURM_TESTING_BRANCH }} --depth 1 https://github.com/${{ env.SLURM_TESTING_REPO }} C-PAC_slurm_testing + pip install "https://github.com/${{ env.SLURM_TESTING_REPO }}/archive/${{ env.SLURM_TESTING_BRANCH }}.zip" mkdir -p ./logs/${{ github.sha }} - sbatch --export="COMPARISON_PATH=${{ env.COMPARISON_PATH }},DASHBOARD_REPO=${{ env.DASHBOARD_REPO}},HOME_DIR=${{ env.SSH_WORK_DIR }},IMAGE=${{ env.DOCKER_TAG }},OWNER=${{ github.repository_owner }},PATH_EXTRA=${{ env.GH_CLI_BIN_PATH }},REPO=$(echo ${{ github.repository }} | cut -d '/' -f 2),TESTING_REPO=${{ env.SLURM_TESTING_REPO }},SHA=${{ github.sha }},TOKEN_FILE=${{ env.TOKEN_FILE }}" --output=${{ env.SSH_WORK_DIR }}/logs/${{ github.sha }}/out.log --error=${{ env.SSH_WORK_DIR }}/logs/${{ github.sha }}/error.log C-PAC_slurm_testing/.github/scripts/launch_regtest_lite.SLURM + sbatch cpac-slurm-status launch --wd="${{ env.SSH_WORK_DIR }}/logs/${{ github.sha }}" --comparison-path="${{ env.COMPARISON_PATH }}" --dashboard-repo="${{ env.DASHBOARD_REPO}}" --home-dir="${{ env.SSH_WORK_DIR }}" --image="${{ env.DOCKER_TAG }}" --owner="${{ github.repository_owner }}" --path-extra="${{ env.GH_CLI_BIN_PATH }}" --repo="${{ github.repository }}" --testing-repo="${{ env.SLURM_TESTING_REPO }}" --sha="${{ github.sha }}" --token-file="${{ env.TOKEN_FILE }}" - name: Cleanup SSH run: | From c493e80334cb04b66fdd5c4f01dfc66b232c0be1 Mon Sep 17 00:00:00 2001 From: Biraj Shrestha Date: Tue, 23 Apr 2024 00:21:25 -0400 Subject: [PATCH 024/507] Abstracted anat_brain_to_bold_res and anat_brain_mask_to_bold_res from bold_mask_anatomical_resampled as two separate sub worflows that can be reused --- CPAC/func_preproc/func_preproc.py | 80 ++++++++++++++++++++++--------- CPAC/registration/registration.py | 52 +++++++++++--------- 2 files changed, 87 insertions(+), 45 deletions(-) diff --git a/CPAC/func_preproc/func_preproc.py b/CPAC/func_preproc/func_preproc.py index e54d879c82..7dc2687763 100644 --- a/CPAC/func_preproc/func_preproc.py +++ b/CPAC/func_preproc/func_preproc.py @@ -1292,6 +1292,52 @@ def bold_mask_anatomical_based(wf, cfg, strat_pool, pipe_num, opt=None): return (wf, outputs) +def anat_brain_to_bold_res(wf_name, cfg, pipe_num): + + wf = pe.Workflow(name=f"{wf_name}_{pipe_num}") + + inputNode = pe.Node(util.IdentityInterface(fields=['T1w-template-funcreg', + 'space-template_desc-preproc_T1w']), + name='inputspec') + outputNode = pe.Node(util.IdentityInterface(fields=['space-template_res-bold_desc-brain_T1w']), + name='outputspec') + + # applywarp --rel --interp=spline -i ${T1wImage} -r ${ResampRefIm} --premat=$FSLDIR/etc/flirtsch/ident.mat -o ${WD}/${T1wImageFile}.${FinalfMRIResolution} + anat_brain_to_func_res = pe.Node(interface=fsl.ApplyWarp(), + name=f'resample_anat_brain_in_standard_{pipe_num}') + + anat_brain_to_func_res.inputs.interp = 'spline' + anat_brain_to_func_res.inputs.premat = cfg.registration_workflows[ + 'anatomical_registration']['registration']['FSL-FNIRT']['identity_matrix'] + + wf.connect(inputNode, 'space-template_desc-preproc_T1w', anat_brain_to_func_res, 'in_file') + wf.connect(inputNode, 'T1w-template-funcreg', anat_brain_to_func_res, 'ref_file') + + wf.connect(anat_brain_to_func_res, 'out_file', outputNode, 'space-template_res-bold_desc-brain_T1w') + return wf + +def anat_brain_mask_to_bold_res(wf_name, cfg, pipe_num): + # Create brain masks in this space from the FreeSurfer output (changing resolution) + # applywarp --rel --interp=nn -i ${FreeSurferBrainMask}.nii.gz -r ${WD}/${T1wImageFile}.${FinalfMRIResolution} --premat=$FSLDIR/etc/flirtsch/ident.mat -o ${WD}/${FreeSurferBrainMaskFile}.${FinalfMRIResolution}.nii.gz + wf = pe.Workflow(name=f"{wf_name}_{pipe_num}") + inputNode = pe.Node(util.IdentityInterface(fields=['space-template_desc-T1w_mask', + 'space-template_desc-preproc_T1w']), + name='inputspec') + outputNode = pe.Node(util.IdentityInterface(fields=['space-template_desc-bold_mask']), + name='outputspec') + + anat_brain_mask_to_func_res = pe.Node(interface=fsl.ApplyWarp(), + name=f'resample_anat_brain_mask_in_standard_{pipe_num}') + + anat_brain_mask_to_func_res.inputs.interp = 'nn' + anat_brain_mask_to_func_res.inputs.premat = cfg.registration_workflows[ + 'anatomical_registration']['registration']['FSL-FNIRT']['identity_matrix'] + + wf.connect(inputNode, 'space-template_desc-T1w_mask', anat_brain_mask_to_func_res, 'in_file') + wf.connect(inputNode, 'space-template_desc-preproc_T1w', anat_brain_mask_to_func_res, 'ref_file') + wf.connect(anat_brain_mask_to_func_res, 'out_file', outputNode, 'space-template_desc-bold_mask') + + return wf @nodeblock( name='bold_mask_anatomical_resampled', @@ -1308,34 +1354,23 @@ def bold_mask_anatomical_resampled(wf, cfg, strat_pool, pipe_num, opt=None): Adapted from `DCAN Lab's BOLD mask method from the ABCD pipeline `_. ''' - # applywarp --rel --interp=spline -i ${T1wImage} -r ${ResampRefIm} --premat=$FSLDIR/etc/flirtsch/ident.mat -o ${WD}/${T1wImageFile}.${FinalfMRIResolution} - anat_brain_to_func_res = pe.Node(interface=fsl.ApplyWarp(), - name=f'resample_anat_brain_in_standard_{pipe_num}') - - anat_brain_to_func_res.inputs.interp = 'spline' - anat_brain_to_func_res.inputs.premat = cfg.registration_workflows[ - 'anatomical_registration']['registration']['FSL-FNIRT']['identity_matrix'] + anat_brain_to_func_res = anat_brain_to_bold_res(wf, cfg, pipe_num) node, out = strat_pool.get_data('space-template_desc-preproc_T1w') - wf.connect(node, out, anat_brain_to_func_res, 'in_file') + wf.connect(node, out, anat_brain_to_func_res, 'inputspec.space-template_desc-preproc_T1w') node, out = strat_pool.get_data('T1w-template-funcreg') - wf.connect(node, out, anat_brain_to_func_res, 'ref_file') + wf.connect(node, out, anat_brain_to_func_res, 'inputspec.T1w-template-funcreg') # Create brain masks in this space from the FreeSurfer output (changing resolution) # applywarp --rel --interp=nn -i ${FreeSurferBrainMask}.nii.gz -r ${WD}/${T1wImageFile}.${FinalfMRIResolution} --premat=$FSLDIR/etc/flirtsch/ident.mat -o ${WD}/${FreeSurferBrainMaskFile}.${FinalfMRIResolution}.nii.gz - anat_brain_mask_to_func_res = pe.Node(interface=fsl.ApplyWarp(), - name=f'resample_anat_brain_mask_in_standard_{pipe_num}') - - anat_brain_mask_to_func_res.inputs.interp = 'nn' - anat_brain_mask_to_func_res.inputs.premat = cfg.registration_workflows[ - 'anatomical_registration']['registration']['FSL-FNIRT']['identity_matrix'] - + anat_brain_mask_to_func_res = anat_brain_mask_to_bold_res(wf_name='anat_brain_mask_to_bold_res', cfg=cfg, pipe_num=pipe_num) + node, out = strat_pool.get_data('space-template_desc-T1w_mask') - wf.connect(node, out, anat_brain_mask_to_func_res, 'in_file') + wf.connect(node, out, anat_brain_mask_to_func_res, 'inputspec.space-template_desc-T1w_mask') - wf.connect(anat_brain_to_func_res, 'out_file', - anat_brain_mask_to_func_res, 'ref_file') + wf.connect(anat_brain_to_func_res, 'outputspec.space-template_res-bold_desc-brain_T1w', + anat_brain_mask_to_func_res, 'inputspec.space-template_desc-preproc_T1w') # Resample func mask in template space back to native space func_mask_template_to_native = pe.Node( @@ -1346,21 +1381,20 @@ def bold_mask_anatomical_resampled(wf, cfg, strat_pool, pipe_num, opt=None): func_mask_template_to_native.inputs.resample_mode = 'NN' func_mask_template_to_native.inputs.outputtype = 'NIFTI_GZ' - wf.connect(anat_brain_mask_to_func_res, 'out_file', + wf.connect(anat_brain_mask_to_func_res, 'outputspec.space-template_desc-bold_mask', func_mask_template_to_native, 'in_file') node, out = strat_pool.get_data("desc-preproc_bold") wf.connect(node, out, func_mask_template_to_native, 'master') outputs = { - 'space-template_res-bold_desc-brain_T1w': (anat_brain_to_func_res, 'out_file'), - 'space-template_desc-bold_mask': (anat_brain_mask_to_func_res, 'out_file'), + 'space-template_res-bold_desc-brain_T1w': (anat_brain_to_func_res, 'outputspec.space-template_res-bold_desc-brain_T1w'), + 'space-template_desc-bold_mask': (anat_brain_mask_to_func_res, 'outputspec.space-template_desc-bold_mask'), 'space-bold_desc-brain_mask': (func_mask_template_to_native, 'out_file') } return (wf, outputs) - @nodeblock( name='bold_mask_ccs', switch=[['functional_preproc', 'run'], diff --git a/CPAC/registration/registration.py b/CPAC/registration/registration.py index f57ed89f43..fbd386b4a7 100644 --- a/CPAC/registration/registration.py +++ b/CPAC/registration/registration.py @@ -3766,6 +3766,9 @@ def warp_timeseries_to_T1template_deriv(wf, cfg, strat_pool, pipe_num, "space-template_res-bold_desc-brain_T1w", "space-template_desc-bold_mask", "T1w-brain-template-funcreg", + "T1w-template-funcreg", + "space-template_desc-preproc_T1w", + "space-template_desc-T1w_mask", ], outputs={ "space-template_desc-preproc_bold": { @@ -3783,12 +3786,6 @@ def warp_timeseries_to_T1template_abcd(wf, cfg, strat_pool, pipe_num, opt=None # https://github.com/DCAN-Labs/DCAN-HCP/blob/master/fMRIVolume/scripts/DistortionCorrectionAndEPIToT1wReg_FLIRTBBRAndFreeSurferBBRbased.sh#L548 # convertwarp --relout --rel -m ${WD}/fMRI2str.mat --ref=${T1wImage} --out=${WD}/fMRI2str.nii.gz - - if not strat_pool.check_rpool("space-template_res-bold_desc-brain_T1w"): - outputs = {} - else: - wf, outputs = bold_mask_anatomical_resampled(wf, cfg, strat_pool, pipe_num, opt) - convert_func_to_anat_linear_warp = pe.Node(interface=fsl.ConvertWarp(), name=f'convert_func_to_anat_linear_warp_{pipe_num}') @@ -3822,8 +3819,17 @@ def warp_timeseries_to_T1template_abcd(wf, cfg, strat_pool, pipe_num, opt=None node, out = strat_pool.get_data('from-T1w_to-template_mode-image_xfm') wf.connect(node, out, convert_func_to_standard_warp, 'warp2') - node, out = strat_pool.get_data('space-template_res-bold_desc-brain_T1w') - wf.connect(node, out, convert_func_to_standard_warp, 'reference') + + from CPAC.func_preproc.func_preproc import anat_brain_to_bold_res, anat_brain_mask_to_bold_res + anat_brain_to_func_res = anat_brain_to_bold_res(wf, cfg, pipe_num) + + node, out = strat_pool.get_data('space-template_desc-preproc_T1w') + wf.connect(node, out, anat_brain_to_func_res, 'inputspec.space-template_desc-preproc_T1w') + + node, out = strat_pool.get_data('T1w-template-funcreg') + wf.connect(node, out, anat_brain_to_func_res, 'inputspec.T1w-template-funcreg') + + wf.connect(anat_brain_to_func_res, 'outputspec.space-template_res-bold_desc-brain_T1w', convert_func_to_standard_warp, 'reference') # TODO add condition: if no gradient distortion # https://github.com/DCAN-Labs/DCAN-HCP/blob/master/fMRIVolume/GenericfMRIVolumeProcessingPipeline.sh#L283-L284 @@ -3882,8 +3888,7 @@ def warp_timeseries_to_T1template_abcd(wf, cfg, strat_pool, pipe_num, opt=None convert_registration_warp.inputs.out_relwarp = True convert_registration_warp.inputs.relwarp = True - node, out = strat_pool.get_data('space-template_res-bold_desc-brain_T1w') - wf.connect(node, out, convert_registration_warp, 'reference') + wf.connect(anat_brain_to_func_res, 'outputspec.space-template_res-bold_desc-brain_T1w', convert_registration_warp, 'reference') wf.connect(convert_motion_distortion_warp, 'out_file', convert_registration_warp, 'warp1') @@ -3915,8 +3920,7 @@ def warp_timeseries_to_T1template_abcd(wf, cfg, strat_pool, pipe_num, opt=None wf.connect(convert_registration_warp, 'out_file', applywarp_func_to_standard, 'field_file') - node, out = strat_pool.get_data('space-template_res-bold_desc-brain_T1w') - wf.connect(node, out, + wf.connect(anat_brain_to_func_res, 'outputspec.space-template_res-bold_desc-brain_T1w', applywarp_func_to_standard, 'ref_file') # applywarp --rel --interp=nn --in=${WD}/prevols/vol${vnum}_mask.nii.gz --warp=${MotionMatrixFolder}/${MotionMatrixPrefix}${vnum}_all_warp.nii.gz --ref=${WD}/${T1wImageFile}.${FinalfMRIResolution} --out=${WD}/postvols/vol${vnum}_mask.nii.gz @@ -3933,8 +3937,7 @@ def warp_timeseries_to_T1template_abcd(wf, cfg, strat_pool, pipe_num, opt=None wf.connect(convert_registration_warp, 'out_file', applywarp_func_mask_to_standard, 'field_file') - node, out = strat_pool.get_data('space-template_res-bold_desc-brain_T1w') - wf.connect(node, out, + wf.connect(anat_brain_to_func_res, 'outputspec.space-template_res-bold_desc-brain_T1w', applywarp_func_mask_to_standard, 'ref_file') ### Loop ends! ### @@ -3975,8 +3978,7 @@ def warp_timeseries_to_T1template_abcd(wf, cfg, strat_pool, pipe_num, opt=None convert_dc_warp.inputs.out_relwarp = True convert_dc_warp.inputs.relwarp = True - node, out = strat_pool.get_data('space-template_res-bold_desc-brain_T1w') - wf.connect(node, out, convert_dc_warp, 'reference') + wf.connect(anat_brain_to_func_res, 'outputspec.space-template_res-bold_desc-brain_T1w', convert_dc_warp, 'reference') wf.connect(multiply_func_roi_by_zero, 'out_file', convert_dc_warp, 'warp1') @@ -3994,8 +3996,7 @@ def warp_timeseries_to_T1template_abcd(wf, cfg, strat_pool, pipe_num, opt=None node, out = strat_pool.get_data('motion-basefile') wf.connect(node, out, applywarp_scout, 'in_file') - node, out = strat_pool.get_data('space-template_res-bold_desc-brain_T1w') - wf.connect(node, out, applywarp_scout, 'ref_file') + wf.connect(anat_brain_to_func_res, 'outputspec.space-template_res-bold_desc-brain_T1w', applywarp_scout, 'ref_file') wf.connect(convert_dc_warp, 'out_file', applywarp_scout, 'field_file') @@ -4004,8 +4005,15 @@ def warp_timeseries_to_T1template_abcd(wf, cfg, strat_pool, pipe_num, opt=None merge_func_mask = pe.Node(util.Merge(2), name=f'merge_func_mask_{pipe_num}') - node, out = strat_pool.get_data('space-template_desc-bold_mask') - wf.connect(node, out, merge_func_mask, 'in1') + anat_brain_mask_to_func_res = anat_brain_mask_to_bold_res(wf_name='anat_brain_mask_to_bold_res', cfg=cfg, pipe_num=pipe_num) + + node, out = strat_pool.get_data('space-template_desc-T1w_mask') + wf.connect(node, out, anat_brain_mask_to_func_res, 'inputspec.space-template_desc-T1w_mask') + + wf.connect(anat_brain_to_func_res, 'outputspec.space-template_res-bold_desc-brain_T1w', + anat_brain_mask_to_func_res, 'inputspec.space-template_desc-preproc_T1w') + + wf.connect(anat_brain_mask_to_func_res, 'outputspec.space-template_desc-bold_mask', merge_func_mask, 'in1') wf.connect(find_min_mask, 'out_file', merge_func_mask, 'in2') @@ -4034,11 +4042,11 @@ def warp_timeseries_to_T1template_abcd(wf, cfg, strat_pool, pipe_num, opt=None wf.connect(merge_func_mask, 'out', extract_scout_brain, 'operand_files') - outputs.update({ + outputs = { 'space-template_desc-preproc_bold': (extract_func_brain, 'out_file'), 'space-template_desc-scout_bold': (extract_scout_brain, 'out_file'), 'space-template_desc-head_bold': (merge_func_to_standard, 'merged_file') - }) + } return (wf, outputs) From e7e625339b17361f1fe83ea07f13cc07ec92b131 Mon Sep 17 00:00:00 2001 From: Jon Clucas Date: Fri, 26 Apr 2024 00:47:05 -0400 Subject: [PATCH 025/507] :construction_worker: Upgrade SLURM testing package if already installed [run reg-suite] --- .github/workflows/regression_test_lite.yml | 10 +++++++++- 1 file changed, 9 insertions(+), 1 deletion(-) diff --git a/.github/workflows/regression_test_lite.yml b/.github/workflows/regression_test_lite.yml index fcf786503f..7b721f8e4c 100644 --- a/.github/workflows/regression_test_lite.yml +++ b/.github/workflows/regression_test_lite.yml @@ -33,6 +33,7 @@ jobs: DOCKER_TAG: GH_CLI_BIN_PATH: ${{ secrets.GH_CLI_BIN_PATH }} SLURM_TESTING_BRANCH: ${{ vars.SLURM_TESTING_BRANCH }} + SLURM_TESTING_PACKAGE: ${{ vars.SLURM_TESTING_PACKAGE }} SLURM_TESTING_REPO: ${{ vars.SLURM_TESTING_REPO }} SSH_PRIVATE_KEY: ${{ secrets.SSH_PRIVATE_KEY }} SSH_HOST: ${{ secrets.SSH_HOST }} @@ -79,7 +80,14 @@ jobs: script: | set -x cd ${{ env.SSH_WORK_DIR }} - pip install "https://github.com/${{ env.SLURM_TESTING_REPO }}/archive/${{ env.SLURM_TESTING_BRANCH }}.zip" + if pip show "${{ env.SLURM_TESTING_PACKAGE }}" > /dev/null 2>&1; then + # If the package is installed, upgrade it + python3 -m pip install --user --upgrade "https://github.com/${{ env.SLURM_TESTING_REPO }}/archive/${{ env.SLURM_TESTING_BRANCH }}.zip" + else + # If the package is not installed, install it + python3 -m pip install --user --upgrade "https://github.com/${{ env.SLURM_TESTING_REPO }}/archive/${{ env.SLURM_TESTING_BRANCH }}.zip" + fi + pip install --user "https://github.com/${{ env.SLURM_TESTING_REPO }}/archive/${{ env.SLURM_TESTING_BRANCH }}.zip" mkdir -p ./logs/${{ github.sha }} sbatch cpac-slurm-status launch --wd="${{ env.SSH_WORK_DIR }}/logs/${{ github.sha }}" --comparison-path="${{ env.COMPARISON_PATH }}" --dashboard-repo="${{ env.DASHBOARD_REPO}}" --home-dir="${{ env.SSH_WORK_DIR }}" --image="${{ env.DOCKER_TAG }}" --owner="${{ github.repository_owner }}" --path-extra="${{ env.GH_CLI_BIN_PATH }}" --repo="${{ github.repository }}" --testing-repo="${{ env.SLURM_TESTING_REPO }}" --sha="${{ github.sha }}" --token-file="${{ env.TOKEN_FILE }}" From b97d9271ca3a8a9875daebaf3e6e6a0a0c700a89 Mon Sep 17 00:00:00 2001 From: Jon Clucas Date: Fri, 26 Apr 2024 10:19:42 -0400 Subject: [PATCH 026/507] :wrench: Update launch argument --- .github/workflows/regression_test_lite.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/regression_test_lite.yml b/.github/workflows/regression_test_lite.yml index 7b721f8e4c..e3ca23de90 100644 --- a/.github/workflows/regression_test_lite.yml +++ b/.github/workflows/regression_test_lite.yml @@ -89,7 +89,7 @@ jobs: fi pip install --user "https://github.com/${{ env.SLURM_TESTING_REPO }}/archive/${{ env.SLURM_TESTING_BRANCH }}.zip" mkdir -p ./logs/${{ github.sha }} - sbatch cpac-slurm-status launch --wd="${{ env.SSH_WORK_DIR }}/logs/${{ github.sha }}" --comparison-path="${{ env.COMPARISON_PATH }}" --dashboard-repo="${{ env.DASHBOARD_REPO}}" --home-dir="${{ env.SSH_WORK_DIR }}" --image="${{ env.DOCKER_TAG }}" --owner="${{ github.repository_owner }}" --path-extra="${{ env.GH_CLI_BIN_PATH }}" --repo="${{ github.repository }}" --testing-repo="${{ env.SLURM_TESTING_REPO }}" --sha="${{ github.sha }}" --token-file="${{ env.TOKEN_FILE }}" + sbatch cpac-slurm-status launch --wd="${{ env.SSH_WORK_DIR }}/logs/${{ github.sha }}" --comparison-path="${{ env.COMPARISON_PATH }}" --dashboard-repo="${{ env.DASHBOARD_REPO}}" --home-dir="${{ env.SSH_WORK_DIR }}" --image="${{ env.DOCKER_TAG }}" --owner="${{ github.repository_owner }}" --path-extra="${{ env.GH_CLI_BIN_PATH }}" --repo="${{ github.repository }}" --slurm-testing-repo="${{ env.SLURM_TESTING_REPO }}" --sha="${{ github.sha }}" --token-file="${{ env.TOKEN_FILE }}" - name: Cleanup SSH run: | From c58adae349cae8570c36faca20398aacf1459375 Mon Sep 17 00:00:00 2001 From: Jon Clucas Date: Fri, 26 Apr 2024 11:56:14 -0400 Subject: [PATCH 027/507] :construction_worker: Add `slurm-testing-branch` to launch params [run reg-suite] --- .github/workflows/regression_test_lite.yml | 17 ++++++++++++++--- 1 file changed, 14 insertions(+), 3 deletions(-) diff --git a/.github/workflows/regression_test_lite.yml b/.github/workflows/regression_test_lite.yml index e3ca23de90..92771a488d 100644 --- a/.github/workflows/regression_test_lite.yml +++ b/.github/workflows/regression_test_lite.yml @@ -85,11 +85,22 @@ jobs: python3 -m pip install --user --upgrade "https://github.com/${{ env.SLURM_TESTING_REPO }}/archive/${{ env.SLURM_TESTING_BRANCH }}.zip" else # If the package is not installed, install it - python3 -m pip install --user --upgrade "https://github.com/${{ env.SLURM_TESTING_REPO }}/archive/${{ env.SLURM_TESTING_BRANCH }}.zip" + python3 -m pip install --user "https://github.com/${{ env.SLURM_TESTING_REPO }}/archive/${{ env.SLURM_TESTING_BRANCH }}.zip" fi - pip install --user "https://github.com/${{ env.SLURM_TESTING_REPO }}/archive/${{ env.SLURM_TESTING_BRANCH }}.zip" mkdir -p ./logs/${{ github.sha }} - sbatch cpac-slurm-status launch --wd="${{ env.SSH_WORK_DIR }}/logs/${{ github.sha }}" --comparison-path="${{ env.COMPARISON_PATH }}" --dashboard-repo="${{ env.DASHBOARD_REPO}}" --home-dir="${{ env.SSH_WORK_DIR }}" --image="${{ env.DOCKER_TAG }}" --owner="${{ github.repository_owner }}" --path-extra="${{ env.GH_CLI_BIN_PATH }}" --repo="${{ github.repository }}" --slurm-testing-repo="${{ env.SLURM_TESTING_REPO }}" --sha="${{ github.sha }}" --token-file="${{ env.TOKEN_FILE }}" + sbatch cpac-slurm-status launch \ + --wd="${{ env.SSH_WORK_DIR }}/logs/${{ github.sha }}" \ + --comparison-path="${{ env.COMPARISON_PATH }}" \ + --dashboard-repo="${{ env.DASHBOARD_REPO}}" \ + --home-dir="${{ env.SSH_WORK_DIR }}" \ + --image="${{ env.DOCKER_TAG }}" \ + --owner="${{ github.repository_owner }}" \ + --path-extra="${{ env.GH_CLI_BIN_PATH }}" \ + --repo="${{ github.repository }}" \ + --sha="${{ github.sha }}" \ + --slurm-testing-branch="${{ env.SLURM_TESTING_BRANCH }}" \ + --slurm-testing-repo="${{ env.SLURM_TESTING_REPO }}" \ + --token-file="${{ env.TOKEN_FILE }}" - name: Cleanup SSH run: | From a2f8d231df540750bdc0b7fc79c138fbb94fece7 Mon Sep 17 00:00:00 2001 From: Biraj Shrestha Date: Fri, 10 May 2024 10:08:07 -0400 Subject: [PATCH 028/507] added template space warped mask as an output in the ANTs Registration nodeblock --- CPAC/registration/registration.py | 12 ++++++++++-- 1 file changed, 10 insertions(+), 2 deletions(-) diff --git a/CPAC/registration/registration.py b/CPAC/registration/registration.py index 9833c7ee24..0ab7dfee64 100644 --- a/CPAC/registration/registration.py +++ b/CPAC/registration/registration.py @@ -1181,7 +1181,8 @@ def create_wf_calculate_ants_warp( 'inverse_warp_field', 'composite_transform', 'wait', - 'normalized_output_brain']), name='outputspec') + 'normalized_output_brain', + 'normalized_output_brain_mask']), name='outputspec') # use ANTS to warp the masked anatomical image to a template image ''' @@ -1204,7 +1205,8 @@ def create_wf_calculate_ants_warp( 'interp', 'reg_with_skull'], output_names=['warp_list', - 'warped_image'], + 'warped_image', + 'warped_mask'], function=hardcoded_reg, imports=reg_imports), name='calc_ants_warp', @@ -1343,6 +1345,9 @@ def create_wf_calculate_ants_warp( calculate_ants_warp, 'warped_image', outputspec, 'normalized_output_brain') + calc_ants_warp_wf.connect( + calculate_ants_warp, 'warped_mask', + outputspec, 'normalized_output_brain_mask') return calc_ants_warp_wf @@ -1785,6 +1790,8 @@ def ANTs_registration_connector(wf_name, cfg, params, orig="T1w", outputs = { f'space-{sym}template_desc-preproc_{orig}': ( ants_reg_anat_mni, 'outputspec.normalized_output_brain'), + f'space-{sym}template_desc-{orig}_mask': ( + ants_reg_anat_mni, 'outputspec.normalized_output_brain_mask'), f'from-{orig}_to-{sym}{tmpl}template_mode-image_xfm': ( write_composite_xfm, 'output_image'), f'from-{sym}{tmpl}template_to-{orig}_mode-image_xfm': ( @@ -2223,6 +2230,7 @@ def register_FSL_EPI_to_template(wf, cfg, strat_pool, pipe_num, opt=None): "template space.", "Template": "T1w-template", }, + "space-template_desc-T1w_mask": {"Template": "T1w-template"}, "from-T1w_to-template_mode-image_desc-linear_xfm": { "Description": "Linear (affine) transform from T1w native space " "to T1w-template space.", From 5ca3a1b1a8e1344418b183ec431e972b16dafb2e Mon Sep 17 00:00:00 2001 From: Biraj Shrestha Date: Fri, 10 May 2024 10:37:29 -0400 Subject: [PATCH 029/507] Edited ANTs registration to output the template mask file as well --- CPAC/registration/registration.py | 27 ++++++++++++++++++--------- 1 file changed, 18 insertions(+), 9 deletions(-) diff --git a/CPAC/registration/registration.py b/CPAC/registration/registration.py index 0ab7dfee64..4bf84927d8 100644 --- a/CPAC/registration/registration.py +++ b/CPAC/registration/registration.py @@ -1181,8 +1181,7 @@ def create_wf_calculate_ants_warp( 'inverse_warp_field', 'composite_transform', 'wait', - 'normalized_output_brain', - 'normalized_output_brain_mask']), name='outputspec') + 'normalized_output_brain']), name='outputspec') # use ANTS to warp the masked anatomical image to a template image ''' @@ -1205,8 +1204,7 @@ def create_wf_calculate_ants_warp( 'interp', 'reg_with_skull'], output_names=['warp_list', - 'warped_image', - 'warped_mask'], + 'warped_image'], function=hardcoded_reg, imports=reg_imports), name='calc_ants_warp', @@ -1345,9 +1343,6 @@ def create_wf_calculate_ants_warp( calculate_ants_warp, 'warped_image', outputspec, 'normalized_output_brain') - calc_ants_warp_wf.connect( - calculate_ants_warp, 'warped_mask', - outputspec, 'normalized_output_brain_mask') return calc_ants_warp_wf @@ -1790,8 +1785,6 @@ def ANTs_registration_connector(wf_name, cfg, params, orig="T1w", outputs = { f'space-{sym}template_desc-preproc_{orig}': ( ants_reg_anat_mni, 'outputspec.normalized_output_brain'), - f'space-{sym}template_desc-{orig}_mask': ( - ants_reg_anat_mni, 'outputspec.normalized_output_brain_mask'), f'from-{orig}_to-{sym}{tmpl}template_mode-image_xfm': ( write_composite_xfm, 'output_image'), f'from-{sym}{tmpl}template_to-{orig}_mode-image_xfm': ( @@ -2355,6 +2348,22 @@ def register_ANTs_anat_to_template(wf, cfg, strat_pool, pipe_num, opt=None): outputs[new_key] = outputs[key] del outputs[key] + ants_apply_warp_t1_brain_mask_to_template = pe.Node(interface=ants.ApplyTransforms(), + name=f'ANTS-ABCD_T1_to_template_{pipe_num}') + ants_apply_warp_t1_brain_mask_to_template.inputs.dimension = 3 + ants_apply_warp_t1_brain_mask_to_template.inputs.print_out_composite_warp_file = True + ants_apply_warp_t1_brain_mask_to_template.inputs.output_image = 'ANTs_CombinedInvWarp.nii.gz' + + node, out = strat_pool.get_data(['space-T1w_desc-brain_mask']) + wf.connect(node, out, ants_apply_warp_t1_brain_mask_to_template, 'input_image') + + node, out = strat_pool.get_data('T1w-template') + wf.connect(node, out, ants_apply_warp_t1_brain_mask_to_template, 'reference_image') + + _, out = outputs['from-T1w_to-template_mode-image_xfm'] + wf.connect(_, out, ants_apply_warp_t1_brain_mask_to_template, 'transforms') + outputs.update({'space-template_desc-T1w_mask': (ants_apply_warp_t1_brain_mask_to_template, 'output_image')}) + return (wf, outputs) From cc285fdf19c40b3e455f0feacfcc70473e88693e Mon Sep 17 00:00:00 2001 From: Biraj Shrestha Date: Fri, 10 May 2024 12:04:16 -0400 Subject: [PATCH 030/507] Revert "added template space warped mask as an output in the ANTs Registration nodeblock" This reverts commit a2f8d231df540750bdc0b7fc79c138fbb94fece7. --- CPAC/registration/registration.py | 1 - 1 file changed, 1 deletion(-) diff --git a/CPAC/registration/registration.py b/CPAC/registration/registration.py index 4bf84927d8..415c0a5ddc 100644 --- a/CPAC/registration/registration.py +++ b/CPAC/registration/registration.py @@ -2223,7 +2223,6 @@ def register_FSL_EPI_to_template(wf, cfg, strat_pool, pipe_num, opt=None): "template space.", "Template": "T1w-template", }, - "space-template_desc-T1w_mask": {"Template": "T1w-template"}, "from-T1w_to-template_mode-image_desc-linear_xfm": { "Description": "Linear (affine) transform from T1w native space " "to T1w-template space.", From 66894a40f375b2ec0d4c7cb7ce33b488f559bf3b Mon Sep 17 00:00:00 2001 From: Biraj Shrestha Date: Fri, 10 May 2024 15:23:17 -0400 Subject: [PATCH 031/507] changed bold_mask_anatomical_resampled to take in brain_mask instead of T1w mask --- CPAC/func_preproc/func_preproc.py | 4 ++-- CPAC/registration/registration.py | 21 ++------------------- 2 files changed, 4 insertions(+), 21 deletions(-) diff --git a/CPAC/func_preproc/func_preproc.py b/CPAC/func_preproc/func_preproc.py index 7dc2687763..b2f9e49ae1 100644 --- a/CPAC/func_preproc/func_preproc.py +++ b/CPAC/func_preproc/func_preproc.py @@ -1346,7 +1346,7 @@ def anat_brain_mask_to_bold_res(wf_name, cfg, pipe_num): option_key=['functional_preproc', 'func_masking', 'using'], option_val='Anatomical_Resampled', inputs=['desc-preproc_bold', 'T1w-template-funcreg', 'space-template_desc-preproc_T1w', - 'space-template_desc-T1w_mask'], + 'space-template_desc-brain_mask'], outputs=['space-template_res-bold_desc-brain_T1w', 'space-template_desc-bold_mask', 'space-bold_desc-brain_mask'] ) def bold_mask_anatomical_resampled(wf, cfg, strat_pool, pipe_num, opt=None): @@ -1366,7 +1366,7 @@ def bold_mask_anatomical_resampled(wf, cfg, strat_pool, pipe_num, opt=None): # applywarp --rel --interp=nn -i ${FreeSurferBrainMask}.nii.gz -r ${WD}/${T1wImageFile}.${FinalfMRIResolution} --premat=$FSLDIR/etc/flirtsch/ident.mat -o ${WD}/${FreeSurferBrainMaskFile}.${FinalfMRIResolution}.nii.gz anat_brain_mask_to_func_res = anat_brain_mask_to_bold_res(wf_name='anat_brain_mask_to_bold_res', cfg=cfg, pipe_num=pipe_num) - node, out = strat_pool.get_data('space-template_desc-T1w_mask') + node, out = strat_pool.get_data('space-template_desc-brain_mask') wf.connect(node, out, anat_brain_mask_to_func_res, 'inputspec.space-template_desc-T1w_mask') wf.connect(anat_brain_to_func_res, 'outputspec.space-template_res-bold_desc-brain_T1w', diff --git a/CPAC/registration/registration.py b/CPAC/registration/registration.py index 415c0a5ddc..a0a1604d7c 100644 --- a/CPAC/registration/registration.py +++ b/CPAC/registration/registration.py @@ -2346,23 +2346,6 @@ def register_ANTs_anat_to_template(wf, cfg, strat_pool, pipe_num, opt=None): f'{direction}-longitudinal') outputs[new_key] = outputs[key] del outputs[key] - - ants_apply_warp_t1_brain_mask_to_template = pe.Node(interface=ants.ApplyTransforms(), - name=f'ANTS-ABCD_T1_to_template_{pipe_num}') - ants_apply_warp_t1_brain_mask_to_template.inputs.dimension = 3 - ants_apply_warp_t1_brain_mask_to_template.inputs.print_out_composite_warp_file = True - ants_apply_warp_t1_brain_mask_to_template.inputs.output_image = 'ANTs_CombinedInvWarp.nii.gz' - - node, out = strat_pool.get_data(['space-T1w_desc-brain_mask']) - wf.connect(node, out, ants_apply_warp_t1_brain_mask_to_template, 'input_image') - - node, out = strat_pool.get_data('T1w-template') - wf.connect(node, out, ants_apply_warp_t1_brain_mask_to_template, 'reference_image') - - _, out = outputs['from-T1w_to-template_mode-image_xfm'] - wf.connect(_, out, ants_apply_warp_t1_brain_mask_to_template, 'transforms') - outputs.update({'space-template_desc-T1w_mask': (ants_apply_warp_t1_brain_mask_to_template, 'output_image')}) - return (wf, outputs) @@ -3752,7 +3735,7 @@ def warp_timeseries_to_T1template_deriv(wf, cfg, strat_pool, pipe_num, "T1w-brain-template-funcreg", "T1w-template-funcreg", "space-template_desc-preproc_T1w", - "space-template_desc-T1w_mask", + "space-template_desc-brain_mask", ], outputs={ "space-template_desc-preproc_bold": { @@ -3991,7 +3974,7 @@ def warp_timeseries_to_T1template_abcd(wf, cfg, strat_pool, pipe_num, opt=None anat_brain_mask_to_func_res = anat_brain_mask_to_bold_res(wf_name='anat_brain_mask_to_bold_res', cfg=cfg, pipe_num=pipe_num) - node, out = strat_pool.get_data('space-template_desc-T1w_mask') + node, out = strat_pool.get_data('space-template_desc-brain_mask') wf.connect(node, out, anat_brain_mask_to_func_res, 'inputspec.space-template_desc-T1w_mask') wf.connect(anat_brain_to_func_res, 'outputspec.space-template_res-bold_desc-brain_T1w', From dc41bf4f94da07dd78aeaf2fb894e11999f34748 Mon Sep 17 00:00:00 2001 From: Biraj Shrestha Date: Mon, 13 May 2024 18:06:52 -0400 Subject: [PATCH 032/507] input bold data to node swapped with reorient-bold to resolve RAI/RPI bold/mask mismatch issue --- CPAC/registration/registration.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/CPAC/registration/registration.py b/CPAC/registration/registration.py index a0a1604d7c..5a957bda0d 100644 --- a/CPAC/registration/registration.py +++ b/CPAC/registration/registration.py @@ -3723,7 +3723,7 @@ def warp_timeseries_to_T1template_deriv(wf, cfg, strat_pool, pipe_num, option_key=["apply_transform", "using"], option_val="abcd", inputs=[ - ("desc-preproc_bold", "bold", "motion-basefile", + ("desc-preproc_bold", "desc-reorient_bold", "motion-basefile", "coordinate-transformation"), "from-T1w_to-template_mode-image_xfm", "from-bold_to-T1w_mode-image_desc-linear_xfm", @@ -3807,7 +3807,7 @@ def warp_timeseries_to_T1template_abcd(wf, cfg, strat_pool, pipe_num, opt=None extract_func_roi.inputs.t_min = 0 extract_func_roi.inputs.t_size = 3 - node, out = strat_pool.get_data('bold') + node, out = strat_pool.get_data('desc-reorient_bold') wf.connect(node, out, extract_func_roi, 'in_file') # fslmaths "$fMRIFolder"/"$NameOffMRI"_gdc_warp -mul 0 "$fMRIFolder"/"$NameOffMRI"_gdc_warp @@ -3826,7 +3826,7 @@ def warp_timeseries_to_T1template_abcd(wf, cfg, strat_pool, pipe_num, opt=None split_func.inputs.dimension = 't' - node, out = strat_pool.get_data('bold') + node, out = strat_pool.get_data('desc-reorient_bold') wf.connect(node, out, split_func, 'in_file') ### Loop starts! ### From 355671aa25e4d7fde2b101d91581d645a3f35e20 Mon Sep 17 00:00:00 2001 From: Jon Clucas Date: Fri, 24 May 2024 21:23:53 -0400 Subject: [PATCH 033/507] :bug: Make sure `use_ants` is defined before passed as a param --- CPAC/nuisance/nuisance.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/CPAC/nuisance/nuisance.py b/CPAC/nuisance/nuisance.py index 3e3385f73e..361f820130 100644 --- a/CPAC/nuisance/nuisance.py +++ b/CPAC/nuisance/nuisance.py @@ -2456,6 +2456,8 @@ def nuisance_regressors_generation( reg_tool = check_prov_for_regtool(xfm_prov) if reg_tool is not None: use_ants = reg_tool == "ants" + else: + use_ants = False if cfg.switch_is_on( [ "functional_preproc", From c0bf5e8ae26dec4846f0606cd99aa7817138a8c2 Mon Sep 17 00:00:00 2001 From: Jon Clucas Date: Mon, 10 Jun 2024 14:28:34 -0400 Subject: [PATCH 034/507] :necktie: :construction_worker: Force reinstall testing package --- .github/workflows/regression_test_lite.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/regression_test_lite.yml b/.github/workflows/regression_test_lite.yml index 92771a488d..76e54b61f3 100644 --- a/.github/workflows/regression_test_lite.yml +++ b/.github/workflows/regression_test_lite.yml @@ -82,7 +82,7 @@ jobs: cd ${{ env.SSH_WORK_DIR }} if pip show "${{ env.SLURM_TESTING_PACKAGE }}" > /dev/null 2>&1; then # If the package is installed, upgrade it - python3 -m pip install --user --upgrade "https://github.com/${{ env.SLURM_TESTING_REPO }}/archive/${{ env.SLURM_TESTING_BRANCH }}.zip" + python3 -m pip install --user --upgrade --force-reinstall "https://github.com/${{ env.SLURM_TESTING_REPO }}/archive/${{ env.SLURM_TESTING_BRANCH }}.zip" else # If the package is not installed, install it python3 -m pip install --user "https://github.com/${{ env.SLURM_TESTING_REPO }}/archive/${{ env.SLURM_TESTING_BRANCH }}.zip" From 42f717fd6371b86d6ec36121edb83b9ef2b322b6 Mon Sep 17 00:00:00 2001 From: Jon Clucas Date: Wed, 3 Jul 2024 17:00:02 -0400 Subject: [PATCH 035/507] :goal_net: Handle unexpected existing working directory \& :rotating_lights: Lint [run reg-suite] --- CPAC/pipeline/cpac_runner.py | 112 +++++++++++++++++------------------ CPAC/utils/io.py | 37 ++++++++++++ 2 files changed, 92 insertions(+), 57 deletions(-) create mode 100644 CPAC/utils/io.py diff --git a/CPAC/pipeline/cpac_runner.py b/CPAC/pipeline/cpac_runner.py index 0110281d5d..854aa57075 100644 --- a/CPAC/pipeline/cpac_runner.py +++ b/CPAC/pipeline/cpac_runner.py @@ -14,6 +14,8 @@ # You should have received a copy of the GNU Lesser General Public # License along with C-PAC. If not, see . +"""Run C-PAC pipeline as configured.""" + from multiprocessing import Process import os from time import strftime @@ -26,20 +28,20 @@ from CPAC.utils.configuration import check_pname, Configuration, set_subject from CPAC.utils.configuration.yaml_template import upgrade_pipeline_to_1_8 from CPAC.utils.ga import track_run -from CPAC.utils.monitoring import failed_to_start, log_nodes_cb, WFLOGGER +from CPAC.utils.io import load_yaml +from CPAC.utils.monitoring import failed_to_start, FMLOGGER, log_nodes_cb, WFLOGGER + +RECOMMENDED_MAX_PATH_LENGTH: int = 70 +"""Recommended maximum length for a working directory path.""" -# Run condor jobs def run_condor_jobs(c, config_file, subject_list_file, p_name): + """Run condor jobs.""" # Import packages import subprocess from time import strftime - try: - sublist = yaml.safe_load(open(os.path.realpath(subject_list_file), "r")) - except: - msg = "Subject list is not in proper YAML format. Please check your file" - raise Exception(msg) + sublist = load_yaml(subject_list_file, "Subject list") cluster_files_dir = os.path.join(os.getcwd(), "cluster_files") subject_bash_file = os.path.join( @@ -100,9 +102,9 @@ def run_condor_jobs(c, config_file, subject_list_file, p_name): # Create and run script for CPAC to run on cluster def run_cpac_on_cluster(config_file, subject_list_file, cluster_files_dir): - """ - Function to build a SLURM batch job submission script and - submit it to the scheduler via 'sbatch'. + """Build a SLURM batch job submission script. + + Submit it to the scheduler via 'sbatch'. """ # Import packages import getpass @@ -113,18 +115,11 @@ def run_cpac_on_cluster(config_file, subject_list_file, cluster_files_dir): from indi_schedulers import cluster_templates # Load in pipeline config - try: - pipeline_dict = yaml.safe_load(open(os.path.realpath(config_file), "r")) - pipeline_config = Configuration(pipeline_dict) - except: - msg = "Pipeline config is not in proper YAML format. Please check your file" - raise Exception(msg) + pipeline_dict = load_yaml(config_file, "Pipeline config") + pipeline_config = Configuration(pipeline_dict) + # Load in the subject list - try: - sublist = yaml.safe_load(open(os.path.realpath(subject_list_file), "r")) - except: - msg = "Subject list is not in proper YAML format. Please check your file" - raise Exception(msg) + sublist = load_yaml(subject_list_file, "Subject list") # Init variables timestamp = str(strftime("%Y_%m_%d_%H_%M_%S")) @@ -238,6 +233,7 @@ def run_cpac_on_cluster(config_file, subject_list_file, cluster_files_dir): def run_T1w_longitudinal(sublist, cfg): + """Run anatomical longitudinal pipeline.""" subject_id_dict = {} for sub in sublist: @@ -260,7 +256,7 @@ def run_T1w_longitudinal(sublist, cfg): ) -def run( +def run( # noqa: PLR0915 subject_list_file, config_file=None, p_name=None, @@ -322,22 +318,21 @@ def run( config_file = os.path.realpath(config_file) try: if not os.path.exists(config_file): - raise IOError - else: + raise FileNotFoundError(config_file) + try: + c = Configuration(load_yaml(config_file, "Pipeline configuration")) + except Invalid: try: - c = Configuration(yaml.safe_load(open(config_file, "r"))) - except Invalid: - try: - upgrade_pipeline_to_1_8(config_file) - c = Configuration(yaml.safe_load(open(config_file, "r"))) - except Exception as e: - msg = ( - "C-PAC could not upgrade pipeline configuration file " - f"{config_file} to v1.8 syntax" - ) - raise RuntimeError(msg) from e + upgrade_pipeline_to_1_8(config_file) + c = Configuration(load_yaml(config_file, "Pipeline configuration")) except Exception as e: - raise e + msg = ( + "C-PAC could not upgrade pipeline configuration file " + f"{config_file} to v1.8 syntax" + ) + raise RuntimeError(msg) from e + except Exception as e: + raise e except IOError as e: msg = f"config file {config_file} doesn't exist" raise FileNotFoundError(msg) from e @@ -385,10 +380,10 @@ def run( msg = "Working directory not specified" raise Exception(msg) - if len(c.pipeline_setup["working_directory"]["path"]) > 70: + if len(c.pipeline_setup["working_directory"]["path"]) > RECOMMENDED_MAX_PATH_LENGTH: warnings.warn( "We recommend that the working directory full path " - "should have less then 70 characters. " + f"should have less then {RECOMMENDED_MAX_PATH_LENGTH} characters. " "Long paths might not work in your operating system." ) warnings.warn( @@ -400,12 +395,8 @@ def run( p_name = check_pname(p_name, c) # Load in subject list - try: - if not sublist: - sublist = yaml.safe_load(open(subject_list_file, "r")) - except: - msg = "Subject list is not in proper YAML format. Please check your file" - raise FileNotFoundError(msg) + if not sublist: + sublist = load_yaml(subject_list_file, "Subject list") # Populate subject scan map sub_scan_map = {} @@ -418,12 +409,12 @@ def run( scan_ids = ["scan_anat"] if "func" in sub: - for id in sub["func"]: - scan_ids.append("scan_" + str(id)) + for _id in sub["func"]: + scan_ids.append("scan_" + str(_id)) if "rest" in sub: - for id in sub["rest"]: - scan_ids.append("scan_" + str(id)) + for _id in sub["rest"]: + scan_ids.append("scan_" + str(_id)) sub_scan_map[s] = scan_ids except Exception as e: @@ -444,8 +435,10 @@ def run( level="participant" if not test_config else "test", participants=len(sublist), ) - except: - WFLOGGER.error("Usage tracking failed for this run.") + except Exception as exception: + WFLOGGER.error( + "Usage tracking failed for this run.\nDetails: %s", exception + ) # If we're running on cluster, execute job scheduler if c.pipeline_setup["system_config"]["on_grid"]["run"]: @@ -471,15 +464,20 @@ def run( # Create working dir if not os.path.exists(c.pipeline_setup["working_directory"]["path"]): try: - os.makedirs(c.pipeline_setup["working_directory"]["path"]) - except: + os.makedirs( + c.pipeline_setup["working_directory"]["path"], exist_ok=True + ) + except FileExistsError: + FMLOGGER.warn( + f"Path exists: {c['pipeline_setup', 'working_directory', 'path']}" + ) + except Exception as exception: err = ( - "\n\n[!] CPAC says: Could not create the working " - "directory: %s\n\nMake sure you have permissions " - "to write to this directory.\n\n" - % c.pipeline_setup["working_directory"]["path"] + "\n\n[!] CPAC says: Could not create the working directory: " + f"{c['pipeline_setup', 'working_directory', 'path']}\n\nMake sure " + "you have permissions to write to this directory.\n\n" ) - raise Exception(err) + raise IOError(err) from exception """ if not os.path.exists(c.pipeline_setup['log_directory']['path']): try: diff --git a/CPAC/utils/io.py b/CPAC/utils/io.py new file mode 100644 index 0000000000..12d7d7f5d1 --- /dev/null +++ b/CPAC/utils/io.py @@ -0,0 +1,37 @@ +# Copyright (C) 2012-2024 C-PAC Developers + +# This file is part of C-PAC. + +# C-PAC is free software: you can redistribute it and/or modify it under +# the terms of the GNU Lesser General Public License as published by the +# Free Software Foundation, either version 3 of the License, or (at your +# option) any later version. + +# C-PAC is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or +# FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public +# License for more details. + +# You should have received a copy of the GNU Lesser General Public +# License along with C-PAC. If not, see . +"""Utilities for inputs and outputs.""" + +from pathlib import Path + +from yaml import safe_load, YAMLError + + +def load_yaml( + path: Path | str, desc: str = "YAML file", encoding="utf8" +) -> dict | list | str: + """Try to load a YAML file to a Python object.""" + path = Path(path).absolute() + try: + with path.open("r", encoding=encoding) as _yaml: + result = safe_load(_yaml) + except FileNotFoundError as error: + raise error + except Exception as error: + msg = f"{desc} is not in proper YAML format. Please check {path}" + raise YAMLError(msg) from error + return result From b232f64c2f6d61860f50cb4e37c67cdfe6bae5c2 Mon Sep 17 00:00:00 2001 From: Jon Clucas Date: Wed, 3 Jul 2024 18:15:43 -0400 Subject: [PATCH 036/507] :bug: Fix import (probably merge error) [run reg-suite] --- CPAC/pipeline/engine.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/CPAC/pipeline/engine.py b/CPAC/pipeline/engine.py index 3c739ae1ab..d7f53f7029 100644 --- a/CPAC/pipeline/engine.py +++ b/CPAC/pipeline/engine.py @@ -19,13 +19,12 @@ import hashlib from itertools import chain import json -import logging import os import re from typing import Optional import warnings -from nipype import config +from nipype import config, logging from nipype.interfaces.utility import Rename from CPAC.image_utils.spatial_smoothing import spatial_smoothing From f45af766226bb901b52480458e2caa25ae60a7c7 Mon Sep 17 00:00:00 2001 From: Jon Clucas Date: Fri, 5 Jul 2024 09:56:33 -0400 Subject: [PATCH 037/507] :bug: Import local variables in `get_scan_params` [run reg-suite] --- CPAC/utils/utils.py | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/CPAC/utils/utils.py b/CPAC/utils/utils.py index 4ba3285218..b8b84ec186 100644 --- a/CPAC/utils/utils.py +++ b/CPAC/utils/utils.py @@ -689,6 +689,13 @@ def get_scan_params( import os import warnings + from CPAC.utils.utils import ( + check, + fetch_and_convert, + try_fetch_parameter, + VALID_PATTERNS, + ) + def check2(val): return val if val is None or val == "" or isinstance(val, str) else int(val) From c63a1833f12dd574f1c2cf78db15446fa6803105 Mon Sep 17 00:00:00 2001 From: Jon Clucas Date: Fri, 5 Jul 2024 14:26:27 -0400 Subject: [PATCH 038/507] :recycle: Dedupe function node imports --- CPAC/utils/datasource.py | 1 - CPAC/utils/utils.py | 23 +++++++++++------------ 2 files changed, 11 insertions(+), 13 deletions(-) diff --git a/CPAC/utils/datasource.py b/CPAC/utils/datasource.py index d0089d8afe..008e674c2d 100644 --- a/CPAC/utils/datasource.py +++ b/CPAC/utils/datasource.py @@ -730,7 +730,6 @@ def ingress_func_metadata( "effective_echo_spacing", ], function=get_scan_params, - imports=["from CPAC.utils.utils import check, try_fetch_parameter"], ), name=f"bold_scan_params_{subject_id}{name_suffix}", ) diff --git a/CPAC/utils/utils.py b/CPAC/utils/utils.py index b8b84ec186..b84c847515 100644 --- a/CPAC/utils/utils.py +++ b/CPAC/utils/utils.py @@ -33,6 +33,7 @@ from CPAC.utils.configuration import Configuration from CPAC.utils.docs import deprecated +from CPAC.utils.interfaces.function import Function from CPAC.utils.monitoring import FMLOGGER, WFLOGGER CONFIGS_DIR = os.path.abspath( @@ -648,6 +649,14 @@ def try_fetch_parameter(scan_parameters, subject, scan, keys): return None +@Function.sig_imports( + [ + "import json", + "import os", + "from CPAC.utils.utils import check, fetch_and_convert," + " try_fetch_parameter, VALID_PATTERNS", + ] +) def get_scan_params( subject_id, scan, @@ -685,16 +694,6 @@ def get_scan_params( pe_direction : str effective_echo_spacing : float """ - import json - import os - import warnings - - from CPAC.utils.utils import ( - check, - fetch_and_convert, - try_fetch_parameter, - VALID_PATTERNS, - ) def check2(val): return val if val is None or val == "" or isinstance(val, str) else int(val) @@ -875,7 +874,7 @@ def check2(val): # checking if the unit of TR and slice timing match or not # if slice timing in ms convert TR to ms as well if TR and max_slice_offset > TR: - warnings.warn( + WFLOGGER.warn( "TR is in seconds and slice timings are in " "milliseconds. Converting TR into milliseconds" ) @@ -885,7 +884,7 @@ def check2(val): elif TR and TR > 10: # noqa: PLR2004 # check to see, if TR is in milliseconds, convert it into seconds - warnings.warn("TR is in milliseconds, Converting it into seconds") + WFLOGGER.warn("TR is in milliseconds, Converting it into seconds") TR = TR / 1000.0 WFLOGGER.info("New TR value %s s", TR) unit = "s" From d010ac710901d53d4f775092e50f82a28ae77f35 Mon Sep 17 00:00:00 2001 From: Jon Clucas Date: Sat, 6 Jul 2024 00:27:06 -0400 Subject: [PATCH 039/507] :construction: WIP :bug: Fix `get_scan_params` [run reg-suite] --- CPAC/utils/tests/test_utils.py | 77 +++++++++---- CPAC/utils/utils.py | 197 ++++++++++++++++++++++----------- 2 files changed, 184 insertions(+), 90 deletions(-) diff --git a/CPAC/utils/tests/test_utils.py b/CPAC/utils/tests/test_utils.py index f9a4cb73e4..43539d9a57 100644 --- a/CPAC/utils/tests/test_utils.py +++ b/CPAC/utils/tests/test_utils.py @@ -3,6 +3,7 @@ import multiprocessing from unittest import mock +from _pytest.logging import LogCaptureFixture import pytest from CPAC.func_preproc import get_motion_ref @@ -12,26 +13,33 @@ from CPAC.utils.utils import ( check_config_resources, check_system_deps, - try_fetch_parameter, + fetch_and_convert, ) -scan_params_bids = { - "RepetitionTime": 2.0, - "ScanOptions": "FS", - "SliceAcquisitionOrder": "Interleaved Ascending", -} -scan_params_cpac = { - "tr": 2.5, - "acquisition": "seq+z", - "reference": "24", - "first_tr": "", - "last_tr": "", +SCAN_PARAMS = { + "BIDS": { + "params": { + "RepetitionTime": 2.0, + "ScanOptions": "FS", + "SliceAcquisitionOrder": "Interleaved Ascending", + }, + "expected_TR": 2.0, + }, + "C-PAC": { + "params": { + "tr": 2.5, + "acquisition": "seq+z", + "reference": "24", + "first_tr": "", + "last_tr": "", + }, + "expected_TR": 2.5, + }, } def _installation_check(command: str, flag: str) -> None: - """Test that command is installed by running specified version or - help flag. + """Test that command is installed by running specified version or help flag. Parameters ---------- @@ -56,9 +64,10 @@ def _installation_check(command: str, flag: str) -> None: def test_check_config_resources(): """Test check_config_resources function.""" - with mock.patch.object(multiprocessing, "cpu_count", return_value=2), pytest.raises( - SystemError - ) as system_error: + with ( + mock.patch.object(multiprocessing, "cpu_count", return_value=2), + pytest.raises(SystemError) as system_error, + ): check_config_resources( Configuration( {"pipeline_setup": {"system_config": {"max_cores_per_participant": 10}}} @@ -69,12 +78,33 @@ def test_check_config_resources(): assert "threads available (2)" in error_string -def test_function(): - TR = try_fetch_parameter(scan_params_bids, "0001", "scan", ["TR", "RepetitionTime"]) - assert TR == 2.0 - - TR = try_fetch_parameter(scan_params_cpac, "0001", "scan", ["TR", "RepetitionTime"]) - assert TR == 2.5 +@pytest.mark.parametrize("scan_params", ["BIDS", "C-PAC"]) +@pytest.mark.parametrize("convert_to", [int, float, str]) +def test_fetch_and_convert( + caplog: LogCaptureFixture, scan_params: str, convert_to: type +) -> None: + """Test functionality to fetch and convert scan parameters.""" + params = SCAN_PARAMS[scan_params]["params"] + TR = fetch_and_convert( + scan_parameters=params, + scan="scan", + keys=["TR", "RepetitionTime"], + convert_to=convert_to, + ) + assert (TR == convert_to(SCAN_PARAMS[scan_params]["expected_TR"])) and isinstance( + TR, convert_to + ) + if scan_params == "C-PAC": + assert "Using case-insenitive match: 'TR' ≅ 'tr'." in caplog.text + else: + assert "Using case-insenitive match: 'TR' ≅ 'tr'." not in caplog.text + not_TR = fetch_and_convert( + scan_parameters=params, + scan="scan", + keys=["NotTR", "NotRepetitionTime"], + convert_to=convert_to, + ) + assert not_TR is None @pytest.mark.parametrize("executable", ["Xvfb"]) @@ -96,6 +126,7 @@ def test_NodeBlock_option_SSOT(): # pylint: disable=invalid-name def test_system_deps(): """Test system dependencies. + Raises an exception if dependencies are not met. """ check_system_deps(*([True] * 4)) diff --git a/CPAC/utils/utils.py b/CPAC/utils/utils.py index b84c847515..47528b7f36 100644 --- a/CPAC/utils/utils.py +++ b/CPAC/utils/utils.py @@ -25,7 +25,7 @@ import numbers import os import pickle -from typing import Any +from typing import Any, Literal, Optional, overload import numpy as np from voluptuous.error import Invalid @@ -47,6 +47,7 @@ os.path.join(CONFIGS_DIR, "1.7-1.8-deprecations.yml"), "r", encoding="utf-8" ) as _f: NESTED_CONFIG_DEPRECATIONS = yaml.safe_load(_f) +PE_DIRECTION = Literal["i", "i-", "j", "j-", "k", "k-", ""] VALID_PATTERNS = [ "alt+z", "altplus", @@ -472,17 +473,92 @@ def compute_fisher_z_score(correlation_file, timeseries_one_d, input_name): return out_file +@overload +def fetch( + scan_parameters: dict, + scan: Optional[str] = None, + keys: Optional[list[str]] = None, + *, + match_case: Literal[False], +) -> Any: ... +@overload +def fetch( + scan_parameters: dict, + scan: Optional[str] = None, + keys: Optional[list[str]] = None, + *, + match_case: Literal[True], +) -> tuple[Any, tuple[str, str]]: ... +def fetch(scan_parameters, scan, keys, *, match_case=False): + """Fetch the first found parameter from a scan params dictionary. + + Returns + ------- + value + The value of the parameter. + + keys, optional + The matched keys (only if ``match_case is True``) + """ + if match_case: + keys = {key.lower(): key for key in keys} + scan_param_keys = {key.lower(): key for key in scan_parameters.keys()} + scan_parameters = {key.lower(): value for key, value in scan_parameters.items()} + for key in keys: + if key in scan_parameters: + if match_case: + return check(scan_parameters, None, scan, key, True), ( + keys[key], + scan_param_keys[key], + ) + return check(scan_parameters, None, scan, key, True) + msg = f"None of {keys} found in {list(scan_parameters.keys())}." + raise KeyError(msg) + + def fetch_and_convert( - scan_parameters: dict, scan: str, keys: list[str], convert_to: type, fallback: Any + scan_parameters: dict, + scan: str, + keys: list[str], + convert_to: type, + fallback: Optional[Any] = None, ) -> Any: - """Fetch a parameter from a scan parameters dictionary and convert it to a given type. + """Fetch a parameter from a scan params dictionary and convert it to a given type. Catch TypeError exceptions and return a fallback value in those cases. + + Returns + ------- + value + The gathered parameter coerced to the specified type, if possible. + ``fallback`` otherwise. """ + value: Any = fallback + fallback_message = f"Falling back to {fallback} ({type(fallback)})." + + try: + raw_value = fetch(scan_parameters, scan, keys) + except KeyError: + try: + raw_value, matched_keys = fetch( + scan_parameters, scan, keys, match_case=True + ) + except KeyError: + WFLOGGER.warning( + f"None of {keys} found in {list(scan_parameters.keys())}. " + f"{fallback_message}" + ) + return fallback + WFLOGGER.warning( + f"None exact match found. Using case-insenitive match: '{matched_keys[0]}'" + f" ≅ '{matched_keys[1]}'." + ) try: - value = convert_to(scan_parameters, None, scan, keys) + value = convert_to(raw_value) except TypeError: - value = fallback + WFLOGGER.warning( + f"Could not convert {value} to {convert_to}. {fallback_message}" + ) return value @@ -625,74 +701,61 @@ def check_random_state(seed): ) -def try_fetch_parameter(scan_parameters, subject, scan, keys): - """Try to fetch a parameter from a scan parameters dictionary.""" - scan_parameters = {k.lower(): v for k, v in scan_parameters.items()} - - for _key in keys: - key = _key.lower() - - if key not in scan_parameters: - continue - - if isinstance(scan_parameters[key], dict): - value = scan_parameters[key][scan] - else: - value = scan_parameters[key] - - # Explicit none value - if value == "None": - return None - - if value is not None: - return value - return None - - @Function.sig_imports( [ "import json", "import os", - "from CPAC.utils.utils import check, fetch_and_convert," - " try_fetch_parameter, VALID_PATTERNS", + "from CPAC.utils.utils import check, fetch_and_convert," " VALID_PATTERNS", ] ) def get_scan_params( - subject_id, - scan, - pipeconfig_start_indx, - pipeconfig_stop_indx, - data_config_scan_params=None, -): + subject_id: str, + scan: str, + pipeconfig_start_indx: int, + pipeconfig_stop_indx: Optional[int | str], + data_config_scan_params: Optional[dict | str] = None, +) -> tuple[ + Optional[str], + Optional[str], + Optional[int], + Optional[int], + Optional[int], + Optional[int], + PE_DIRECTION, + Optional[float], +]: """Extract slice timing correction parameters and scan parameters. Parameters ---------- - subject_id : str + subject_id subject id - scan : str + scan scan id - pipeconfig_start_indx : int + pipeconfig_start_indx starting volume index as provided in the pipeline config yaml file - pipeconfig_stop_indx : int + pipeconfig_stop_indx ending volume index as provided in the pipeline config yaml file - data_config_scan_params : str - file path to scan parameter JSON file listed in data config yaml file + data_config_scan_params + file path to scan parameter JSON file listed in data config yaml file or loaded + paramater dictionary Returns ------- - TR : a string + TR TR value - pattern : a string + tpattern slice aquisition pattern string or file path - ref_slice : an integer - reference slice which is used to allign all other slices - first_tr : an integer - starting TR or starting volume index - last_tr : an integer - ending TR or ending volume index - pe_direction : str - effective_echo_spacing : float + ref_slice + index of reference slice which is used to allign all other slices + first_tr + index of starting TR or starting volume index + last_tr + index of ending TR or ending volume index + pe_direction + https://bids-specification.readthedocs.io/en/stable/glossary.html#phaseencodingdirection-metadata + effective_echo_spacing + https://bids-specification.readthedocs.io/en/stable/glossary.html#effectiveechospacing-metadata """ def check2(val): @@ -700,7 +763,7 @@ def check2(val): # initialize vars to empty TR = pattern = ref_slice = first_tr = last_tr = pe_direction = "" - unit = "s" + unit: Literal["ms", "s"] = "s" effective_echo_spacing = template = None if isinstance(pipeconfig_stop_indx, str): @@ -750,24 +813,24 @@ def check2(val): # TODO: better handling of errant key values!!! # TODO: use schema validator to deal with it # get details from the configuration - TR = fetch_and_convert( + TR: Optional[float] = fetch_and_convert( params_dct, scan, ["TR", "RepetitionTime"], float, None ) - template = fetch_and_convert( + template: Optional[str] = fetch_and_convert( params_dct, scan, ["Template", "template"], str, None ) - pattern = str( - try_fetch_parameter( - params_dct, - subject_id, - scan, - ["acquisition", "SliceTiming", "SliceAcquisitionOrder"], - ) + pattern: str = fetch_and_convert( + params_dct, + scan, + ["acquisition", "SliceTiming", "SliceAcquisitionOrder"], + str, + "", ) - ref_slice = check(params_dct, subject_id, scan, "reference", False) - ref_slice = int(ref_slice) if ref_slice else ref_slice + ref_slice: Optional[int] = fetch_and_convert( + params_dct, scan, ["reference"], int, None + ) first_tr = check(params_dct, subject_id, scan, "first_TR", False) first_tr = check2(first_tr) if first_tr else first_tr @@ -874,7 +937,7 @@ def check2(val): # checking if the unit of TR and slice timing match or not # if slice timing in ms convert TR to ms as well if TR and max_slice_offset > TR: - WFLOGGER.warn( + WFLOGGER.warning( "TR is in seconds and slice timings are in " "milliseconds. Converting TR into milliseconds" ) @@ -884,7 +947,7 @@ def check2(val): elif TR and TR > 10: # noqa: PLR2004 # check to see, if TR is in milliseconds, convert it into seconds - WFLOGGER.warn("TR is in milliseconds, Converting it into seconds") + WFLOGGER.warning("TR is in milliseconds, Converting it into seconds") TR = TR / 1000.0 WFLOGGER.info("New TR value %s s", TR) unit = "s" From d8d5553b89cb422d64b7e10409c138655b99f67b Mon Sep 17 00:00:00 2001 From: Jon Clucas Date: Mon, 8 Jul 2024 10:05:09 -0400 Subject: [PATCH 040/507] :recycle: DRY `fetch_and_convert`|`fetch`|`check`|`check2` [run reg-suite] --- CPAC/utils/utils.py | 191 +++++++++++++++++++++----------------------- 1 file changed, 89 insertions(+), 102 deletions(-) diff --git a/CPAC/utils/utils.py b/CPAC/utils/utils.py index 47528b7f36..dd20b14b43 100644 --- a/CPAC/utils/utils.py +++ b/CPAC/utils/utils.py @@ -476,6 +476,7 @@ def compute_fisher_z_score(correlation_file, timeseries_one_d, input_name): @overload def fetch( scan_parameters: dict, + subject: Optional[str] = None, scan: Optional[str] = None, keys: Optional[list[str]] = None, *, @@ -484,12 +485,13 @@ def fetch( @overload def fetch( scan_parameters: dict, + subject: Optional[str] = None, scan: Optional[str] = None, keys: Optional[list[str]] = None, *, match_case: Literal[True], ) -> tuple[Any, tuple[str, str]]: ... -def fetch(scan_parameters, scan, keys, *, match_case=False): +def fetch(scan_parameters, subject, scan, keys, *, match_case=False): """Fetch the first found parameter from a scan params dictionary. Returns @@ -507,26 +509,52 @@ def fetch(scan_parameters, scan, keys, *, match_case=False): for key in keys: if key in scan_parameters: if match_case: - return check(scan_parameters, None, scan, key, True), ( + return check(scan_parameters, subject, scan, key, True), ( keys[key], scan_param_keys[key], ) - return check(scan_parameters, None, scan, key, True) + return check(scan_parameters, subject, scan, key, True) msg = f"None of {keys} found in {list(scan_parameters.keys())}." raise KeyError(msg) def fetch_and_convert( scan_parameters: dict, + subject: str, scan: str, keys: list[str], convert_to: type, fallback: Optional[Any] = None, + warn_typeerror: bool = True, ) -> Any: """Fetch a parameter from a scan params dictionary and convert it to a given type. Catch TypeError exceptions and return a fallback value in those cases. + Parameters + ---------- + scan_parameters + dictionary of scan metadata + + subject + the subject ID + + scan + the scan ID + + keys + if multiple keys provided, the value corresponding to the first found will be + returned + + convert_to + the type to return if possible + + fallback + a value to return if the keys are not found in ``scan_parameters`` + + warn_typeerror + log a warning if value cannot be converted to ``convert_to`` type? + Returns ------- value @@ -537,11 +565,11 @@ def fetch_and_convert( fallback_message = f"Falling back to {fallback} ({type(fallback)})." try: - raw_value = fetch(scan_parameters, scan, keys) + raw_value = fetch(scan_parameters, subject, scan, keys) except KeyError: try: raw_value, matched_keys = fetch( - scan_parameters, scan, keys, match_case=True + scan_parameters, subject, scan, keys, match_case=True ) except KeyError: WFLOGGER.warning( @@ -556,9 +584,10 @@ def fetch_and_convert( try: value = convert_to(raw_value) except TypeError: - WFLOGGER.warning( - f"Could not convert {value} to {convert_to}. {fallback_message}" - ) + if warn_typeerror: + WFLOGGER.warning( + f"Could not convert {value} to {convert_to}. {fallback_message}" + ) return value @@ -705,13 +734,14 @@ def check_random_state(seed): [ "import json", "import os", - "from CPAC.utils.utils import check, fetch_and_convert," " VALID_PATTERNS", + "from typing import Literal, Optional", + "from CPAC.utils.utils import fetch_and_convert, PE_DIRECTION, VALID_PATTERNS", ] ) def get_scan_params( subject_id: str, scan: str, - pipeconfig_start_indx: int, + pipeconfig_start_indx: Optional[int | str], pipeconfig_stop_indx: Optional[int | str], data_config_scan_params: Optional[dict | str] = None, ) -> tuple[ @@ -742,7 +772,7 @@ def get_scan_params( Returns ------- - TR + tr TR value tpattern slice aquisition pattern string or file path @@ -757,12 +787,8 @@ def get_scan_params( effective_echo_spacing https://bids-specification.readthedocs.io/en/stable/glossary.html#effectiveechospacing-metadata """ - - def check2(val): - return val if val is None or val == "" or isinstance(val, str) else int(val) - # initialize vars to empty - TR = pattern = ref_slice = first_tr = last_tr = pe_direction = "" + tr = pattern = ref_slice = first_tr = last_tr = pe_direction = "" unit: Literal["ms", "s"] = "s" effective_echo_spacing = template = None @@ -777,78 +803,10 @@ def check2(val): f" configuration file does not exist:\n{data_config_scan_params}" ) raise FileNotFoundError(err) - with open(data_config_scan_params, "r") as f: - params_dct = json.load(f) - - # get details from the configuration - # if this is a JSON file, the key values are the BIDS format - # standard - # TODO: better handling of errant key values!!! - if "RepetitionTime" in params_dct.keys(): - TR = float(check(params_dct, subject_id, scan, "RepetitionTime", False)) - if "SliceTiming" in params_dct.keys(): - pattern = str(check(params_dct, subject_id, scan, "SliceTiming", False)) - elif "SliceAcquisitionOrder" in params_dct.keys(): - pattern = str( - check(params_dct, subject_id, scan, "SliceAcquisitionOrder", False) - ) - if "PhaseEncodingDirection" in params_dct.keys(): - pe_direction = str( - check(params_dct, subject_id, scan, "PhaseEncodingDirection", False) - ) - try: - "EffectiveEchoSpacing" in params_dct.keys() - effective_echo_spacing = float( - check(params_dct, subject_id, scan, "EffectiveEchoSpacing", False) - ) - except TypeError: - pass - - elif len(data_config_scan_params) > 0 and isinstance( - data_config_scan_params, dict - ): + params_dct: dict = json.load(f) + elif isinstance(data_config_scan_params, dict): params_dct = data_config_scan_params - - # TODO: better handling of errant key values!!! - # TODO: use schema validator to deal with it - # get details from the configuration - TR: Optional[float] = fetch_and_convert( - params_dct, scan, ["TR", "RepetitionTime"], float, None - ) - template: Optional[str] = fetch_and_convert( - params_dct, scan, ["Template", "template"], str, None - ) - - pattern: str = fetch_and_convert( - params_dct, - scan, - ["acquisition", "SliceTiming", "SliceAcquisitionOrder"], - str, - "", - ) - - ref_slice: Optional[int] = fetch_and_convert( - params_dct, scan, ["reference"], int, None - ) - - first_tr = check(params_dct, subject_id, scan, "first_TR", False) - first_tr = check2(first_tr) if first_tr else first_tr - - last_tr = check(params_dct, subject_id, scan, "last_TR", False) - last_tr = check2(last_tr) if last_tr else last_tr - - pe_direction = check( - params_dct, subject_id, scan, "PhaseEncodingDirection", False - ) - effective_echo_spacing = fetch_and_convert( - params_dct, - scan, - ["EffectiveEchoSpacing"], - float, - effective_echo_spacing, - ) - else: err = ( "\n\n[!] Could not read the format of the scan parameters " @@ -856,9 +814,38 @@ def check2(val): f"the participant {subject_id}.\n\n" ) raise OSError(err) - first_tr = pipeconfig_start_indx if first_tr == "" or first_tr is None else first_tr - last_tr = pipeconfig_stop_indx if last_tr == "" or last_tr is None else last_tr - pattern = None if "None" in pattern or "none" in pattern else pattern + # TODO: better handling of errant key values!!! + # TODO: use schema validator to deal with it + # get details from the configuration + tr: float | Literal[""] = fetch_and_convert( + params_dct, subject_id, scan, ["RepetitionTime", "TR"], float, "" + ) + template: Optional[str] = fetch_and_convert( + params_dct, subject_id, scan, ["Template", "template"], str + ) + pattern: Optional[str] = fetch_and_convert( + params_dct, + subject_id, + scan, + ["acquisition", "SliceTiming", "SliceAcquisitionOrder"], + str, + None, + ) + ref_slice: Optional[int | str] = fetch_and_convert( + params_dct, subject_id, scan, ["reference"], int, None + ) + first_tr: Optional[int | str] = fetch_and_convert( + params_dct, subject_id, scan, ["first_TR"], int, pipeconfig_start_indx + ) + last_tr: Optional[int | str] = fetch_and_convert( + params_dct, subject_id, scan, ["last_TR"], int, pipeconfig_stop_indx + ) + pe_direction: PE_DIRECTION = fetch_and_convert( + params_dct, subject_id, scan, ["PhaseEncodingDirection"], str, "" + ) + effective_echo_spacing: Optional[float] = fetch_and_convert( + params_dct, subject_id, scan, ["EffectiveEchoSpacing"], float + ) """ if not pattern: @@ -934,26 +921,26 @@ def check2(val): slice_timings.sort() max_slice_offset = slice_timings[-1] - # checking if the unit of TR and slice timing match or not - # if slice timing in ms convert TR to ms as well - if TR and max_slice_offset > TR: + # checking if the unit of tr and slice timing match or not + # if slice timing in ms convert tr to ms as well + if tr and max_slice_offset > tr: WFLOGGER.warning( - "TR is in seconds and slice timings are in " - "milliseconds. Converting TR into milliseconds" + "tr is in seconds and slice timings are in " + "milliseconds. Converting tr into milliseconds" ) - TR = TR * 1000 - WFLOGGER.info("New TR value %s ms", TR) + tr = tr * 1000 + WFLOGGER.info("New tr value %s ms", tr) unit = "ms" - elif TR and TR > 10: # noqa: PLR2004 - # check to see, if TR is in milliseconds, convert it into seconds - WFLOGGER.warning("TR is in milliseconds, Converting it into seconds") - TR = TR / 1000.0 - WFLOGGER.info("New TR value %s s", TR) + elif tr and tr > 10: # noqa: PLR2004 + # check to see, if tr is in milliseconds, convert it into seconds + WFLOGGER.warning("tr is in milliseconds, Converting it into seconds") + tr = tr / 1000.0 + WFLOGGER.info("New tr value %s s", tr) unit = "s" # swap back in - tr = f"{TR!s}{unit}" if TR else "" + tr = f"{tr!s}{unit}" if tr else "" tpattern = pattern start_indx = first_tr stop_indx = last_tr From 7bcd2a0747cf1e62afe72c7d876b20ee80264208 Mon Sep 17 00:00:00 2001 From: Jon Clucas Date: Mon, 8 Jul 2024 10:49:11 -0400 Subject: [PATCH 041/507] :bug: Tell Nipype to import typehint type [run reg-suite] --- CPAC/alff/utils.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/CPAC/alff/utils.py b/CPAC/alff/utils.py index f89e0c8ca4..d7532373bf 100644 --- a/CPAC/alff/utils.py +++ b/CPAC/alff/utils.py @@ -3,7 +3,10 @@ from pathlib import Path +from CPAC.utils.interfaces.function import Function + +@Function.sig_imports(["from pathlib import Path"]) def get_opt_string(mask: Path | str) -> str: """ Return option string for 3dTstat. From 9d1b0a8aeef5d9849b4d92d4d981345bea67c8ef Mon Sep 17 00:00:00 2001 From: Jon Clucas Date: Mon, 8 Jul 2024 12:01:02 -0400 Subject: [PATCH 042/507] :recycle: DRY params, sub, scan --- CPAC/utils/tests/test_utils.py | 12 +- CPAC/utils/utils.py | 341 ++++++++++++++++----------------- 2 files changed, 167 insertions(+), 186 deletions(-) diff --git a/CPAC/utils/tests/test_utils.py b/CPAC/utils/tests/test_utils.py index 43539d9a57..750b883758 100644 --- a/CPAC/utils/tests/test_utils.py +++ b/CPAC/utils/tests/test_utils.py @@ -13,7 +13,7 @@ from CPAC.utils.utils import ( check_config_resources, check_system_deps, - fetch_and_convert, + ScanParameters, ) SCAN_PARAMS = { @@ -84,10 +84,8 @@ def test_fetch_and_convert( caplog: LogCaptureFixture, scan_params: str, convert_to: type ) -> None: """Test functionality to fetch and convert scan parameters.""" - params = SCAN_PARAMS[scan_params]["params"] - TR = fetch_and_convert( - scan_parameters=params, - scan="scan", + params = ScanParameters(SCAN_PARAMS[scan_params]["params"], "subject", "scan") + TR = params.fetch_and_convert( keys=["TR", "RepetitionTime"], convert_to=convert_to, ) @@ -98,9 +96,7 @@ def test_fetch_and_convert( assert "Using case-insenitive match: 'TR' ≅ 'tr'." in caplog.text else: assert "Using case-insenitive match: 'TR' ≅ 'tr'." not in caplog.text - not_TR = fetch_and_convert( - scan_parameters=params, - scan="scan", + not_TR = params.fetch_and_convert( keys=["NotTR", "NotRepetitionTime"], convert_to=convert_to, ) diff --git a/CPAC/utils/utils.py b/CPAC/utils/utils.py index dd20b14b43..814303f249 100644 --- a/CPAC/utils/utils.py +++ b/CPAC/utils/utils.py @@ -473,122 +473,161 @@ def compute_fisher_z_score(correlation_file, timeseries_one_d, input_name): return out_file -@overload -def fetch( - scan_parameters: dict, - subject: Optional[str] = None, - scan: Optional[str] = None, - keys: Optional[list[str]] = None, - *, - match_case: Literal[False], -) -> Any: ... -@overload -def fetch( - scan_parameters: dict, - subject: Optional[str] = None, - scan: Optional[str] = None, - keys: Optional[list[str]] = None, - *, - match_case: Literal[True], -) -> tuple[Any, tuple[str, str]]: ... -def fetch(scan_parameters, subject, scan, keys, *, match_case=False): - """Fetch the first found parameter from a scan params dictionary. +class ScanParameters: + """A dictionary of scan parameters and access methods.""" + + def __init__(self, scan_parameters: str | dict, subject_id: str, scan: str): + """Initialize ScanParameters dict and metadata.""" + self.subject = subject_id + self.scan = scan + if ".json" in scan_parameters: + if not os.path.exists(scan_parameters): + err = ( + "\n[!] WARNING: Scan parameters JSON file listed in your data" + f" configuration file does not exist:\n{scan_parameters}" + ) + raise FileNotFoundError(err) + with open(scan_parameters, "r") as f: + self.params: dict = json.load(f) + elif isinstance(scan_parameters, dict): + self.params = scan_parameters + else: + err = ( + "\n\n[!] Could not read the format of the scan parameters " + "information included in the data configuration file for " + f"the participant {self.subject}.\n\n" + ) + raise OSError(err) - Returns - ------- - value - The value of the parameter. + def check(self, val_to_check: str, throw_exception: bool): + """Check that a value is populated for a given key in a parameters dictionary.""" + if val_to_check not in self.params: + if throw_exception: + msg = f"Missing Value for {val_to_check} for participant {self.subject}" + raise ValueError(msg) + return None - keys, optional - The matched keys (only if ``match_case is True``) - """ - if match_case: - keys = {key.lower(): key for key in keys} - scan_param_keys = {key.lower(): key for key in scan_parameters.keys()} - scan_parameters = {key.lower(): value for key, value in scan_parameters.items()} - for key in keys: - if key in scan_parameters: - if match_case: - return check(scan_parameters, subject, scan, key, True), ( - keys[key], - scan_param_keys[key], + if isinstance(self.params[val_to_check], dict): + ret_val = self.params[val_to_check][self.scan] + else: + ret_val = self.params[val_to_check] + + if ret_val == "None": + if throw_exception: + msg = ( + f"'None' parameter value for {val_to_check} for" + f" participant {self.subject}." ) - return check(scan_parameters, subject, scan, key, True) - msg = f"None of {keys} found in {list(scan_parameters.keys())}." - raise KeyError(msg) + raise ValueError(msg) + ret_val = None + if ret_val == "" and throw_exception: + msg = f"Missing value for {val_to_check} for participant {self.subject}." + raise ValueError(msg) -def fetch_and_convert( - scan_parameters: dict, - subject: str, - scan: str, - keys: list[str], - convert_to: type, - fallback: Optional[Any] = None, - warn_typeerror: bool = True, -) -> Any: - """Fetch a parameter from a scan params dictionary and convert it to a given type. + return ret_val + + @overload + def fetch( + self, + keys: Optional[list[str]] = None, + *, + match_case: Literal[False], + ) -> Any: ... + @overload + def fetch( + self, + keys: Optional[list[str]] = None, + *, + match_case: Literal[True], + ) -> tuple[Any, tuple[str, str]]: ... + def fetch(self, keys, *, match_case=False): + """Fetch the first found parameter from a scan params dictionary. - Catch TypeError exceptions and return a fallback value in those cases. + Returns + ------- + value + The value of the parameter. - Parameters - ---------- - scan_parameters - dictionary of scan metadata + keys, optional + The matched keys (only if ``match_case is True``) + """ + if match_case: + keys = {key.lower(): key for key in keys} + scan_param_keys = {key.lower(): key for key in self.params.keys()} + scan_parameters = {key.lower(): value for key, value in self.params.items()} + else: + scan_parameters = self.params + for key in keys: + if key in scan_parameters: + if match_case: + return self.check(key, True), ( + keys[key], + scan_param_keys[key], + ) + return self.check(key, True) + msg = f"None of {keys} found in {list(scan_parameters.keys())}." + raise KeyError(msg) - subject - the subject ID + def fetch_and_convert( + self, + keys: list[str], + convert_to: Optional[type] = None, + fallback: Optional[Any] = None, + warn_typeerror: bool = True, + ) -> Any: + """Fetch a parameter from a scan params dictionary and convert it to a given type. - scan - the scan ID + Catch TypeError exceptions and return a fallback value in those cases. - keys - if multiple keys provided, the value corresponding to the first found will be - returned + Parameters + ---------- + keys + if multiple keys provided, the value corresponding to the first found will be + returned - convert_to - the type to return if possible + convert_to + the type to return if possible - fallback - a value to return if the keys are not found in ``scan_parameters`` + fallback + a value to return if the keys are not found in ``scan_parameters`` - warn_typeerror - log a warning if value cannot be converted to ``convert_to`` type? + warn_typeerror + log a warning if value cannot be converted to ``convert_to`` type? - Returns - ------- - value - The gathered parameter coerced to the specified type, if possible. - ``fallback`` otherwise. - """ - value: Any = fallback - fallback_message = f"Falling back to {fallback} ({type(fallback)})." + Returns + ------- + value + The gathered parameter coerced to the specified type, if possible. + ``fallback`` otherwise. + """ + value: Any = fallback + fallback_message = f"Falling back to {fallback} ({type(fallback)})." - try: - raw_value = fetch(scan_parameters, subject, scan, keys) - except KeyError: try: - raw_value, matched_keys = fetch( - scan_parameters, subject, scan, keys, match_case=True - ) + raw_value = self.fetch(keys) except KeyError: + try: + raw_value, matched_keys = self.fetch(keys, match_case=True) + except KeyError: + WFLOGGER.warning( + f"None of {keys} found in {list(self.params.keys())}. " + f"{fallback_message}" + ) + return fallback WFLOGGER.warning( - f"None of {keys} found in {list(scan_parameters.keys())}. " - f"{fallback_message}" - ) - return fallback - WFLOGGER.warning( - f"None exact match found. Using case-insenitive match: '{matched_keys[0]}'" - f" ≅ '{matched_keys[1]}'." - ) - try: - value = convert_to(raw_value) - except TypeError: - if warn_typeerror: - WFLOGGER.warning( - f"Could not convert {value} to {convert_to}. {fallback_message}" + f"None exact match found. Using case-insenitive match: '{matched_keys[0]}'" + f" ≅ '{matched_keys[1]}'." ) - return value + if convert_to: + try: + value = convert_to(raw_value) + except TypeError: + if warn_typeerror: + WFLOGGER.warning( + f"Could not convert {value} to {convert_to}. {fallback_message}" + ) + return value def get_operand_string(mean, std_dev): @@ -676,35 +715,6 @@ def correlation(matrix1, matrix2, match_rows=False, z_scored=False, symmetric=Fa return r -def check(params_dct, subject_id, scan_id, val_to_check, throw_exception): - """Check that a value is populated for a given key in a parameters dictionary.""" - if val_to_check not in params_dct: - if throw_exception: - msg = f"Missing Value for {val_to_check} for participant {subject_id}" - raise ValueError(msg) - return None - - if isinstance(params_dct[val_to_check], dict): - ret_val = params_dct[val_to_check][scan_id] - else: - ret_val = params_dct[val_to_check] - - if ret_val == "None": - if throw_exception: - msg = ( - f"'None' Parameter Value for {val_to_check} for" - f" participant {subject_id}" - ) - raise ValueError(msg) - ret_val = None - - if ret_val == "" and throw_exception: - msg = f"Missing Value for {val_to_check} for participant {subject_id}" - raise ValueError(msg) - - return ret_val - - def check_random_state(seed): """ Turn seed into a np.random.RandomState instance. @@ -735,7 +745,7 @@ def check_random_state(seed): "import json", "import os", "from typing import Literal, Optional", - "from CPAC.utils.utils import fetch_and_convert, PE_DIRECTION, VALID_PATTERNS", + "from CPAC.utils.utils import ScanParameters, PE_DIRECTION, VALID_PATTERNS", ] ) def get_scan_params( @@ -795,57 +805,32 @@ def get_scan_params( if isinstance(pipeconfig_stop_indx, str): if "End" in pipeconfig_stop_indx or "end" in pipeconfig_stop_indx: pipeconfig_stop_indx = None - if data_config_scan_params: - if ".json" in data_config_scan_params: - if not os.path.exists(data_config_scan_params): - err = ( - "\n[!] WARNING: Scan parameters JSON file listed in your data" - f" configuration file does not exist:\n{data_config_scan_params}" - ) - raise FileNotFoundError(err) - with open(data_config_scan_params, "r") as f: - params_dct: dict = json.load(f) - elif isinstance(data_config_scan_params, dict): - params_dct = data_config_scan_params - else: - err = ( - "\n\n[!] Could not read the format of the scan parameters " - "information included in the data configuration file for " - f"the participant {subject_id}.\n\n" - ) - raise OSError(err) - # TODO: better handling of errant key values!!! - # TODO: use schema validator to deal with it - # get details from the configuration - tr: float | Literal[""] = fetch_and_convert( - params_dct, subject_id, scan, ["RepetitionTime", "TR"], float, "" - ) - template: Optional[str] = fetch_and_convert( - params_dct, subject_id, scan, ["Template", "template"], str - ) - pattern: Optional[str] = fetch_and_convert( - params_dct, - subject_id, - scan, - ["acquisition", "SliceTiming", "SliceAcquisitionOrder"], - str, - None, - ) - ref_slice: Optional[int | str] = fetch_and_convert( - params_dct, subject_id, scan, ["reference"], int, None - ) - first_tr: Optional[int | str] = fetch_and_convert( - params_dct, subject_id, scan, ["first_TR"], int, pipeconfig_start_indx - ) - last_tr: Optional[int | str] = fetch_and_convert( - params_dct, subject_id, scan, ["last_TR"], int, pipeconfig_stop_indx - ) - pe_direction: PE_DIRECTION = fetch_and_convert( - params_dct, subject_id, scan, ["PhaseEncodingDirection"], str, "" - ) - effective_echo_spacing: Optional[float] = fetch_and_convert( - params_dct, subject_id, scan, ["EffectiveEchoSpacing"], float - ) + params = ScanParameters(data_config_scan_params, subject_id, scan) + # TODO: better handling of errant key values!!! + # TODO: use schema validator to deal with it + # get details from the configuration + tr: float | Literal[""] = params.fetch_and_convert( + ["RepetitionTime", "TR"], float, "" + ) + template: Optional[str] = params.fetch_and_convert(["Template", "template"], str) + pattern: Optional[str] = params.fetch_and_convert( + ["acquisition", "SliceTiming", "SliceAcquisitionOrder"], + str, + None, + ) + ref_slice: Optional[int | str] = params.fetch_and_convert(["reference"], int, None) + first_tr: Optional[int | str] = params.fetch_and_convert( + ["first_TR"], int, pipeconfig_start_indx + ) + last_tr: Optional[int | str] = params.fetch_and_convert( + ["last_TR"], int, pipeconfig_stop_indx + ) + pe_direction: PE_DIRECTION = params.fetch_and_convert( + ["PhaseEncodingDirection"], str, "" + ) + effective_echo_spacing: Optional[float] = params.fetch_and_convert( + ["EffectiveEchoSpacing"], float + ) """ if not pattern: From e431ac061e7781d86aece406427810596f6519f8 Mon Sep 17 00:00:00 2001 From: Jon Clucas Date: Mon, 8 Jul 2024 12:54:48 -0400 Subject: [PATCH 043/507] :white_check_mark: Add tests for `fetch` refactor [rebuild base-lite] [rebuild base-standard] [run reg-suite] --- .ruff.toml | 1 + CPAC/utils/tests/old_functions.py | 67 +++++++++++++++++++++++++++++++ CPAC/utils/tests/test_utils.py | 52 +++++++++++++++++++++--- CPAC/utils/utils.py | 27 +++++++++---- 4 files changed, 134 insertions(+), 13 deletions(-) create mode 100644 CPAC/utils/tests/old_functions.py diff --git a/.ruff.toml b/.ruff.toml index d690751b02..265427a1ab 100644 --- a/.ruff.toml +++ b/.ruff.toml @@ -11,6 +11,7 @@ external = ["T20"] # Don't autoremove 'noqa` comments for these rules [lint.per-file-ignores] "CPAC/func_preproc/func_preproc.py" = ["E402"] "CPAC/utils/sklearn.py" = ["RUF003"] +"CPAC/utils/tests/old_functions.py" = ["C", "D", "E", "EM", "PLW", "RET"] "CPAC/utils/utils.py" = ["T201"] # until `repickle` is removed "setup.py" = ["D1"] diff --git a/CPAC/utils/tests/old_functions.py b/CPAC/utils/tests/old_functions.py new file mode 100644 index 0000000000..80171db77b --- /dev/null +++ b/CPAC/utils/tests/old_functions.py @@ -0,0 +1,67 @@ +# Copyright (C) 2012-2024 C-PAC Developers + +# This file is part of C-PAC. + +# C-PAC is free software: you can redistribute it and/or modify it under +# the terms of the GNU Lesser General Public License as published by the +# Free Software Foundation, either version 3 of the License, or (at your +# option) any later version. + +# C-PAC is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or +# FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public +# License for more details. + +# You should have received a copy of the GNU Lesser General Public +# License along with C-PAC. If not, see . +"""Functions from before refactoring.""" + + +def check(params_dct, subject_id, scan_id, val_to_check, throw_exception): + """https://github.com/FCP-INDI/C-PAC/blob/96db8b0b65ab1d5f55fb3b895855af34d72c17e4/CPAC/utils/utils.py#L630-L653""" + if val_to_check not in params_dct: + if throw_exception: + raise Exception( + f"Missing Value for {val_to_check} for participant " f"{subject_id}" + ) + return None + if isinstance(params_dct[val_to_check], dict): + ret_val = params_dct[val_to_check][scan_id] + else: + ret_val = params_dct[val_to_check] + if ret_val == "None": + if throw_exception: + raise Exception( + f"'None' Parameter Value for {val_to_check} for participant " + f"{subject_id}" + ) + else: + ret_val = None + if ret_val == "" and throw_exception: + raise Exception( + f"Missing Value for {val_to_check} for participant " f"{subject_id}" + ) + return ret_val + + +def check2(val): + """https://github.com/FCP-INDI/C-PAC/blob/96db8b0b65ab1d5f55fb3b895855af34d72c17e4/CPAC/utils/utils.py#L745-L746""" + return val if val == None or val == "" or isinstance(val, str) else int(val) + + +def try_fetch_parameter(scan_parameters, subject, scan, keys): + """https://github.com/FCP-INDI/C-PAC/blob/96db8b0b65ab1d5f55fb3b895855af34d72c17e4/CPAC/utils/utils.py#L679-L703""" + scan_parameters = dict((k.lower(), v) for k, v in scan_parameters.items()) + for key in keys: + key = key.lower() + if key not in scan_parameters: + continue + if isinstance(scan_parameters[key], dict): + value = scan_parameters[key][scan] + else: + value = scan_parameters[key] + if value == "None": + return None + if value is not None: + return value + return None diff --git a/CPAC/utils/tests/test_utils.py b/CPAC/utils/tests/test_utils.py index 750b883758..ab896c6029 100644 --- a/CPAC/utils/tests/test_utils.py +++ b/CPAC/utils/tests/test_utils.py @@ -10,6 +10,7 @@ from CPAC.pipeline.nodeblock import NodeBlockFunction from CPAC.utils.configuration import Configuration from CPAC.utils.monitoring.custom_logging import log_subprocess +from CPAC.utils.tests import old_functions from CPAC.utils.utils import ( check_config_resources, check_system_deps, @@ -30,11 +31,19 @@ "tr": 2.5, "acquisition": "seq+z", "reference": "24", - "first_tr": "", - "last_tr": "", + "first_TR": 1, + "last_TR": "", }, "expected_TR": 2.5, }, + "nested": { + "params": { + "TR": {"scan": 3}, + "first_TR": {"scan": 0}, + "last_TR": {"scan": 450}, + }, + "expected_TR": 3, + }, } @@ -78,7 +87,7 @@ def test_check_config_resources(): assert "threads available (2)" in error_string -@pytest.mark.parametrize("scan_params", ["BIDS", "C-PAC"]) +@pytest.mark.parametrize("scan_params", ["BIDS", "C-PAC", "nested"]) @pytest.mark.parametrize("convert_to", [int, float, str]) def test_fetch_and_convert( caplog: LogCaptureFixture, scan_params: str, convert_to: type @@ -89,8 +98,25 @@ def test_fetch_and_convert( keys=["TR", "RepetitionTime"], convert_to=convert_to, ) - assert (TR == convert_to(SCAN_PARAMS[scan_params]["expected_TR"])) and isinstance( - TR, convert_to + if TR and "RepetitionTime" in params.params: + old_TR = convert_to( + old_functions.check( + params.params, params.subject, params.scan, "RepetitionTime", False + ) + ) + assert TR == old_TR + try: + old_TR = convert_to( + old_functions.try_fetch_parameter( + params.params, params.subject, params.scan, ["TR", "RepetitionTime"] + ) + ) + except TypeError: + old_TR = None + assert ( + (TR == convert_to(SCAN_PARAMS[scan_params]["expected_TR"])) + and isinstance(TR, convert_to) + and TR == old_TR ) if scan_params == "C-PAC": assert "Using case-insenitive match: 'TR' ≅ 'tr'." in caplog.text @@ -101,6 +127,22 @@ def test_fetch_and_convert( convert_to=convert_to, ) assert not_TR is None + if "first_TR" in params.params: + first_tr = params.fetch_and_convert(["first_TR"], int, 1, False) + old_first_tr = old_functions.check( + params.params, params.subject, params.scan, "first_TR", False + ) + if old_first_tr: + old_first_tr = old_functions.check2(old_first_tr) + assert first_tr == old_first_tr + if "last_TR" in params.params: + last_tr = params.fetch_and_convert(["last_TR"], int, "", False) + old_last_tr = old_functions.check( + params.params, params.subject, params.scan, "last_TR", False + ) + if old_last_tr: + old_last_tr = old_functions.check2(old_last_tr) + assert last_tr == old_last_tr @pytest.mark.parametrize("executable", ["Xvfb"]) diff --git a/CPAC/utils/utils.py b/CPAC/utils/utils.py index 814303f249..29201d779d 100644 --- a/CPAC/utils/utils.py +++ b/CPAC/utils/utils.py @@ -533,6 +533,7 @@ def fetch( keys: Optional[list[str]] = None, *, match_case: Literal[False], + throw_exception: bool, ) -> Any: ... @overload def fetch( @@ -540,8 +541,9 @@ def fetch( keys: Optional[list[str]] = None, *, match_case: Literal[True], + throw_exception: bool, ) -> tuple[Any, tuple[str, str]]: ... - def fetch(self, keys, *, match_case=False): + def fetch(self, keys, *, match_case=False, throw_exception=True): """Fetch the first found parameter from a scan params dictionary. Returns @@ -551,6 +553,9 @@ def fetch(self, keys, *, match_case=False): keys, optional The matched keys (only if ``match_case is True``) + + throw_exception + Raise an exception if value is ``""`` or ``None``? """ if match_case: keys = {key.lower(): key for key in keys} @@ -561,11 +566,11 @@ def fetch(self, keys, *, match_case=False): for key in keys: if key in scan_parameters: if match_case: - return self.check(key, True), ( + return self.check(key, throw_exception), ( keys[key], scan_param_keys[key], ) - return self.check(key, True) + return self.check(key, throw_exception) msg = f"None of {keys} found in {list(scan_parameters.keys())}." raise KeyError(msg) @@ -575,6 +580,7 @@ def fetch_and_convert( convert_to: Optional[type] = None, fallback: Optional[Any] = None, warn_typeerror: bool = True, + throw_exception: bool = False, ) -> Any: """Fetch a parameter from a scan params dictionary and convert it to a given type. @@ -595,6 +601,9 @@ def fetch_and_convert( warn_typeerror log a warning if value cannot be converted to ``convert_to`` type? + throw_exception + raise an error for empty string or NoneTypes? + Returns ------- value @@ -605,10 +614,12 @@ def fetch_and_convert( fallback_message = f"Falling back to {fallback} ({type(fallback)})." try: - raw_value = self.fetch(keys) + raw_value = self.fetch(keys, throw_exception=throw_exception) except KeyError: try: - raw_value, matched_keys = self.fetch(keys, match_case=True) + raw_value, matched_keys = self.fetch( + keys, match_case=True, throw_exception=throw_exception + ) except KeyError: WFLOGGER.warning( f"None of {keys} found in {list(self.params.keys())}. " @@ -622,7 +633,7 @@ def fetch_and_convert( if convert_to: try: value = convert_to(raw_value) - except TypeError: + except (TypeError, ValueError): if warn_typeerror: WFLOGGER.warning( f"Could not convert {value} to {convert_to}. {fallback_message}" @@ -820,10 +831,10 @@ def get_scan_params( ) ref_slice: Optional[int | str] = params.fetch_and_convert(["reference"], int, None) first_tr: Optional[int | str] = params.fetch_and_convert( - ["first_TR"], int, pipeconfig_start_indx + ["first_TR"], int, pipeconfig_start_indx, False ) last_tr: Optional[int | str] = params.fetch_and_convert( - ["last_TR"], int, pipeconfig_stop_indx + ["last_TR"], int, pipeconfig_stop_indx, False ) pe_direction: PE_DIRECTION = params.fetch_and_convert( ["PhaseEncodingDirection"], str, "" From a6e1ad7dba67ad7f4d92178b76b0d92389838080 Mon Sep 17 00:00:00 2001 From: Jon Clucas Date: Mon, 8 Jul 2024 12:59:12 -0400 Subject: [PATCH 044/507] :pencil2: Fix TR capitalization --- CPAC/utils/utils.py | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/CPAC/utils/utils.py b/CPAC/utils/utils.py index 29201d779d..41937056c6 100644 --- a/CPAC/utils/utils.py +++ b/CPAC/utils/utils.py @@ -921,18 +921,18 @@ def get_scan_params( # if slice timing in ms convert tr to ms as well if tr and max_slice_offset > tr: WFLOGGER.warning( - "tr is in seconds and slice timings are in " - "milliseconds. Converting tr into milliseconds" + "TR is in seconds and slice timings are in " + "milliseconds. Converting TR into milliseconds" ) tr = tr * 1000 WFLOGGER.info("New tr value %s ms", tr) unit = "ms" elif tr and tr > 10: # noqa: PLR2004 - # check to see, if tr is in milliseconds, convert it into seconds - WFLOGGER.warning("tr is in milliseconds, Converting it into seconds") + # check to see, if TR is in milliseconds, convert it into seconds + WFLOGGER.warning("TR is in milliseconds, Converting it into seconds") tr = tr / 1000.0 - WFLOGGER.info("New tr value %s s", tr) + WFLOGGER.info("New TR value %s s", tr) unit = "s" # swap back in From f2a0ba9878f7eb667c2e073415ca3286d027e0c8 Mon Sep 17 00:00:00 2001 From: Jon Clucas Date: Mon, 8 Jul 2024 13:40:19 -0400 Subject: [PATCH 045/507] :bug: Use C-PAC Function node --- CPAC/alff/alff.py | 18 +++++++++++++++++- 1 file changed, 17 insertions(+), 1 deletion(-) diff --git a/CPAC/alff/alff.py b/CPAC/alff/alff.py index 4fe03cb2cc..f8bfc1a0b8 100644 --- a/CPAC/alff/alff.py +++ b/CPAC/alff/alff.py @@ -1,5 +1,20 @@ # -*- coding: utf-8 -*- +# Copyright (C) 2012-2024 C-PAC Developers +# This file is part of C-PAC. + +# C-PAC is free software: you can redistribute it and/or modify it under +# the terms of the GNU Lesser General Public License as published by the +# Free Software Foundation, either version 3 of the License, or (at your +# option) any later version. + +# C-PAC is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or +# FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public +# License for more details. + +# You should have received a copy of the GNU Lesser General Public +# License along with C-PAC. If not, see . import os from nipype.interfaces.afni import preprocess @@ -9,6 +24,7 @@ from CPAC.pipeline import nipype_pipeline_engine as pe from CPAC.pipeline.nodeblock import nodeblock from CPAC.registration.registration import apply_transform +from CPAC.utils.interfaces import Function from CPAC.utils.utils import check_prov_for_regtool @@ -177,7 +193,7 @@ def create_alff(wf_name="alff_workflow"): wf.connect(input_node, "rest_res", bandpass, "in_file") get_option_string = pe.Node( - util.Function( + Function( input_names=["mask"], output_names=["option_string"], function=get_opt_string, From 11ef28e0d156d408cf4f7933e23243d12ebcdc9e Mon Sep 17 00:00:00 2001 From: Jon Clucas Date: Mon, 8 Jul 2024 13:54:48 -0400 Subject: [PATCH 046/507] :package: Init `Function` --- CPAC/utils/interfaces/__init__.py | 20 ++++++++++++++++++++ 1 file changed, 20 insertions(+) diff --git a/CPAC/utils/interfaces/__init__.py b/CPAC/utils/interfaces/__init__.py index 126bb1c22b..6716a562f5 100644 --- a/CPAC/utils/interfaces/__init__.py +++ b/CPAC/utils/interfaces/__init__.py @@ -1,7 +1,27 @@ +# Copyright (C) 2010-2024 C-PAC Developers + +# This file is part of C-PAC. + +# C-PAC is free software: you can redistribute it and/or modify it under +# the terms of the GNU Lesser General Public License as published by the +# Free Software Foundation, either version 3 of the License, or (at your +# option) any later version. + +# C-PAC is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or +# FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public +# License for more details. + +# You should have received a copy of the GNU Lesser General Public +# License along with C-PAC. If not, see . +"""Custom interfaces for C-PAC.""" + from . import brickstat, datasink, function, pc +from .function import Function __all__ = [ "function", + "Function", "pc", "brickstat", "datasink", From f7c616fcf16d9a22295b954a93dc6726630a1891 Mon Sep 17 00:00:00 2001 From: Jon Clucas Date: Mon, 8 Jul 2024 14:16:53 -0400 Subject: [PATCH 047/507] :art: Remove unnecessary initializations [run reg-suite] --- CPAC/utils/utils.py | 2 -- 1 file changed, 2 deletions(-) diff --git a/CPAC/utils/utils.py b/CPAC/utils/utils.py index 41937056c6..511100a6b2 100644 --- a/CPAC/utils/utils.py +++ b/CPAC/utils/utils.py @@ -809,9 +809,7 @@ def get_scan_params( https://bids-specification.readthedocs.io/en/stable/glossary.html#effectiveechospacing-metadata """ # initialize vars to empty - tr = pattern = ref_slice = first_tr = last_tr = pe_direction = "" unit: Literal["ms", "s"] = "s" - effective_echo_spacing = template = None if isinstance(pipeconfig_stop_indx, str): if "End" in pipeconfig_stop_indx or "end" in pipeconfig_stop_indx: From f9e8af1aa801e7eb8566cc838bde3185980e5bfd Mon Sep 17 00:00:00 2001 From: Jon Clucas Date: Mon, 8 Jul 2024 16:07:15 -0400 Subject: [PATCH 048/507] :recycle: Exclusively use custom `Function` Nodes + :rotating_light: Lint [run reg-suite] --- CPAC/anat_preproc/anat_preproc.py | 30 +++--- CPAC/anat_preproc/lesion_preproc.py | 28 ++++-- CPAC/anat_preproc/utils.py | 98 ++++++------------- .../distortion_correction.py | 14 +-- CPAC/distortion_correction/utils.py | 29 +++++- CPAC/easy_thresh/easy_thresh.py | 47 +++++---- CPAC/func_preproc/func_motion.py | 2 +- CPAC/func_preproc/func_preproc.py | 5 +- CPAC/group_analysis/group_analysis.py | 33 +++++-- .../longitudinal_preproc.py | 4 +- CPAC/median_angle/median_angle.py | 35 +++++-- CPAC/nuisance/nuisance.py | 13 +-- CPAC/nuisance/utils/utils.py | 2 +- CPAC/randomise/randomise.py | 10 +- CPAC/registration/output_func_to_standard.py | 6 +- CPAC/registration/registration.py | 57 +++++------ CPAC/reho/reho.py | 19 +++- CPAC/sca/sca.py | 20 ++-- CPAC/scrubbing/scrubbing.py | 49 ++++++---- CPAC/seg_preproc/seg_preproc.py | 38 ++++--- CPAC/surface/surf_preproc.py | 39 +++++--- CPAC/timeseries/timeseries_analysis.py | 8 +- CPAC/utils/interfaces/function/seg_preproc.py | 23 ++++- CPAC/utils/tests/test_datasource.py | 20 +++- CPAC/utils/utils.py | 10 +- 25 files changed, 385 insertions(+), 254 deletions(-) diff --git a/CPAC/anat_preproc/anat_preproc.py b/CPAC/anat_preproc/anat_preproc.py index 8e24b54b81..0f4e770f97 100644 --- a/CPAC/anat_preproc/anat_preproc.py +++ b/CPAC/anat_preproc/anat_preproc.py @@ -15,7 +15,6 @@ # You should have received a copy of the GNU Lesser General Public # License along with C-PAC. If not, see . -# from copy import deepcopy import os from nipype.interfaces import afni, ants, freesurfer, fsl @@ -36,6 +35,7 @@ ) from CPAC.pipeline import nipype_pipeline_engine as pe from CPAC.pipeline.nodeblock import nodeblock +from CPAC.utils.interfaces import Function from CPAC.utils.interfaces.fsl import Merge as fslMerge @@ -138,7 +138,7 @@ def acpc_alignment( aff_to_rig_imports = ["import os", "from numpy import *"] aff_to_rig = pe.Node( - util.Function( + Function( input_names=["in_xfm", "out_name"], output_names=["out_mat"], function=fsl_aff_to_rigid, @@ -319,7 +319,7 @@ def T1wmulT2w_brain_norm_s_string(sigma, in_file): return "-s %f -div %s" % (sigma, in_file) T1wmulT2w_brain_norm_s_string = pe.Node( - util.Function( + Function( input_names=["sigma", "in_file"], output_names=["out_str"], function=T1wmulT2w_brain_norm_s_string, @@ -378,7 +378,7 @@ def form_lower_string(mean, std): return "-thr %s -bin -ero -mul 255" % (lower) form_lower_string = pe.Node( - util.Function( + Function( input_names=["mean", "std"], output_names=["out_str"], function=form_lower_string, @@ -444,7 +444,7 @@ def file_to_a_list(infile_1, infile_2): return [infile_1, infile_2] file_to_a_list = pe.Node( - util.Function( + Function( input_names=["infile_1", "infile_2"], output_names=["out_list"], function=file_to_a_list, @@ -544,7 +544,7 @@ def afni_brain_connector(wf, cfg, strat_pool, pipe_num, opt): ) skullstrip_args = pe.Node( - util.Function( + Function( input_names=[ "spat_norm", "spat_norm_dxyz", @@ -762,7 +762,7 @@ def fsl_brain_connector(wf, cfg, strat_pool, pipe_num, opt): anat_robustfov.inputs.output_type = "NIFTI_GZ" anat_pad_RobustFOV_cropped = pe.Node( - util.Function( + Function( input_names=["cropped_image_path", "target_image_path"], output_names=["padded_image_path"], function=pad, @@ -902,7 +902,7 @@ def unet_brain_connector(wf, cfg, strat_pool, pipe_num, opt): from CPAC.unet.function import predict_volumes unet_mask = pe.Node( - util.Function( + Function( input_names=["model_path", "cimg_in"], output_names=["out_path"], function=predict_volumes, @@ -1083,7 +1083,7 @@ def freesurfer_brain_connector(wf, cfg, strat_pool, pipe_num, opt): # convert brain mask file from .mgz to .nii.gz fs_brain_mask_to_nifti = pe.Node( - util.Function( + Function( input_names=["in_file"], output_names=["out_file"], function=mri_convert ), name=f"fs_brainmask_to_nifti_{pipe_num}", @@ -1119,7 +1119,7 @@ def freesurfer_abcd_brain_connector(wf, cfg, strat_pool, pipe_num, opt): Ref: https://github.com/DCAN-Labs/DCAN-HCP/blob/7927754/PostFreeSurfer/PostFreeSurferPipeline.sh#L151-L156 """ wmparc_to_nifti = pe.Node( - util.Function( + Function( input_names=["in_file", "reslice_like", "args"], output_names=["out_file"], function=mri_convert, @@ -1130,7 +1130,7 @@ def freesurfer_abcd_brain_connector(wf, cfg, strat_pool, pipe_num, opt): # Register wmparc file if ingressing FreeSurfer data if strat_pool.check_rpool("pipeline-fs_xfm"): wmparc_to_native = pe.Node( - util.Function( + Function( input_names=["source_file", "target_file", "xfm", "out_file"], output_names=["transformed_file"], function=normalize_wmparc, @@ -1168,7 +1168,7 @@ def freesurfer_abcd_brain_connector(wf, cfg, strat_pool, pipe_num, opt): wf.connect(wmparc_to_nifti, "out_file", binary_mask, "in_file") wb_command_fill_holes = pe.Node( - util.Function( + Function( input_names=["in_file"], output_names=["out_file"], function=wb_command ), name=f"wb_command_fill_holes_{pipe_num}", @@ -1206,7 +1206,7 @@ def freesurfer_fsl_brain_connector(wf, cfg, strat_pool, pipe_num, opt): # mri_convert -it mgz ${SUBJECTS_DIR}/${subject}/mri/brainmask.mgz -ot nii brainmask.nii.gz convert_fs_brainmask_to_nifti = pe.Node( - util.Function( + Function( input_names=["in_file"], output_names=["out_file"], function=mri_convert ), name=f"convert_fs_brainmask_to_nifti_{node_id}", @@ -1217,7 +1217,7 @@ def freesurfer_fsl_brain_connector(wf, cfg, strat_pool, pipe_num, opt): # mri_convert -it mgz ${SUBJECTS_DIR}/${subject}/mri/T1.mgz -ot nii T1.nii.gz convert_fs_T1_to_nifti = pe.Node( - util.Function( + Function( input_names=["in_file"], output_names=["out_file"], function=mri_convert ), name=f"convert_fs_T1_to_nifti_{node_id}", @@ -2888,7 +2888,7 @@ def freesurfer_abcd_preproc(wf, cfg, strat_pool, pipe_num, opt=None): # fslmaths "$T1wImageFile"_1mm.nii.gz -div $Mean -mul 150 -abs "$T1wImageFile"_1mm.nii.gz normalize_head = pe.Node( - util.Function( + Function( input_names=["in_file", "number", "out_file_suffix"], output_names=["out_file"], function=fslmaths_command, diff --git a/CPAC/anat_preproc/lesion_preproc.py b/CPAC/anat_preproc/lesion_preproc.py index 2ef58c3d2a..07871ae32d 100644 --- a/CPAC/anat_preproc/lesion_preproc.py +++ b/CPAC/anat_preproc/lesion_preproc.py @@ -1,13 +1,30 @@ # -*- coding: utf-8 -*- +# Copyright (C) 2019-2023 C-PAC Developers +# This file is part of C-PAC. + +# C-PAC is free software: you can redistribute it and/or modify it under +# the terms of the GNU Lesser General Public License as published by the +# Free Software Foundation, either version 3 of the License, or (at your +# option) any later version. + +# C-PAC is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or +# FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public +# License for more details. + +# You should have received a copy of the GNU Lesser General Public +# License along with C-PAC. If not, see . from nipype.interfaces import afni import nipype.interfaces.utility as util from CPAC.pipeline import nipype_pipeline_engine as pe +from CPAC.utils.interfaces import Function def inverse_lesion(lesion_path): - """ + """Replace non-zeroes with zeroes and zeroes with ones. + Check if the image contains more zeros than non-zeros, if so, replaces non-zeros by zeros and zeros by ones. @@ -38,13 +55,12 @@ def inverse_lesion(lesion_path): nii = nu.inverse_nifti_values(image=lesion_path) nib.save(nii, lesion_out) return lesion_out - else: - return lesion_out + return lesion_out def create_lesion_preproc(wf_name="lesion_preproc"): - """ - The main purpose of this workflow is to process lesions masks. + """Process lesions masks. + Lesion mask file is deobliqued and reoriented in the same way as the T1 in the anat_preproc function. @@ -95,7 +111,7 @@ def create_lesion_preproc(wf_name="lesion_preproc"): lesion_deoblique.inputs.deoblique = True lesion_inverted = pe.Node( - interface=util.Function( + interface=Function( input_names=["lesion_path"], output_names=["lesion_out"], function=inverse_lesion, diff --git a/CPAC/anat_preproc/utils.py b/CPAC/anat_preproc/utils.py index b3246fc41a..39904bbb66 100644 --- a/CPAC/anat_preproc/utils.py +++ b/CPAC/anat_preproc/utils.py @@ -1,73 +1,34 @@ # -*- coding: utf-8 -*- -from numpy import zeros -from nibabel import load as nib_load, Nifti1Image -import nipype.interfaces.utility as util - -from CPAC.pipeline import nipype_pipeline_engine as pe - - -def get_shape(nifti_image): - return nib_load(nifti_image).shape - - -def pad(cropped_image_path, target_image_path): - """ - Pad a cropped image to match the dimensions of a target image along the z-axis, - while keeping padded image aligned with target_image. - - Parameters - ---------- - - cropped_image_path (str): The file path to the cropped image (NIfTI format). - - target_image_path (str): The file path to the target image (NIfTI format). - - Returns - ------- - - str: The file path to the saved padded image (NIfTI format). +# Copyright (C) 2018-2023 C-PAC Developers - The function loads cropped and target iamges, calculates the z-dimension shift required for alignment such - that the mask generated from padded image will work correctly on the target image. The result padded image is - saved as an NIfTI file in the working directory/node and file path is returned as output. +# This file is part of C-PAC. - Note: The function assumes that the input images are in NIfTI format and have compatible dimensions. The cropped - and target image should only differ in z-axis dimension. - """ - from os import getcwd, path - from typing import Optional +# C-PAC is free software: you can redistribute it and/or modify it under +# the terms of the GNU Lesser General Public License as published by the +# Free Software Foundation, either version 3 of the License, or (at your +# option) any later version. - from numpy import asanyarray, ndarray, zeros_like - from nibabel import load, Nifti1Image, save +# C-PAC is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or +# FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public +# License for more details. - cropped_image: Optional[ndarray] = asanyarray(load(cropped_image_path).dataobj) - target_image: Optional[ndarray] = asanyarray(load(target_image_path).dataobj) +# You should have received a copy of the GNU Lesser General Public +# License along with C-PAC. If not, see . +import os - # Taking 1 slice to calculate the z dimension shift from top - center_row: int = target_image.shape[0] // 2 - center_column: int = target_image.shape[1] // 2 - z_slice_cropped_image: Optional[ndarray] = cropped_image[ - center_row, center_column, : - ] - z_slice_target_image: Optional[ndarray] = target_image[center_row, center_column, :] - - for z_shift in range(len(z_slice_target_image) - len(z_slice_cropped_image) + 1): - if ( - z_slice_target_image[z_shift : z_shift + len(z_slice_cropped_image)] - == z_slice_cropped_image - ).all(): - break +from numpy import * +from nibabel import load as nib_load +from nipype.interfaces.base import CommandLineInputSpec, File, TraitedSpec +import nipype.interfaces.utility as util +from nipype.interfaces.workbench.base import WBCommand - padded_image_matrix: Optional[ndarray] = zeros_like(target_image) - padded_image_matrix[:, :, z_shift : cropped_image.shape[2] + z_shift] = ( - cropped_image - ) - padded_image_path: str = path.join(getcwd(), "padded_image_T1w.nii.gz") - cropped_image = load(cropped_image_path) - save( - Nifti1Image(padded_image_matrix, affine=cropped_image.affine), padded_image_path - ) - return padded_image_path +from CPAC.pipeline import nipype_pipeline_engine as pe +from CPAC.utils.interfaces import Function def get_shape(nifti_image): + """Return the shape of a NIfTI image.""" return nib_load(nifti_image).shape @@ -286,7 +247,7 @@ def split_hemi(multi_file): def split_hemi_interface() -> util.Function: """Return a function interface for split_hemi.""" - return util.Function( + return Function( input_names=["multi_file"], output_names=["lh", "rh"], function=split_hemi ) @@ -587,12 +548,9 @@ def normalize_wmparc(source_file, target_file, xfm, out_file): return os.path.join(os.getcwd(), out_file) -"""This module provides interfaces for workbench -volume-remove-islands commands""" -from nipype.interfaces.base import CommandLineInputSpec, File, TraitedSpec -from nipype.interfaces.workbench.base import WBCommand - - class VolumeRemoveIslandsInputSpec(CommandLineInputSpec): + """InputSpec for workbench -volume-remove-islands commands.""" + in_file = File( exists=True, mandatory=True, @@ -610,14 +568,14 @@ class VolumeRemoveIslandsInputSpec(CommandLineInputSpec): class VolumeRemoveIslandsOutputSpec(TraitedSpec): + """OutputSpec for workbench -volume-remove-islands commands.""" + out_file = File(exists=True, desc="the output ROI volume") class VolumeRemoveIslands(WBCommand): - """ - workbench - -volume-remove-islands - REMOVE ISLANDS FROM AN ROI VOLUME + """Remove islandes from an ROI volume. + wb_command -volume-remove-islands - the input ROI volume - output - the output ROI volume. diff --git a/CPAC/distortion_correction/distortion_correction.py b/CPAC/distortion_correction/distortion_correction.py index 79b8400bb1..91b379b0a7 100644 --- a/CPAC/distortion_correction/distortion_correction.py +++ b/CPAC/distortion_correction/distortion_correction.py @@ -131,7 +131,7 @@ def distcor_phasediff_fsl_fugue(wf, cfg, strat_pool, pipe_num, opt=None): == "AFNI" ): skullstrip_args = pe.Node( - util.Function( + Function( input_names=["shrink_fac"], output_names=["expr"], function=create_afni_arg, @@ -667,7 +667,7 @@ def distcor_blip_fsl_topup(wf, cfg, strat_pool, pipe_num, opt=None): "import sys", ] phase_encoding = pe.Node( - util.Function( + Function( input_names=[ "unwarp_dir", "phase_one", @@ -710,7 +710,7 @@ def distcor_blip_fsl_topup(wf, cfg, strat_pool, pipe_num, opt=None): topup_imports = ["import os", "import subprocess"] run_topup = pe.Node( - util.Function( + Function( input_names=["merged_file", "acqparams"], output_names=[ "out_fieldcoef", @@ -732,7 +732,7 @@ def distcor_blip_fsl_topup(wf, cfg, strat_pool, pipe_num, opt=None): wf.connect(phase_encoding, "acq_params", run_topup, "acqparams") choose_phase = pe.Node( - util.Function( + Function( input_names=["phase_imgs", "unwarp_dir"], output_names=["out_phase_image", "vnum"], function=choose_phase_image, @@ -746,7 +746,7 @@ def distcor_blip_fsl_topup(wf, cfg, strat_pool, pipe_num, opt=None): wf.connect(node, out, choose_phase, "unwarp_dir") vnum_base = pe.Node( - util.Function( + Function( input_names=[ "vnum", "motion_mat_list", @@ -797,7 +797,7 @@ def distcor_blip_fsl_topup(wf, cfg, strat_pool, pipe_num, opt=None): name = "PhaseTwo_aw" vnum_base_two = pe.Node( - util.Function( + Function( input_names=[ "vnum", "motion_mat_list", @@ -840,7 +840,7 @@ def distcor_blip_fsl_topup(wf, cfg, strat_pool, pipe_num, opt=None): name = "PhaseOne_aw" vnum_base_one = pe.Node( - util.Function( + Function( input_names=[ "vnum", "motion_mat_list", diff --git a/CPAC/distortion_correction/utils.py b/CPAC/distortion_correction/utils.py index 2b78dbfa4d..b76acba074 100644 --- a/CPAC/distortion_correction/utils.py +++ b/CPAC/distortion_correction/utils.py @@ -1,3 +1,19 @@ +# Copyright (C) 2021-2023 C-PAC Developers + +# This file is part of C-PAC. + +# C-PAC is free software: you can redistribute it and/or modify it under +# the terms of the GNU Lesser General Public License as published by the +# Free Software Foundation, either version 3 of the License, or (at your +# option) any later version. + +# C-PAC is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or +# FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public +# License for more details. + +# You should have received a copy of the GNU Lesser General Public +# License along with C-PAC. If not, see . import os import subprocess import sys @@ -12,6 +28,8 @@ import nipype.interfaces.utility as util from nipype.pipeline import engine as pe +from CPAC.utils.interfaces import Function + def run_HCP_gradient_unwarp(phase_vol, input_coeffs): import os @@ -49,7 +67,7 @@ def run_convertwarp(cw_trilinear, cw_fullWarp_abs): f"--warp1={cw_fullWarp_abs}", "--relout", f"--out={out_file}", - f"--j={jac_out}", + f"--j={out_jac}", ] subprocess.check_output(cmd) @@ -64,7 +82,7 @@ def gradient_distortion_correction(wf, inp_image, name): grad_unwarp_imports = ["import os", "import subprocess"] grad_unwarp = pe.Node( - util.Function( + Function( input_names=["phase_vol", "input_coeffs"], output_names=["trilinear", "abs_fullWarp"], function=run_HCP_gradient_unwarp, @@ -78,7 +96,7 @@ def gradient_distortion_correction(wf, inp_image, name): convertwarp_imports = ["import os", "import subprocess"] convert_warp = pe.Node( - util.Function( + Function( input_names=["cw_trilinear", "cw_fullWarp_abs"], output_names=["out_file_cw", "out_jac_cw"], function=run_convertwarp, @@ -248,8 +266,9 @@ def phase_encode( def z_pad(name="z_pad"): - """Pad in Z by one slice if odd so that topup does not complain - (slice consists of zeros that will be dilated by following step). + """Pad in Z by one slice if odd so that topup does not complain. + + (Slice consists of zeros that will be dilated by following step). """ wf = pe.Workflow(name=name) diff --git a/CPAC/easy_thresh/easy_thresh.py b/CPAC/easy_thresh/easy_thresh.py index d514d51c54..20918c08a9 100644 --- a/CPAC/easy_thresh/easy_thresh.py +++ b/CPAC/easy_thresh/easy_thresh.py @@ -1,3 +1,19 @@ +# Copyright (C) 2012-2023 C-PAC Developers + +# This file is part of C-PAC. + +# C-PAC is free software: you can redistribute it and/or modify it under +# the terms of the GNU Lesser General Public License as published by the +# Free Software Foundation, either version 3 of the License, or (at your +# option) any later version. + +# C-PAC is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or +# FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public +# License for more details. + +# You should have received a copy of the GNU Lesser General Public +# License along with C-PAC. If not, see . import os import re import subprocess @@ -7,12 +23,11 @@ import nipype.interfaces.utility as util from CPAC.pipeline import nipype_pipeline_engine as pe +from CPAC.utils.interfaces import Function def easy_thresh(wf_name): - """ - Workflow for carrying out cluster-based thresholding - and colour activation overlaying. + """Carry out cluster-based thresholding and colour activation overlaying. Parameters ---------- @@ -213,7 +228,7 @@ def easy_thresh(wf_name): # or qform/sform info) from one image to another geo_imports = ["import subprocess"] copy_geometry = pe.MapNode( - util.Function( + Function( input_names=["infile_a", "infile_b"], output_names=["out_file"], function=copy_geom, @@ -246,7 +261,7 @@ def easy_thresh(wf_name): cluster_imports = ["import os", "import re", "import subprocess"] cluster = pe.MapNode( - util.Function( + Function( input_names=[ "in_file", "volume", @@ -271,7 +286,7 @@ def easy_thresh(wf_name): # create tuple of z_threshold and max intensity value of threshold file create_tuple = pe.MapNode( - util.Function( + Function( input_names=["infile_a", "infile_b"], output_names=["out_file"], function=get_tuple, @@ -299,7 +314,7 @@ def easy_thresh(wf_name): # as FSLDIR,MNI and voxel size get_bg_imports = ["import os", "import nibabel as nib"] get_backgroundimage = pe.MapNode( - util.Function( + Function( input_names=["in_file", "file_parameters"], output_names=["out_file"], function=get_standard_background_img, @@ -312,7 +327,7 @@ def easy_thresh(wf_name): # function node to get the standard fsl brain image # outputs single file get_backgroundimage2 = pe.Node( - util.Function( + Function( input_names=["in_file", "file_parameters"], output_names=["out_file"], function=get_standard_background_img, @@ -412,10 +427,9 @@ def call_cluster(in_file, volume, dlh, threshold, pthreshold, parameters): def copy_geom(infile_a, infile_b): - """ - Method to call fsl fslcpgeom command to copy - certain parts of the header information (image dimensions, - voxel dimensions, voxel dimensions units string, image + """Call fsl fslcpgeom command to copy certain parts of the header information. + + Copy (image dimensions, voxel dimensions, voxel dimensions units string, image orientation/origin or qform/sform info) from one image to another. Parameters @@ -449,9 +463,7 @@ def copy_geom(infile_a, infile_b): def get_standard_background_img(in_file, file_parameters): - """ - Method to get the standard brain image from FSL - standard data directory. + """Get the standard brain image from FSL standard data directory. Parameters ---------- @@ -487,10 +499,7 @@ def get_standard_background_img(in_file, file_parameters): def get_tuple(infile_a, infile_b): - """ - Simple method to return tuple of z_threhsold - maximum intensity values of Zstatistic image - for input to the overlay. + """Return tuple of z_threhsold maximum intensity values of Zstatistic image for input to the overlay. Parameters ---------- diff --git a/CPAC/func_preproc/func_motion.py b/CPAC/func_preproc/func_motion.py index 21fdb86a50..bea7d2e29c 100644 --- a/CPAC/func_preproc/func_motion.py +++ b/CPAC/func_preproc/func_motion.py @@ -423,7 +423,7 @@ def get_motion_ref(wf, cfg, strat_pool, pipe_num, opt=None): elif opt == "fmriprep_reference": func_get_RPI = pe.Node( - util.Function( + Function( input_names=["in_file"], output_names=["out_file"], function=estimate_reference_image, diff --git a/CPAC/func_preproc/func_preproc.py b/CPAC/func_preproc/func_preproc.py index 4d0fe73c9e..7004b4f025 100644 --- a/CPAC/func_preproc/func_preproc.py +++ b/CPAC/func_preproc/func_preproc.py @@ -23,6 +23,7 @@ from CPAC.func_preproc.utils import nullify from CPAC.pipeline import nipype_pipeline_engine as pe from CPAC.pipeline.nodeblock import nodeblock +from CPAC.utils.interfaces import Function from CPAC.utils.interfaces.ants import ( AI, # niworkflows PrintHeader, @@ -343,7 +344,7 @@ def create_wf_edit_func(wf_name="edit_func"): # allocate a node to check that the requested edits are # reasonable given the data func_get_idx = pe.Node( - util.Function( + Function( input_names=["in_files", "stop_idx", "start_idx"], output_names=["stopidx", "startidx"], function=get_idx, @@ -877,7 +878,7 @@ def form_thr_string(thr): return "-thr %s" % (threshold_z) form_thr_string = pe.Node( - util.Function( + Function( input_names=["thr"], output_names=["out_str"], function=form_thr_string, diff --git a/CPAC/group_analysis/group_analysis.py b/CPAC/group_analysis/group_analysis.py index d3e78c4698..6da81ff37e 100644 --- a/CPAC/group_analysis/group_analysis.py +++ b/CPAC/group_analysis/group_analysis.py @@ -1,14 +1,29 @@ +# Copyright (C) 2012-2023 C-PAC Developers + +# This file is part of C-PAC. + +# C-PAC is free software: you can redistribute it and/or modify it under +# the terms of the GNU Lesser General Public License as published by the +# Free Software Foundation, either version 3 of the License, or (at your +# option) any later version. + +# C-PAC is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or +# FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public +# License for more details. + +# You should have received a copy of the GNU Lesser General Public +# License along with C-PAC. If not, see . from nipype.interfaces import fsl import nipype.interfaces.utility as util from CPAC.easy_thresh import easy_thresh from CPAC.pipeline import nipype_pipeline_engine as pe +from CPAC.utils.interfaces import Function def get_operation(in_file): - """ - Method to create operation string - for fslmaths. + """Create operation string for fslmaths. Parameters ---------- @@ -39,7 +54,9 @@ def get_operation(in_file): def label_zstat_files(zstat_list, con_file): - """Take in the z-stat file outputs of FSL FLAME and rename them after the + """Rename z-stat file outputs from FSL FLAME using contrast labels. + + Take in the z-stat file outputs of FSL FLAME and rename them after the contrast labels of the contrasts provided. """ cons = [] @@ -64,9 +81,7 @@ def label_zstat_files(zstat_list, con_file): def create_fsl_flame_wf(ftest=False, wf_name="groupAnalysis"): - """ - FSL `FEAT `_ - BASED Group Analysis. + """Run FSL `FEAT `_ BASED Group Analysis. Parameters ---------- @@ -313,7 +328,7 @@ def create_fsl_flame_wf(ftest=False, wf_name="groupAnalysis"): # easier interpretation label_zstat_imports = ["import os"] label_zstat = pe.Node( - util.Function( + Function( input_names=["zstat_list", "con_file"], output_names=["new_zstat_list"], function=label_zstat_files, @@ -341,7 +356,7 @@ def create_fsl_flame_wf(ftest=False, wf_name="groupAnalysis"): # function node to get the operation string for fslmaths command get_opstring = pe.Node( - util.Function( + Function( input_names=["in_file"], output_names=["out_file"], function=get_operation ), name="get_opstring", diff --git a/CPAC/longitudinal_pipeline/longitudinal_preproc.py b/CPAC/longitudinal_pipeline/longitudinal_preproc.py index dfead14d59..9fbe31c6b5 100644 --- a/CPAC/longitudinal_pipeline/longitudinal_preproc.py +++ b/CPAC/longitudinal_pipeline/longitudinal_preproc.py @@ -24,9 +24,9 @@ import numpy as np import nibabel as nib from nipype.interfaces import fsl -import nipype.interfaces.utility as util from CPAC.pipeline import nipype_pipeline_engine as pe +from CPAC.utils.interfaces import Function from CPAC.utils.monitoring import IFLOGGER from CPAC.utils.nifti_utils import nifti_image_input @@ -617,7 +617,7 @@ def subject_specific_template( ] if method == "flirt": template_gen_node = pe.Node( - util.Function( + Function( input_names=[ "input_brain_list", "input_skull_list", diff --git a/CPAC/median_angle/median_angle.py b/CPAC/median_angle/median_angle.py index 1433df8ac8..de4fd683cb 100644 --- a/CPAC/median_angle/median_angle.py +++ b/CPAC/median_angle/median_angle.py @@ -1,12 +1,29 @@ +# Copyright (C) 2012-2023 C-PAC Developers + +# This file is part of C-PAC. + +# C-PAC is free software: you can redistribute it and/or modify it under +# the terms of the GNU Lesser General Public License as published by the +# Free Software Foundation, either version 3 of the License, or (at your +# option) any later version. + +# C-PAC is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or +# FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public +# License for more details. + +# You should have received a copy of the GNU Lesser General Public +# License along with C-PAC. If not, see . import nipype.interfaces.utility as util from CPAC.pipeline import nipype_pipeline_engine as pe +from CPAC.utils.interfaces import Function def median_angle_correct(target_angle_deg, realigned_file): - """ - Performs median angle correction on fMRI data. Median angle correction algorithm - based on [1]_. + """Perform median angle correction on fMRI data. + + Median angle correction algorithm based on [1]_. Parameters ---------- @@ -89,8 +106,7 @@ def writeToFile(data, nii, fname): def calc_median_angle_params(subject): - """ - Calculates median angle parameters of a subject. + """Calculate median angle parameters of a subject. Parameters ---------- @@ -133,8 +149,7 @@ def calc_median_angle_params(subject): def calc_target_angle(mean_bolds, median_angles): """ - Calculates a target angle based on median angle parameters of - the group. + Calculate a target angle based on median angle parameters of the group. Parameters ---------- @@ -229,7 +244,7 @@ def create_median_angle_correction(name="median_angle_correction"): ) mac = pe.Node( - util.Function( + Function( input_names=["target_angle_deg", "realigned_file"], output_names=["corrected_file", "angles_file"], function=median_angle_correct, @@ -305,7 +320,7 @@ def create_target_angle(name="target_angle"): ) cmap = pe.MapNode( - util.Function( + Function( input_names=["subject"], output_names=["mean_bold", "median_angle"], function=calc_median_angle_params, @@ -315,7 +330,7 @@ def create_target_angle(name="target_angle"): ) cta = pe.Node( - util.Function( + Function( input_names=["mean_bolds", "median_angles"], output_names=["target_angle"], function=calc_target_angle, diff --git a/CPAC/nuisance/nuisance.py b/CPAC/nuisance/nuisance.py index c547ff6b01..45337a0c23 100644 --- a/CPAC/nuisance/nuisance.py +++ b/CPAC/nuisance/nuisance.py @@ -125,7 +125,7 @@ def form_mask_erosion_prop(erosion_prop): ] eroded_mask = pe.Node( - util.Function( + Function( input_names=[ "roi_mask", "skullstrip_mask", @@ -156,7 +156,7 @@ def form_mask_erosion_prop(erosion_prop): wf.connect(eroded_mask, "output_roi_mask", outputspec, "eroded_mask") if segmentmap: erosion_segmentmap = pe.Node( - util.Function( + Function( input_names=["roi_mask", "erosion_mm", "erosion_prop"], output_names=["eroded_roi_mask"], function=erosion, @@ -1357,7 +1357,7 @@ def create_regressor_workflow( ] cosfilter_node = pe.Node( - util.Function( + Function( input_names=["input_image_path", "timestep"], output_names=["cosfiltered_img"], function=cosine_filter, @@ -1374,7 +1374,7 @@ def create_regressor_workflow( "input_image_path", ) tr_string2float_node = pe.Node( - util.Function( + Function( input_names=["tr"], output_names=["tr_float"], function=TR_string_to_float, @@ -1887,7 +1887,7 @@ def filtering_bold_and_regressors( bandpass_ts.inputs.outputtype = "NIFTI_GZ" tr_string2float_node = pe.Node( - util.Function( + Function( input_names=["tr"], output_names=["tr_float"], function=TR_string_to_float, @@ -2418,7 +2418,8 @@ def nuisance_regressors_generation( opt: dict, space: Literal["T1w", "bold"], ) -> tuple[Workflow, dict]: - """ + """Generate nuisance regressors. + Parameters ---------- wf : ~nipype.pipeline.engine.workflows.Workflow diff --git a/CPAC/nuisance/utils/utils.py b/CPAC/nuisance/utils/utils.py index 92499523a8..db6667dcb3 100644 --- a/CPAC/nuisance/utils/utils.py +++ b/CPAC/nuisance/utils/utils.py @@ -499,7 +499,7 @@ def generate_summarize_tissue_mask_ventricles_masking( # generate inverse transform flags, which depends on the number of transforms inverse_transform_flags = pe.Node( - util.Function( + Function( input_names=["transform_list"], output_names=["inverse_transform_flags"], function=generate_inverse_transform_flags, diff --git a/CPAC/randomise/randomise.py b/CPAC/randomise/randomise.py index 8c2351c9f0..b3144685aa 100644 --- a/CPAC/randomise/randomise.py +++ b/CPAC/randomise/randomise.py @@ -15,6 +15,7 @@ # You should have received a copy of the GNU Lesser General Public # License along with C-PAC. If not, see . from CPAC.pipeline import nipype_pipeline_engine as pe +from CPAC.utils.interfaces import Function from CPAC.utils.monitoring import IFLOGGER @@ -53,7 +54,6 @@ def prep_randomise_workflow( ): from nipype.interfaces import fsl import nipype.interfaces.io as nio - import nipype.interfaces.utility as util wf = pe.Workflow(name="randomise_workflow") wf.base_dir = c.work_dir @@ -74,7 +74,7 @@ def prep_randomise_workflow( randomise.inputs.fcon = fts_file select_tcorrp_files = pe.Node( - util.Function( + Function( input_names=["input_list"], output_names=["out_file"], function=select ), name="select_t_corrp", @@ -83,7 +83,7 @@ def prep_randomise_workflow( wf.connect(randomise, "t_corrected_p_files", select_tcorrp_files, "input_list") select_tstat_files = pe.Node( - util.Function( + Function( input_names=["input_list"], output_names=["out_file"], function=select ), name="select_t_stat", @@ -147,6 +147,10 @@ def run(group_config_path): import os from CPAC.pipeline.cpac_group_runner import load_config_yml + from CPAC.pipeline.cpac_randomise_pipeline import ( + randomise_merged_file, + randomise_merged_mask, + ) group_config_obj = load_config_yml(group_config_path) pipeline_output_folder = group_config_obj.pipeline_dir diff --git a/CPAC/registration/output_func_to_standard.py b/CPAC/registration/output_func_to_standard.py index 6cf172f76d..bafea7d8d0 100644 --- a/CPAC/registration/output_func_to_standard.py +++ b/CPAC/registration/output_func_to_standard.py @@ -374,7 +374,7 @@ def ants_apply_warps_func_mni( itk_imports = ["import os"] change_transform = pe.Node( - util.Function( + Function( input_names=["input_affine_file"], output_names=["updated_affine_file"], function=change_itk_transform_type, @@ -534,7 +534,7 @@ def ants_apply_warps_func_mni( # check transform list (if missing any init/rig/affine) and exclude Nonetype check_transform = pe.Node( - util.Function( + Function( input_names=["transform_list"], output_names=["checked_transform_list", "list_length"], function=check_transforms, @@ -546,7 +546,7 @@ def ants_apply_warps_func_mni( # generate inverse transform flags, which depends on the number of transforms inverse_transform_flags = pe.Node( - util.Function( + Function( input_names=["transform_list"], output_names=["inverse_transform_flags"], function=generate_inverse_transform_flags, diff --git a/CPAC/registration/registration.py b/CPAC/registration/registration.py index b6cc9892ea..da63e694e4 100644 --- a/CPAC/registration/registration.py +++ b/CPAC/registration/registration.py @@ -39,6 +39,7 @@ seperate_warps_list, single_ants_xfm_to_list, ) +from CPAC.utils.interfaces import Function from CPAC.utils.interfaces.fsl import Merge as fslMerge from CPAC.utils.utils import check_prov_for_motion_tool, check_prov_for_regtool @@ -104,7 +105,7 @@ def apply_transform( wf.connect(inputNode, "reference", apply_warp, "reference_image") interp_string = pe.Node( - util.Function( + Function( input_names=["interpolation", "reg_tool"], output_names=["interpolation"], function=interpolation_string, @@ -118,7 +119,7 @@ def apply_transform( wf.connect(interp_string, "interpolation", apply_warp, "interpolation") ants_xfm_list = pe.Node( - util.Function( + Function( input_names=["transform"], output_names=["transform_list"], function=single_ants_xfm_to_list, @@ -135,7 +136,7 @@ def apply_transform( if int(num_cpus) > 1 and time_series: chunk_imports = ["import nibabel as nib"] chunk = pe.Node( - util.Function( + Function( input_names=["func_file", "n_chunks", "chunk_size"], output_names=["TR_ranges"], function=chunk_ts, @@ -154,7 +155,7 @@ def apply_transform( split_imports = ["import os", "import subprocess"] split = pe.Node( - util.Function( + Function( input_names=["func_file", "tr_ranges"], output_names=["split_funcs"], function=split_ts_chunks, @@ -196,7 +197,7 @@ def apply_transform( ) interp_string = pe.Node( - util.Function( + Function( input_names=["interpolation", "reg_tool"], output_names=["interpolation"], function=interpolation_string, @@ -222,7 +223,7 @@ def apply_transform( if int(num_cpus) > 1 and time_series: chunk_imports = ["import nibabel as nib"] chunk = pe.Node( - util.Function( + Function( input_names=["func_file", "n_chunks", "chunk_size"], output_names=["TR_ranges"], function=chunk_ts, @@ -241,7 +242,7 @@ def apply_transform( split_imports = ["import os", "import subprocess"] split = pe.Node( - util.Function( + Function( input_names=["func_file", "tr_ranges"], output_names=["split_funcs"], function=split_ts_chunks, @@ -761,7 +762,7 @@ def create_register_func_to_anat( if phase_diff_distcor: conv_pedir = pe.Node( - interface=util.Function( + interface=Function( input_names=["pedir", "convert"], output_names=["pedir"], function=convert_pedir, @@ -1067,7 +1068,7 @@ def bbreg_args(bbreg_target): if phase_diff_distcor: conv_pedir = pe.Node( - interface=util.Function( + interface=Function( input_names=["pedir", "convert"], output_names=["pedir"], function=convert_pedir, @@ -1276,7 +1277,7 @@ def create_wf_calculate_ants_warp( """ reg_imports = ["import os", "import subprocess"] calculate_ants_warp = pe.Node( - interface=util.Function( + interface=Function( input_names=[ "moving_brain", "reference_brain", @@ -1302,7 +1303,7 @@ def create_wf_calculate_ants_warp( calculate_ants_warp.interface.num_threads = num_threads select_forward_initial = pe.Node( - util.Function( + Function( input_names=["warp_list", "selection"], output_names=["selected_warp"], function=seperate_warps_list, @@ -1313,7 +1314,7 @@ def create_wf_calculate_ants_warp( select_forward_initial.inputs.selection = "Initial" select_forward_rigid = pe.Node( - util.Function( + Function( input_names=["warp_list", "selection"], output_names=["selected_warp"], function=seperate_warps_list, @@ -1324,7 +1325,7 @@ def create_wf_calculate_ants_warp( select_forward_rigid.inputs.selection = "Rigid" select_forward_affine = pe.Node( - util.Function( + Function( input_names=["warp_list", "selection"], output_names=["selected_warp"], function=seperate_warps_list, @@ -1335,7 +1336,7 @@ def create_wf_calculate_ants_warp( select_forward_affine.inputs.selection = "Affine" select_forward_warp = pe.Node( - util.Function( + Function( input_names=["warp_list", "selection"], output_names=["selected_warp"], function=seperate_warps_list, @@ -1346,7 +1347,7 @@ def create_wf_calculate_ants_warp( select_forward_warp.inputs.selection = "Warp" select_inverse_warp = pe.Node( - util.Function( + Function( input_names=["warp_list", "selection"], output_names=["selected_warp"], function=seperate_warps_list, @@ -1788,7 +1789,7 @@ def ANTs_registration_connector( # check transform list to exclude Nonetype (missing) init/rig/affine check_transform = pe.Node( - util.Function( + Function( input_names=["transform_list"], output_names=["checked_transform_list", "list_length"], function=check_transforms, @@ -1851,7 +1852,7 @@ def ANTs_registration_connector( # check transform list to exclude Nonetype (missing) init/rig/affine check_invlinear_transform = pe.Node( - util.Function( + Function( input_names=["transform_list"], output_names=["checked_transform_list", "list_length"], function=check_transforms, @@ -1873,7 +1874,7 @@ def ANTs_registration_connector( # generate inverse transform flags, which depends on the # number of transforms inverse_transform_flags = pe.Node( - util.Function( + Function( input_names=["transform_list"], output_names=["inverse_transform_flags"], function=generate_inverse_transform_flags, @@ -1935,7 +1936,7 @@ def ANTs_registration_connector( # check transform list to exclude Nonetype (missing) init/rig/affine check_all_transform = pe.Node( - util.Function( + Function( input_names=["transform_list"], output_names=["checked_transform_list", "list_length"], function=check_transforms, @@ -2004,7 +2005,7 @@ def ANTs_registration_connector( # check transform list to exclude Nonetype (missing) init/rig/affine check_all_inv_transform = pe.Node( - util.Function( + Function( input_names=["transform_list"], output_names=["checked_transform_list", "list_length"], function=check_transforms, @@ -2026,7 +2027,7 @@ def ANTs_registration_connector( # generate inverse transform flags, which depends on the # number of transforms inverse_all_transform_flags = pe.Node( - util.Function( + Function( input_names=["transform_list"], output_names=["inverse_transform_flags"], function=generate_inverse_transform_flags, @@ -2122,7 +2123,7 @@ def bold_to_T1template_xfm_connector( itk_imports = ["import os"] change_transform = pe.Node( - util.Function( + Function( input_names=["input_affine_file"], output_names=["updated_affine_file"], function=change_itk_transform_type, @@ -2964,7 +2965,7 @@ def overwrite_transform_anat_to_template(wf, cfg, strat_pool, pipe_num, opt=None # c4d -mcs ${WD}/xfms/ANTs_CombinedWarp.nii.gz -oo ${WD}/xfms/e1.nii.gz ${WD}/xfms/e2.nii.gz ${WD}/xfms/e3.nii.gz # -mcs: -multicomponent-split, -oo: -output-multiple split_combined_warp = pe.Node( - util.Function( + Function( input_names=["input_name", "output_name"], output_names=["output1", "output2", "output3"], function=run_c4d, @@ -2982,7 +2983,7 @@ def overwrite_transform_anat_to_template(wf, cfg, strat_pool, pipe_num, opt=None # c4d -mcs ${WD}/xfms/ANTs_CombinedInvWarp.nii.gz -oo ${WD}/xfms/e1inv.nii.gz ${WD}/xfms/e2inv.nii.gz ${WD}/xfms/e3inv.nii.gz split_combined_inv_warp = pe.Node( - util.Function( + Function( input_names=["input_name", "output_name"], output_names=["output1", "output2", "output3"], function=run_c4d, @@ -3678,7 +3679,7 @@ def apply_phasediff_to_timeseries_separately(wf, cfg, strat_pool, pipe_num, opt= wf.connect(warp_fmap, "out_file", mask_fmap, "in_file") conv_pedir = pe.Node( - interface=util.Function( + interface=Function( input_names=["pedir", "convert"], output_names=["pedir"], function=convert_pedir, @@ -4819,7 +4820,7 @@ def single_step_resample_timeseries_to_T1template( reg_tool = check_prov_for_regtool(xfm_prov) bbr2itk = pe.Node( - util.Function( + Function( input_names=["reference_file", "source_file", "transform_file"], output_names=["itk_transform"], function=run_c3d, @@ -4860,7 +4861,7 @@ def single_step_resample_timeseries_to_T1template( ### Loop starts! ### motionxfm2itk = pe.MapNode( - util.Function( + Function( input_names=["reference_file", "source_file", "transform_file"], output_names=["itk_transform"], function=run_c3d, @@ -4881,7 +4882,7 @@ def single_step_resample_timeseries_to_T1template( wf.connect(node, out, motionxfm2itk, "transform_file") elif motion_correct_tool == "3dvolreg": convert_transform = pe.Node( - util.Function( + Function( input_names=["one_d_filename"], output_names=["transform_directory"], function=one_d_to_mat, diff --git a/CPAC/reho/reho.py b/CPAC/reho/reho.py index 80e6599d10..870d3fa36d 100644 --- a/CPAC/reho/reho.py +++ b/CPAC/reho/reho.py @@ -1,9 +1,26 @@ # coding: utf-8 +# Copyright (C) 2012-2024 C-PAC Developers + +# This file is part of C-PAC. + +# C-PAC is free software: you can redistribute it and/or modify it under +# the terms of the GNU Lesser General Public License as published by the +# Free Software Foundation, either version 3 of the License, or (at your +# option) any later version. + +# C-PAC is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or +# FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public +# License for more details. + +# You should have received a copy of the GNU Lesser General Public +# License along with C-PAC. If not, see . import nipype.interfaces.utility as util from CPAC.pipeline import nipype_pipeline_engine as pe from CPAC.pipeline.nodeblock import nodeblock from CPAC.reho.utils import * +from CPAC.utils.interfaces import Function def create_reho(wf_name): @@ -99,7 +116,7 @@ def create_reho(wf_name): "from CPAC.reho.utils import f_kendall", ] raw_reho_map = pe.Node( - util.Function( + Function( input_names=["in_file", "mask_file", "cluster_size"], output_names=["out_file"], function=compute_reho, diff --git a/CPAC/sca/sca.py b/CPAC/sca/sca.py index 8e714dbd5f..d12aae7de9 100644 --- a/CPAC/sca/sca.py +++ b/CPAC/sca/sca.py @@ -30,11 +30,15 @@ create_spatial_map_dataflow, resample_func_roi, ) +from CPAC.utils.interfaces import Function def create_sca(name_sca="sca"): """ - Map of the correlations of the Region of Interest(Seed in native or MNI space) with the rest of brain voxels. + Create map of the correlations of the Region of Interest with the rest of brain voxels. + + (Seed in native or MNI space) + The map is normalized to contain Z-scores, mapped in standard space and treated with spatial smoothing. Parameters @@ -150,8 +154,8 @@ def create_sca(name_sca="sca"): def create_temporal_reg(wflow_name="temporal_reg", which="SR"): - r""" - Temporal multiple regression workflow + r"""Create temporal multiple regression workflow. + Provides a spatial map of parameter estimates corresponding to each provided timeseries in a timeseries.txt file as regressors. @@ -280,9 +284,7 @@ def create_temporal_reg(wflow_name="temporal_reg", which="SR"): ) check_timeseries = pe.Node( - util.Function( - input_names=["in_file"], output_names=["out_file"], function=check_ts - ), + Function(input_names=["in_file"], output_names=["out_file"], function=check_ts), name="check_timeseries", ) @@ -325,7 +327,7 @@ def create_temporal_reg(wflow_name="temporal_reg", which="SR"): map_roi_imports = ['import os', 'import numpy as np'] # get roi order and send to output node for raw outputs - get_roi_order = pe.Node(util.Function(input_names=['maps', + get_roi_order = pe.Node(Function(input_names=['maps', 'timeseries'], output_names=['labels', 'maps'], @@ -350,7 +352,7 @@ def create_temporal_reg(wflow_name="temporal_reg", which="SR"): outputNode, 'temp_reg_map_files') # get roi order and send to output node for z-stat outputs - get_roi_order_zstat = pe.Node(util.Function(input_names=['maps', + get_roi_order_zstat = pe.Node(Function(input_names=['maps', 'timeseries'], output_names=['labels', 'maps'], @@ -396,7 +398,7 @@ def SCA_AVG(wf, cfg, strat_pool, pipe_num, opt=None): # same workflow, except to run TSE and send it to the resource # pool so that it will not get sent to SCA resample_functional_roi_for_sca = pe.Node( - util.Function( + Function( input_names=["in_func", "in_roi", "realignment", "identity_matrix"], output_names=["out_func", "out_roi"], function=resample_func_roi, diff --git a/CPAC/scrubbing/scrubbing.py b/CPAC/scrubbing/scrubbing.py index ed85ef1024..e08b816edc 100644 --- a/CPAC/scrubbing/scrubbing.py +++ b/CPAC/scrubbing/scrubbing.py @@ -1,13 +1,29 @@ +# Copyright (C) 2012-2023 C-PAC Developers + +# This file is part of C-PAC. + +# C-PAC is free software: you can redistribute it and/or modify it under +# the terms of the GNU Lesser General Public License as published by the +# Free Software Foundation, either version 3 of the License, or (at your +# option) any later version. + +# C-PAC is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or +# FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public +# License for more details. + +# You should have received a copy of the GNU Lesser General Public +# License along with C-PAC. If not, see . import nipype.interfaces.utility as util from CPAC.pipeline import nipype_pipeline_engine as pe +from CPAC.utils.interfaces import Function def create_scrubbing_preproc(wf_name="scrubbing"): - """ - This workflow essentially takes the list of offending timepoints that are to be removed - and removes it from the motion corrected input image. Also, it removes the information - of discarded time points from the movement parameters file obtained during motion correction. + """Take the list of offending timepoints that are to be removed and remove it from the motion corrected input image. + + Also remove the information of discarded time points from the movement parameters file obtained during motion correction. Parameters ---------- @@ -94,7 +110,7 @@ def create_scrubbing_preproc(wf_name="scrubbing"): ) craft_scrub_input = pe.Node( - util.Function( + Function( input_names=["scrub_input", "frames_in_1D_file"], output_names=["scrub_input_string"], function=get_indx, @@ -103,7 +119,7 @@ def create_scrubbing_preproc(wf_name="scrubbing"): ) scrubbed_movement_parameters = pe.Node( - util.Function( + Function( input_names=["infile_a", "infile_b"], output_names=["out_file"], function=get_mov_parameters, @@ -120,7 +136,7 @@ def create_scrubbing_preproc(wf_name="scrubbing"): # scrubbed_preprocessed.inputs.outputtype = 'NIFTI_GZ' scrubbed_preprocessed = pe.Node( - util.Function( + Function( input_names=["scrub_input"], output_names=["scrubbed_image"], function=scrub_image, @@ -152,9 +168,8 @@ def create_scrubbing_preproc(wf_name="scrubbing"): def get_mov_parameters(infile_a, infile_b): - """ - Method to get the new movement parameters - file after removing the offending time frames + """Get the new movement parameters file after removing the offending time frames. + (i.e., those exceeding FD 0.5mm/0.2mm threshold). Parameters @@ -192,7 +207,7 @@ def get_mov_parameters(infile_a, infile_b): raise Exception(msg) f = open(out_file, "a") - for l in l1: + for l in l1: # noqa: E741 data = l2[int(l.strip())] f.write(data) f.close() @@ -200,9 +215,7 @@ def get_mov_parameters(infile_a, infile_b): def get_indx(scrub_input, frames_in_1D_file): - """ - Method to get the list of time - frames that are to be included. + """Get the list of time frames that are to be included. Parameters ---------- @@ -230,10 +243,10 @@ def get_indx(scrub_input, frames_in_1D_file): def scrub_image(scrub_input): - """ - Method to run 3dcalc in order to scrub the image. This is used instead of - the Nipype interface for 3dcalc because functionality is needed for - specifying an input file with specifically-selected volumes. For example: + """Run 3dcalc in order to scrub the image. + + This is used instead of the Nipype interface for 3dcalc because functionality is + needed for specifying an input file with specifically-selected volumes. For example: input.nii.gz[2,3,4,..98], etc. Parameters diff --git a/CPAC/seg_preproc/seg_preproc.py b/CPAC/seg_preproc/seg_preproc.py index a66990f1e6..f769cf14b3 100644 --- a/CPAC/seg_preproc/seg_preproc.py +++ b/CPAC/seg_preproc/seg_preproc.py @@ -1,3 +1,19 @@ +# Copyright (C) 2012-2023 C-PAC Developers + +# This file is part of C-PAC. + +# C-PAC is free software: you can redistribute it and/or modify it under +# the terms of the GNU Lesser General Public License as published by the +# Free Software Foundation, either version 3 of the License, or (at your +# option) any later version. + +# C-PAC is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or +# FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public +# License for more details. + +# You should have received a copy of the GNU Lesser General Public +# License along with C-PAC. If not, see . from nipype.interfaces import ants, freesurfer, fsl, utility as util from nipype.interfaces.utility import Function @@ -23,10 +39,10 @@ def process_segment_map(wf_name, use_priors, use_custom_threshold, reg_tool): - """This is a sub workflow used inside segmentation workflow to process - probability maps obtained in segmentation. Steps include overlapping - of the prior tissue with probability maps, thresholding and binarizing - it and creating a mask that is used in further analysis. + """Create a sub workflow used inside segmentation workflow to process probability maps obtained in segmentation. + + Steps include overlapping of the prior tissue with probability maps, thresholding + and binarizing it and creating a mask that is used in further analysis. Parameters ---------- @@ -274,7 +290,7 @@ def tissue_mask_template_to_t1(wf_name, use_ants): # check transform list to exclude Nonetype (missing) init/rig/affine check_transform = pe.Node( - util.Function( + Function( input_names=["transform_list"], output_names=["checked_transform_list", "list_length"], function=check_transforms, @@ -289,7 +305,7 @@ def tissue_mask_template_to_t1(wf_name, use_ants): # generate inverse transform flags, which depends on the # number of transforms inverse_transform_flags = pe.Node( - util.Function( + Function( input_names=["transform_list"], output_names=["inverse_transform_flags"], function=generate_inverse_transform_flags, @@ -356,9 +372,7 @@ def tissue_mask_template_to_t1(wf_name, use_ants): def create_seg_preproc_antsJointLabel_method(wf_name="seg_preproc_templated_based"): - """ - Generate the subject's cerebral spinal fluids, - white matter and gray matter mask based on provided template, if selected to do so. + """Generate the subject's cerebral spinal fluids, white matter and gray matter mask based on provided template, if selected to do so. Parameters ---------- @@ -417,7 +431,7 @@ def create_seg_preproc_antsJointLabel_method(wf_name="seg_preproc_templated_base ) seg_preproc_antsJointLabel = pe.Node( - util.Function( + Function( input_names=[ "anatomical_brain", "anatomical_brain_mask", @@ -700,7 +714,7 @@ def tissue_seg_fsl_fast(wf, cfg, strat_pool, pipe_num, opt=None): ) get_csf = pe.Node( - util.Function( + Function( input_names=["probability_maps"], output_names=["filename"], function=pick_wm_prob_0, @@ -945,7 +959,7 @@ def tissue_seg_freesurfer(wf, cfg, strat_pool, pipe_num, opt=None): wf.connect(node, out, fs_aseg_to_native, "target_file") fs_aseg_to_nifti = pe.Node( - util.Function( + Function( input_names=["in_file"], output_names=["out_file"], function=mri_convert ), name=f"fs_aseg_to_nifti_{pipe_num}", diff --git a/CPAC/surface/surf_preproc.py b/CPAC/surface/surf_preproc.py index 2229e24b5a..1defe4e2d1 100644 --- a/CPAC/surface/surf_preproc.py +++ b/CPAC/surface/surf_preproc.py @@ -1,10 +1,25 @@ -import os +# Copyright (C) 2021-2023 C-PAC Developers + +# This file is part of C-PAC. + +# C-PAC is free software: you can redistribute it and/or modify it under +# the terms of the GNU Lesser General Public License as published by the +# Free Software Foundation, either version 3 of the License, or (at your +# option) any later version. -import nipype.interfaces.utility as util +# C-PAC is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or +# FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public +# License for more details. + +# You should have received a copy of the GNU Lesser General Public +# License along with C-PAC. If not, see . +import os from CPAC.pipeline import nipype_pipeline_engine as pe from CPAC.pipeline.nodeblock import nodeblock from CPAC.surface.PostFreeSurfer.surf_reho import run_surf_reho +from CPAC.utils.interfaces import Function def run_surface( @@ -1026,7 +1041,7 @@ def run_surface( ) def surface_postproc(wf, cfg, strat_pool, pipe_num, opt=None): surf = pe.Node( - util.Function( + Function( input_names=[ "post_freesurfer_folder", "freesurfer_folder", @@ -1369,7 +1384,7 @@ def surface_postproc(wf, cfg, strat_pool, pipe_num, opt=None): ) def surface_falff(wf, cfg, strat_pool, pipe_num, opt): falff = pe.Node( - util.Function( + Function( input_names=["subject", "dtseries"], output_names=["surf_falff"], function=run_surf_falff, @@ -1394,7 +1409,7 @@ def surface_falff(wf, cfg, strat_pool, pipe_num, opt): ) def surface_alff(wf, cfg, strat_pool, pipe_num, opt): alff = pe.Node( - util.Function( + Function( input_names=["subject", "dtseries"], output_names=["surf_alff"], function=run_surf_alff, @@ -1427,7 +1442,7 @@ def surface_alff(wf, cfg, strat_pool, pipe_num, opt): ) def surface_reho(wf, cfg, strat_pool, pipe_num, opt): L_cortex_file = pe.Node( - util.Function( + Function( input_names=["subject", "dtseries", "structure", "cortex_filename"], output_names=["L_cortex_file"], function=run_get_cortex, @@ -1442,7 +1457,7 @@ def surface_reho(wf, cfg, strat_pool, pipe_num, opt): wf.connect(node, out, L_cortex_file, "dtseries") R_cortex_file = pe.Node( - util.Function( + Function( input_names=["subject", "dtseries", "structure", "cortex_filename"], output_names=["R_cortex_file"], function=run_get_cortex, @@ -1456,7 +1471,7 @@ def surface_reho(wf, cfg, strat_pool, pipe_num, opt): wf.connect(node, out, R_cortex_file, "dtseries") mean_timeseries = pe.Node( - util.Function( + Function( input_names=["subject", "dtseries"], output_names=["mean_timeseries"], function=run_mean_timeseries, @@ -1468,7 +1483,7 @@ def surface_reho(wf, cfg, strat_pool, pipe_num, opt): wf.connect(node, out, mean_timeseries, "dtseries") L_reho = pe.Node( - util.Function( + Function( input_names=[ "subject", "dtseries", @@ -1499,7 +1514,7 @@ def surface_reho(wf, cfg, strat_pool, pipe_num, opt): wf.connect(node, out, L_reho, "dtseries") R_reho = pe.Node( - util.Function( + Function( input_names=[ "subject", "dtseries", @@ -1545,7 +1560,7 @@ def surface_reho(wf, cfg, strat_pool, pipe_num, opt): ) def surface_connectivity_matrix(wf, cfg, strat_pool, pipe_num, opt): connectivity_parcellation = pe.Node( - util.Function( + Function( input_names=["subject", "dtseries", "surf_atlaslabel"], output_names=["parcellation_file"], function=run_ciftiparcellate, @@ -1561,7 +1576,7 @@ def surface_connectivity_matrix(wf, cfg, strat_pool, pipe_num, opt): ]["surface_parcellation_template"] correlation_matrix = pe.Node( - util.Function( + Function( input_names=["subject", "ptseries"], output_names=["correlation_matrix"], function=run_cifticorrelation, diff --git a/CPAC/timeseries/timeseries_analysis.py b/CPAC/timeseries/timeseries_analysis.py index 14547bc79b..a56bc33c74 100644 --- a/CPAC/timeseries/timeseries_analysis.py +++ b/CPAC/timeseries/timeseries_analysis.py @@ -15,7 +15,6 @@ # You should have received a copy of the GNU Lesser General Public # License along with C-PAC. If not, see . from nipype.interfaces import afni, fsl, utility as util -from nipype.interfaces.utility import Function from CPAC.connectome.connectivity_matrix import ( create_connectome_afni, @@ -29,6 +28,7 @@ create_spatial_map_dataflow, resample_func_roi, ) +from CPAC.utils.interfaces import Function from CPAC.utils.monitoring import FMLOGGER @@ -86,7 +86,7 @@ def get_voxel_timeseries(wf_name: str = "voxel_timeseries") -> pe.Workflow: ) timeseries_voxel = pe.Node( - util.Function( + Function( input_names=["data_file", "template"], output_names=["oneD_file"], function=gen_voxel_timeseries, @@ -241,7 +241,7 @@ def get_roi_timeseries(wf_name: str = "roi_timeseries") -> pe.Workflow: clean_csv_imports = ["import os"] clean_csv = pe.Node( - util.Function( + Function( input_names=["roi_csv"], output_names=["roi_array", "edited_roi_csv"], function=clean_roi_csv, @@ -382,7 +382,7 @@ def get_vertices_timeseries(wf_name="vertices_timeseries"): ) timeseries_surface = pe.Node( - util.Function( + Function( input_names=["rh_surface_file", "lh_surface_file"], output_names=["out_file"], function=gen_vertices_timeseries, diff --git a/CPAC/utils/interfaces/function/seg_preproc.py b/CPAC/utils/interfaces/function/seg_preproc.py index d220781f48..5fe9152b23 100644 --- a/CPAC/utils/interfaces/function/seg_preproc.py +++ b/CPAC/utils/interfaces/function/seg_preproc.py @@ -1,11 +1,26 @@ +# Copyright (C) 2022-2023 C-PAC Developers + +# This file is part of C-PAC. + +# C-PAC is free software: you can redistribute it and/or modify it under +# the terms of the GNU Lesser General Public License as published by the +# Free Software Foundation, either version 3 of the License, or (at your +# option) any later version. + +# C-PAC is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or +# FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public +# License for more details. + +# You should have received a copy of the GNU Lesser General Public +# License along with C-PAC. If not, see . """Function interfaces for seg_preproc.""" -from nipype.interfaces import utility as util +from CPAC.utils.interfaces import Function def pick_tissue_from_labels_file_interface(input_names=None): - """Function to create a Function interface for - CPAC.seg_preproc.utils.pick_tissue_from_labels_file. + """Create a Function interface for ~CPAC.seg_preproc.utils.pick_tissue_from_labels_file. Parameters ---------- @@ -20,7 +35,7 @@ def pick_tissue_from_labels_file_interface(input_names=None): if input_names is None: input_names = ["multiatlas_Labels", "csf_label", "gm_label", "wm_label"] - return util.Function( + return Function( input_names=input_names, output_names=["csf_mask", "gm_mask", "wm_mask"], function=pick_tissue_from_labels_file, diff --git a/CPAC/utils/tests/test_datasource.py b/CPAC/utils/tests/test_datasource.py index 9842310bb1..be7c2255c2 100644 --- a/CPAC/utils/tests/test_datasource.py +++ b/CPAC/utils/tests/test_datasource.py @@ -1,10 +1,26 @@ +# Copyright (C) 2019-2024 C-PAC Developers + +# This file is part of C-PAC. + +# C-PAC is free software: you can redistribute it and/or modify it under +# the terms of the GNU Lesser General Public License as published by the +# Free Software Foundation, either version 3 of the License, or (at your +# option) any later version. + +# C-PAC is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or +# FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public +# License for more details. + +# You should have received a copy of the GNU Lesser General Public +# License along with C-PAC. If not, see . import json import pytest -import nipype.interfaces.utility as util from CPAC.pipeline import nipype_pipeline_engine as pe from CPAC.utils.datasource import match_epi_fmaps +from CPAC.utils.interfaces import Function from CPAC.utils.test_resources import setup_test_wf @@ -48,7 +64,7 @@ def test_match_epi_fmaps(): } match_fmaps = pe.Node( - util.Function( + Function( input_names=["fmap_dct", "bold_pedir"], output_names=["opposite_pe_epi", "same_pe_epi"], function=match_epi_fmaps, diff --git a/CPAC/utils/utils.py b/CPAC/utils/utils.py index 511100a6b2..beaac216bb 100644 --- a/CPAC/utils/utils.py +++ b/CPAC/utils/utils.py @@ -132,7 +132,7 @@ def get_flag_wf(wf_name="get_flag"): input_node = pe.Node(util.IdentityInterface(fields=["in_flag"]), name="inputspec") get_flag = pe.Node( - util.Function(input_names=["in_flag"], function=_get_flag), name="get_flag" + Function(input_names=["in_flag"], function=_get_flag), name="get_flag" ) wf.connect(input_node, "in_flag", get_flag, "in_flag") @@ -322,7 +322,7 @@ def get_zscore(map_node=False, wf_name="z_score"): ) op_string = pe.MapNode( - util.Function( + Function( input_names=["mean", "std_dev"], output_names=["op_string"], function=get_operand_string, @@ -345,7 +345,7 @@ def get_zscore(map_node=False, wf_name="z_score"): ) op_string = pe.Node( - util.Function( + Function( input_names=["mean", "std_dev"], output_names=["op_string"], function=get_operand_string, @@ -400,7 +400,7 @@ def get_fisher_zscore(input_name, map_node=False, wf_name="fisher_z_score"): if map_node: # node to separate out fisher_z_score = pe.MapNode( - util.Function( + Function( input_names=["correlation_file", "timeseries_one_d", "input_name"], output_names=["out_file"], function=compute_fisher_z_score, @@ -410,7 +410,7 @@ def get_fisher_zscore(input_name, map_node=False, wf_name="fisher_z_score"): ) else: fisher_z_score = pe.Node( - util.Function( + Function( input_names=["correlation_file", "timeseries_one_d", "input_name"], output_names=["out_file"], function=compute_fisher_z_score, From fbe18afa3467fe0b57901b33c16061760f3aaefa Mon Sep 17 00:00:00 2001 From: Jon Clucas Date: Mon, 8 Jul 2024 17:01:06 -0400 Subject: [PATCH 049/507] :bug: Fix circular import [run reg-suite] --- CPAC/utils/interfaces/function/seg_preproc.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/CPAC/utils/interfaces/function/seg_preproc.py b/CPAC/utils/interfaces/function/seg_preproc.py index 5fe9152b23..f3cb06840b 100644 --- a/CPAC/utils/interfaces/function/seg_preproc.py +++ b/CPAC/utils/interfaces/function/seg_preproc.py @@ -16,7 +16,7 @@ # License along with C-PAC. If not, see . """Function interfaces for seg_preproc.""" -from CPAC.utils.interfaces import Function +from CPAC.utils.interfaces.function.function import Function def pick_tissue_from_labels_file_interface(input_names=None): From 09a39f5e1308fc0039d14680dd3b9bf834046062 Mon Sep 17 00:00:00 2001 From: Jon Clucas Date: Fri, 19 Jul 2024 01:41:24 -0400 Subject: [PATCH 050/507] :children_crossing: Disable echo of each log statement Co-authored-by: Elizabeth Kenneally <113037677+e-kenneally@users.noreply.github.com> --- CPAC/__main__.py | 2 -- CPAC/cwas/tests/test_cwas.py | 3 --- CPAC/cwas/tests/test_pipeline_cwas.py | 2 -- CPAC/image_utils/tests/test_smooth.py | 2 -- CPAC/nuisance/tests/test_utils.py | 2 -- CPAC/pipeline/test/test_cpac_group_runner.py | 3 +-- CPAC/utils/build_data_config.py | 2 -- CPAC/utils/monitoring/custom_logging.py | 9 ++++++++- CPAC/utils/ndmg_utils.py | 2 -- CPAC/utils/tests/test_bids_utils.py | 2 -- CPAC/utils/tests/test_symlinks.py | 2 -- CPAC/vmhc/tests/test_vmhc.py | 2 -- 12 files changed, 9 insertions(+), 24 deletions(-) diff --git a/CPAC/__main__.py b/CPAC/__main__.py index 90eb435b23..0b088c67f2 100644 --- a/CPAC/__main__.py +++ b/CPAC/__main__.py @@ -15,7 +15,6 @@ # You should have received a copy of the GNU Lesser General Public # License along with C-PAC. If not, see . -from logging import basicConfig, INFO import os import click @@ -26,7 +25,6 @@ from CPAC.utils.monitoring.custom_logging import getLogger logger = getLogger("CPAC") -basicConfig(format="%(message)s", level=INFO) # CLI tree # diff --git a/CPAC/cwas/tests/test_cwas.py b/CPAC/cwas/tests/test_cwas.py index 974fd83513..72abfc4d5a 100755 --- a/CPAC/cwas/tests/test_cwas.py +++ b/CPAC/cwas/tests/test_cwas.py @@ -16,8 +16,6 @@ # License along with C-PAC. If not, see . """Test the CWAS pipeline.""" -from logging import basicConfig, INFO - import pytest import nibabel as nib @@ -25,7 +23,6 @@ from CPAC.utils.monitoring.custom_logging import getLogger logger = getLogger("CPAC.cwas.tests") -basicConfig(format="%(message)s", level=INFO) @pytest.mark.skip(reason="requires RegressionTester") diff --git a/CPAC/cwas/tests/test_pipeline_cwas.py b/CPAC/cwas/tests/test_pipeline_cwas.py index 866318821a..f910419d2c 100644 --- a/CPAC/cwas/tests/test_pipeline_cwas.py +++ b/CPAC/cwas/tests/test_pipeline_cwas.py @@ -16,7 +16,6 @@ # License along with C-PAC. If not, see . """Test the CWAS pipeline.""" -from logging import basicConfig, INFO import os from urllib.error import URLError @@ -30,7 +29,6 @@ from CPAC.utils.monitoring.custom_logging import getLogger logger = getLogger("CPAC.cwas.tests") -basicConfig(format="%(message)s", level=INFO) @pytest.mark.parametrize("z_score", [[0], [1], [0, 1], []]) diff --git a/CPAC/image_utils/tests/test_smooth.py b/CPAC/image_utils/tests/test_smooth.py index d1f8a8ec98..bf1c79fd94 100644 --- a/CPAC/image_utils/tests/test_smooth.py +++ b/CPAC/image_utils/tests/test_smooth.py @@ -14,7 +14,6 @@ # You should have received a copy of the GNU Lesser General Public # License along with C-PAC. If not, see . -from logging import basicConfig, INFO import os import pytest @@ -26,7 +25,6 @@ from CPAC.utils.test_mocks import configuration_strategy_mock logger = getLogger("CPAC.image_utils.tests") -basicConfig(format="%(message)s", level=INFO) @pytest.mark.skip(reason="needs refactoring") diff --git a/CPAC/nuisance/tests/test_utils.py b/CPAC/nuisance/tests/test_utils.py index 724d536b63..be0ea03e96 100644 --- a/CPAC/nuisance/tests/test_utils.py +++ b/CPAC/nuisance/tests/test_utils.py @@ -1,4 +1,3 @@ -from logging import basicConfig, INFO import os import tempfile @@ -10,7 +9,6 @@ from CPAC.utils.monitoring.custom_logging import getLogger logger = getLogger("CPAC.nuisance.tests") -basicConfig(format="%(message)s", level=INFO) mocked_outputs = p.resource_filename( "CPAC", os.path.join("nuisance", "tests", "motion_statistics") diff --git a/CPAC/pipeline/test/test_cpac_group_runner.py b/CPAC/pipeline/test/test_cpac_group_runner.py index d8a218ca19..6c20341ede 100644 --- a/CPAC/pipeline/test/test_cpac_group_runner.py +++ b/CPAC/pipeline/test/test_cpac_group_runner.py @@ -14,12 +14,11 @@ # You should have received a copy of the GNU Lesser General Public # License along with C-PAC. If not, see . -from logging import basicConfig, INFO + from CPAC.utils.monitoring.custom_logging import getLogger logger = getLogger("CPAC.pipeline.test") -basicConfig(format="%(message)s", level=INFO) def run_gather_outputs_func(pipeline_out_dir): diff --git a/CPAC/utils/build_data_config.py b/CPAC/utils/build_data_config.py index 8be6c6b234..e17cf2ed30 100644 --- a/CPAC/utils/build_data_config.py +++ b/CPAC/utils/build_data_config.py @@ -16,14 +16,12 @@ # License along with C-PAC. If not, see . """Build a C-PAC data configuration.""" -from logging import basicConfig, INFO from pathlib import Path from typing import Any from CPAC.utils.monitoring.custom_logging import getLogger logger = getLogger("CPAC.utils.data-config") -basicConfig(format="%(message)s", level=INFO) def _cannot_write(file_name: Path | str) -> None: diff --git a/CPAC/utils/monitoring/custom_logging.py b/CPAC/utils/monitoring/custom_logging.py index abd6b63438..bced6e99d4 100644 --- a/CPAC/utils/monitoring/custom_logging.py +++ b/CPAC/utils/monitoring/custom_logging.py @@ -59,7 +59,14 @@ def getLogger(name): # pylint: disable=invalid-name if name in MOCK_LOGGERS: return MOCK_LOGGERS[name] logger = nipype_logging.getLogger(name) - return logging.getLogger(name) if logger is None else logger + if logger is None: + logger = logging.getLogger(name) + if not logger.handlers: + handler = logging.StreamHandler() + handler.setFormatter(logging.Formatter("%(message)s")) + logger.setLevel(logging.INFO) + logger.addHandler(handler) + return logger # Nipype built-in loggers diff --git a/CPAC/utils/ndmg_utils.py b/CPAC/utils/ndmg_utils.py index 0623118e75..1680e8edf6 100644 --- a/CPAC/utils/ndmg_utils.py +++ b/CPAC/utils/ndmg_utils.py @@ -32,7 +32,6 @@ # Modifications Copyright (C) 2022-2024 C-PAC Developers # This file is part of C-PAC. -from logging import basicConfig, INFO import os import numpy as np @@ -41,7 +40,6 @@ from CPAC.utils.monitoring.custom_logging import getLogger logger = getLogger("nuerodata.m2g.ndmg") -basicConfig(format="%(message)s", level=INFO) def ndmg_roi_timeseries(func_file, label_file): diff --git a/CPAC/utils/tests/test_bids_utils.py b/CPAC/utils/tests/test_bids_utils.py index 57c0abef56..00355f1201 100644 --- a/CPAC/utils/tests/test_bids_utils.py +++ b/CPAC/utils/tests/test_bids_utils.py @@ -16,7 +16,6 @@ # License along with C-PAC. If not, see . """Tests for bids_utils.""" -from logging import basicConfig, INFO import os from subprocess import run @@ -34,7 +33,6 @@ from CPAC.utils.monitoring.custom_logging import getLogger logger = getLogger("CPAC.utils.tests") -basicConfig(format="%(message)s", level=INFO) def create_sample_bids_structure(root_dir): diff --git a/CPAC/utils/tests/test_symlinks.py b/CPAC/utils/tests/test_symlinks.py index 570d2e9b74..a2ddca02c9 100644 --- a/CPAC/utils/tests/test_symlinks.py +++ b/CPAC/utils/tests/test_symlinks.py @@ -14,7 +14,6 @@ # You should have received a copy of the GNU Lesser General Public # License along with C-PAC. If not, see . -from logging import basicConfig, INFO import os import tempfile @@ -24,7 +23,6 @@ from CPAC.utils.symlinks import create_symlinks logger = getLogger("CPAC.utils.tests") -basicConfig(format="%(message)s", level=INFO) mocked_outputs = p.resource_filename( "CPAC", os.path.join("utils", "tests", "test_symlinks-outputs.txt") diff --git a/CPAC/vmhc/tests/test_vmhc.py b/CPAC/vmhc/tests/test_vmhc.py index 2471a9b02c..e66d3cd782 100644 --- a/CPAC/vmhc/tests/test_vmhc.py +++ b/CPAC/vmhc/tests/test_vmhc.py @@ -14,7 +14,6 @@ # You should have received a copy of the GNU Lesser General Public # License along with C-PAC. If not, see . -from logging import basicConfig, INFO import os import pytest @@ -25,7 +24,6 @@ from CPAC.vmhc.vmhc import vmhc as create_vmhc logger = getLogger("CPAC.utils.tests") -basicConfig(format="%(message)s", level=INFO) @pytest.mark.skip(reason="test needs refactoring") From ebdd807c06934cf2edd50409031670e8eebead87 Mon Sep 17 00:00:00 2001 From: Jon Clucas Date: Fri, 26 Jul 2024 11:25:39 -0400 Subject: [PATCH 051/507] :white_check_mark: Don't assume first handler is FileHandler --- CPAC/pipeline/check_outputs.py | 8 ++++++-- CPAC/utils/monitoring/custom_logging.py | 19 ++++++++++++++++--- 2 files changed, 22 insertions(+), 5 deletions(-) diff --git a/CPAC/pipeline/check_outputs.py b/CPAC/pipeline/check_outputs.py index 2e55ef560d..7db2349337 100644 --- a/CPAC/pipeline/check_outputs.py +++ b/CPAC/pipeline/check_outputs.py @@ -59,7 +59,11 @@ def check_outputs(output_dir: str, log_dir: str, pipe_name: str, unique_id: str) if isinstance(outputs_logger, (Logger, MockLogger)) and len( outputs_logger.handlers ): - outputs_log = getattr(outputs_logger.handlers[0], "baseFilename", None) + outputs_log = getattr( + MockLogger._get_first_file_handler(outputs_logger.handlers), + "baseFilename", + None, + ) else: outputs_log = None if outputs_log is None: @@ -103,7 +107,7 @@ def check_outputs(output_dir: str, log_dir: str, pipe_name: str, unique_id: str) try: log_note = ( "Missing outputs have been logged in " - f"{missing_log.handlers[0].baseFilename}" + f"{MockLogger._get_first_file_handler(missing_log.handlers).baseFilename}" ) except (AttributeError, IndexError): log_note = "" diff --git a/CPAC/utils/monitoring/custom_logging.py b/CPAC/utils/monitoring/custom_logging.py index bced6e99d4..c3e5aa972d 100644 --- a/CPAC/utils/monitoring/custom_logging.py +++ b/CPAC/utils/monitoring/custom_logging.py @@ -21,6 +21,7 @@ import subprocess from sys import exc_info as sys_exc_info from traceback import print_exception +from typing import Optional, Sequence from nipype import logging as nipype_logging @@ -178,7 +179,9 @@ def _log(message, *items, exc_info=False): logging, level.upper(), logging.NOTSET ): with open( - self.handlers[0].baseFilename, "a", encoding="utf-8" + MockLogger._get_first_file_handler(self.handlers).baseFilename, + "a", + encoding="utf-8", ) as log_file: if exc_info and isinstance(message, Exception): value, traceback = sys_exc_info()[1:] @@ -197,6 +200,16 @@ def delete(self): """Delete the mock logger from memory.""" del MOCK_LOGGERS[self.name] + @staticmethod + def _get_first_file_handler( + handlers: Sequence[logging.Handler | MockHandler], + ) -> Optional[logging.FileHandler]: + """Given a list of Handlers, return the first FileHandler found or return None.""" + for handler in handlers: + if isinstance(handler, logging.FileHandler): + return handler + return None + def _lazy_sub(message, *items): """Given lazy-logging syntax, return string with substitutions. @@ -259,12 +272,12 @@ def set_up_logger( Examples -------- >>> lg = set_up_logger('test') - >>> lg.handlers[0].baseFilename.split('/')[-1] + >>> MockLogger._get_first_file_handler(lg.handlers).baseFilename.split('/')[-1] 'test.log' >>> lg.level 0 >>> lg = set_up_logger('second_test', 'specific_filename.custom', 'debug') - >>> lg.handlers[0].baseFilename.split('/')[-1] + >>> MockLogger._get_first_file_handler(lg.handlers).baseFilename.split('/')[-1] 'specific_filename.custom' >>> lg.level 10 From e6cfc58bfdfaa6264bb761de06e6451874b04ae6 Mon Sep 17 00:00:00 2001 From: Jon Clucas Date: Fri, 26 Jul 2024 14:20:20 -0400 Subject: [PATCH 052/507] :necktie: Count MockHandler as FileHandler --- CPAC/utils/monitoring/custom_logging.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/CPAC/utils/monitoring/custom_logging.py b/CPAC/utils/monitoring/custom_logging.py index c3e5aa972d..3d8d1b842a 100644 --- a/CPAC/utils/monitoring/custom_logging.py +++ b/CPAC/utils/monitoring/custom_logging.py @@ -203,10 +203,10 @@ def delete(self): @staticmethod def _get_first_file_handler( handlers: Sequence[logging.Handler | MockHandler], - ) -> Optional[logging.FileHandler]: + ) -> Optional[logging.FileHandler | MockHandler]: """Given a list of Handlers, return the first FileHandler found or return None.""" for handler in handlers: - if isinstance(handler, logging.FileHandler): + if isinstance(handler, (logging.FileHandler, MockHandler)): return handler return None From d942e258bf08a157c1c09ffd9d10b46d817b5a10 Mon Sep 17 00:00:00 2001 From: Jon Clucas Date: Mon, 26 Aug 2024 14:26:04 -0400 Subject: [PATCH 053/507] :construction_worker: Skip local scripts for CI pre-config --- .pre-commit-config.yaml | 3 +++ 1 file changed, 3 insertions(+) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 957e36b029..f22989f84d 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -14,6 +14,9 @@ # You should have received a copy of the GNU Lesser General Public # License along with C-PAC. If not, see . +ci: + skip: [autoversioning, update-yaml-comments] + fail_fast: false repos: From 0f36032fd75b3030e428a0fd0359fc5e1f83c57f Mon Sep 17 00:00:00 2001 From: Jon Clucas Date: Mon, 26 Aug 2024 16:33:13 -0400 Subject: [PATCH 054/507] :construction_worker: Reenable autoversioning; disable ruff ref https://github.com/FCP-INDI/C-PAC/pull/2058#technical_details --- .pre-commit-config.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index f22989f84d..66b0a5da0e 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -15,7 +15,7 @@ # You should have received a copy of the GNU Lesser General Public # License along with C-PAC. If not, see . ci: - skip: [autoversioning, update-yaml-comments] + skip: [ruff, update-yaml-comments] fail_fast: false From b5101669a08eb8048a91a79d55b987c4a91adb93 Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Mon, 26 Aug 2024 20:33:32 +0000 Subject: [PATCH 055/507] [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci --- CPAC/pipeline/cpac_pipeline.py | 33 ++++++++++++++++++--------------- 1 file changed, 18 insertions(+), 15 deletions(-) diff --git a/CPAC/pipeline/cpac_pipeline.py b/CPAC/pipeline/cpac_pipeline.py index 40811b9e77..26f67c970f 100644 --- a/CPAC/pipeline/cpac_pipeline.py +++ b/CPAC/pipeline/cpac_pipeline.py @@ -710,21 +710,24 @@ def run_workflow( ] timeHeader = dict(zip(gpaTimeFields, gpaTimeFields)) - with open( - os.path.join( - c.pipeline_setup["log_directory"]["path"], - "cpac_individual_timing" - f"_{c.pipeline_setup['pipeline_name']}.csv", - ), - "a", - ) as timeCSV, open( - os.path.join( - c.pipeline_setup["log_directory"]["path"], - "cpac_individual_timing_%s.csv" - % c.pipeline_setup["pipeline_name"], - ), - "r", - ) as readTimeCSV: + with ( + open( + os.path.join( + c.pipeline_setup["log_directory"]["path"], + "cpac_individual_timing" + f"_{c.pipeline_setup['pipeline_name']}.csv", + ), + "a", + ) as timeCSV, + open( + os.path.join( + c.pipeline_setup["log_directory"]["path"], + "cpac_individual_timing_%s.csv" + % c.pipeline_setup["pipeline_name"], + ), + "r", + ) as readTimeCSV, + ): timeWriter = csv.DictWriter(timeCSV, fieldnames=gpaTimeFields) timeReader = csv.DictReader(readTimeCSV) From f92fde14ba5a6c37247903f70e84979a276ffb4a Mon Sep 17 00:00:00 2001 From: Jon Clucas Date: Mon, 26 Aug 2024 16:41:27 -0400 Subject: [PATCH 056/507] :memo: Add pre-commit.ci badge to README --- CHANGELOG.md | 1 + README.md | 3 ++- 2 files changed, 3 insertions(+), 1 deletion(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index df8f40a666..781be04ee6 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -19,6 +19,7 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 ### Added - `pyproject.toml` file with `[build-system]` defined. +- [![pre-commit.ci status](https://results.pre-commit.ci/badge/github/FCP-INDI/C-PAC/main.svg)](https://results.pre-commit.ci/latest/github/FCP-INDI/C-PAC/main) badge to [`README`](./README.md). ### Changed diff --git a/README.md b/README.md index 137bc57972..c320755101 100644 --- a/README.md +++ b/README.md @@ -9,7 +9,8 @@ C-PAC is distributed in the hope that it will be useful, but WITHOUT ANY WARRANT You should have received a copy of the GNU Lesser General Public License along with C-PAC. If not, see . --> C-PAC: Configurable Pipeline for the Analysis of Connectomes ============================================================ -[![DOI for "Moving Beyond Processing and Analysis-Related Variation in Neuroscience"](https://zenodo.org/badge/DOI/10.1101/2021.12.01.470790.svg)](https://doi.org/10.1101/2021.12.01.470790) [![DOI for "FCP-INDI/C-PAC: CPAC Version 1.0.0 Beta"](https://zenodo.org/badge/DOI/10.5281/zenodo.164638.svg)](https://doi.org/10.5281/zenodo.164638) +[![DOI for "Moving Beyond Processing and Analysis-Related Variation in Neuroscience"](https://zenodo.org/badge/DOI/10.1101/2021.12.01.470790.svg)](https://doi.org/10.1101/2021.12.01.470790) [![DOI for "FCP-INDI/C-PAC: CPAC Version 1.0.0 Beta"](https://zenodo.org/badge/DOI/10.5281/zenodo.164638.svg)](https://doi.org/10.5281/zenodo.164638) [![pre-commit.ci status](https://results.pre-commit.ci/badge/github/FCP-INDI/C-PAC/main.svg)](https://results.pre-commit.ci/latest/github/FCP-INDI/C-PAC/main) + [![LGPL](https://www.gnu.org/graphics/lgplv3-88x31.png)](./COPYING.LESSER) From 4faf30dc209599c3d082bcb1c6b7f0e749d42e94 Mon Sep 17 00:00:00 2001 From: Jon Clucas Date: Wed, 31 Jul 2024 11:42:52 -0400 Subject: [PATCH 057/507] :alembic: Test run [run reg-suite] From 822bd4820c3f69eb4313b06ee6547120c059c9f5 Mon Sep 17 00:00:00 2001 From: Jon Clucas Date: Wed, 4 Sep 2024 14:16:34 -0400 Subject: [PATCH 058/507] :memo: Update `FROM` base from `default` to `blank` --- CPAC/utils/configuration/configuration.py | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/CPAC/utils/configuration/configuration.py b/CPAC/utils/configuration/configuration.py index 8444cce105..c4542f579d 100644 --- a/CPAC/utils/configuration/configuration.py +++ b/CPAC/utils/configuration/configuration.py @@ -50,7 +50,10 @@ class Configuration: will form the base of the Configuration object with the values in the given dictionary overriding matching keys in the base at any depth. If no ``FROM`` key is included, the base Configuration is - the default Configuration. + the blank preconfiguration. + + .. versionchanged:: 1.8.5 + From version 1.8.0 to version 1.8.5, unspecified keys were based on the default configuration rather than the blank preconfiguration. ``FROM`` accepts either the name of a preconfigured pipleine or a path to a YAML file. From b896b75e64e1e8b57f35fe7c4fa0985e3a6d52be Mon Sep 17 00:00:00 2001 From: Jon Clucas Date: Tue, 3 Sep 2024 11:57:17 -0400 Subject: [PATCH 059/507] :children_crossing: Coerce ints in data config sub- and ses- values to strings --- CPAC/utils/bids_utils.py | 54 ++++++++++++++++++++---- CPAC/utils/tests/configs/__init__.py | 12 +++--- CPAC/utils/tests/configs/github_2144.yml | 4 ++ CPAC/utils/tests/test_bids_utils.py | 18 ++++++++ 4 files changed, 75 insertions(+), 13 deletions(-) create mode 100644 CPAC/utils/tests/configs/github_2144.yml diff --git a/CPAC/utils/bids_utils.py b/CPAC/utils/bids_utils.py index 34e72d430e..9840badb23 100755 --- a/CPAC/utils/bids_utils.py +++ b/CPAC/utils/bids_utils.py @@ -14,10 +14,13 @@ # You should have received a copy of the GNU Lesser General Public # License along with C-PAC. If not, see . +from base64 import b64decode +from collections.abc import Iterable import json import os import re import sys +from typing import Any, Callable from warnings import warn from botocore.exceptions import BotoCoreError @@ -26,6 +29,16 @@ from CPAC.utils.monitoring import UTLOGGER +class SpecifiedBotoCoreError(BotoCoreError): + """Specified :py:class:`~botocore.exceptions.BotoCoreError`.""" + + def __init__(self, msg: str, *args, **kwargs) -> None: + """Initialize BotoCoreError with message.""" + msg = msg.format(**kwargs) + Exception.__init__(self, msg) + self.kwargs = kwargs + + def bids_decode_fname(file_path, dbg=False, raise_error=True): f_dict = {} @@ -842,7 +855,7 @@ def collect_bids_files_configs(bids_dir, aws_input_creds=""): f"Error retrieving {s3_obj.key.replace(prefix, '')}" f" ({e.message})" ) - raise BotoCoreError(msg) from e + raise SpecifiedBotoCoreError(msg) from e elif "nii" in str(s3_obj.key): file_paths.append( str(s3_obj.key).replace(prefix, "").lstrip("/") @@ -868,9 +881,15 @@ def collect_bids_files_configs(bids_dir, aws_input_creds=""): ): json.load(open(os.path.join(root, f), "r")) } ) - except UnicodeDecodeError: + except UnicodeDecodeError as unicode_decode_error: msg = f"Could not decode {os.path.join(root, f)}" - raise UnicodeDecodeError(msg) + raise UnicodeDecodeError( + unicode_decode_error.encoding, + unicode_decode_error.object, + unicode_decode_error.start, + unicode_decode_error.end, + msg, + ) if not file_paths and not config_dict: msg = ( @@ -983,15 +1002,17 @@ def insert_entity(resource, key, value): return "_".join([*new_entities[0], f"{key}-{value}", *new_entities[1], suff]) -def load_yaml_config(config_filename, aws_input_creds): +def load_yaml_config(config_filename: str, aws_input_creds, safe_load: bool = False): if config_filename.lower().startswith("data:"): try: header, encoded = config_filename.split(",", 1) config_content = b64decode(encoded) - return yaml.safe_load(config_content) + if safe_load: + return yaml.safe_load(config_content) + return yaml.load(config_content, Loader=yaml.loader.BaseLoader) except: msg = f"Error! Could not find load config from data URI {config_filename}" - raise BotoCoreError(msg) + raise SpecifiedBotoCoreError(msg=msg) if config_filename.lower().startswith("s3://"): # s3 paths begin with s3://bucket/ @@ -1013,7 +1034,9 @@ def load_yaml_config(config_filename, aws_input_creds): config_filename = os.path.realpath(config_filename) try: - return yaml.safe_load(open(config_filename, "r")) + if safe_load: + return yaml.safe_load(open(config_filename, "r")) + return yaml.load(open(config_filename, "r"), Loader=yaml.loader.BaseLoader) except IOError: msg = f"Error! Could not find config file {config_filename}" raise FileNotFoundError(msg) @@ -1110,6 +1133,18 @@ def create_cpac_data_config( return sub_list +def _check_value_type( + sub_list: list[dict[str, Any]], + keys: list[str] = ["subject_id", "unique_id"], + value_type: type = int, + any_or_all: Callable[[Iterable], bool] = any, +) -> bool: + """Check if any or all of a key in a sub_list is of a given type.""" + return any_or_all( + isinstance(sub.get(key), value_type) for key in keys for sub in sub_list + ) + + def load_cpac_data_config(data_config_file, participant_labels, aws_input_creds): """ Loads the file as a check to make sure it is available and readable. @@ -1127,7 +1162,10 @@ def load_cpac_data_config(data_config_file, participant_labels, aws_input_creds) ------- list """ - sub_list = load_yaml_config(data_config_file, aws_input_creds) + sub_list: list[dict[str, str]] = load_yaml_config(data_config_file, aws_input_creds) + + if _check_value_type(sub_list, ["subject_id", "unique_id"], int, any): + sub_list = load_yaml_config(data_config_file, aws_input_creds, safe_load=False) if participant_labels: sub_list = [ diff --git a/CPAC/utils/tests/configs/__init__.py b/CPAC/utils/tests/configs/__init__.py index f8a23bd4e6..a02311ad9e 100644 --- a/CPAC/utils/tests/configs/__init__.py +++ b/CPAC/utils/tests/configs/__init__.py @@ -1,15 +1,17 @@ """Configs for testing.""" -from pathlib import Path +from importlib import resources +from importlib.resources.abc import Traversable -from pkg_resources import resource_filename import yaml -_TEST_CONFIGS_PATH = Path(resource_filename("CPAC", "utils/tests/configs")) -with open(_TEST_CONFIGS_PATH / "neurostars_23786.yml", "r", encoding="utf-8") as _f: +_TEST_CONFIGS_PATH: Traversable = resources.files("CPAC").joinpath( + "utils/tests/configs" +) +with (_TEST_CONFIGS_PATH / "neurostars_23786.yml").open("r", encoding="utf-8") as _f: # A loaded YAML file to test https://tinyurl.com/neurostars23786 NEUROSTARS_23786 = _f.read() -with open(_TEST_CONFIGS_PATH / "neurostars_24035.yml", "r", encoding="utf-8") as _f: +with (_TEST_CONFIGS_PATH / "neurostars_24035.yml").open("r", encoding="utf-8") as _f: # A loaded YAML file to test https://tinyurl.com/neurostars24035 NEUROSTARS_24035 = _f.read() # A loaded YAML file to test https://tinyurl.com/cmicnlslack420349 diff --git a/CPAC/utils/tests/configs/github_2144.yml b/CPAC/utils/tests/configs/github_2144.yml new file mode 100644 index 0000000000..4a645472b2 --- /dev/null +++ b/CPAC/utils/tests/configs/github_2144.yml @@ -0,0 +1,4 @@ +- site: site-1 + subject_id: 01 + unique_id: 02 + derivatives_dir: /fprep/sub-0151 diff --git a/CPAC/utils/tests/test_bids_utils.py b/CPAC/utils/tests/test_bids_utils.py index 00355f1201..61b381756c 100644 --- a/CPAC/utils/tests/test_bids_utils.py +++ b/CPAC/utils/tests/test_bids_utils.py @@ -16,6 +16,7 @@ # License along with C-PAC. If not, see . """Tests for bids_utils.""" +from importlib import resources import os from subprocess import run @@ -23,11 +24,13 @@ import yaml from CPAC.utils.bids_utils import ( + _check_value_type, bids_gen_cpac_sublist, cl_strip_brackets, collect_bids_files_configs, create_cpac_data_config, load_cpac_data_config, + load_yaml_config, sub_list_filter_by_labels, ) from CPAC.utils.monitoring.custom_logging import getLogger @@ -107,6 +110,21 @@ def test_gen_bids_sublist(bids_dir, test_yml, creds_path, dbg=False): assert sublist +def test_load_data_config_with_ints() -> None: + """Check that C-PAC coerces sub- and ses- ints to strings.""" + data_config_file = resources.files("CPAC").joinpath( + "utils/tests/configs/github_2144.yml" + ) + # make sure there are ints in the test data + assert _check_value_type( + load_yaml_config(str(data_config_file), None, safe_load=True) + ) + # make sure there aren't ints when it's loaded through the loader + assert not _check_value_type( + load_cpac_data_config(str(data_config_file), None, None) + ) + + @pytest.mark.parametrize("t1w_label", ["acq-HCP", "acq-VNavNorm", "T1w", None]) @pytest.mark.parametrize( "bold_label", ["task-peer_run-1", "[task-peer_run-1 task-peer_run-2]", "bold", None] From ca193f594581c4d58b6c3358c74e795d7ebd5af3 Mon Sep 17 00:00:00 2001 From: Jon Clucas Date: Wed, 4 Sep 2024 10:51:09 -0400 Subject: [PATCH 060/507] :white_check_mark: Fix `Traversable` import for minimum Python version --- CPAC/utils/tests/configs/__init__.py | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/CPAC/utils/tests/configs/__init__.py b/CPAC/utils/tests/configs/__init__.py index a02311ad9e..896c79bf69 100644 --- a/CPAC/utils/tests/configs/__init__.py +++ b/CPAC/utils/tests/configs/__init__.py @@ -1,7 +1,11 @@ """Configs for testing.""" from importlib import resources -from importlib.resources.abc import Traversable + +try: + from importlib.resources.abc import Traversable +except ModuleNotFoundError: # TODO: Remove this block once minimum Python version includes `importlib.resources.abc` + from importlib.abc import Traversable import yaml From a162b5026f2cbd79b92d2992a18eb969d271551a Mon Sep 17 00:00:00 2001 From: Jon Clucas Date: Thu, 5 Sep 2024 14:09:45 -0400 Subject: [PATCH 061/507] :recycle: Use YAML tags instead of unsafe loader --- CPAC/utils/bids_utils.py | 53 +++++++++++++++++++++-------- CPAC/utils/tests/test_bids_utils.py | 4 +-- 2 files changed, 39 insertions(+), 18 deletions(-) diff --git a/CPAC/utils/bids_utils.py b/CPAC/utils/bids_utils.py index 9840badb23..73a74c148f 100755 --- a/CPAC/utils/bids_utils.py +++ b/CPAC/utils/bids_utils.py @@ -20,7 +20,7 @@ import os import re import sys -from typing import Any, Callable +from typing import Any, Callable, Optional from warnings import warn from botocore.exceptions import BotoCoreError @@ -1002,15 +1002,33 @@ def insert_entity(resource, key, value): return "_".join([*new_entities[0], f"{key}-{value}", *new_entities[1], suff]) -def load_yaml_config(config_filename: str, aws_input_creds, safe_load: bool = False): +def apply_modifications( + yaml_contents: str, modifications: Optional[list[Callable[[str], str]]] +) -> str: + """Apply modification functions to YAML contents""" + if modifications: + for modification in modifications: + yaml_contents = modification(yaml_contents) + return yaml_contents + + +def load_yaml_config( + config_filename: str, + aws_input_creds, + modifications: Optional[list[Callable[[str], str]]] = None, +) -> dict | list | str: + """Load a YAML config file, possibly from AWS, with modifications applied. + + `modifications` should be a list of functions that take a single string argument (the loaded YAML contents) and return a single string argument (the modified YAML contents). + """ if config_filename.lower().startswith("data:"): try: - header, encoded = config_filename.split(",", 1) - config_content = b64decode(encoded) - if safe_load: - return yaml.safe_load(config_content) - return yaml.load(config_content, Loader=yaml.loader.BaseLoader) - except: + _header, encoded = config_filename.split(",", 1) + config_content = apply_modifications( + b64decode(encoded).decode("utf-8"), modifications + ) + return yaml.safe_load(config_content) + except Exception: msg = f"Error! Could not find load config from data URI {config_filename}" raise SpecifiedBotoCoreError(msg=msg) @@ -1034,9 +1052,8 @@ def load_yaml_config(config_filename: str, aws_input_creds, safe_load: bool = Fa config_filename = os.path.realpath(config_filename) try: - if safe_load: - return yaml.safe_load(open(config_filename, "r")) - return yaml.load(open(config_filename, "r"), Loader=yaml.loader.BaseLoader) + with open(config_filename, "r") as _f: + return yaml.safe_load(apply_modifications(_f.read(), modifications)) except IOError: msg = f"Error! Could not find config file {config_filename}" raise FileNotFoundError(msg) @@ -1145,6 +1162,13 @@ def _check_value_type( ) +def coerce_data_config_strings(contents: str) -> str: + """Coerge `subject_id` and `unique_id` to be strings.""" + for key in ["subject_id: ", "unique_id: "]: + contents = contents.replace(key, f"{key}!!str ") + return contents + + def load_cpac_data_config(data_config_file, participant_labels, aws_input_creds): """ Loads the file as a check to make sure it is available and readable. @@ -1162,10 +1186,9 @@ def load_cpac_data_config(data_config_file, participant_labels, aws_input_creds) ------- list """ - sub_list: list[dict[str, str]] = load_yaml_config(data_config_file, aws_input_creds) - - if _check_value_type(sub_list, ["subject_id", "unique_id"], int, any): - sub_list = load_yaml_config(data_config_file, aws_input_creds, safe_load=False) + sub_list: list[dict[str, str]] = load_yaml_config( + data_config_file, aws_input_creds, modifications=[coerce_data_config_strings] + ) if participant_labels: sub_list = [ diff --git a/CPAC/utils/tests/test_bids_utils.py b/CPAC/utils/tests/test_bids_utils.py index 61b381756c..2b7267af94 100644 --- a/CPAC/utils/tests/test_bids_utils.py +++ b/CPAC/utils/tests/test_bids_utils.py @@ -116,9 +116,7 @@ def test_load_data_config_with_ints() -> None: "utils/tests/configs/github_2144.yml" ) # make sure there are ints in the test data - assert _check_value_type( - load_yaml_config(str(data_config_file), None, safe_load=True) - ) + assert _check_value_type(load_yaml_config(str(data_config_file), None)) # make sure there aren't ints when it's loaded through the loader assert not _check_value_type( load_cpac_data_config(str(data_config_file), None, None) From 01ee7c1796749cbc3e922fed18f5adfc56b4e8e6 Mon Sep 17 00:00:00 2001 From: Jon Clucas Date: Thu, 5 Sep 2024 14:15:33 -0400 Subject: [PATCH 062/507] :necktie: Use regex to not override existing YAML type declarations --- CPAC/utils/bids_utils.py | 4 ++-- CPAC/utils/tests/configs/github_2144.yml | 4 ++++ 2 files changed, 6 insertions(+), 2 deletions(-) diff --git a/CPAC/utils/bids_utils.py b/CPAC/utils/bids_utils.py index 73a74c148f..4ed9e45e7c 100755 --- a/CPAC/utils/bids_utils.py +++ b/CPAC/utils/bids_utils.py @@ -1165,8 +1165,8 @@ def _check_value_type( def coerce_data_config_strings(contents: str) -> str: """Coerge `subject_id` and `unique_id` to be strings.""" for key in ["subject_id: ", "unique_id: "]: - contents = contents.replace(key, f"{key}!!str ") - return contents + contents = re.sub(f"{key}(?!!!)", f"{key}!!str ", contents) + return contents.replace(": !!str !!", ": !!") def load_cpac_data_config(data_config_file, participant_labels, aws_input_creds): diff --git a/CPAC/utils/tests/configs/github_2144.yml b/CPAC/utils/tests/configs/github_2144.yml index 4a645472b2..a7d405c8ea 100644 --- a/CPAC/utils/tests/configs/github_2144.yml +++ b/CPAC/utils/tests/configs/github_2144.yml @@ -2,3 +2,7 @@ subject_id: 01 unique_id: 02 derivatives_dir: /fprep/sub-0151 +- site: site-1 + subject_id: !!str 02 + unique_id: 02 + derivatives_dir: /fprep/sub-0151 From 4f2559752f210eab67f6a4c9349dd6dfb4cc91a5 Mon Sep 17 00:00:00 2001 From: Jon Clucas Date: Thu, 5 Sep 2024 14:20:38 -0400 Subject: [PATCH 063/507] :loud_sound: Add data config `str` casting to CHANGELOG [skip ci] --- CHANGELOG.md | 1 + 1 file changed, 1 insertion(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index 781be04ee6..7c97267c7c 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -24,6 +24,7 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 ### Changed - Moved `pygraphviz` from requirements to `graphviz` optional dependencies group. +- Automatically tag untagged `subject_id` and `unique_id` as `!!str` when loading data config files. ### Fixed From e2a65bf09cbbf2fedd5bbdcdf87111ad122e59f0 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Wed, 4 Sep 2024 00:02:57 +0000 Subject: [PATCH 064/507] :arrow_up: Bump cryptography from 42.0.3 to 43.0.1 Bumps [cryptography](https://github.com/pyca/cryptography) from 42.0.3 to 43.0.1. - [Changelog](https://github.com/pyca/cryptography/blob/main/CHANGELOG.rst) - [Commits](https://github.com/pyca/cryptography/compare/42.0.3...43.0.1) --- updated-dependencies: - dependency-name: cryptography dependency-type: direct:production ... Signed-off-by: dependabot[bot] --- requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements.txt b/requirements.txt index 58afacfa6d..f54d4ba8bc 100644 --- a/requirements.txt +++ b/requirements.txt @@ -40,7 +40,7 @@ voluptuous==0.13.1 # the below are pinned specifically to match what the FSL installer installs botocore==1.31.4 charset-normalizer==3.1.0 -cryptography==42.0.3 +cryptography==43.0.1 h5py==3.8.0 importlib-metadata==6.8.0 lxml==4.9.2 From 73ddb147c0c267dfbf0c6592d3dda9996408fce2 Mon Sep 17 00:00:00 2001 From: Jon Clucas Date: Tue, 17 Sep 2024 14:19:01 -0400 Subject: [PATCH 065/507] :bug: Fix header reading for bandpass filters --- CPAC/nuisance/bandpass.py | 16 ++++++++-------- 1 file changed, 8 insertions(+), 8 deletions(-) diff --git a/CPAC/nuisance/bandpass.py b/CPAC/nuisance/bandpass.py index c5dc0f170d..4693cd5da1 100644 --- a/CPAC/nuisance/bandpass.py +++ b/CPAC/nuisance/bandpass.py @@ -106,14 +106,14 @@ def bandpass_voxels(realigned_file, regressor_file, bandpass_freqs, sample_perio img.to_filename(regressor_bandpassed_file) else: - with open(regressor_file, "r") as f: - header = [] - - # header wouldn't be longer than 5, right? I don't want to - # loop over the whole file - for i in range(5): - line = f.readline() - if line.startswith("#") or isinstance(line[0], str): + header = [] + with open(regressor_file, "r") as _f: + # Each leading line that doesn't start with a number goes into the header + for line in _f.readlines(): + try: + float(line.split()[0]) + break + except ValueError: header.append(line) # usecols=[list] From d7bd9c219ca1db037844af9164aac14689a746c7 Mon Sep 17 00:00:00 2001 From: Jon Clucas Date: Tue, 17 Sep 2024 16:38:08 -0400 Subject: [PATCH 066/507] :white_check_mark: Add unit test for reading bandpass 1D file --- CPAC/nuisance/bandpass.py | 33 ++++++++++++------- CPAC/nuisance/tests/regressors.1D | 15 +++++++++ CPAC/nuisance/tests/test_bandpass.py | 48 ++++++++++++++++++++++++++++ 3 files changed, 84 insertions(+), 12 deletions(-) create mode 100644 CPAC/nuisance/tests/regressors.1D create mode 100644 CPAC/nuisance/tests/test_bandpass.py diff --git a/CPAC/nuisance/bandpass.py b/CPAC/nuisance/bandpass.py index 4693cd5da1..451d4a5b9e 100644 --- a/CPAC/nuisance/bandpass.py +++ b/CPAC/nuisance/bandpass.py @@ -1,6 +1,8 @@ import os +from pathlib import Path import numpy as np +from numpy.typing import NDArray import nibabel as nib from scipy.fftpack import fft, ifft @@ -44,6 +46,22 @@ def ideal_bandpass(data, sample_period, bandpass_freqs): return np.real_if_close(ifft(f_data)[:sample_length]) +def read_1D(one_D: Path | str) -> tuple[list[str], NDArray]: + """Parse a header from a 1D file, returing that header and a Numpy Array.""" + header = [] + with open(one_D, "r") as _f: + # Each leading line that doesn't start with a number goes into the header + for line in _f.readlines(): + try: + float(line.split()[0]) + break + except ValueError: + header.append(line) + + regressor = np.loadtxt(one_D, skiprows=len(header)) + return header, regressor + + def bandpass_voxels(realigned_file, regressor_file, bandpass_freqs, sample_period=None): """Performs ideal bandpass filtering on each voxel time-series. @@ -106,18 +124,9 @@ def bandpass_voxels(realigned_file, regressor_file, bandpass_freqs, sample_perio img.to_filename(regressor_bandpassed_file) else: - header = [] - with open(regressor_file, "r") as _f: - # Each leading line that doesn't start with a number goes into the header - for line in _f.readlines(): - try: - float(line.split()[0]) - break - except ValueError: - header.append(line) - - # usecols=[list] - regressor = np.loadtxt(regressor_file, skiprows=len(header)) + header: list[str] + regressor: NDArray + header, regressor = read_1D(regressor_file) Yc = regressor - np.tile(regressor.mean(0), (regressor.shape[0], 1)) Y_bp = np.zeros_like(Yc) diff --git a/CPAC/nuisance/tests/regressors.1D b/CPAC/nuisance/tests/regressors.1D new file mode 100644 index 0000000000..d55945bd4e --- /dev/null +++ b/CPAC/nuisance/tests/regressors.1D @@ -0,0 +1,15 @@ +# Extra header +# extra header +# C-PAC 1.8.7.dev1 +# Nuisance regressors: +# RotY RotYDelay RotYSq RotYDelaySq RotX RotXDelay RotXSq RotXDelaySq RotZ RotZDelay RotZSq RotZDelaySq Y YDelay YSq YDelaySq X XDelay XSq XDelaySq Z ZDelay ZSq ZDelaySq aCompCorDetrendPC0 aCompCorDetrendPC1 aCompCorDetrendPC2 aCompCorDetrendPC3 aCompCorDetrendPC4 +0.064503015618032941 0.000000000000000000 0.004160639023820202 0.000000000000000000 0.071612848897811346 0.000000000000000000 0.005128400127260760 0.000000000000000000 -0.045875642036314265 0.000000000000000000 0.002104574532244045 0.000000000000000000 0.132890000000000008 0.000000000000000000 0.017659752100000002 0.000000000000000000 0.014942199999999999 0.000000000000000000 0.000223269340840000 0.000000000000000000 0.000408556000000000 0.000000000000000000 0.000000166918005136 0.000000000000000000 -0.022348500000000000 0.024816700000000001 -0.096326200000000001 0.157762999999999987 -0.097873799999999997 +0.031640849390966043 0.064503015618032941 0.001001143350181796 0.004160639023820202 0.128928108975928074 0.071612848897811346 0.016622457284108785 0.005128400127260760 -0.067560891370646151 -0.045875642036314265 0.004564474042796250 0.002104574532244045 0.031627599999999999 0.132890000000000008 0.001000305081760000 0.017659752100000002 0.038095700000000003 0.014942199999999999 0.001451282358490000 0.000223269340840000 -0.005307810000000000 0.000408556000000000 0.000028172846996100 0.000000166918005136 -0.064876000000000003 -0.013603499999999999 0.009020350000000000 -0.160142000000000007 -0.177807999999999994 +0.014566182605406878 0.011659654350684051 0.001782025477654622 0.001708282485349496 0.087538262826358210 0.084814056613328720 0.003050410763897181 0.001983217145137512 -0.041453889502682612 -0.041248724781196566 0.000887045295189055 0.001102853114798172 0.024593061637466357 0.019123515563283400 -0.001171834437865083 -0.001702326740091272 0.013267008686230538 0.014908354440170480 -0.000023048542269668 0.000030800663864303 -0.003147503026503373 -0.002156489951271478 -0.000212523379574746 -0.000134571632225604 -0.005279020489008680 0.003309414394962159 0.006218425399968431 0.006926438427946187 0.031874911370701621 +0.023012917432044880 0.023641462459337223 0.001353826869739763 0.001428748088263631 0.128401517423642225 0.127907328597750475 0.002936077845255422 0.001732591121410621 -0.064041009402203836 -0.065349619535801984 -0.001376339705694537 -0.000867347717315630 0.055371528230890282 0.047838664356472604 -0.003939704578469714 -0.004413819725322955 0.008626921921677059 0.013521224060128565 -0.000524131399781458 -0.000509996162422567 0.001399646015426790 0.002426771079716165 -0.000697817034458711 -0.000644064148730770 0.003453684797811343 0.004439728633043883 0.005528130051255496 -0.000681564743845684 0.027088427450170843 +0.025438893846313822 0.030058212923250879 0.000838561693597976 0.001085005134557843 0.158696217127646116 0.160188595362451003 0.002834979654468744 0.001871305030454243 -0.079495085073931035 -0.083080090516398086 -0.003568788021910289 -0.002826331376429190 0.082500133838064399 0.073831252771084988 -0.006214900864815498 -0.006543763203955914 0.000519334243296480 0.008630341137520037 -0.000923363158038725 -0.000927750776503564 0.005165821347348335 0.005851034226762506 -0.001054704872395450 -0.001043041584010332 0.012752740283469200 0.004786640061712925 0.012289830660907162 -0.008745532683606035 0.014261415118720363 +0.021743016120035281 0.029688950895877426 0.000290547599874028 0.000682055571198300 0.175549364989970313 0.178338230890874111 0.002486643991830800 0.002149192970833630 -0.085377454115175486 -0.091489126463240492 -0.005383312549059558 -0.004535185285883645 0.102288251365551003 0.094066918293276736 -0.007766221033112258 -0.007876677356441979 -0.010112433374632405 0.000319385240548675 -0.001198648271548705 -0.001193505340585474 0.008037366757553616 0.007980258888708817 -0.001242736103775270 -0.001273598198058523 0.020974706057590668 0.005751802778007228 0.025351389814577394 -0.017180756363741379 -0.003956879522184370 +0.013525050094767123 0.023039913400015079 -0.000213791695822321 0.000249432472712464 0.178794499964418374 0.182090614749512603 0.001668344371008412 0.002226367140418777 -0.081444170893389012 -0.089634493861210238 -0.006575553895215308 -0.005785817468059847 0.112188805335497160 0.106323654207989879 -0.008527087208204130 -0.008379970470761666 -0.021551792557092900 -0.010410526495855658 -0.001350988613004632 -0.001312369367927021 0.010150399365352503 0.009047754995696919 -0.001267065761949068 -0.001322015050183638 0.027086406796860162 0.008769045224622980 0.041260717228531141 -0.025783341088905919 -0.023130294003556602 +0.003710293144471088 0.012303012925884141 -0.000591683949645386 -0.000159272972606234 0.170628799324984620 0.173931286958495634 0.000283113801796188 0.001792439708046661 -0.069705778794223461 -0.078851018906234180 -0.007027047515815758 -0.006451875040969878 0.111350260801486828 0.109587738981351920 -0.008599721245876775 -0.008202102875302755 -0.031732073497397532 -0.021834007346710128 -0.001401093147591972 -0.001318145135788918 0.011803916636990694 0.009558331079300939 -0.001174308952196117 -0.001222014617445004 0.030766413414606002 0.014584179038094797 0.055050504861566943 -0.034070573320800129 -0.038211308729750156 +-0.004234132549489960 0.000832649040004215 -0.000775011343076728 -0.000476609472781693 0.154706450373287646 0.158080166174354941 -0.001594467727120446 0.000677513943272737 -0.053855672812893871 -0.062403402297765788 -0.006775880525707196 -0.006520249171448194 0.100795197589240160 0.104212055737311265 -0.008213823778438519 -0.007620791647640451 -0.038786879957853938 -0.031921725342639970 -0.001382244593213621 -0.001261079549196342 0.013326020461721926 0.010106105453167591 -0.001035584634549134 -0.001043215412680393 0.032184969062050033 0.022685626981519318 0.061584251842384724 -0.041209431181336478 -0.044960991839340970 +-0.007277482359979987 -0.007732524875134966 -0.000730772864804432 -0.000639221128232586 0.134982773383533428 0.139112331565989317 -0.003731084894624379 -0.001083447029356628 -0.038226479638264539 -0.044608858813448345 -0.006004861339980094 -0.006093216205261694 0.083174466753920082 0.091804401122952045 -0.007653794404152313 -0.006957769820022970 -0.041563433510437883 -0.038870841259792399 -0.001331418827716966 -0.001192680917598046 0.014941405278128142 0.011168243606804554 -0.000922586836031674 -0.000868492699062700 0.031582380224112708 0.031133381053542616 0.057080842480777161 -0.046093261482679442 -0.041188612349529960 diff --git a/CPAC/nuisance/tests/test_bandpass.py b/CPAC/nuisance/tests/test_bandpass.py new file mode 100644 index 0000000000..19daf42091 --- /dev/null +++ b/CPAC/nuisance/tests/test_bandpass.py @@ -0,0 +1,48 @@ +# Copyright (C) 2022 - 2024 C-PAC Developers + +# This file is part of C-PAC. + +# C-PAC is free software: you can redistribute it and/or modify it under +# the terms of the GNU Lesser General Public License as published by the +# Free Software Foundation, either version 3 of the License, or (at your +# option) any later version. + +# C-PAC is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or +# FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public +# License for more details. + +# You should have received a copy of the GNU Lesser General Public +# License along with C-PAC. If not, see . +"""Tests for bandpass filters.""" + +from importlib.resources import files +from importlib.resources.abc import Traversable +from pathlib import Path + +from numpy.typing import NDArray +import pytest + +from CPAC.nuisance.bandpass import read_1D + +RAW_ONE_D: Traversable = files("CPAC").joinpath("nuisance/tests/regressors.1D") + + +@pytest.mark.parametrize("start_line", list(range(6))) +def test_read_1D(start_line: int, tmp_path: Path) -> None: + """Test the correct number of rows are read when reading a 1D file.""" + regressor: Path = tmp_path / f"regressor_startAtL{start_line}.1D" + # create a regressor.1D file with (5 - ``start_line``) lines of header + with ( + RAW_ONE_D.open("r", encoding="utf-8") as _raw, + regressor.open("w", encoding="utf-8") as _test_file, + ): + for line in _raw.readlines()[start_line:]: + _test_file.write(line) + header: list[str] + data: NDArray + header, data = read_1D(regressor) + # should get the same array no matter how many lines of header + assert data.shape == (10, 29) + # all header lines should be captured + assert len(header) == 5 - start_line From 6a37466625b66fdb8f923dfaea137ed9bceffb9c Mon Sep 17 00:00:00 2001 From: Jon Clucas Date: Wed, 18 Sep 2024 10:27:25 -0400 Subject: [PATCH 067/507] :alien: Import `Traversable` from `importlib.abc` for older Python compatibility --- CPAC/nuisance/tests/test_bandpass.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/CPAC/nuisance/tests/test_bandpass.py b/CPAC/nuisance/tests/test_bandpass.py index 19daf42091..452b55d3c7 100644 --- a/CPAC/nuisance/tests/test_bandpass.py +++ b/CPAC/nuisance/tests/test_bandpass.py @@ -16,8 +16,8 @@ # License along with C-PAC. If not, see . """Tests for bandpass filters.""" +from importlib.abc import Traversable from importlib.resources import files -from importlib.resources.abc import Traversable from pathlib import Path from numpy.typing import NDArray From dd8ea5d35d409d262cb86f31c78bf03c20387c1a Mon Sep 17 00:00:00 2001 From: Jon Clucas Date: Wed, 18 Sep 2024 12:52:29 -0400 Subject: [PATCH 068/507] :memo: Add bandpass header fix to CHANGELOG [skip ci] --- CHANGELOG.md | 1 + 1 file changed, 1 insertion(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index 781be04ee6..cb0f5a96b7 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -29,6 +29,7 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 - A bug in which AWS S3 encryption was looked for in Nipype config instead of pipeline config (only affected uploading logs). - Restored `bids-validator` functionality. +- A bug in which bandpass filters always assumed 1D regressor files have exactly 5 header rows. ### Removed From 68baf2e0714fd75040ac3f82cb87f0f9896104a9 Mon Sep 17 00:00:00 2001 From: Jon Clucas Date: Mon, 23 Sep 2024 09:57:05 -0400 Subject: [PATCH 069/507] :bug: Replace empty-string `shell` with path to executable shell --- .../Dockerfiles/C-PAC.develop-jammy.Dockerfile | 3 ++- .../C-PAC.develop-lite-jammy.Dockerfile | 3 ++- CHANGELOG.md | 1 + CPAC/pipeline/cpac_runner.py | 11 +++++------ CPAC/pipeline/test/test_cpac_runner.py | 9 +++++++++ CPAC/pipeline/utils.py | 17 +++++++++++++++++ Dockerfile | 3 ++- variant-lite.Dockerfile | 3 ++- 8 files changed, 40 insertions(+), 10 deletions(-) diff --git a/.github/Dockerfiles/C-PAC.develop-jammy.Dockerfile b/.github/Dockerfiles/C-PAC.develop-jammy.Dockerfile index 838d8dcc4b..2fa4ae4a23 100644 --- a/.github/Dockerfiles/C-PAC.develop-jammy.Dockerfile +++ b/.github/Dockerfiles/C-PAC.develop-jammy.Dockerfile @@ -45,7 +45,8 @@ RUN rm -rf /var/lib/apt/lists/* /tmp/* /var/tmp/* /root/.cache/* \ && chmod 777 $(ls / | grep -v sys | grep -v proc) ENV PYTHONUSERBASE=/home/c-pac_user/.local ENV PATH=$PATH:/home/c-pac_user/.local/bin \ - PYTHONPATH=$PYTHONPATH:$PYTHONUSERBASE/lib/python3.10/site-packages + PYTHONPATH=$PYTHONPATH:$PYTHONUSERBASE/lib/python3.10/site-packages \ + _SHELL=/bin/bash # set user WORKDIR /home/c-pac_user diff --git a/.github/Dockerfiles/C-PAC.develop-lite-jammy.Dockerfile b/.github/Dockerfiles/C-PAC.develop-lite-jammy.Dockerfile index b58801b519..8e76675dc4 100644 --- a/.github/Dockerfiles/C-PAC.develop-lite-jammy.Dockerfile +++ b/.github/Dockerfiles/C-PAC.develop-lite-jammy.Dockerfile @@ -46,7 +46,8 @@ RUN rm -rf /var/lib/apt/lists/* /tmp/* /var/tmp/* /root/.cache/* \ && chmod 777 $(ls / | grep -v sys | grep -v proc) ENV PYTHONUSERBASE=/home/c-pac_user/.local ENV PATH=$PATH:/home/c-pac_user/.local/bin \ - PYTHONPATH=$PYTHONPATH:$PYTHONUSERBASE/lib/python3.10/site-packages + PYTHONPATH=$PYTHONPATH:$PYTHONUSERBASE/lib/python3.10/site-packages \ + _SHELL=/bin/bash # set user WORKDIR /home/c-pac_user diff --git a/CHANGELOG.md b/CHANGELOG.md index 781be04ee6..5dd1aaff1b 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -29,6 +29,7 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 - A bug in which AWS S3 encryption was looked for in Nipype config instead of pipeline config (only affected uploading logs). - Restored `bids-validator` functionality. +- Fixed empty `shell` variable in cluster run scripts. ### Removed diff --git a/CPAC/pipeline/cpac_runner.py b/CPAC/pipeline/cpac_runner.py index 0110281d5d..e5eef08138 100644 --- a/CPAC/pipeline/cpac_runner.py +++ b/CPAC/pipeline/cpac_runner.py @@ -14,6 +14,8 @@ # You should have received a copy of the GNU Lesser General Public # License along with C-PAC. If not, see . +"""Run C-PAC.""" + from multiprocessing import Process import os from time import strftime @@ -23,6 +25,7 @@ import yaml from CPAC.longitudinal_pipeline.longitudinal_workflow import anat_longitudinal_wf +from CPAC.pipeline.utils import get_shell from CPAC.utils.configuration import check_pname, Configuration, set_subject from CPAC.utils.configuration.yaml_template import upgrade_pipeline_to_1_8 from CPAC.utils.ga import track_run @@ -100,10 +103,7 @@ def run_condor_jobs(c, config_file, subject_list_file, p_name): # Create and run script for CPAC to run on cluster def run_cpac_on_cluster(config_file, subject_list_file, cluster_files_dir): - """ - Function to build a SLURM batch job submission script and - submit it to the scheduler via 'sbatch'. - """ + """Build a batch job submission script and submit to the scheduler.""" # Import packages import getpass import re @@ -137,7 +137,6 @@ def run_cpac_on_cluster(config_file, subject_list_file, cluster_files_dir): time_limit = "%d:00:00" % hrs_limit # Batch file variables - shell = subprocess.getoutput("echo $SHELL") user_account = getpass.getuser() num_subs = len(sublist) @@ -174,7 +173,7 @@ def run_cpac_on_cluster(config_file, subject_list_file, cluster_files_dir): # Set up config dictionary config_dict = { "timestamp": timestamp, - "shell": shell, + "shell": get_shell(), "job_name": "CPAC_" + pipeline_config.pipeline_setup["pipeline_name"], "num_tasks": num_subs, "queue": pipeline_config.pipeline_setup["system_config"]["on_grid"]["SGE"][ diff --git a/CPAC/pipeline/test/test_cpac_runner.py b/CPAC/pipeline/test/test_cpac_runner.py index 7ee91f5125..eb7121a9b3 100644 --- a/CPAC/pipeline/test/test_cpac_runner.py +++ b/CPAC/pipeline/test/test_cpac_runner.py @@ -1,13 +1,22 @@ import os +from pathlib import Path import pkg_resources as p import pytest from CPAC.pipeline.cpac_pipeline import load_cpac_pipe_config from CPAC.pipeline.cpac_runner import run_T1w_longitudinal +from CPAC.pipeline.utils import get_shell from CPAC.utils.bids_utils import create_cpac_data_config +def test_shell() -> None: + """Test that ``get_shell`` returns a path to an executable BASH.""" + shell = Path(get_shell()) + assert shell.exists(), "No default shell found." + assert os.access(shell, os.X_OK), "Default shell not executable." + + @pytest.mark.skip(reason="not a pytest test") def test_run_T1w_longitudinal(bids_dir, cfg, test_dir, part_id): sub_data_list = create_cpac_data_config( diff --git a/CPAC/pipeline/utils.py b/CPAC/pipeline/utils.py index 39acb6429f..d135addc41 100644 --- a/CPAC/pipeline/utils.py +++ b/CPAC/pipeline/utils.py @@ -17,6 +17,9 @@ """C-PAC pipeline engine utilities.""" from itertools import chain +import os +import subprocess +from typing import Optional from CPAC.func_preproc.func_motion import motion_estimate_filter from CPAC.utils.bids_utils import insert_entity @@ -24,6 +27,20 @@ MOVEMENT_FILTER_KEYS = motion_estimate_filter.outputs +def get_shell() -> str: + """Return the path to default shell.""" + shell: Optional[str] = subprocess.getoutput( + f"which $(ps -p {os.getppid()} -o comm=)" + ) + if not shell: + try: + shell = os.environ["_SHELL"] + except KeyError: + msg = "Shell command not found." + raise EnvironmentError(msg) + return shell + + def name_fork(resource_idx, cfg, json_info, out_dct): """Create and insert entities for forkpoints. diff --git a/Dockerfile b/Dockerfile index 838d8dcc4b..2fa4ae4a23 100644 --- a/Dockerfile +++ b/Dockerfile @@ -45,7 +45,8 @@ RUN rm -rf /var/lib/apt/lists/* /tmp/* /var/tmp/* /root/.cache/* \ && chmod 777 $(ls / | grep -v sys | grep -v proc) ENV PYTHONUSERBASE=/home/c-pac_user/.local ENV PATH=$PATH:/home/c-pac_user/.local/bin \ - PYTHONPATH=$PYTHONPATH:$PYTHONUSERBASE/lib/python3.10/site-packages + PYTHONPATH=$PYTHONPATH:$PYTHONUSERBASE/lib/python3.10/site-packages \ + _SHELL=/bin/bash # set user WORKDIR /home/c-pac_user diff --git a/variant-lite.Dockerfile b/variant-lite.Dockerfile index b58801b519..8e76675dc4 100644 --- a/variant-lite.Dockerfile +++ b/variant-lite.Dockerfile @@ -46,7 +46,8 @@ RUN rm -rf /var/lib/apt/lists/* /tmp/* /var/tmp/* /root/.cache/* \ && chmod 777 $(ls / | grep -v sys | grep -v proc) ENV PYTHONUSERBASE=/home/c-pac_user/.local ENV PATH=$PATH:/home/c-pac_user/.local/bin \ - PYTHONPATH=$PYTHONPATH:$PYTHONUSERBASE/lib/python3.10/site-packages + PYTHONPATH=$PYTHONPATH:$PYTHONUSERBASE/lib/python3.10/site-packages \ + _SHELL=/bin/bash # set user WORKDIR /home/c-pac_user From 964cb4b72d6b7f2fcc2deb5a280d9fc226d05167 Mon Sep 17 00:00:00 2001 From: Jon Clucas Date: Mon, 23 Sep 2024 14:25:20 -0400 Subject: [PATCH 070/507] :white_check_mark: Make sure default shell in container is BASH --- CPAC/pipeline/test/test_cpac_runner.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/CPAC/pipeline/test/test_cpac_runner.py b/CPAC/pipeline/test/test_cpac_runner.py index eb7121a9b3..1e43a3e3b6 100644 --- a/CPAC/pipeline/test/test_cpac_runner.py +++ b/CPAC/pipeline/test/test_cpac_runner.py @@ -12,8 +12,9 @@ def test_shell() -> None: """Test that ``get_shell`` returns a path to an executable BASH.""" - shell = Path(get_shell()) - assert shell.exists(), "No default shell found." + shell: str = get_shell() + assert shell.lower().endswith("bash"), "Default shell isn't BASH?" + assert Path(shell).exists(), "No default shell found." assert os.access(shell, os.X_OK), "Default shell not executable." From b9867c9084509ad6d4be88ff2b8e239b08bf1878 Mon Sep 17 00:00:00 2001 From: Biraj Shrestha Date: Fri, 16 Aug 2024 16:24:40 -0400 Subject: [PATCH 071/507] :heavy_plus_sign: orientation checks for resources added to PipeConfigs --- CPAC/pipeline/engine.py | 18 ++++++++++++++++-- CPAC/pipeline/utils.py | 31 +++++++++++++++++++++++++++++++ 2 files changed, 47 insertions(+), 2 deletions(-) diff --git a/CPAC/pipeline/engine.py b/CPAC/pipeline/engine.py index d7f53f7029..2890c9e858 100644 --- a/CPAC/pipeline/engine.py +++ b/CPAC/pipeline/engine.py @@ -35,7 +35,7 @@ from CPAC.pipeline import nipype_pipeline_engine as pe from CPAC.pipeline.check_outputs import ExpectedOutputs from CPAC.pipeline.nodeblock import NodeBlockFunction -from CPAC.pipeline.utils import MOVEMENT_FILTER_KEYS, name_fork, source_set +from CPAC.pipeline.utils import MOVEMENT_FILTER_KEYS, name_fork, source_set, check_all_orientations from CPAC.registration.registration import transform_derivative from CPAC.resources.templates.lookup_table import lookup_identifier from CPAC.utils.bids_utils import res_in_filename @@ -65,6 +65,7 @@ ) + class ResourcePool: def __init__(self, rpool=None, name=None, cfg=None, pipe_list=None): if not rpool: @@ -2409,9 +2410,11 @@ def ingress_pipeconfig_paths(cfg, rpool, unique_id, creds_path=None): import pandas as pd import pkg_resources as p + import sys template_csv = p.resource_filename("CPAC", "resources/cpac_templates.csv") template_df = pd.read_csv(template_csv, keep_default_na=False) + templates = [] for row in template_df.itertuples(): key = row.Key @@ -2511,6 +2514,18 @@ def ingress_pipeconfig_paths(cfg, rpool, unique_id, creds_path=None): "", f"{key}_config_ingress", ) + #check if val is a nifti file .nii.gz + if val.endswith('.nii.gz'): + templates.append([key, val]) + + table = check_all_orientations(templates,"RPI") + df = pd.DataFrame(table, columns = ['Resource', 'Path', 'Orientation']) + + # check if any of the values in Orientation column are not RPI + if not df[df['Orientation'] != 'RPI'].empty: + WFLOGGER.info(f"The following templates are not in RPI orientation: {df}") + sys.exit() + # templates, resampling from config """ template_keys = [ @@ -2596,7 +2611,6 @@ def _set_nested(attr, keys): ) cfg.set_nested(cfg, key, node) """ - return rpool diff --git a/CPAC/pipeline/utils.py b/CPAC/pipeline/utils.py index 39acb6429f..a01e1a65d5 100644 --- a/CPAC/pipeline/utils.py +++ b/CPAC/pipeline/utils.py @@ -20,10 +20,41 @@ from CPAC.func_preproc.func_motion import motion_estimate_filter from CPAC.utils.bids_utils import insert_entity +from nipype import Node, Workflow, Function +from CPAC.pipeline import nipype_pipeline_engine as pe +from nipype.interfaces import afni +from nipype.interfaces.afni import Info +import os +import pandas as pd MOVEMENT_FILTER_KEYS = motion_estimate_filter.outputs +def find_orientation(input_file): + import subprocess + cmd_3dinfo = [ + "3dinfo", + "-orient", input_file + ] + + orientation = subprocess.run(cmd_3dinfo, capture_output=True, text=True).stdout.strip().upper() + return orientation + +def check_all_orientations(input_images:list, desired_orientation:str="RPI"): + desired_orientation = desired_orientation.upper() + orientations = [] + find_orient = Node(Function(input_names=["input_file"], + output_names=["orientation"], + function=find_orientation), + name="find_orient") + + for key, image in input_images: + find_orient.inputs.input_file = image + orientation = find_orient.run().outputs.orientation + orientations.append([key, image, orientation]) + return orientations + + def name_fork(resource_idx, cfg, json_info, out_dct): """Create and insert entities for forkpoints. From 81869e23c3ba41c32c3b05f1b478c2203a094389 Mon Sep 17 00:00:00 2001 From: birajstha <111654544+birajstha@users.noreply.github.com> Date: Fri, 23 Aug 2024 16:29:02 -0400 Subject: [PATCH 072/507] Update CPAC/pipeline/utils.py Co-authored-by: Jon Clucas --- CPAC/pipeline/utils.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/CPAC/pipeline/utils.py b/CPAC/pipeline/utils.py index a01e1a65d5..252cb075c5 100644 --- a/CPAC/pipeline/utils.py +++ b/CPAC/pipeline/utils.py @@ -53,7 +53,7 @@ def check_all_orientations(input_images:list, desired_orientation:str="RPI"): orientation = find_orient.run().outputs.orientation orientations.append([key, image, orientation]) return orientations - + def name_fork(resource_idx, cfg, json_info, out_dct): """Create and insert entities for forkpoints. From 0a32a32d0fbb92fe047bdc2e0c40c9269cebeab1 Mon Sep 17 00:00:00 2001 From: birajstha <111654544+birajstha@users.noreply.github.com> Date: Fri, 23 Aug 2024 16:29:16 -0400 Subject: [PATCH 073/507] Update CPAC/pipeline/engine.py Co-authored-by: Jon Clucas --- CPAC/pipeline/engine.py | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/CPAC/pipeline/engine.py b/CPAC/pipeline/engine.py index 2890c9e858..27b5ead94c 100644 --- a/CPAC/pipeline/engine.py +++ b/CPAC/pipeline/engine.py @@ -35,7 +35,12 @@ from CPAC.pipeline import nipype_pipeline_engine as pe from CPAC.pipeline.check_outputs import ExpectedOutputs from CPAC.pipeline.nodeblock import NodeBlockFunction -from CPAC.pipeline.utils import MOVEMENT_FILTER_KEYS, name_fork, source_set, check_all_orientations +from CPAC.pipeline.utils import ( + check_all_orientations, + MOVEMENT_FILTER_KEYS, + name_fork, + source_set, +) from CPAC.registration.registration import transform_derivative from CPAC.resources.templates.lookup_table import lookup_identifier from CPAC.utils.bids_utils import res_in_filename From d005b01d174a7588bd64207456865a509c98d56c Mon Sep 17 00:00:00 2001 From: birajstha <111654544+birajstha@users.noreply.github.com> Date: Fri, 23 Aug 2024 16:29:25 -0400 Subject: [PATCH 074/507] Update CPAC/pipeline/engine.py Co-authored-by: Jon Clucas --- CPAC/pipeline/engine.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/CPAC/pipeline/engine.py b/CPAC/pipeline/engine.py index 27b5ead94c..42f2d22a19 100644 --- a/CPAC/pipeline/engine.py +++ b/CPAC/pipeline/engine.py @@ -2413,9 +2413,10 @@ def ingress_pipeconfig_paths(cfg, rpool, unique_id, creds_path=None): # ingress config file paths # TODO: may want to change the resource keys for each to include one level up in the YAML as well + import sys + import pandas as pd import pkg_resources as p - import sys template_csv = p.resource_filename("CPAC", "resources/cpac_templates.csv") template_df = pd.read_csv(template_csv, keep_default_na=False) From d31b1d9b18c248a6336e16c405a39161c461ebbc Mon Sep 17 00:00:00 2001 From: birajstha <111654544+birajstha@users.noreply.github.com> Date: Fri, 23 Aug 2024 16:29:41 -0400 Subject: [PATCH 075/507] Update CPAC/pipeline/engine.py Co-authored-by: Jon Clucas --- CPAC/pipeline/engine.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/CPAC/pipeline/engine.py b/CPAC/pipeline/engine.py index 42f2d22a19..d11a4b6800 100644 --- a/CPAC/pipeline/engine.py +++ b/CPAC/pipeline/engine.py @@ -2520,8 +2520,8 @@ def ingress_pipeconfig_paths(cfg, rpool, unique_id, creds_path=None): "", f"{key}_config_ingress", ) - #check if val is a nifti file .nii.gz - if val.endswith('.nii.gz'): + # check if val is a nifti file .nii.gz + if val.endswith(".nii.gz"): templates.append([key, val]) table = check_all_orientations(templates,"RPI") From 548067824b3e595286701b98892d37182bcfbdd9 Mon Sep 17 00:00:00 2001 From: birajstha <111654544+birajstha@users.noreply.github.com> Date: Fri, 23 Aug 2024 16:29:57 -0400 Subject: [PATCH 076/507] Update CPAC/pipeline/engine.py Co-authored-by: Jon Clucas --- CPAC/pipeline/engine.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/CPAC/pipeline/engine.py b/CPAC/pipeline/engine.py index d11a4b6800..66567e61ec 100644 --- a/CPAC/pipeline/engine.py +++ b/CPAC/pipeline/engine.py @@ -2524,8 +2524,8 @@ def ingress_pipeconfig_paths(cfg, rpool, unique_id, creds_path=None): if val.endswith(".nii.gz"): templates.append([key, val]) - table = check_all_orientations(templates,"RPI") - df = pd.DataFrame(table, columns = ['Resource', 'Path', 'Orientation']) + table = check_all_orientations(templates, "RPI") + df = pd.DataFrame(table, columns=["Resource", "Path", "Orientation"]) # check if any of the values in Orientation column are not RPI if not df[df['Orientation'] != 'RPI'].empty: From b9659522ce6f5073c73449f6e71ac00ed2f4daa2 Mon Sep 17 00:00:00 2001 From: birajstha <111654544+birajstha@users.noreply.github.com> Date: Fri, 23 Aug 2024 16:30:06 -0400 Subject: [PATCH 077/507] Update CPAC/pipeline/engine.py Co-authored-by: Jon Clucas --- CPAC/pipeline/engine.py | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/CPAC/pipeline/engine.py b/CPAC/pipeline/engine.py index 66567e61ec..60b2eaf470 100644 --- a/CPAC/pipeline/engine.py +++ b/CPAC/pipeline/engine.py @@ -2528,9 +2528,10 @@ def ingress_pipeconfig_paths(cfg, rpool, unique_id, creds_path=None): df = pd.DataFrame(table, columns=["Resource", "Path", "Orientation"]) # check if any of the values in Orientation column are not RPI - if not df[df['Orientation'] != 'RPI'].empty: - WFLOGGER.info(f"The following templates are not in RPI orientation: {df}") - sys.exit() + other_orientation = df[df["Orientation"] != "RPI"] + if not other_orientation.empty: + msg = f"The following templates are not in RPI orientation: {other_orientation}" + OrientationError(msg) # templates, resampling from config """ From 9df6ab58e0a823d39dfa2508da55b717074e53fb Mon Sep 17 00:00:00 2001 From: birajstha <111654544+birajstha@users.noreply.github.com> Date: Fri, 23 Aug 2024 16:32:09 -0400 Subject: [PATCH 078/507] Update CPAC/pipeline/utils.py Co-authored-by: Jon Clucas --- CPAC/pipeline/utils.py | 12 ++++++++---- 1 file changed, 8 insertions(+), 4 deletions(-) diff --git a/CPAC/pipeline/utils.py b/CPAC/pipeline/utils.py index 252cb075c5..e95e7ac8b7 100644 --- a/CPAC/pipeline/utils.py +++ b/CPAC/pipeline/utils.py @@ -43,10 +43,14 @@ def find_orientation(input_file): def check_all_orientations(input_images:list, desired_orientation:str="RPI"): desired_orientation = desired_orientation.upper() orientations = [] - find_orient = Node(Function(input_names=["input_file"], - output_names=["orientation"], - function=find_orientation), - name="find_orient") + find_orient = Node( + Function( + input_names=["input_file"], + output_names=["orientation"], + function=find_orientation, + ), + name="find_orient", + ) for key, image in input_images: find_orient.inputs.input_file = image From c0006cd686983eaf257d7982c316366fab55be03 Mon Sep 17 00:00:00 2001 From: birajstha <111654544+birajstha@users.noreply.github.com> Date: Fri, 23 Aug 2024 16:32:17 -0400 Subject: [PATCH 079/507] Update CPAC/pipeline/utils.py Co-authored-by: Jon Clucas --- CPAC/pipeline/utils.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/CPAC/pipeline/utils.py b/CPAC/pipeline/utils.py index e95e7ac8b7..2d8a1ed34c 100644 --- a/CPAC/pipeline/utils.py +++ b/CPAC/pipeline/utils.py @@ -39,7 +39,8 @@ def find_orientation(input_file): orientation = subprocess.run(cmd_3dinfo, capture_output=True, text=True).stdout.strip().upper() return orientation - + + def check_all_orientations(input_images:list, desired_orientation:str="RPI"): desired_orientation = desired_orientation.upper() orientations = [] From edf6d991df545583915439f0ac02e8ba4fc48afc Mon Sep 17 00:00:00 2001 From: birajstha <111654544+birajstha@users.noreply.github.com> Date: Fri, 23 Aug 2024 16:32:26 -0400 Subject: [PATCH 080/507] Update CPAC/pipeline/utils.py Co-authored-by: Jon Clucas --- CPAC/pipeline/utils.py | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/CPAC/pipeline/utils.py b/CPAC/pipeline/utils.py index 2d8a1ed34c..55491c80c6 100644 --- a/CPAC/pipeline/utils.py +++ b/CPAC/pipeline/utils.py @@ -37,7 +37,11 @@ def find_orientation(input_file): "-orient", input_file ] - orientation = subprocess.run(cmd_3dinfo, capture_output=True, text=True).stdout.strip().upper() + orientation = ( + subprocess.run(cmd_3dinfo, capture_output=True, text=True, check=False) + .stdout.strip() + .upper() + ) return orientation From 57d61334bbf7cbf64eb16afcac472750fccf1e16 Mon Sep 17 00:00:00 2001 From: birajstha <111654544+birajstha@users.noreply.github.com> Date: Fri, 23 Aug 2024 16:32:36 -0400 Subject: [PATCH 081/507] Update CPAC/pipeline/utils.py Co-authored-by: Jon Clucas --- CPAC/pipeline/utils.py | 8 ++------ 1 file changed, 2 insertions(+), 6 deletions(-) diff --git a/CPAC/pipeline/utils.py b/CPAC/pipeline/utils.py index 55491c80c6..b9b5a43f74 100644 --- a/CPAC/pipeline/utils.py +++ b/CPAC/pipeline/utils.py @@ -18,14 +18,10 @@ from itertools import chain +from nipype import Function, Node + from CPAC.func_preproc.func_motion import motion_estimate_filter from CPAC.utils.bids_utils import insert_entity -from nipype import Node, Workflow, Function -from CPAC.pipeline import nipype_pipeline_engine as pe -from nipype.interfaces import afni -from nipype.interfaces.afni import Info -import os -import pandas as pd MOVEMENT_FILTER_KEYS = motion_estimate_filter.outputs From 0f190d8faa5b21d61c5c4d59f872b35c647c0ad7 Mon Sep 17 00:00:00 2001 From: birajstha <111654544+birajstha@users.noreply.github.com> Date: Fri, 23 Aug 2024 16:32:44 -0400 Subject: [PATCH 082/507] Update CPAC/pipeline/utils.py Co-authored-by: Jon Clucas --- CPAC/pipeline/utils.py | 5 +---- 1 file changed, 1 insertion(+), 4 deletions(-) diff --git a/CPAC/pipeline/utils.py b/CPAC/pipeline/utils.py index b9b5a43f74..90279c550d 100644 --- a/CPAC/pipeline/utils.py +++ b/CPAC/pipeline/utils.py @@ -28,10 +28,7 @@ def find_orientation(input_file): import subprocess - cmd_3dinfo = [ - "3dinfo", - "-orient", input_file - ] + cmd_3dinfo = ["3dinfo", "-orient", input_file] orientation = ( subprocess.run(cmd_3dinfo, capture_output=True, text=True, check=False) From c5b4a1ef777d0f813ad533e8ce2e257f7913b9e8 Mon Sep 17 00:00:00 2001 From: Biraj Shrestha Date: Mon, 9 Sep 2024 13:04:03 -0400 Subject: [PATCH 083/507] adding desired-orientation as a config key --- CPAC/pipeline/engine.py | 16 +++++---- CPAC/pipeline/schema.py | 1 + CPAC/pipeline/utils.py | 33 +++++++++++++++++-- .../configs/pipeline_config_blank.yml | 4 +++ 4 files changed, 44 insertions(+), 10 deletions(-) diff --git a/CPAC/pipeline/engine.py b/CPAC/pipeline/engine.py index 60b2eaf470..f56826a1d6 100644 --- a/CPAC/pipeline/engine.py +++ b/CPAC/pipeline/engine.py @@ -70,7 +70,6 @@ ) - class ResourcePool: def __init__(self, rpool=None, name=None, cfg=None, pipe_list=None): if not rpool: @@ -2413,14 +2412,14 @@ def ingress_pipeconfig_paths(cfg, rpool, unique_id, creds_path=None): # ingress config file paths # TODO: may want to change the resource keys for each to include one level up in the YAML as well - import sys - import pandas as pd import pkg_resources as p + from nibabel.orientations import OrientationError template_csv = p.resource_filename("CPAC", "resources/cpac_templates.csv") template_df = pd.read_csv(template_csv, keep_default_na=False) templates = [] + desired_orientation = cfg.pipeline_setup["desired_orientation"] for row in template_df.itertuples(): key = row.Key @@ -2524,11 +2523,11 @@ def ingress_pipeconfig_paths(cfg, rpool, unique_id, creds_path=None): if val.endswith(".nii.gz"): templates.append([key, val]) - table = check_all_orientations(templates, "RPI") + table = check_all_orientations(templates, desired_orientation) df = pd.DataFrame(table, columns=["Resource", "Path", "Orientation"]) - + # check if any of the values in Orientation column are not RPI - other_orientation = df[df["Orientation"] != "RPI"] + other_orientation = df[df["Orientation"] != desired_orientation] if not other_orientation.empty: msg = f"The following templates are not in RPI orientation: {other_orientation}" OrientationError(msg) @@ -2692,7 +2691,10 @@ def initiate_rpool(wf, cfg, data_paths=None, part_id=None): rpool = ingress_pipeconfig_paths(cfg, rpool, unique_id, creds_path) # output files with 4 different scans - + for x in rpool.get_entire_pool().keys(): + print(x) + import sys + sys.exit() return (wf, rpool) diff --git a/CPAC/pipeline/schema.py b/CPAC/pipeline/schema.py index 915cb47045..c47025bf3b 100644 --- a/CPAC/pipeline/schema.py +++ b/CPAC/pipeline/schema.py @@ -423,6 +423,7 @@ def sanitize(filename): "skip env check": Maybe(bool), # flag for skipping an environment check "pipeline_setup": { "pipeline_name": All(str, Length(min=1), sanitize), + "desired_orientation": In({"RPI", "LPI", "RAI", "LAI", "RAS", "LAS", "RPS", "LPS"}), "output_directory": { "path": str, "source_outputs_dir": Maybe(str), diff --git a/CPAC/pipeline/utils.py b/CPAC/pipeline/utils.py index 90279c550d..2353d36158 100644 --- a/CPAC/pipeline/utils.py +++ b/CPAC/pipeline/utils.py @@ -27,18 +27,45 @@ def find_orientation(input_file): + """Find the orientation of the input file. + + Parameters + ---------- + input_file : str + Input file path + + Returns + ------- + orientation : str + Orientation of the input file + """ import subprocess + cmd_3dinfo = ["3dinfo", "-orient", input_file] - orientation = ( + return ( subprocess.run(cmd_3dinfo, capture_output=True, text=True, check=False) .stdout.strip() .upper() ) - return orientation -def check_all_orientations(input_images:list, desired_orientation:str="RPI"): +def check_all_orientations(input_images: list, desired_orientation: str = "RPI"): + """Check the orientation of all input images. + + Parameters + ---------- + input_images : list + List of input images + desired_orientation : str + Desired orientation of the input images + + Returns + ------- + orientations : list + List of orientations of the input images + + """ desired_orientation = desired_orientation.upper() orientations = [] find_orient = Node( diff --git a/CPAC/resources/configs/pipeline_config_blank.yml b/CPAC/resources/configs/pipeline_config_blank.yml index 7f09680fc6..b3cd9863c5 100644 --- a/CPAC/resources/configs/pipeline_config_blank.yml +++ b/CPAC/resources/configs/pipeline_config_blank.yml @@ -11,6 +11,10 @@ pipeline_setup: # Name for this pipeline configuration - useful for identification. # This string will be sanitized and used in filepaths pipeline_name: cpac-blank-template + + # Desired orientation for the output data. "RPI", "LPI", "RAI", "LAI", "RAS", "LAS", "RPS", "LPS" + desired_orientation: RPI + output_directory: # Quality control outputs From a04d47d1526719295964849ad5f4f32fd170cce6 Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Mon, 9 Sep 2024 17:11:59 +0000 Subject: [PATCH 084/507] [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci --- CPAC/pipeline/engine.py | 1 + CPAC/pipeline/schema.py | 4 +++- CPAC/resources/configs/pipeline_config_blank.yml | 2 +- 3 files changed, 5 insertions(+), 2 deletions(-) diff --git a/CPAC/pipeline/engine.py b/CPAC/pipeline/engine.py index f56826a1d6..0cb9517576 100644 --- a/CPAC/pipeline/engine.py +++ b/CPAC/pipeline/engine.py @@ -2694,6 +2694,7 @@ def initiate_rpool(wf, cfg, data_paths=None, part_id=None): for x in rpool.get_entire_pool().keys(): print(x) import sys + sys.exit() return (wf, rpool) diff --git a/CPAC/pipeline/schema.py b/CPAC/pipeline/schema.py index c47025bf3b..997c6267b8 100644 --- a/CPAC/pipeline/schema.py +++ b/CPAC/pipeline/schema.py @@ -423,7 +423,9 @@ def sanitize(filename): "skip env check": Maybe(bool), # flag for skipping an environment check "pipeline_setup": { "pipeline_name": All(str, Length(min=1), sanitize), - "desired_orientation": In({"RPI", "LPI", "RAI", "LAI", "RAS", "LAS", "RPS", "LPS"}), + "desired_orientation": In( + {"RPI", "LPI", "RAI", "LAI", "RAS", "LAS", "RPS", "LPS"} + ), "output_directory": { "path": str, "source_outputs_dir": Maybe(str), diff --git a/CPAC/resources/configs/pipeline_config_blank.yml b/CPAC/resources/configs/pipeline_config_blank.yml index b3cd9863c5..454d8add59 100644 --- a/CPAC/resources/configs/pipeline_config_blank.yml +++ b/CPAC/resources/configs/pipeline_config_blank.yml @@ -11,7 +11,7 @@ pipeline_setup: # Name for this pipeline configuration - useful for identification. # This string will be sanitized and used in filepaths pipeline_name: cpac-blank-template - + # Desired orientation for the output data. "RPI", "LPI", "RAI", "LAI", "RAS", "LAS", "RPS", "LPS" desired_orientation: RPI From f2be81a6c1a99d9897db761bff03dbe546e5b795 Mon Sep 17 00:00:00 2001 From: "birajstha:construction_worker::penguin" Date: Mon, 9 Sep 2024 17:03:42 -0400 Subject: [PATCH 085/507] reorienting all files before rpool initialization --- CPAC/pipeline/engine.py | 35 +++++++++++++++++++++++++------- CPAC/pipeline/utils.py | 44 ++++++++++++++++++++++++++++++++++++++++- 2 files changed, 71 insertions(+), 8 deletions(-) diff --git a/CPAC/pipeline/engine.py b/CPAC/pipeline/engine.py index 0cb9517576..6a6e631cee 100644 --- a/CPAC/pipeline/engine.py +++ b/CPAC/pipeline/engine.py @@ -68,7 +68,7 @@ read_json, write_output_json, ) - +from nibabel.orientations import OrientationError class ResourcePool: def __init__(self, rpool=None, name=None, cfg=None, pipe_list=None): @@ -2414,7 +2414,7 @@ def ingress_pipeconfig_paths(cfg, rpool, unique_id, creds_path=None): import pandas as pd import pkg_resources as p - from nibabel.orientations import OrientationError + template_csv = p.resource_filename("CPAC", "resources/cpac_templates.csv") template_df = pd.read_csv(template_csv, keep_default_na=False) @@ -2523,7 +2523,7 @@ def ingress_pipeconfig_paths(cfg, rpool, unique_id, creds_path=None): if val.endswith(".nii.gz"): templates.append([key, val]) - table = check_all_orientations(templates, desired_orientation) + table = check_all_orientations(templates, desired_orientation, reorient=True) df = pd.DataFrame(table, columns=["Resource", "Path", "Orientation"]) # check if any of the values in Orientation column are not RPI @@ -2660,8 +2660,33 @@ def initiate_rpool(wf, cfg, data_paths=None, part_id=None): creds_path = None rpool = ResourcePool(name=unique_id, cfg=cfg) + + desired_orientation = cfg.pipeline_setup["desired_orientation"] if data_paths: + # check all data_paths and convert it to the desired_orientations + #Convert all anat to desired_orientation + if "anat" in data_paths: + anat = [] + for key in data_paths["anat"]: + anat.append([key, data_paths["anat"][key]]) + if anat: + try: + orientation = check_all_orientations(anat, desired_orientation, reorient=True) + except OrientationError as e: + raise e("Anatomical data is not in the desired orientation") + + #Convert all func to desired_orientation + if "func" in data_paths: + func = [] + for key in data_paths["func"]: + func.append([key, data_paths["func"][key]["scan"]]) + if func: + try: + orientation = check_all_orientations(func, desired_orientation, reorient=True) + except : + raise OrientationError("Functional data is not in the desired orientation") + # ingress outdir try: if ( @@ -2691,11 +2716,7 @@ def initiate_rpool(wf, cfg, data_paths=None, part_id=None): rpool = ingress_pipeconfig_paths(cfg, rpool, unique_id, creds_path) # output files with 4 different scans - for x in rpool.get_entire_pool().keys(): - print(x) - import sys - sys.exit() return (wf, rpool) diff --git a/CPAC/pipeline/utils.py b/CPAC/pipeline/utils.py index 2353d36158..ff373423e2 100644 --- a/CPAC/pipeline/utils.py +++ b/CPAC/pipeline/utils.py @@ -49,8 +49,48 @@ def find_orientation(input_file): .upper() ) +def reorient_image(input_file, orientation): + """Reorient the input image to the desired orientation. Replaces the original input_file with the reoriented image. -def check_all_orientations(input_images: list, desired_orientation: str = "RPI"): + Parameters + ---------- + input_file : str + Input image file path + orientation : str + Desired orientation of the input image + + """ + import os + import subprocess + + output_file = os.path.join( + os.path.dirname(input_file), + f"reoriented_{os.path.basename(input_file)}", + ) + cmd_3drefit = ["3drefit", "-deoblique", input_file] + cmd_3dresample = [ + "3dresample", + "-orient", + orientation, + "-prefix", + output_file, + "-inset", + input_file, + ] + cmd_mv = ["mv", output_file, input_file] + print(f"""+++ +Reorienting : {input_file} +to : {orientation} ++++""") + subprocess.run(cmd_3drefit, check=True) + subprocess.run(cmd_3dresample, check=True) + print(f"""+++Replacing {input_file} with reoriented image + """) + subprocess.run(cmd_mv, check=True) + return + + +def check_all_orientations(input_images: list, desired_orientation: str = "RPI", reorient=True): """Check the orientation of all input images. Parameters @@ -80,6 +120,8 @@ def check_all_orientations(input_images: list, desired_orientation: str = "RPI") for key, image in input_images: find_orient.inputs.input_file = image orientation = find_orient.run().outputs.orientation + if reorient and orientation != desired_orientation: + reorient_image(image, desired_orientation) orientations.append([key, image, orientation]) return orientations From 995163c3c0d561b0ae8908f5d94480a377d59ea9 Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Mon, 9 Sep 2024 21:03:55 +0000 Subject: [PATCH 086/507] [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci --- CPAC/pipeline/engine.py | 26 ++++++++++++++++---------- CPAC/pipeline/utils.py | 7 +++++-- 2 files changed, 21 insertions(+), 12 deletions(-) diff --git a/CPAC/pipeline/engine.py b/CPAC/pipeline/engine.py index 6a6e631cee..ec7bbd0ff7 100644 --- a/CPAC/pipeline/engine.py +++ b/CPAC/pipeline/engine.py @@ -70,6 +70,7 @@ ) from nibabel.orientations import OrientationError + class ResourcePool: def __init__(self, rpool=None, name=None, cfg=None, pipe_list=None): if not rpool: @@ -2414,7 +2415,6 @@ def ingress_pipeconfig_paths(cfg, rpool, unique_id, creds_path=None): import pandas as pd import pkg_resources as p - template_csv = p.resource_filename("CPAC", "resources/cpac_templates.csv") template_df = pd.read_csv(template_csv, keep_default_na=False) @@ -2660,33 +2660,39 @@ def initiate_rpool(wf, cfg, data_paths=None, part_id=None): creds_path = None rpool = ResourcePool(name=unique_id, cfg=cfg) - + desired_orientation = cfg.pipeline_setup["desired_orientation"] if data_paths: # check all data_paths and convert it to the desired_orientations - #Convert all anat to desired_orientation + # Convert all anat to desired_orientation if "anat" in data_paths: anat = [] for key in data_paths["anat"]: anat.append([key, data_paths["anat"][key]]) if anat: try: - orientation = check_all_orientations(anat, desired_orientation, reorient=True) + orientation = check_all_orientations( + anat, desired_orientation, reorient=True + ) except OrientationError as e: raise e("Anatomical data is not in the desired orientation") - - #Convert all func to desired_orientation + + # Convert all func to desired_orientation if "func" in data_paths: func = [] for key in data_paths["func"]: func.append([key, data_paths["func"][key]["scan"]]) if func: try: - orientation = check_all_orientations(func, desired_orientation, reorient=True) - except : - raise OrientationError("Functional data is not in the desired orientation") - + orientation = check_all_orientations( + func, desired_orientation, reorient=True + ) + except: + raise OrientationError( + "Functional data is not in the desired orientation" + ) + # ingress outdir try: if ( diff --git a/CPAC/pipeline/utils.py b/CPAC/pipeline/utils.py index ff373423e2..cd6856a20f 100644 --- a/CPAC/pipeline/utils.py +++ b/CPAC/pipeline/utils.py @@ -49,6 +49,7 @@ def find_orientation(input_file): .upper() ) + def reorient_image(input_file, orientation): """Reorient the input image to the desired orientation. Replaces the original input_file with the reoriented image. @@ -79,7 +80,7 @@ def reorient_image(input_file, orientation): ] cmd_mv = ["mv", output_file, input_file] print(f"""+++ -Reorienting : {input_file} +Reorienting : {input_file} to : {orientation} +++""") subprocess.run(cmd_3drefit, check=True) @@ -90,7 +91,9 @@ def reorient_image(input_file, orientation): return -def check_all_orientations(input_images: list, desired_orientation: str = "RPI", reorient=True): +def check_all_orientations( + input_images: list, desired_orientation: str = "RPI", reorient=True +): """Check the orientation of all input images. Parameters From 5aeebbb05b6b0e4870d9531375ff3247aa341865 Mon Sep 17 00:00:00 2001 From: "birajstha:construction_worker::penguin" Date: Tue, 17 Sep 2024 16:34:43 -0400 Subject: [PATCH 087/507] moved orientation check and reorient if necessary to the wf --- CPAC/pipeline/engine.py | 102 ++++++++++++------------------------- CPAC/pipeline/utils.py | 108 +++++++++++++++------------------------- 2 files changed, 72 insertions(+), 138 deletions(-) diff --git a/CPAC/pipeline/engine.py b/CPAC/pipeline/engine.py index ec7bbd0ff7..f1188b3b2b 100644 --- a/CPAC/pipeline/engine.py +++ b/CPAC/pipeline/engine.py @@ -36,7 +36,7 @@ from CPAC.pipeline.check_outputs import ExpectedOutputs from CPAC.pipeline.nodeblock import NodeBlockFunction from CPAC.pipeline.utils import ( - check_all_orientations, + check_orientation, MOVEMENT_FILTER_KEYS, name_fork, source_set, @@ -68,7 +68,6 @@ read_json, write_output_json, ) -from nibabel.orientations import OrientationError class ResourcePool: @@ -2409,7 +2408,7 @@ def strip_template(data_label, dir_path, filename): return data_label, json -def ingress_pipeconfig_paths(cfg, rpool, unique_id, creds_path=None): +def ingress_pipeconfig_paths(wf, cfg, rpool, unique_id, creds_path=None): # ingress config file paths # TODO: may want to change the resource keys for each to include one level up in the YAML as well @@ -2418,7 +2417,6 @@ def ingress_pipeconfig_paths(cfg, rpool, unique_id, creds_path=None): template_csv = p.resource_filename("CPAC", "resources/cpac_templates.csv") template_df = pd.read_csv(template_csv, keep_default_na=False) - templates = [] desired_orientation = cfg.pipeline_setup["desired_orientation"] for row in template_df.itertuples(): @@ -2489,19 +2487,9 @@ def ingress_pipeconfig_paths(cfg, rpool, unique_id, creds_path=None): resampled_template.inputs.template_name = key resampled_template.inputs.tag = tag - # the set_data below is set up a little differently, because we are - # injecting and also over-writing already-existing entries - # other alternative would have been to ingress into the - # resampled_template node from the already existing entries, but we - # didn't do that here - rpool.set_data( - key, - resampled_template, - "resampled_template", - json_info, - "", - "template_resample", - ) # pipe_idx (after the blank json {}) should be the previous strat that you want deleted! because you're not connecting this the regular way, you have to do it manually + node = resampled_template + output = "resampled_template" + node_name = f"{key}_resampled_template" elif val: config_ingress = create_general_datasource(f"gather_{key}") @@ -2511,27 +2499,31 @@ def ingress_pipeconfig_paths(cfg, rpool, unique_id, creds_path=None): creds_path=creds_path, dl_dir=cfg.pipeline_setup["working_directory"]["path"], ) - rpool.set_data( - key, - config_ingress, - "outputspec.data", - json_info, - "", - f"{key}_config_ingress", - ) - # check if val is a nifti file .nii.gz - if val.endswith(".nii.gz"): - templates.append([key, val]) - - table = check_all_orientations(templates, desired_orientation, reorient=True) - df = pd.DataFrame(table, columns=["Resource", "Path", "Orientation"]) - - # check if any of the values in Orientation column are not RPI - other_orientation = df[df["Orientation"] != desired_orientation] - if not other_orientation.empty: - msg = f"The following templates are not in RPI orientation: {other_orientation}" - OrientationError(msg) - + node = config_ingress + output = "outputspec.data" + node_name = f"{key}_config_ingress" + + # check if the output is in desired orientation, if not reorient it + check_orient = pe.Node( + Function( + input_names=["input_file", "desired_orientation", "reorient"], + output_names=["orientation"], + function=check_orientation, + ), + name=f"check_orientation_{key}", + ) + wf.connect(node, output, check_orient, "input_file") + check_orient.inputs.desired_orientation = desired_orientation + check_orient.inputs.reorient = True + + rpool.set_data( + key, + check_orient, + "output_file", + json_info, + "", + f"check_orient-{node_name}-{key}", + ) # templates, resampling from config """ template_keys = [ @@ -2617,7 +2609,7 @@ def _set_nested(attr, keys): ) cfg.set_nested(cfg, key, node) """ - return rpool + return wf, rpool def initiate_rpool(wf, cfg, data_paths=None, part_id=None): @@ -2661,38 +2653,8 @@ def initiate_rpool(wf, cfg, data_paths=None, part_id=None): rpool = ResourcePool(name=unique_id, cfg=cfg) - desired_orientation = cfg.pipeline_setup["desired_orientation"] if data_paths: - # check all data_paths and convert it to the desired_orientations - # Convert all anat to desired_orientation - if "anat" in data_paths: - anat = [] - for key in data_paths["anat"]: - anat.append([key, data_paths["anat"][key]]) - if anat: - try: - orientation = check_all_orientations( - anat, desired_orientation, reorient=True - ) - except OrientationError as e: - raise e("Anatomical data is not in the desired orientation") - - # Convert all func to desired_orientation - if "func" in data_paths: - func = [] - for key in data_paths["func"]: - func.append([key, data_paths["func"][key]["scan"]]) - if func: - try: - orientation = check_all_orientations( - func, desired_orientation, reorient=True - ) - except: - raise OrientationError( - "Functional data is not in the desired orientation" - ) - # ingress outdir try: if ( @@ -2719,7 +2681,7 @@ def initiate_rpool(wf, cfg, data_paths=None, part_id=None): ) # grab any file paths from the pipeline config YAML - rpool = ingress_pipeconfig_paths(cfg, rpool, unique_id, creds_path) + wf, rpool = ingress_pipeconfig_paths(wf, cfg, rpool, unique_id, creds_path) # output files with 4 different scans diff --git a/CPAC/pipeline/utils.py b/CPAC/pipeline/utils.py index cd6856a20f..cafbf3d686 100644 --- a/CPAC/pipeline/utils.py +++ b/CPAC/pipeline/utils.py @@ -18,21 +18,24 @@ from itertools import chain -from nipype import Function, Node - from CPAC.func_preproc.func_motion import motion_estimate_filter from CPAC.utils.bids_utils import insert_entity +from CPAC.utils.monitoring import IFLOGGER MOVEMENT_FILTER_KEYS = motion_estimate_filter.outputs -def find_orientation(input_file): - """Find the orientation of the input file. +def check_orientation(input_file, desired_orientation, reorient=False): + """Find the orientation of the input file and reorient it if necessary. Parameters ---------- input_file : str Input file path + desired_orientation : str + Desired orientation of the input file + reorient : bool + Reorient the input file to the desired orientation Returns ------- @@ -43,11 +46,16 @@ def find_orientation(input_file): cmd_3dinfo = ["3dinfo", "-orient", input_file] - return ( + orientation = ( subprocess.run(cmd_3dinfo, capture_output=True, text=True, check=False) .stdout.strip() .upper() ) + if orientation != desired_orientation and reorient: + output_file = reorient_image(input_file, desired_orientation) + else: + output_file = input_file + return output_file def reorient_image(input_file, orientation): @@ -60,73 +68,37 @@ def reorient_image(input_file, orientation): orientation : str Desired orientation of the input image - """ - import os - import subprocess - - output_file = os.path.join( - os.path.dirname(input_file), - f"reoriented_{os.path.basename(input_file)}", - ) - cmd_3drefit = ["3drefit", "-deoblique", input_file] - cmd_3dresample = [ - "3dresample", - "-orient", - orientation, - "-prefix", - output_file, - "-inset", - input_file, - ] - cmd_mv = ["mv", output_file, input_file] - print(f"""+++ -Reorienting : {input_file} -to : {orientation} -+++""") - subprocess.run(cmd_3drefit, check=True) - subprocess.run(cmd_3dresample, check=True) - print(f"""+++Replacing {input_file} with reoriented image - """) - subprocess.run(cmd_mv, check=True) - return - - -def check_all_orientations( - input_images: list, desired_orientation: str = "RPI", reorient=True -): - """Check the orientation of all input images. - - Parameters - ---------- - input_images : list - List of input images - desired_orientation : str - Desired orientation of the input images - Returns ------- - orientations : list - List of orientations of the input images - + output_file : str + Reoriented image file path """ - desired_orientation = desired_orientation.upper() - orientations = [] - find_orient = Node( - Function( - input_names=["input_file"], - output_names=["orientation"], - function=find_orientation, - ), - name="find_orient", - ) + try: + import os + import subprocess + + output_file = os.path.join( + os.path.dirname(input_file), + f"reoriented_{os.path.basename(input_file)}", + ) + cmd_3drefit = ["3drefit", "-deoblique", input_file] + cmd_3dresample = [ + "3dresample", + "-orient", + orientation, + "-prefix", + output_file, + "-inset", + input_file, + ] - for key, image in input_images: - find_orient.inputs.input_file = image - orientation = find_orient.run().outputs.orientation - if reorient and orientation != desired_orientation: - reorient_image(image, desired_orientation) - orientations.append([key, image, orientation]) - return orientations + IFLOGGER.info(f"""+++\nReorienting : {input_file}\nto : {orientation}\n+++""") + subprocess.run(cmd_3drefit, check=True) + subprocess.run(cmd_3dresample, check=True) + return output_file + except Exception as e: + IFLOGGER.error(f"Reorienting failed for {input_file} with error: {e}") + return input_file def name_fork(resource_idx, cfg, json_info, out_dct): From e8baa282476829d813de42e628d4b6b4580df26c Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Tue, 17 Sep 2024 20:35:00 +0000 Subject: [PATCH 088/507] [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci --- CPAC/pipeline/engine.py | 1 - 1 file changed, 1 deletion(-) diff --git a/CPAC/pipeline/engine.py b/CPAC/pipeline/engine.py index f1188b3b2b..586690ce72 100644 --- a/CPAC/pipeline/engine.py +++ b/CPAC/pipeline/engine.py @@ -2653,7 +2653,6 @@ def initiate_rpool(wf, cfg, data_paths=None, part_id=None): rpool = ResourcePool(name=unique_id, cfg=cfg) - if data_paths: # ingress outdir try: From ef10e36db930fdaf1bfdd8d8440a81beb1583577 Mon Sep 17 00:00:00 2001 From: "birajstha:construction_worker::penguin" Date: Tue, 17 Sep 2024 16:51:16 -0400 Subject: [PATCH 089/507] renaming output name correctly --- CPAC/pipeline/engine.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/CPAC/pipeline/engine.py b/CPAC/pipeline/engine.py index 586690ce72..77825a2d10 100644 --- a/CPAC/pipeline/engine.py +++ b/CPAC/pipeline/engine.py @@ -2507,10 +2507,10 @@ def ingress_pipeconfig_paths(wf, cfg, rpool, unique_id, creds_path=None): check_orient = pe.Node( Function( input_names=["input_file", "desired_orientation", "reorient"], - output_names=["orientation"], + output_names=["output_file"], function=check_orientation, ), - name=f"check_orientation_{key}", + name=f"check_orient_{key}", ) wf.connect(node, output, check_orient, "input_file") check_orient.inputs.desired_orientation = desired_orientation From 6bfd6806e47bf3394bb22f0f3eb8a35b96190f44 Mon Sep 17 00:00:00 2001 From: "birajstha:construction_worker::penguin" Date: Wed, 18 Sep 2024 13:23:58 -0400 Subject: [PATCH 090/507] handle for read-only template files --- CPAC/pipeline/engine.py | 46 +++++++++------- CPAC/pipeline/utils.py | 119 ++++++++++++++++++++++++---------------- 2 files changed, 96 insertions(+), 69 deletions(-) diff --git a/CPAC/pipeline/engine.py b/CPAC/pipeline/engine.py index 77825a2d10..ac81436f57 100644 --- a/CPAC/pipeline/engine.py +++ b/CPAC/pipeline/engine.py @@ -2503,27 +2503,31 @@ def ingress_pipeconfig_paths(wf, cfg, rpool, unique_id, creds_path=None): output = "outputspec.data" node_name = f"{key}_config_ingress" - # check if the output is in desired orientation, if not reorient it - check_orient = pe.Node( - Function( - input_names=["input_file", "desired_orientation", "reorient"], - output_names=["output_file"], - function=check_orientation, - ), - name=f"check_orient_{key}", - ) - wf.connect(node, output, check_orient, "input_file") - check_orient.inputs.desired_orientation = desired_orientation - check_orient.inputs.reorient = True - - rpool.set_data( - key, - check_orient, - "output_file", - json_info, - "", - f"check_orient-{node_name}-{key}", - ) + if val.endswith(".nii.gz"): + # check if the output is in desired orientation, if not reorient it + check_orient = pe.Node( + Function( + input_names=["input_file", "desired_orientation", "reorient"], + output_names=["output_file"], + function=check_orientation, + imports=["from CPAC.pipeline.utils import reorient_image"], + ), + name=f"check_orient_{key}", + ) + wf.connect(node, output, check_orient, "input_file") + check_orient.inputs.desired_orientation = desired_orientation + check_orient.inputs.reorient = True + + rpool.set_data( + key, + check_orient, + "output_file", + json_info, + "", + f"check_orient-{node_name}-{key}", + ) + else: + rpool.set_data(key, node, output, json_info, "", node_name) # templates, resampling from config """ template_keys = [ diff --git a/CPAC/pipeline/utils.py b/CPAC/pipeline/utils.py index cafbf3d686..251eece667 100644 --- a/CPAC/pipeline/utils.py +++ b/CPAC/pipeline/utils.py @@ -25,80 +25,103 @@ MOVEMENT_FILTER_KEYS = motion_estimate_filter.outputs -def check_orientation(input_file, desired_orientation, reorient=False): - """Find the orientation of the input file and reorient it if necessary. +def reorient_image(input_file, orientation): + """Reorient the input image to the desired orientation. Replaces the original input_file with the reoriented image. Parameters ---------- input_file : str - Input file path - desired_orientation : str - Desired orientation of the input file - reorient : bool - Reorient the input file to the desired orientation + Input image file path + orientation : str + Desired orientation of the input image Returns ------- - orientation : str - Orientation of the input file + output_file : str + Reoriented image file path """ + import os + import shutil import subprocess - cmd_3dinfo = ["3dinfo", "-orient", input_file] - - orientation = ( - subprocess.run(cmd_3dinfo, capture_output=True, text=True, check=False) - .stdout.strip() - .upper() + output_file = os.path.join( + os.getcwd(), + f"reoriented_{os.path.basename(input_file)}", ) - if orientation != desired_orientation and reorient: - output_file = reorient_image(input_file, desired_orientation) - else: - output_file = input_file + # if output file exist delete it + if os.path.exists(output_file): + os.remove(output_file) + + # make a copy of the input file as temp file so that the original file is not modified + temp_file = os.path.join( + os.getcwd(), + f"temp_{os.path.basename(input_file)}", + ) + shutil.copy(input_file, temp_file) + + cmd_3drefit = ["3drefit", "-deoblique", temp_file] + cmd_3dresample = [ + "3dresample", + "-orient", + orientation, + "-prefix", + output_file, + "-inset", + temp_file, + ] + subprocess.run(cmd_3drefit, check=True) + subprocess.run(cmd_3dresample, check=True) + + # remove the temporary file + os.remove(temp_file) + return output_file -def reorient_image(input_file, orientation): - """Reorient the input image to the desired orientation. Replaces the original input_file with the reoriented image. +def check_orientation(input_file, desired_orientation, reorient=True): + """Find the orientation of the input file and reorient it if necessary. Does not modify the original input file. Parameters ---------- input_file : str - Input image file path - orientation : str - Desired orientation of the input image + Input file path + desired_orientation : str + Desired orientation of the input file + reorient : bool + Reorient the input file to the desired orientation Returns ------- output_file : str Reoriented image file path """ - try: - import os - import subprocess + import subprocess - output_file = os.path.join( - os.path.dirname(input_file), - f"reoriented_{os.path.basename(input_file)}", - ) - cmd_3drefit = ["3drefit", "-deoblique", input_file] - cmd_3dresample = [ - "3dresample", - "-orient", - orientation, - "-prefix", - output_file, - "-inset", - input_file, - ] + cmd_3dinfo = ["3dinfo", "-orient", input_file] - IFLOGGER.info(f"""+++\nReorienting : {input_file}\nto : {orientation}\n+++""") - subprocess.run(cmd_3drefit, check=True) - subprocess.run(cmd_3dresample, check=True) - return output_file - except Exception as e: - IFLOGGER.error(f"Reorienting failed for {input_file} with error: {e}") - return input_file + orientation = ( + subprocess.run(cmd_3dinfo, capture_output=True, text=True, check=False) + .stdout.strip() + .upper() + ) + if orientation != desired_orientation and reorient: + IFLOGGER.info( + f"+++ Reorienting {input_file} from {orientation} to {desired_orientation} +++" + ) + try: + output_file = reorient_image(input_file, desired_orientation) + except Exception as e: + IFLOGGER.error( + f"Error in reorienting the image: {input_file}.\nCould not reorient the image to {desired_orientation}" + ) + IFLOGGER.error(f"Error: {e}") + output_file = input_file # return the original file ? + else: + IFLOGGER.info( + f"+++ Orientation of {input_file} is already {desired_orientation} +++" + ) + output_file = input_file + return output_file def name_fork(resource_idx, cfg, json_info, out_dct): From e454cec6623459724d4cf6fe3797061a44b84e49 Mon Sep 17 00:00:00 2001 From: "birajstha:construction_worker::penguin" Date: Thu, 26 Sep 2024 14:46:28 -0400 Subject: [PATCH 091/507] adding reorient nodes in anat, func, freesurfer and template ingress nodes --- CPAC/anat_preproc/anat_preproc.py | 17 +---- CPAC/func_preproc/func_preproc.py | 16 +--- CPAC/pipeline/engine.py | 117 +++++++++++++++++++++--------- CPAC/pipeline/utils.py | 101 -------------------------- CPAC/utils/datasource.py | 3 +- 5 files changed, 91 insertions(+), 163 deletions(-) diff --git a/CPAC/anat_preproc/anat_preproc.py b/CPAC/anat_preproc/anat_preproc.py index 0f4e770f97..7d2c7db767 100644 --- a/CPAC/anat_preproc/anat_preproc.py +++ b/CPAC/anat_preproc/anat_preproc.py @@ -1454,21 +1454,10 @@ def anatomical_init(wf, cfg, strat_pool, pipe_num, opt=None): node, out = strat_pool.get_data("T1w") wf.connect(node, out, anat_deoblique, "in_file") - anat_reorient = pe.Node( - interface=afni.Resample(), - name=f"anat_reorient_{pipe_num}", - mem_gb=0, - mem_x=(0.0115, "in_file", "t"), - ) - anat_reorient.inputs.orientation = "RPI" - anat_reorient.inputs.outputtype = "NIFTI_GZ" - - wf.connect(anat_deoblique, "out_file", anat_reorient, "in_file") - outputs = { - "desc-preproc_T1w": (anat_reorient, "out_file"), - "desc-reorient_T1w": (anat_reorient, "out_file"), - "desc-head_T1w": (anat_reorient, "out_file"), + "desc-preproc_T1w": (anat_deoblique, "out_file"), + "desc-reorient_T1w": (anat_deoblique, "out_file"), + "desc-head_T1w": (anat_deoblique, "out_file"), } return (wf, outputs) diff --git a/CPAC/func_preproc/func_preproc.py b/CPAC/func_preproc/func_preproc.py index 7004b4f025..2007591c84 100644 --- a/CPAC/func_preproc/func_preproc.py +++ b/CPAC/func_preproc/func_preproc.py @@ -521,21 +521,9 @@ def func_reorient(wf, cfg, strat_pool, pipe_num, opt=None): node, out = strat_pool.get_data("bold") wf.connect(node, out, func_deoblique, "in_file") - func_reorient = pe.Node( - interface=afni_utils.Resample(), - name=f"func_reorient_{pipe_num}", - mem_gb=0, - mem_x=(0.0115, "in_file", "t"), - ) - - func_reorient.inputs.orientation = "RPI" - func_reorient.inputs.outputtype = "NIFTI_GZ" - - wf.connect(func_deoblique, "out_file", func_reorient, "in_file") - outputs = { - "desc-preproc_bold": (func_reorient, "out_file"), - "desc-reorient_bold": (func_reorient, "out_file"), + "desc-preproc_bold": (func_deoblique, "out_file"), + "desc-reorient_bold": (func_deoblique, "out_file"), } return (wf, outputs) diff --git a/CPAC/pipeline/engine.py b/CPAC/pipeline/engine.py index ac81436f57..b1c05190a5 100644 --- a/CPAC/pipeline/engine.py +++ b/CPAC/pipeline/engine.py @@ -25,6 +25,7 @@ import warnings from nipype import config, logging +from nipype.interfaces import afni from nipype.interfaces.utility import Rename from CPAC.image_utils.spatial_smoothing import spatial_smoothing @@ -36,7 +37,6 @@ from CPAC.pipeline.check_outputs import ExpectedOutputs from CPAC.pipeline.nodeblock import NodeBlockFunction from CPAC.pipeline.utils import ( - check_orientation, MOVEMENT_FILTER_KEYS, name_fork, source_set, @@ -1908,6 +1908,7 @@ def wrap_block(node_blocks, interface, wf, cfg, strat_pool, pipe_num, opt): def ingress_raw_anat_data(wf, rpool, cfg, data_paths, unique_id, part_id, ses_id): + desired_orientation = cfg.pipeline_setup["desired_orientation"] if "anat" not in data_paths: WFLOGGER.warning("No anatomical data present.") return rpool @@ -1931,7 +1932,17 @@ def ingress_raw_anat_data(wf, rpool, cfg, data_paths, unique_id, part_id, ses_id dl_dir=cfg.pipeline_setup["working_directory"]["path"], img_type="anat", ) - rpool.set_data("T1w", anat_flow, "outputspec.anat", {}, "", "anat_ingress") + reorient = pe.Node( + interface=afni.Resample(), + name=f"reorient_T1w_{part_id}_{ses_id}", + ) + + reorient.inputs.orientation = desired_orientation + reorient.inputs.outputtype = "NIFTI_GZ" + + wf.connect(anat_flow, "outputspec.anat", reorient, "in_file") + + rpool.set_data("T1w", reorient, "out_file", {}, "", "anat_ingress") if "T2w" in data_paths["anat"]: anat_flow_T2 = create_anat_datasource(f"anat_T2w_gather_{part_id}_{ses_id}") @@ -1942,7 +1953,17 @@ def ingress_raw_anat_data(wf, rpool, cfg, data_paths, unique_id, part_id, ses_id dl_dir=cfg.pipeline_setup["working_directory"]["path"], img_type="anat", ) - rpool.set_data("T2w", anat_flow_T2, "outputspec.anat", {}, "", "anat_ingress") + reorient = pe.Node( + interface=afni.Resample(), + name=f"reorient_T1w_{part_id}_{ses_id}", + ) + + reorient.inputs.orientation = desired_orientation + reorient.inputs.outputtype = "NIFTI_GZ" + + wf.connect(anat_flow_T2, "outputspec.anat", reorient, "in_file") + + rpool.set_data("T2w", reorient, "out_file", {}, "", "anat_ingress") if cfg.surface_analysis["freesurfer"]["ingress_reconall"]: rpool = ingress_freesurfer( @@ -1989,13 +2010,28 @@ def ingress_freesurfer(wf, rpool, cfg, data_paths, unique_id, part_id, ses_id): creds_path=data_paths["creds_path"], dl_dir=cfg.pipeline_setup["working_directory"]["path"], ) + node = fs_ingress + out = "outputspec.data" + node_name = "freesurfer_config_ingress" + + if fs_path.endswith(".nii.gz" or ".nii"): + reorient = pe.Node( + interface=afni.Resample(), + name=f"reorient_fs_{part_id}_{ses_id}", + ) + reorient.inputs.orientation = cfg.pipeline_setup["desired_orientation"] + reorient.inputs.outputtype = "NIFTI_GZ" + wf.connect(fs_ingress, "outputspec.data", reorient, "in_file") + node = reorient + out = "out_file" + node_name = "reorient_fs" rpool.set_data( "freesurfer-subject-dir", - fs_ingress, - "outputspec.data", + node, + out, {}, "", - "freesurfer_config_ingress", + node_name, ) recon_outs = { @@ -2058,8 +2094,18 @@ def ingress_raw_func_data(wf, rpool, cfg, data_paths, unique_id, part_id, ses_id func_wf.get_node("inputnode").iterables = ("scan", list(func_paths_dct.keys())) rpool.set_data("subject", func_wf, "outputspec.subject", {}, "", "func_ingress") - rpool.set_data("bold", func_wf, "outputspec.rest", {}, "", "func_ingress") + reorient = pe.Node( + interface=afni.Resample(), + name=f"reorient_func_{part_id}_{ses_id}", + ) + reorient.inputs.orientation = cfg.pipeline_setup["desired_orientation"] + reorient.inputs.outputtype = "NIFTI_GZ" + wf.connect(func_wf, "outputspec.rest", reorient, "in_file") + rpool.set_data("bold", reorient, "out_file", {}, "", "func_ingress") + # rpool.set_data("bold", func_wf, "outputspec.rest", {}, "", "func_ingress") + rpool.set_data("scan", func_wf, "outputspec.scan", {}, "", "func_ingress") + rpool.set_data( "scan-params", func_wf, "outputspec.scan_params", {}, "", "scan_params_ingress" ) @@ -2474,7 +2520,13 @@ def ingress_pipeconfig_paths(wf, cfg, rpool, unique_id, creds_path=None): resampled_template = pe.Node( Function( - input_names=["resolution", "template", "template_name", "tag"], + input_names=[ + "orientation", + "resolution", + "template", + "template_name", + "tag", + ], output_names=["resampled_template"], function=resolve_resolution, as_module=True, @@ -2482,6 +2534,7 @@ def ingress_pipeconfig_paths(wf, cfg, rpool, unique_id, creds_path=None): name="resampled_" + key, ) + resampled_template.inputs.orientation = desired_orientation resampled_template.inputs.resolution = resolution resampled_template.inputs.template = val resampled_template.inputs.template_name = key @@ -2489,7 +2542,7 @@ def ingress_pipeconfig_paths(wf, cfg, rpool, unique_id, creds_path=None): node = resampled_template output = "resampled_template" - node_name = f"{key}_resampled_template" + node_name = "template_resample" elif val: config_ingress = create_general_datasource(f"gather_{key}") @@ -2503,31 +2556,29 @@ def ingress_pipeconfig_paths(wf, cfg, rpool, unique_id, creds_path=None): output = "outputspec.data" node_name = f"{key}_config_ingress" - if val.endswith(".nii.gz"): - # check if the output is in desired orientation, if not reorient it - check_orient = pe.Node( - Function( - input_names=["input_file", "desired_orientation", "reorient"], - output_names=["output_file"], - function=check_orientation, - imports=["from CPAC.pipeline.utils import reorient_image"], - ), - name=f"check_orient_{key}", - ) - wf.connect(node, output, check_orient, "input_file") - check_orient.inputs.desired_orientation = desired_orientation - check_orient.inputs.reorient = True + if val.endswith(".nii" or ".nii.gz"): + check_reorient = pe.Node( + interface=afni.Resample(), + name=f"reorient_{key}", + ) + + check_reorient.inputs.orientation = desired_orientation + check_reorient.inputs.outputtype = "NIFTI_GZ" + + wf.connect(node, output, check_reorient, "in_file") + node = check_reorient + output = "out_file" + node_name = f"{key}_reorient" + + rpool.set_data( + key, + node, + output, + json_info, + "", + node_name, + ) - rpool.set_data( - key, - check_orient, - "output_file", - json_info, - "", - f"check_orient-{node_name}-{key}", - ) - else: - rpool.set_data(key, node, output, json_info, "", node_name) # templates, resampling from config """ template_keys = [ diff --git a/CPAC/pipeline/utils.py b/CPAC/pipeline/utils.py index 251eece667..f3b08be1ba 100644 --- a/CPAC/pipeline/utils.py +++ b/CPAC/pipeline/utils.py @@ -20,110 +20,9 @@ from CPAC.func_preproc.func_motion import motion_estimate_filter from CPAC.utils.bids_utils import insert_entity -from CPAC.utils.monitoring import IFLOGGER MOVEMENT_FILTER_KEYS = motion_estimate_filter.outputs - -def reorient_image(input_file, orientation): - """Reorient the input image to the desired orientation. Replaces the original input_file with the reoriented image. - - Parameters - ---------- - input_file : str - Input image file path - orientation : str - Desired orientation of the input image - - Returns - ------- - output_file : str - Reoriented image file path - """ - import os - import shutil - import subprocess - - output_file = os.path.join( - os.getcwd(), - f"reoriented_{os.path.basename(input_file)}", - ) - # if output file exist delete it - if os.path.exists(output_file): - os.remove(output_file) - - # make a copy of the input file as temp file so that the original file is not modified - temp_file = os.path.join( - os.getcwd(), - f"temp_{os.path.basename(input_file)}", - ) - shutil.copy(input_file, temp_file) - - cmd_3drefit = ["3drefit", "-deoblique", temp_file] - cmd_3dresample = [ - "3dresample", - "-orient", - orientation, - "-prefix", - output_file, - "-inset", - temp_file, - ] - subprocess.run(cmd_3drefit, check=True) - subprocess.run(cmd_3dresample, check=True) - - # remove the temporary file - os.remove(temp_file) - - return output_file - - -def check_orientation(input_file, desired_orientation, reorient=True): - """Find the orientation of the input file and reorient it if necessary. Does not modify the original input file. - - Parameters - ---------- - input_file : str - Input file path - desired_orientation : str - Desired orientation of the input file - reorient : bool - Reorient the input file to the desired orientation - - Returns - ------- - output_file : str - Reoriented image file path - """ - import subprocess - - cmd_3dinfo = ["3dinfo", "-orient", input_file] - - orientation = ( - subprocess.run(cmd_3dinfo, capture_output=True, text=True, check=False) - .stdout.strip() - .upper() - ) - if orientation != desired_orientation and reorient: - IFLOGGER.info( - f"+++ Reorienting {input_file} from {orientation} to {desired_orientation} +++" - ) - try: - output_file = reorient_image(input_file, desired_orientation) - except Exception as e: - IFLOGGER.error( - f"Error in reorienting the image: {input_file}.\nCould not reorient the image to {desired_orientation}" - ) - IFLOGGER.error(f"Error: {e}") - output_file = input_file # return the original file ? - else: - IFLOGGER.info( - f"+++ Orientation of {input_file} is already {desired_orientation} +++" - ) - output_file = input_file - return output_file - - def name_fork(resource_idx, cfg, json_info, out_dct): """Create and insert entities for forkpoints. diff --git a/CPAC/utils/datasource.py b/CPAC/utils/datasource.py index 008e674c2d..eedc171561 100644 --- a/CPAC/utils/datasource.py +++ b/CPAC/utils/datasource.py @@ -1156,7 +1156,7 @@ def res_string_to_tuple(resolution): return (float(resolution.replace("mm", "")),) * 3 -def resolve_resolution(resolution, template, template_name, tag=None): +def resolve_resolution(orientation, resolution, template, template_name, tag=None): """Resample a template to a given resolution.""" from nipype.interfaces import afni @@ -1203,6 +1203,7 @@ def resolve_resolution(resolution, template, template_name, tag=None): resample.inputs.resample_mode = "Cu" resample.inputs.in_file = local_path resample.base_dir = "." + resample.orientation = orientation resampled_template = resample.run() local_path = resampled_template.outputs.out_file From d47defb156b7a50b82c22c929a04f48dbd59492c Mon Sep 17 00:00:00 2001 From: "birajstha:construction_worker::penguin" Date: Tue, 1 Oct 2024 10:13:10 -0400 Subject: [PATCH 092/507] fix a typo in orientation flag for resample --- CPAC/anat_preproc/anat_preproc.py | 17 +++-------------- CPAC/utils/datasource.py | 2 +- 2 files changed, 4 insertions(+), 15 deletions(-) diff --git a/CPAC/anat_preproc/anat_preproc.py b/CPAC/anat_preproc/anat_preproc.py index 7d2c7db767..300eb6b878 100644 --- a/CPAC/anat_preproc/anat_preproc.py +++ b/CPAC/anat_preproc/anat_preproc.py @@ -2251,21 +2251,10 @@ def anatomical_init_T2(wf, cfg, strat_pool, pipe_num, opt=None): node, out = strat_pool.get_data("T2w") wf.connect(node, out, T2_deoblique, "in_file") - T2_reorient = pe.Node( - interface=afni.Resample(), - name=f"T2_reorient_{pipe_num}", - mem_gb=0, - mem_x=(0.0115, "in_file", "t"), - ) - T2_reorient.inputs.orientation = "RPI" - T2_reorient.inputs.outputtype = "NIFTI_GZ" - - wf.connect(T2_deoblique, "out_file", T2_reorient, "in_file") - outputs = { - "desc-preproc_T2w": (T2_reorient, "out_file"), - "desc-reorient_T2w": (T2_reorient, "out_file"), - "desc-head_T2w": (T2_reorient, "out_file"), + "desc-preproc_T2w": (T2_deoblique, "out_file"), + "desc-reorient_T2w": (T2_deoblique, "out_file"), + "desc-head_T2w": (T2_deoblique, "out_file"), } return (wf, outputs) diff --git a/CPAC/utils/datasource.py b/CPAC/utils/datasource.py index eedc171561..25adb1eeca 100644 --- a/CPAC/utils/datasource.py +++ b/CPAC/utils/datasource.py @@ -1203,7 +1203,7 @@ def resolve_resolution(orientation, resolution, template, template_name, tag=Non resample.inputs.resample_mode = "Cu" resample.inputs.in_file = local_path resample.base_dir = "." - resample.orientation = orientation + resample.inputs.orientation = orientation resampled_template = resample.run() local_path = resampled_template.outputs.out_file From c9b69b93c2bc6d6eb11d55baabd4a82d75fa5d39 Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Tue, 1 Oct 2024 14:15:15 +0000 Subject: [PATCH 093/507] [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci --- CPAC/pipeline/utils.py | 1 + 1 file changed, 1 insertion(+) diff --git a/CPAC/pipeline/utils.py b/CPAC/pipeline/utils.py index f3b08be1ba..39acb6429f 100644 --- a/CPAC/pipeline/utils.py +++ b/CPAC/pipeline/utils.py @@ -23,6 +23,7 @@ MOVEMENT_FILTER_KEYS = motion_estimate_filter.outputs + def name_fork(resource_idx, cfg, json_info, out_dct): """Create and insert entities for forkpoints. From d0a7c2aef02964eaada90680fee78c37b162823b Mon Sep 17 00:00:00 2001 From: "birajstha:construction_worker::penguin" Date: Tue, 1 Oct 2024 10:59:49 -0400 Subject: [PATCH 094/507] updating the orientation flag at other places where resolve_resolution is being used --- CPAC/longitudinal_pipeline/longitudinal_workflow.py | 1 + CPAC/registration/tests/mocks.py | 1 + CPAC/utils/test_mocks.py | 1 + 3 files changed, 3 insertions(+) diff --git a/CPAC/longitudinal_pipeline/longitudinal_workflow.py b/CPAC/longitudinal_pipeline/longitudinal_workflow.py index 4229fc30c6..9134769d6c 100644 --- a/CPAC/longitudinal_pipeline/longitudinal_workflow.py +++ b/CPAC/longitudinal_pipeline/longitudinal_workflow.py @@ -1204,6 +1204,7 @@ def func_longitudinal_template_wf(subject_id, strat_list, config): resampled_template.inputs.template = template resampled_template.inputs.template_name = template_name resampled_template.inputs.tag = tag + resampled_template.inputs.orientation = config['desired_orientation'] strat_init.update_resource_pool( {template_name: (resampled_template, "resampled_template")} diff --git a/CPAC/registration/tests/mocks.py b/CPAC/registration/tests/mocks.py index 18501c5a9a..4f35595abd 100644 --- a/CPAC/registration/tests/mocks.py +++ b/CPAC/registration/tests/mocks.py @@ -151,6 +151,7 @@ def configuration_strategy_mock(method="FSL"): resampled_template.inputs.template = template resampled_template.inputs.template_name = template_name resampled_template.inputs.tag = tag + resampled_template.inputs.orientation = "RPI" strat.update_resource_pool( {template_name: (resampled_template, "resampled_template")} diff --git a/CPAC/utils/test_mocks.py b/CPAC/utils/test_mocks.py index 336488f318..ea16c0be36 100644 --- a/CPAC/utils/test_mocks.py +++ b/CPAC/utils/test_mocks.py @@ -235,6 +235,7 @@ def configuration_strategy_mock(method="FSL"): resampled_template.inputs.template = template resampled_template.inputs.template_name = template_name resampled_template.inputs.tag = tag + resampled_template.inputs.orientation = "RPI" strat.update_resource_pool( {template_name: (resampled_template, "resampled_template")} From 7246e449ae56ba116203bead402b95fa84629d17 Mon Sep 17 00:00:00 2001 From: "birajstha:construction_worker::penguin" Date: Tue, 1 Oct 2024 14:37:35 -0400 Subject: [PATCH 095/507] reverting adding reorient node at the very begining and passing orientation set in config in the existing nodes for reorientations --- CPAC/anat_preproc/anat_preproc.py | 38 ++++++++++++++++----- CPAC/func_preproc/func_preproc.py | 16 +++++++-- CPAC/pipeline/engine.py | 56 ++++--------------------------- 3 files changed, 51 insertions(+), 59 deletions(-) diff --git a/CPAC/anat_preproc/anat_preproc.py b/CPAC/anat_preproc/anat_preproc.py index 300eb6b878..a561f8e077 100644 --- a/CPAC/anat_preproc/anat_preproc.py +++ b/CPAC/anat_preproc/anat_preproc.py @@ -1233,7 +1233,7 @@ def freesurfer_fsl_brain_connector(wf, cfg, strat_pool, pipe_num, opt): mem_gb=0, mem_x=(0.0115, "in_file", "t"), ) - reorient_fs_brainmask.inputs.orientation = "RPI" + reorient_fs_brainmask.inputs.orientation = cfg.pipeline_setup["desired_orientation"] reorient_fs_brainmask.inputs.outputtype = "NIFTI_GZ" wf.connect( @@ -1255,7 +1255,7 @@ def freesurfer_fsl_brain_connector(wf, cfg, strat_pool, pipe_num, opt): mem_gb=0, mem_x=(0.0115, "in_file", "t"), ) - reorient_fs_T1.inputs.orientation = "RPI" + reorient_fs_T1.inputs.orientation = cfg.pipeline_setup["desired_orientation"] reorient_fs_T1.inputs.outputtype = "NIFTI_GZ" wf.connect(convert_fs_T1_to_nifti, "out_file", reorient_fs_T1, "in_file") @@ -1454,10 +1454,21 @@ def anatomical_init(wf, cfg, strat_pool, pipe_num, opt=None): node, out = strat_pool.get_data("T1w") wf.connect(node, out, anat_deoblique, "in_file") + anat_reorient = pe.Node( + interface=afni.Resample(), + name=f"anat_reorient_{pipe_num}", + mem_gb=0, + mem_x=(0.0115, "in_file", "t"), + ) + anat_reorient.inputs.orientation = cfg.pipeline_setup["desired_orientation"] + anat_reorient.inputs.outputtype = "NIFTI_GZ" + + wf.connect(anat_deoblique, "out_file", anat_reorient, "in_file") + outputs = { - "desc-preproc_T1w": (anat_deoblique, "out_file"), - "desc-reorient_T1w": (anat_deoblique, "out_file"), - "desc-head_T1w": (anat_deoblique, "out_file"), + "desc-preproc_T1w": (anat_reorient, "out_file"), + "desc-reorient_T1w": (anat_reorient, "out_file"), + "desc-head_T1w": (anat_reorient, "out_file"), } return (wf, outputs) @@ -2251,10 +2262,21 @@ def anatomical_init_T2(wf, cfg, strat_pool, pipe_num, opt=None): node, out = strat_pool.get_data("T2w") wf.connect(node, out, T2_deoblique, "in_file") + T2_reorient = pe.Node( + interface=afni.Resample(), + name=f"T2_reorient_{pipe_num}", + mem_gb=0, + mem_x=(0.0115, "in_file", "t"), + ) + T2_reorient.inputs.orientation = cfg.pipeline_setup["desired_orientation"] + T2_reorient.inputs.outputtype = "NIFTI_GZ" + + wf.connect(T2_deoblique, "out_file", T2_reorient, "in_file") + outputs = { - "desc-preproc_T2w": (T2_deoblique, "out_file"), - "desc-reorient_T2w": (T2_deoblique, "out_file"), - "desc-head_T2w": (T2_deoblique, "out_file"), + "desc-preproc_T2w": (T2_reorient, "out_file"), + "desc-reorient_T2w": (T2_reorient, "out_file"), + "desc-head_T2w": (T2_reorient, "out_file"), } return (wf, outputs) diff --git a/CPAC/func_preproc/func_preproc.py b/CPAC/func_preproc/func_preproc.py index 2007591c84..672bd2c985 100644 --- a/CPAC/func_preproc/func_preproc.py +++ b/CPAC/func_preproc/func_preproc.py @@ -521,9 +521,21 @@ def func_reorient(wf, cfg, strat_pool, pipe_num, opt=None): node, out = strat_pool.get_data("bold") wf.connect(node, out, func_deoblique, "in_file") + func_reorient = pe.Node( + interface=afni_utils.Resample(), + name=f"func_reorient_{pipe_num}", + mem_gb=0, + mem_x=(0.0115, "in_file", "t"), + ) + + func_reorient.inputs.orientation = cfg.pipeline_setup["desired_orientation"] + func_reorient.inputs.outputtype = "NIFTI_GZ" + + wf.connect(func_deoblique, "out_file", func_reorient, "in_file") + outputs = { - "desc-preproc_bold": (func_deoblique, "out_file"), - "desc-reorient_bold": (func_deoblique, "out_file"), + "desc-preproc_bold": (func_reorient, "out_file"), + "desc-reorient_bold": (func_reorient, "out_file"), } return (wf, outputs) diff --git a/CPAC/pipeline/engine.py b/CPAC/pipeline/engine.py index b1c05190a5..187888284b 100644 --- a/CPAC/pipeline/engine.py +++ b/CPAC/pipeline/engine.py @@ -1932,17 +1932,7 @@ def ingress_raw_anat_data(wf, rpool, cfg, data_paths, unique_id, part_id, ses_id dl_dir=cfg.pipeline_setup["working_directory"]["path"], img_type="anat", ) - reorient = pe.Node( - interface=afni.Resample(), - name=f"reorient_T1w_{part_id}_{ses_id}", - ) - - reorient.inputs.orientation = desired_orientation - reorient.inputs.outputtype = "NIFTI_GZ" - - wf.connect(anat_flow, "outputspec.anat", reorient, "in_file") - - rpool.set_data("T1w", reorient, "out_file", {}, "", "anat_ingress") + rpool.set_data("T1w", anat_flow, "outputspec.anat", {}, "", "anat_ingress") if "T2w" in data_paths["anat"]: anat_flow_T2 = create_anat_datasource(f"anat_T2w_gather_{part_id}_{ses_id}") @@ -1953,17 +1943,7 @@ def ingress_raw_anat_data(wf, rpool, cfg, data_paths, unique_id, part_id, ses_id dl_dir=cfg.pipeline_setup["working_directory"]["path"], img_type="anat", ) - reorient = pe.Node( - interface=afni.Resample(), - name=f"reorient_T1w_{part_id}_{ses_id}", - ) - - reorient.inputs.orientation = desired_orientation - reorient.inputs.outputtype = "NIFTI_GZ" - - wf.connect(anat_flow_T2, "outputspec.anat", reorient, "in_file") - - rpool.set_data("T2w", reorient, "out_file", {}, "", "anat_ingress") + rpool.set_data("T2w", anat_flow_T2, "outputspec.anat", {}, "", "anat_ingress") if cfg.surface_analysis["freesurfer"]["ingress_reconall"]: rpool = ingress_freesurfer( @@ -2010,28 +1990,13 @@ def ingress_freesurfer(wf, rpool, cfg, data_paths, unique_id, part_id, ses_id): creds_path=data_paths["creds_path"], dl_dir=cfg.pipeline_setup["working_directory"]["path"], ) - node = fs_ingress - out = "outputspec.data" - node_name = "freesurfer_config_ingress" - - if fs_path.endswith(".nii.gz" or ".nii"): - reorient = pe.Node( - interface=afni.Resample(), - name=f"reorient_fs_{part_id}_{ses_id}", - ) - reorient.inputs.orientation = cfg.pipeline_setup["desired_orientation"] - reorient.inputs.outputtype = "NIFTI_GZ" - wf.connect(fs_ingress, "outputspec.data", reorient, "in_file") - node = reorient - out = "out_file" - node_name = "reorient_fs" rpool.set_data( "freesurfer-subject-dir", - node, - out, + fs_ingress, + "outputspec.data", {}, "", - node_name, + "freesurfer_config_ingress", ) recon_outs = { @@ -2094,15 +2059,8 @@ def ingress_raw_func_data(wf, rpool, cfg, data_paths, unique_id, part_id, ses_id func_wf.get_node("inputnode").iterables = ("scan", list(func_paths_dct.keys())) rpool.set_data("subject", func_wf, "outputspec.subject", {}, "", "func_ingress") - reorient = pe.Node( - interface=afni.Resample(), - name=f"reorient_func_{part_id}_{ses_id}", - ) - reorient.inputs.orientation = cfg.pipeline_setup["desired_orientation"] - reorient.inputs.outputtype = "NIFTI_GZ" - wf.connect(func_wf, "outputspec.rest", reorient, "in_file") - rpool.set_data("bold", reorient, "out_file", {}, "", "func_ingress") - # rpool.set_data("bold", func_wf, "outputspec.rest", {}, "", "func_ingress") + + rpool.set_data("bold", func_wf, "outputspec.rest", {}, "", "func_ingress") rpool.set_data("scan", func_wf, "outputspec.scan", {}, "", "func_ingress") From f3f3f98405f35c3b54b1d72b703dee5aece5d234 Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Tue, 1 Oct 2024 18:44:29 +0000 Subject: [PATCH 096/507] [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci --- CPAC/longitudinal_pipeline/longitudinal_workflow.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/CPAC/longitudinal_pipeline/longitudinal_workflow.py b/CPAC/longitudinal_pipeline/longitudinal_workflow.py index 9134769d6c..5c989675c1 100644 --- a/CPAC/longitudinal_pipeline/longitudinal_workflow.py +++ b/CPAC/longitudinal_pipeline/longitudinal_workflow.py @@ -1204,7 +1204,7 @@ def func_longitudinal_template_wf(subject_id, strat_list, config): resampled_template.inputs.template = template resampled_template.inputs.template_name = template_name resampled_template.inputs.tag = tag - resampled_template.inputs.orientation = config['desired_orientation'] + resampled_template.inputs.orientation = config["desired_orientation"] strat_init.update_resource_pool( {template_name: (resampled_template, "resampled_template")} From 1a43750f59c3cadc1a91be37b0e19531663b5f56 Mon Sep 17 00:00:00 2001 From: "birajstha:construction_worker::penguin" Date: Tue, 1 Oct 2024 14:48:39 -0400 Subject: [PATCH 097/507] empty --- CPAC/pipeline/engine.py | 4 ---- 1 file changed, 4 deletions(-) diff --git a/CPAC/pipeline/engine.py b/CPAC/pipeline/engine.py index 187888284b..bf31c957f7 100644 --- a/CPAC/pipeline/engine.py +++ b/CPAC/pipeline/engine.py @@ -1908,7 +1908,6 @@ def wrap_block(node_blocks, interface, wf, cfg, strat_pool, pipe_num, opt): def ingress_raw_anat_data(wf, rpool, cfg, data_paths, unique_id, part_id, ses_id): - desired_orientation = cfg.pipeline_setup["desired_orientation"] if "anat" not in data_paths: WFLOGGER.warning("No anatomical data present.") return rpool @@ -2059,11 +2058,8 @@ def ingress_raw_func_data(wf, rpool, cfg, data_paths, unique_id, part_id, ses_id func_wf.get_node("inputnode").iterables = ("scan", list(func_paths_dct.keys())) rpool.set_data("subject", func_wf, "outputspec.subject", {}, "", "func_ingress") - rpool.set_data("bold", func_wf, "outputspec.rest", {}, "", "func_ingress") - rpool.set_data("scan", func_wf, "outputspec.scan", {}, "", "func_ingress") - rpool.set_data( "scan-params", func_wf, "outputspec.scan_params", {}, "", "scan_params_ingress" ) From 939c7a55ce70b26770b2b45e387a3cd6ce18259a Mon Sep 17 00:00:00 2001 From: "birajstha:construction_worker::penguin" Date: Wed, 2 Oct 2024 13:08:39 -0400 Subject: [PATCH 098/507] updated tests --- CPAC/pipeline/test/test_engine.py | 2 +- CPAC/resources/tests/test_templates.py | 4 ++-- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/CPAC/pipeline/test/test_engine.py b/CPAC/pipeline/test/test_engine.py index c228fc3640..cf85f50dbe 100644 --- a/CPAC/pipeline/test/test_engine.py +++ b/CPAC/pipeline/test/test_engine.py @@ -90,7 +90,7 @@ def test_ingress_pipeconfig_data(pipe_config, bids_dir, test_dir): rpool = ResourcePool(name=unique_id, cfg=cfg) - rpool = ingress_pipeconfig_paths(cfg, rpool, sub_data_dct, unique_id) + wf, rpool = ingress_pipeconfig_paths(wf, cfg, rpool, sub_data_dct, unique_id) rpool.gather_pipes(wf, cfg, all=True) diff --git a/CPAC/resources/tests/test_templates.py b/CPAC/resources/tests/test_templates.py index 13a4f72745..0d041930cb 100644 --- a/CPAC/resources/tests/test_templates.py +++ b/CPAC/resources/tests/test_templates.py @@ -32,8 +32,8 @@ def test_packaged_path_exists(pipeline): Check that all local templates are included in image at at least one resolution. """ - rpool = ingress_pipeconfig_paths( - Preconfiguration(pipeline), ResourcePool(), "pytest" + wf, rpool = ingress_pipeconfig_paths( + wf, Preconfiguration(pipeline), ResourcePool(), "pytest" ) for resource in rpool.rpool.values(): node = next(iter(resource.values())).get("data")[0] From 9df743cf3d1fa8f01a71d0f1f60aa96febb3a976 Mon Sep 17 00:00:00 2001 From: "birajstha:construction_worker::penguin" Date: Wed, 2 Oct 2024 14:19:02 -0400 Subject: [PATCH 099/507] updated the changelog --- CHANGELOG.md | 3 +++ CPAC/resources/tests/test_templates.py | 2 ++ 2 files changed, 5 insertions(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index cb0f5a96b7..0fa0347214 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -20,10 +20,13 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 - `pyproject.toml` file with `[build-system]` defined. - [![pre-commit.ci status](https://results.pre-commit.ci/badge/github/FCP-INDI/C-PAC/main.svg)](https://results.pre-commit.ci/latest/github/FCP-INDI/C-PAC/main) badge to [`README`](./README.md). +- `desired_orientation` key in the blank config under `pipeline_setup`. +- Workflow (`wf`) parameter in input and output of `ingress_pipeconfig_paths` function, where a node to reorient templates is added to the `wf` ### Changed - Moved `pygraphviz` from requirements to `graphviz` optional dependencies group. +- Fixed-orientation-parameter `RPI` in resolve_resolution `freesurfer_fs_brain_connector`, `anatomical_init_T1`, `anatomical_init_T2`, `func_reorient` to take in whatever is set in the config `desired_orientation` field. ### Fixed diff --git a/CPAC/resources/tests/test_templates.py b/CPAC/resources/tests/test_templates.py index 0d041930cb..341bde6c27 100644 --- a/CPAC/resources/tests/test_templates.py +++ b/CPAC/resources/tests/test_templates.py @@ -24,6 +24,7 @@ from CPAC.pipeline.engine import ingress_pipeconfig_paths, ResourcePool from CPAC.utils.configuration import Preconfiguration from CPAC.utils.datasource import get_highest_local_res +import nipype.pipeline.engine as pe @pytest.mark.parametrize("pipeline", ALL_PIPELINE_CONFIGS) @@ -32,6 +33,7 @@ def test_packaged_path_exists(pipeline): Check that all local templates are included in image at at least one resolution. """ + wf = pe.Workflow(name="test") wf, rpool = ingress_pipeconfig_paths( wf, Preconfiguration(pipeline), ResourcePool(), "pytest" ) From 079b1f06a35848a79f26b3ee82e84d2848a81932 Mon Sep 17 00:00:00 2001 From: "birajstha:construction_worker::penguin" Date: Wed, 2 Oct 2024 15:48:57 -0400 Subject: [PATCH 100/507] replaced RPI to take in config value in few more places --- CPAC/anat_preproc/lesion_preproc.py | 3 ++- CPAC/func_preproc/func_preproc.py | 2 +- 2 files changed, 3 insertions(+), 2 deletions(-) diff --git a/CPAC/anat_preproc/lesion_preproc.py b/CPAC/anat_preproc/lesion_preproc.py index 07871ae32d..7a31bc3e2b 100644 --- a/CPAC/anat_preproc/lesion_preproc.py +++ b/CPAC/anat_preproc/lesion_preproc.py @@ -19,6 +19,7 @@ import nipype.interfaces.utility as util from CPAC.pipeline import nipype_pipeline_engine as pe +from CPAC.utils.configuration import Configuration as cfg from CPAC.utils.interfaces import Function @@ -133,7 +134,7 @@ def create_lesion_preproc(wf_name="lesion_preproc"): mem_x=(0.0115, "in_file", "t"), ) - lesion_reorient.inputs.orientation = "RPI" + lesion_reorient.inputs.orientation = cfg.pipeline_setup["desired_orientation"] lesion_reorient.inputs.outputtype = "NIFTI_GZ" preproc.connect(lesion_deoblique, "out_file", lesion_reorient, "in_file") diff --git a/CPAC/func_preproc/func_preproc.py b/CPAC/func_preproc/func_preproc.py index 672bd2c985..3bac53cc87 100644 --- a/CPAC/func_preproc/func_preproc.py +++ b/CPAC/func_preproc/func_preproc.py @@ -1290,7 +1290,7 @@ def bold_mask_anatomical_refined(wf, cfg, strat_pool, pipe_num, opt=None): mem_x=(0.0115, "in_file", "t"), ) - func_reorient.inputs.orientation = "RPI" + func_reorient.inputs.orientation = cfg.pipeline_setup["desired_orientation"] func_reorient.inputs.outputtype = "NIFTI_GZ" wf.connect(func_deoblique, "out_file", func_reorient, "in_file") From 35ed398e167f0800d21928e9b14abb67f0a37f0d Mon Sep 17 00:00:00 2001 From: "birajstha:construction_worker::penguin" Date: Wed, 2 Oct 2024 20:18:07 -0400 Subject: [PATCH 101/507] adding cfg as parameter in lesion_preproc --- CHANGELOG.md | 2 +- CPAC/anat_preproc/lesion_preproc.py | 3 +-- 2 files changed, 2 insertions(+), 3 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 0fa0347214..2850e0460c 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -26,7 +26,7 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 ### Changed - Moved `pygraphviz` from requirements to `graphviz` optional dependencies group. -- Fixed-orientation-parameter `RPI` in resolve_resolution `freesurfer_fs_brain_connector`, `anatomical_init_T1`, `anatomical_init_T2`, `func_reorient` to take in whatever is set in the config `desired_orientation` field. +- Fixed-orientation-parameter `RPI` in resolve_resolution `freesurfer_fs_brain_connector`, `anatomical_init_T1`, `lesion_preproc`, `anatomical_init_T2`, `func_reorient` to take in whatever is set in the config `desired_orientation` field. ### Fixed diff --git a/CPAC/anat_preproc/lesion_preproc.py b/CPAC/anat_preproc/lesion_preproc.py index 7a31bc3e2b..97a6828e59 100644 --- a/CPAC/anat_preproc/lesion_preproc.py +++ b/CPAC/anat_preproc/lesion_preproc.py @@ -19,7 +19,6 @@ import nipype.interfaces.utility as util from CPAC.pipeline import nipype_pipeline_engine as pe -from CPAC.utils.configuration import Configuration as cfg from CPAC.utils.interfaces import Function @@ -59,7 +58,7 @@ def inverse_lesion(lesion_path): return lesion_out -def create_lesion_preproc(wf_name="lesion_preproc"): +def create_lesion_preproc(wf_name="lesion_preproc", cfg="RPI"): """Process lesions masks. Lesion mask file is deobliqued and reoriented in the same way as the T1 in From b36808361ac082950368237b2bfe38e1e1a6022e Mon Sep 17 00:00:00 2001 From: "birajstha:construction_worker::penguin" Date: Wed, 2 Oct 2024 20:22:21 -0400 Subject: [PATCH 102/507] sending only orientation not all cfg --- CPAC/anat_preproc/lesion_preproc.py | 4 ++-- CPAC/registration/registration.py | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/CPAC/anat_preproc/lesion_preproc.py b/CPAC/anat_preproc/lesion_preproc.py index 97a6828e59..c10f057954 100644 --- a/CPAC/anat_preproc/lesion_preproc.py +++ b/CPAC/anat_preproc/lesion_preproc.py @@ -58,7 +58,7 @@ def inverse_lesion(lesion_path): return lesion_out -def create_lesion_preproc(wf_name="lesion_preproc", cfg="RPI"): +def create_lesion_preproc(wf_name="lesion_preproc", orientation="RPI"): """Process lesions masks. Lesion mask file is deobliqued and reoriented in the same way as the T1 in @@ -133,7 +133,7 @@ def create_lesion_preproc(wf_name="lesion_preproc", cfg="RPI"): mem_x=(0.0115, "in_file", "t"), ) - lesion_reorient.inputs.orientation = cfg.pipeline_setup["desired_orientation"] + lesion_reorient.inputs.orientation = orientation lesion_reorient.inputs.outputtype = "NIFTI_GZ" preproc.connect(lesion_deoblique, "out_file", lesion_reorient, "in_file") diff --git a/CPAC/registration/registration.py b/CPAC/registration/registration.py index da63e694e4..2214585d9c 100644 --- a/CPAC/registration/registration.py +++ b/CPAC/registration/registration.py @@ -1736,7 +1736,7 @@ def ANTs_registration_connector( "ANTs" ]["use_lesion_mask"]: # Create lesion preproc node to apply afni Refit and Resample - lesion_preproc = create_lesion_preproc(wf_name=f"lesion_preproc{symm}") + lesion_preproc = create_lesion_preproc(wf_name=f"lesion_preproc{symm}", cfg.pipeline_setup["desired_orientation"]) wf.connect(inputNode, "lesion_mask", lesion_preproc, "inputspec.lesion") wf.connect( lesion_preproc, From 71dd070a699ceae56bf85d72b5ef45ddb33734ab Mon Sep 17 00:00:00 2001 From: "birajstha:construction_worker::penguin" Date: Wed, 2 Oct 2024 20:32:00 -0400 Subject: [PATCH 103/507] fixed positions of args in lesion_preproc and added default RPI --- CPAC/anat_preproc/lesion_preproc.py | 6 ++++-- CPAC/registration/registration.py | 2 +- CPAC/registration/tests/test_registration.py | 2 +- 3 files changed, 6 insertions(+), 4 deletions(-) diff --git a/CPAC/anat_preproc/lesion_preproc.py b/CPAC/anat_preproc/lesion_preproc.py index c10f057954..21628c97f0 100644 --- a/CPAC/anat_preproc/lesion_preproc.py +++ b/CPAC/anat_preproc/lesion_preproc.py @@ -58,7 +58,7 @@ def inverse_lesion(lesion_path): return lesion_out -def create_lesion_preproc(wf_name="lesion_preproc", orientation="RPI"): +def create_lesion_preproc(cfg=None, wf_name="lesion_preproc"): """Process lesions masks. Lesion mask file is deobliqued and reoriented in the same way as the T1 in @@ -133,7 +133,9 @@ def create_lesion_preproc(wf_name="lesion_preproc", orientation="RPI"): mem_x=(0.0115, "in_file", "t"), ) - lesion_reorient.inputs.orientation = orientation + lesion_reorient.inputs.orientation = ( + cfg.pipeline_setup["desired_orientation"] if cfg else "RPI" + ) lesion_reorient.inputs.outputtype = "NIFTI_GZ" preproc.connect(lesion_deoblique, "out_file", lesion_reorient, "in_file") diff --git a/CPAC/registration/registration.py b/CPAC/registration/registration.py index 2214585d9c..1c6b6fa71a 100644 --- a/CPAC/registration/registration.py +++ b/CPAC/registration/registration.py @@ -1736,7 +1736,7 @@ def ANTs_registration_connector( "ANTs" ]["use_lesion_mask"]: # Create lesion preproc node to apply afni Refit and Resample - lesion_preproc = create_lesion_preproc(wf_name=f"lesion_preproc{symm}", cfg.pipeline_setup["desired_orientation"]) + lesion_preproc = create_lesion_preproc(cfg, wf_name=f"lesion_preproc{symm}") wf.connect(inputNode, "lesion_mask", lesion_preproc, "inputspec.lesion") wf.connect( lesion_preproc, diff --git a/CPAC/registration/tests/test_registration.py b/CPAC/registration/tests/test_registration.py index 58741da445..d8e8228497 100755 --- a/CPAC/registration/tests/test_registration.py +++ b/CPAC/registration/tests/test_registration.py @@ -130,7 +130,7 @@ def test_registration_lesion(): anat_preproc.inputs.inputspec.anat = anat_file - lesion_preproc = create_lesion_preproc(wf_name="lesion_preproc") + lesion_preproc = create_lesion_preproc(cfg, wf_name="lesion_preproc") lesion_preproc.inputs.inputspec.lesion = lesion_file From f4f91b30f3be6a45592f2e21b73060935f51b0da Mon Sep 17 00:00:00 2001 From: Biraj Date: Tue, 26 Mar 2024 16:13:08 +0000 Subject: [PATCH 104/507] Optimized cosine filter and added Doc strings --- CPAC/nuisance/utils/compcor.py | 65 ++++- .../configs/pipeline_config_blank.yml | 275 ++++++------------ 2 files changed, 155 insertions(+), 185 deletions(-) diff --git a/CPAC/nuisance/utils/compcor.py b/CPAC/nuisance/utils/compcor.py index 9de8e3a918..5fac927591 100644 --- a/CPAC/nuisance/utils/compcor.py +++ b/CPAC/nuisance/utils/compcor.py @@ -91,6 +91,7 @@ def cosine_filter( failure_mode="error", ): """ +<<<<<<< HEAD `cosine_filter` adapted from Nipype. https://github.com/nipy/nipype/blob/d353f0d/nipype/algorithms/confounds.py#L1086-L1107 @@ -103,6 +104,39 @@ def cosine_filter( 'Repetition time (TR) of series (in sec) - derived from image header if unspecified' period_cut : float Minimum period (in sec) for DCT high-pass filter, nipype default value: 128. +======= + Apply cosine filter to a BOLD image. + + Parameters: + ----------- + input_image_path : str + Path to the BOLD image to be filtered. + timestep : float + Repetition time (TR) of the series (in seconds). Derived from image header if unspecified. + period_cut : float, optional + Minimum period (in seconds) for the DCT high-pass filter. Default value is 128. + remove_mean : bool, optional + Whether to remove the mean from the voxel time series before filtering. Default is True. + axis : int, optional + The axis along which to apply the filter. Default is -1 (last axis). + failure_mode : {'error', 'ignore'}, optional + Specifies how to handle failure modes. If set to 'error', the function raises an error. + If set to 'ignore', it returns the input data unchanged in case of failure. Default is 'error'. + + Returns: + -------- + cosfiltered_img : str + Path to the filtered BOLD image. + + Notes: + ------ + The function applies a cosine filter to the input BOLD image using the discrete cosine transform (DCT) method. + It removes the low-frequency drift from the voxel time series. The filtered image is saved to disk. + + Adapted from nipype implementation. + + The function uses a generator to iterate over voxel time series to optimize memory usage. +>>>>>>> 57d70a9d9 (Optimized cosine filter and added Doc strings) """ # STATEMENT OF CHANGES: # This function is derived from sources licensed under the Apache-2.0 terms, @@ -114,6 +148,7 @@ def cosine_filter( # * Modified docstring to reflect local changes # * Updated style to match C-PAC codebase +<<<<<<< HEAD # ORIGINAL WORK'S ATTRIBUTION NOTICE: # Copyright (c) 2009-2016, Nipype developers @@ -135,6 +170,15 @@ def cosine_filter( from nipype.algorithms.confounds import _cosine_drift, _full_rank input_img = nib.load(input_image_path) +======= + def voxel_generator(): + for i in range(datashape[0]): + for j in range(datashape[1]): + for k in range(datashape[2]): + yield input_data[i, j, k, :] + + input_img = nb.load(input_image_path) +>>>>>>> 57d70a9d9 (Optimized cosine filter and added Doc strings) input_data = input_img.get_fdata() datashape = input_data.shape @@ -147,15 +191,24 @@ def cosine_filter( frametimes = timestep * np.arange(timepoints) X = _full_rank(_cosine_drift(period_cut, frametimes))[0] - betas = np.linalg.lstsq(X, input_data.T)[0] + output_data = np.zeros(input_data.shape) + + voxel_gen = voxel_generator() + + for i in range(datashape[0]): + print(f"calculating {i+1} of {datashape[0]} row of voxels") + for j in range(datashape[1]): + for k in range(datashape[2]): + voxel_time_series = next(voxel_gen) + betas = np.linalg.lstsq(X, voxel_time_series.T)[0] - if not remove_mean: - X = X[:, :-1] - betas = betas[:-1] + if not remove_mean: + X = X[:, :-1] + betas = betas[:-1] - residuals = input_data - X.dot(betas).T + residuals = voxel_time_series - X.dot(betas) - output_data = residuals.reshape(datashape) + output_data[i, j, k, :] = residuals hdr = input_img.header output_img = nib.Nifti1Image(output_data, header=hdr, affine=input_img.affine) diff --git a/CPAC/resources/configs/pipeline_config_blank.yml b/CPAC/resources/configs/pipeline_config_blank.yml index 7f09680fc6..2bb40559bd 100644 --- a/CPAC/resources/configs/pipeline_config_blank.yml +++ b/CPAC/resources/configs/pipeline_config_blank.yml @@ -7,15 +7,12 @@ # # Tip: This file can be edited manually with a text editor for quick modifications. pipeline_setup: - # Name for this pipeline configuration - useful for identification. # This string will be sanitized and used in filepaths pipeline_name: cpac-blank-template output_directory: - # Quality control outputs quality_control: - # Generate eXtensible Connectivity Pipeline-style quality control files generate_xcpqc_files: Off @@ -56,7 +53,6 @@ pipeline_setup: output_tree: default system_config: - # Stop worklow execution on first crash? fail_fast: Off @@ -73,7 +69,6 @@ pipeline_setup: # A callback.log file from a previous run can be provided to estimate memory usage based on that run. observed_usage: - # Path to callback log file with previously observed usage. # Can be overridden with the commandline flag `--runtime_usage`. callback_log: @@ -91,7 +86,6 @@ pipeline_setup: # Only applies if you are running on a grid or compute cluster. resource_manager: SGE SGE: - # SGE Parallel Environment to use when running CPAC. # Only applies when you are running on a grid or compute cluster using SGE. parallel_environment: mpi_smp @@ -137,7 +131,6 @@ pipeline_setup: FSLDIR: FSLDIR working_directory: - # Directory where C-PAC should store temporary and intermediate files. # - This directory must be saved if you wish to re-run your pipeline from where you left off (if not completed). # - NOTE: As it stores all intermediate files, this directory can grow to become very @@ -153,17 +146,14 @@ pipeline_setup: remove_working_dir: On log_directory: - # Whether to write log details of the pipeline run to the logging files. run_logging: On path: /outputs/logs # Configuration options for logging visualizations of the workflow graph graphviz: - # Configuration for a graphviz visualization of the entire workflow. See https://fcp-indi.github.io/docs/developer/nodes#CPAC.pipeline.nipype_pipeline_engine.Workflow.write_graph for details about the various options entire_workflow: - # Whether to generate the graph visualization generate: Off @@ -177,7 +167,6 @@ pipeline_setup: simple_form: On crash_log_directory: - # Directory where CPAC should write crash logs. path: /outputs/crash @@ -185,7 +174,6 @@ pipeline_setup: run: Off Amazon-AWS: - # If setting the 'Output Directory' to an S3 bucket, insert the path to your AWS credentials file here. aws_output_bucket_credentials: @@ -193,14 +181,12 @@ pipeline_setup: s3_encryption: Off Debugging: - # Verbose developer messages. verbose: Off # PREPROCESSING # ------------- surface_analysis: - # Run freesurfer_abcd_preproc to obtain preprocessed T1w for reconall abcd_prefreesurfer_prep: run: Off @@ -277,7 +263,6 @@ anatomical_preproc: brain_extraction: run: Off FreeSurfer-BET: - # Template to be used for FreeSurfer-BET brain extraction in CCS-options pipeline T1w_brain_template_mask_ccs: /ccs_template/MNI152_T1_1mm_first_brain_mask.nii.gz @@ -287,7 +272,6 @@ anatomical_preproc: # option parameters AFNI-3dSkullStrip: - # Output a mask volume instead of a skull-stripped volume. The mask volume containes 0 to 6, which represents voxel's postion. If set to True, C-PAC will use this output to generate anatomical brain mask for further analysis. mask_vol: Off @@ -352,7 +336,6 @@ anatomical_preproc: monkey: Off FSL-BET: - # Switch "On" to crop out neck regions before generating the mask (default: Off). Robustfov: Off @@ -393,12 +376,10 @@ anatomical_preproc: vertical_gradient: 0.0 UNet: - # UNet model unet_model: s3://fcp-indi/resources/cpac/resources/Site-All-T-epoch_36.model niworkflows-ants: - # Template to be used during niworkflows-ants. # It is not necessary to change this path unless you intend to use a non-standard template. # niworkflows-ants Brain extraction template @@ -419,7 +400,6 @@ anatomical_preproc: # Non-local means filtering via ANTs DenoiseImage non_local_means_filtering: - # this is a fork option run: [Off] @@ -428,7 +408,6 @@ anatomical_preproc: # N4 bias field correction via ANTs n4_bias_field_correction: - # this is a fork option run: [Off] @@ -436,12 +415,10 @@ anatomical_preproc: shrink_factor: 2 segmentation: - # Automatically segment anatomical images into white matter, gray matter, # and CSF based on prior probability maps. run: Off tissue_segmentation: - # using: ['FSL-FAST', 'Template_Based', 'ANTs_Prior_Based', 'FreeSurfer'] # this is a fork point using: [FSL-FAST] @@ -449,12 +426,10 @@ segmentation: # option parameters FSL-FAST: thresholding: - # thresholding of the tissue segmentation probability maps # options: 'Auto', 'Custom' use: Auto Custom: - # Set the threshold value for the segmentation probability masks (CSF, White Matter, and Gray Matter) # The values remaining will become the binary tissue masks. # A good starting point is 0.95. @@ -468,7 +443,6 @@ segmentation: GM_threshold_value: 0.95 use_priors: - # Use template-space tissue priors to refine the binary tissue masks generated by segmentation. run: On @@ -490,7 +464,6 @@ segmentation: CSF_path: $priors_path/avg152T1_csf_bin.nii.gz Template_Based: - # These masks should be in the same space of your registration template, e.g. if # you choose 'EPI Template' , below tissue masks should also be EPI template tissue masks. # @@ -509,7 +482,6 @@ segmentation: CSF: $FSLDIR/data/standard/tissuepriors/2mm/avg152T1_csf_bin.nii.gz ANTs_Prior_Based: - # Generate white matter, gray matter, CSF masks based on antsJointLabelFusion # ANTs Prior-based Segmentation workflow that has shown optimal results for non-human primate data. # The atlas image assumed to be used in ANTs Prior-based Segmentation. @@ -544,7 +516,6 @@ segmentation: CSF_label: [24] FreeSurfer: - # Use mri_binarize --erode option to erode segmentation masks erode: 0 @@ -562,7 +533,6 @@ registration_workflows: run: Off registration: FSL-FNIRT: - # The resolution to which anatomical images should be transformed during registration. # This is the resolution at which processed anatomical files will be output. # specifically for monkey pipeline @@ -604,7 +574,6 @@ registration_workflows: # option parameters ANTs: - # If a lesion mask is available for a T1w image, use it to improve the ANTs' registration # ANTS registration only. use_lesion_mask: Off @@ -616,54 +585,54 @@ registration_workflows: - initial-moving-transform: initializationFeature: 0 - transforms: - - Rigid: - gradientStep: 0.1 - metric: - type: MI - metricWeight: 1 - numberOfBins: 32 - samplingStrategy: Regular - samplingPercentage: 0.25 - convergence: - iteration: 1000x500x250x100 - convergenceThreshold: 1e-08 - convergenceWindowSize: 10 - smoothing-sigmas: 3.0x2.0x1.0x0.0 - shrink-factors: 8x4x2x1 - use-histogram-matching: On - - Affine: - gradientStep: 0.1 - metric: - type: MI - metricWeight: 1 - numberOfBins: 32 - samplingStrategy: Regular - samplingPercentage: 0.25 - convergence: - iteration: 1000x500x250x100 - convergenceThreshold: 1e-08 - convergenceWindowSize: 10 - smoothing-sigmas: 3.0x2.0x1.0x0.0 - shrink-factors: 8x4x2x1 - use-histogram-matching: On - - SyN: - gradientStep: 0.1 - updateFieldVarianceInVoxelSpace: 3.0 - totalFieldVarianceInVoxelSpace: 0.0 - metric: - type: CC - metricWeight: 1 - radius: 4 - convergence: - iteration: 100x100x70x20 - convergenceThreshold: 1e-09 - convergenceWindowSize: 15 - smoothing-sigmas: 3.0x2.0x1.0x0.0 - shrink-factors: 6x4x2x1 - use-histogram-matching: On - winsorize-image-intensities: - lowerQuantile: 0.01 - upperQuantile: 0.99 + - Rigid: + gradientStep: 0.1 + metric: + type: MI + metricWeight: 1 + numberOfBins: 32 + samplingStrategy: Regular + samplingPercentage: 0.25 + convergence: + iteration: 1000x500x250x100 + convergenceThreshold: 1e-08 + convergenceWindowSize: 10 + smoothing-sigmas: 3.0x2.0x1.0x0.0 + shrink-factors: 8x4x2x1 + use-histogram-matching: On + - Affine: + gradientStep: 0.1 + metric: + type: MI + metricWeight: 1 + numberOfBins: 32 + samplingStrategy: Regular + samplingPercentage: 0.25 + convergence: + iteration: 1000x500x250x100 + convergenceThreshold: 1e-08 + convergenceWindowSize: 10 + smoothing-sigmas: 3.0x2.0x1.0x0.0 + shrink-factors: 8x4x2x1 + use-histogram-matching: On + - SyN: + gradientStep: 0.1 + updateFieldVarianceInVoxelSpace: 3.0 + totalFieldVarianceInVoxelSpace: 0.0 + metric: + type: CC + metricWeight: 1 + radius: 4 + convergence: + iteration: 100x100x70x20 + convergenceThreshold: 1e-09 + convergenceWindowSize: 15 + smoothing-sigmas: 3.0x2.0x1.0x0.0 + shrink-factors: 6x4x2x1 + use-histogram-matching: On + winsorize-image-intensities: + lowerQuantile: 0.01 + upperQuantile: 0.99 # Interpolation method for writing out transformed anatomical images. # Possible values: Linear, BSpline, LanczosWindowedSinc @@ -697,11 +666,9 @@ registration_workflows: functional_registration: coregistration: - # functional (BOLD/EPI) registration to anatomical (structural/T1) run: Off func_input_prep: - # Choose whether to use functional brain or skull as the input to functional-to-anatomical registration reg_with_skull: Off @@ -709,19 +676,16 @@ registration_workflows: # input: ['Mean_Functional', 'Selected_Functional_Volume', 'fmriprep_reference'] input: [Mean_Functional] Mean Functional: - # Run ANTs’ N4 Bias Field Correction on the input BOLD (EPI) # this can increase tissue contrast which may improve registration quality in some data n4_correct_func: Off Selected Functional Volume: - # Only for when 'Use as Functional-to-Anatomical Registration Input' is set to 'Selected Functional Volume'. #Input the index of which volume from the functional 4D timeseries input file you wish to use as the input for functional-to-anatomical registration. func_reg_input_volume: 0 boundary_based_registration: - # this is a fork point # run: [On, Off] - this will run both and fork the pipeline run: [Off] @@ -764,7 +728,6 @@ registration_workflows: arguments: func_registration_to_template: - # these options modify the application (to the functional data), not the calculation, of the # T1-to-template and EPI-to-template transforms calculated earlier during registration # apply the functional-to-template (T1 template) registration transform to the functional data @@ -773,7 +736,6 @@ registration_workflows: # apply the functional-to-template (EPI template) registration transform to the functional data run_EPI: Off apply_transform: - # options: 'default', 'abcd', 'single_step_resampling_from_stc', 'dcan_nhp' # 'default': apply func-to-anat and anat-to-template transforms on motion corrected functional image. # 'abcd': apply motion correction, func-to-anat and anat-to-template transforms on each of raw functional volume using FSL applywarp based on ABCD-HCP pipeline. @@ -782,7 +744,6 @@ registration_workflows: using: default output_resolution: - # The resolution (in mm) to which the preprocessed, registered functional timeseries outputs are written into. # NOTE: # selecting a 1 mm or 2 mm resolution might substantially increase your RAM needs- these resolutions should be selected with caution. @@ -799,7 +760,6 @@ registration_workflows: func_derivative_outputs: 3mm target_template: - # choose which template space to transform derivatives towards # using: ['T1_template', 'EPI_template'] # this is a fork point @@ -808,7 +768,6 @@ registration_workflows: # outputs and derivatives to template space using: [T1_template] T1_template: - # Standard Skull Stripped Template. Used as a reference image for functional registration. # This can be different than the template used as the reference/fixed for T1-to-template registration. T1w_brain_template_funcreg: $FSLDIR/data/standard/MNI152_T1_${func_resolution}_brain.nii.gz @@ -825,7 +784,6 @@ registration_workflows: T1w_template_for_resample: $FSLDIR/data/standard/MNI152_T1_1mm_brain.nii.gz EPI_template: - # EPI template for direct functional-to-template registration # (bypassing coregistration and the anatomical-to-template transforms) EPI_template_funcreg: s3://fcp-indi/resources/cpac/resources/epi_hbn.nii.gz @@ -837,13 +795,11 @@ registration_workflows: EPI_template_for_resample: $FSLDIR/data/standard/MNI152_T1_1mm_brain.nii.gz ANTs_pipelines: - # Interpolation method for writing out transformed functional images. # Possible values: Linear, BSpline, LanczosWindowedSinc interpolation: LanczosWindowedSinc FNIRT_pipelines: - # Interpolation method for writing out transformed functional images. # Possible values: trilinear, sinc, spline interpolation: sinc @@ -853,7 +809,6 @@ registration_workflows: identity_matrix: $FSLDIR/etc/flirtsch/ident.mat EPI_registration: - # directly register the mean functional to an EPI template # instead of applying the anatomical T1-to-template transform to the functional data that has been # coregistered to anatomical/T1 space @@ -871,7 +826,6 @@ registration_workflows: # EPI template mask. EPI_template_mask: ANTs: - # EPI registration configuration - synonymous with T1_registration # parameters under anatomical registration above parameters: @@ -880,61 +834,60 @@ registration_workflows: - initial-moving-transform: initializationFeature: 0 - transforms: - - Rigid: - gradientStep: 0.1 - metric: - type: MI - metricWeight: 1 - numberOfBins: 32 - samplingStrategy: Regular - samplingPercentage: 0.25 - convergence: - iteration: 1000x500x250x100 - convergenceThreshold: 1e-08 - convergenceWindowSize: 10 - smoothing-sigmas: 3.0x2.0x1.0x0.0 - shrink-factors: 8x4x2x1 - use-histogram-matching: On - - Affine: - gradientStep: 0.1 - metric: - type: MI - metricWeight: 1 - numberOfBins: 32 - samplingStrategy: Regular - samplingPercentage: 0.25 - convergence: - iteration: 1000x500x250x100 - convergenceThreshold: 1e-08 - convergenceWindowSize: 10 - smoothing-sigmas: 3.0x2.0x1.0x0.0 - shrink-factors: 8x4x2x1 - use-histogram-matching: On - - SyN: - gradientStep: 0.1 - updateFieldVarianceInVoxelSpace: 3.0 - totalFieldVarianceInVoxelSpace: 0.0 - metric: - type: CC - metricWeight: 1 - radius: 4 - convergence: - iteration: 100x100x70x20 - convergenceThreshold: 1e-09 - convergenceWindowSize: 15 - smoothing-sigmas: 3.0x2.0x1.0x0.0 - shrink-factors: 6x4x2x1 - use-histogram-matching: On - winsorize-image-intensities: - lowerQuantile: 0.01 - upperQuantile: 0.99 + - Rigid: + gradientStep: 0.1 + metric: + type: MI + metricWeight: 1 + numberOfBins: 32 + samplingStrategy: Regular + samplingPercentage: 0.25 + convergence: + iteration: 1000x500x250x100 + convergenceThreshold: 1e-08 + convergenceWindowSize: 10 + smoothing-sigmas: 3.0x2.0x1.0x0.0 + shrink-factors: 8x4x2x1 + use-histogram-matching: On + - Affine: + gradientStep: 0.1 + metric: + type: MI + metricWeight: 1 + numberOfBins: 32 + samplingStrategy: Regular + samplingPercentage: 0.25 + convergence: + iteration: 1000x500x250x100 + convergenceThreshold: 1e-08 + convergenceWindowSize: 10 + smoothing-sigmas: 3.0x2.0x1.0x0.0 + shrink-factors: 8x4x2x1 + use-histogram-matching: On + - SyN: + gradientStep: 0.1 + updateFieldVarianceInVoxelSpace: 3.0 + totalFieldVarianceInVoxelSpace: 0.0 + metric: + type: CC + metricWeight: 1 + radius: 4 + convergence: + iteration: 100x100x70x20 + convergenceThreshold: 1e-09 + convergenceWindowSize: 15 + smoothing-sigmas: 3.0x2.0x1.0x0.0 + shrink-factors: 6x4x2x1 + use-histogram-matching: On + winsorize-image-intensities: + lowerQuantile: 0.01 + upperQuantile: 0.99 # Interpolation method for writing out transformed EPI images. # Possible values: Linear, BSpline, LanczosWindowedSinc interpolation: LanczosWindowedSinc FSL-FNIRT: - # Configuration file to be used by FSL to set FNIRT parameters. # It is not necessary to change this path unless you intend to use custom FNIRT parameters or a non-standard template. fnirt_config: T1_2_MNI152_2mm @@ -950,12 +903,10 @@ registration_workflows: functional_preproc: run: Off update_header: - # Convert raw data from LPI to RPI run: On slice_timing_correction: - # Interpolate voxel time courses so they are sampled at the same time points. # this is a fork point # run: [On, Off] - this will run both and fork the pipeline @@ -971,7 +922,6 @@ functional_preproc: motion_estimates_and_correction: run: Off motion_estimates: - # calculate motion statistics BEFORE slice-timing correction calculate_motion_first: Off @@ -979,7 +929,6 @@ functional_preproc: calculate_motion_after: On motion_correction: - # using: ['3dvolreg', 'mcflirt'] # Forking is currently broken for this option. # Please use separate configs if you want to use each of 3dvolreg and mcflirt. @@ -988,7 +937,6 @@ functional_preproc: # option parameters AFNI-3dvolreg: - # This option is useful when aligning high-resolution datasets that may need more alignment than a few voxels. functional_volreg_twopass: On @@ -999,7 +947,6 @@ functional_preproc: motion_correction_reference_volume: 0 motion_estimate_filter: - # Filter physiological (respiration) artifacts from the head motion estimates. # Adapted from DCAN Labs filter. # https://www.ohsu.edu/school-of-medicine/developmental-cognition-and-neuroimaging-lab @@ -1010,12 +957,10 @@ functional_preproc: filters: [] distortion_correction: - # this is a fork point # run: [On, Off] - this will run both and fork the pipeline run: [Off] Blip-FSL-TOPUP: - # (approximate) resolution (in mm) of warp basis for the different sub-sampling levels, default 10 warpres: 10 @@ -1066,7 +1011,6 @@ functional_preproc: # option parameters PhaseDiff: - # Since the quality of the distortion heavily relies on the skull-stripping step, we provide a choice of method ('AFNI' for AFNI 3dSkullStrip or 'BET' for FSL BET). # Options: 'BET' or 'AFNI' fmap_skullstrip_option: BET @@ -1082,7 +1026,6 @@ functional_preproc: func_masking: run: Off FSL-BET: - # Set an intensity threshold to improve skull stripping performances of FSL BET on rodent scans. functional_mean_thr: run: Off @@ -1149,22 +1092,18 @@ functional_preproc: # this is a fork point using: [AFNI] Anatomical_Refined: - # Choose whether or not to dilate the anatomical mask if you choose 'Anatomical_Refined' as the functional masking option. It will dilate one voxel if enabled. anatomical_mask_dilation: Off generate_func_mean: - # Generate mean functional image run: Off normalize_func: - # Normalize functional image run: Off truncation: - # First timepoint to include in analysis. # Default is 0 (beginning of timeseries). # First timepoint selection in the scan parameters in the data configuration file, if present, will over-ride this selection. @@ -1178,7 +1117,6 @@ functional_preproc: stop_tr: scaling: - # Scale functional raw data, usually used in rodent pipeline run: Off @@ -1186,7 +1124,6 @@ functional_preproc: scaling_factor: 10 despiking: - # Run AFNI 3dDespike # this is a fork point # run: [On, Off] - this will run both and fork the pipeline @@ -1194,13 +1131,11 @@ functional_preproc: space: native coreg_prep: - # Generate sbref run: Off nuisance_corrections: 2-nuisance_regression: - # this is a fork point # run: [On, Off] - this will run both and fork the pipeline run: [Off] @@ -1212,7 +1147,6 @@ nuisance_corrections: # regression. regressor_masks: erode_anatomical_brain_mask: - # Erode brain mask in millimeters, default for brain mask is 30 mm # Brain erosion default is using millimeters. brain_mask_erosion_mm: @@ -1229,7 +1163,6 @@ nuisance_corrections: brain_erosion_mm: erode_csf: - # Erode cerebrospinal fluid mask in millimeters, default for cerebrospinal fluid is 30mm # Cerebrospinal fluid erosion default is using millimeters. csf_mask_erosion_mm: @@ -1246,7 +1179,6 @@ nuisance_corrections: csf_erosion_mm: erode_wm: - # Target volume ratio, if using erosion. # Default proportion is 0.6 for white matter mask. # If using erosion, using both proportion and millimeters is not recommended. @@ -1263,7 +1195,6 @@ nuisance_corrections: wm_erosion_mm: erode_gm: - # Target volume ratio, if using erosion. # If using erosion, using both proportion and millimeters is not recommended. gm_erosion_prop: @@ -1301,7 +1232,6 @@ nuisance_corrections: bandpass_filtering_order: After 1-ICA-AROMA: - # this is a fork point # run: [On, Off] - this will run both and fork the pipeline run: [Off] @@ -1314,7 +1244,6 @@ nuisance_corrections: timeseries_extraction: run: Off connectivity_matrix: - # Create a connectivity matrix from timeseries data # Options: # ['AFNI', 'Nilearn', 'ndmg'] @@ -1369,7 +1298,6 @@ timeseries_extraction: realignment: ROI_to_func amplitude_low_frequency_fluctuation: - # ALFF & f/ALFF # Calculate Amplitude of Low Frequency Fluctuations (ALFF) and fractional ALFF (f/ALFF) for all voxels. run: Off @@ -1384,7 +1312,6 @@ amplitude_low_frequency_fluctuation: lowpass_cutoff: [0.1] regional_homogeneity: - # ReHo # Calculate Regional Homogeneity (ReHo) for all voxels. run: Off @@ -1399,12 +1326,10 @@ regional_homogeneity: cluster_size: 27 voxel_mirrored_homotopic_connectivity: - # VMHC # Calculate Voxel-mirrored Homotopic Connectivity (VMHC) for all voxels. run: Off symmetric_registration: - # Included as part of the 'Image Resource Files' package available on the Install page of the User Guide. # It is not necessary to change this path unless you intend to use a non-standard symmetric template. T1w_brain_template_symmetric: $FSLDIR/data/standard/MNI152_T1_${resolution_for_anat}_brain_symmetric.nii.gz @@ -1435,7 +1360,6 @@ voxel_mirrored_homotopic_connectivity: dilated_symmetric_brain_mask_for_resample: $FSLDIR/data/standard/MNI152_T1_1mm_brain_mask_symmetric_dil.nii.gz network_centrality: - # Calculate Degree, Eigenvector Centrality, or Functional Connectivity Density. run: Off @@ -1446,7 +1370,6 @@ network_centrality: # Full path to a NIFTI file describing the mask. Centrality will be calculated for all voxels within the mask. template_specification_file: /cpac_templates/Mask_ABIDE_85Percent_GM.nii.gz degree_centrality: - # Enable/Disable degree centrality by selecting the connectivity weights # weight_options: ['Binarized', 'Weighted'] # disable this type of centrality with: @@ -1465,7 +1388,6 @@ network_centrality: correlation_threshold: 0.001 eigenvector_centrality: - # Enable/Disable eigenvector centrality by selecting the connectivity weights # weight_options: ['Binarized', 'Weighted'] # disable this type of centrality with: @@ -1484,7 +1406,6 @@ network_centrality: correlation_threshold: 0.001 local_functional_connectivity_density: - # Enable/Disable lFCD by selecting the connectivity weights # weight_options: ['Binarized', 'Weighted'] # disable this type of centrality with: @@ -1503,7 +1424,6 @@ network_centrality: correlation_threshold: 0.6 longitudinal_template_generation: - # If you have multiple T1w's, you can generate your own run-specific custom # T1w template to serve as an intermediate to the standard template for # anatomical registration. @@ -1569,7 +1489,6 @@ post_processing: output: [z-scored] seed_based_correlation_analysis: - # SCA - Seed-Based Correlation Analysis # For each extracted ROI Average time series, CPAC will generate a whole-brain correlation map. # It should be noted that for a given seed/ROI, SCA maps for ROI Average time series will be the same. @@ -1595,7 +1514,6 @@ seed_based_correlation_analysis: # PACKAGE INTEGRATIONS # -------------------- PyPEER: - # Training of eye-estimation models. Commonly used for movies data/naturalistic viewing. run: Off @@ -1614,7 +1532,6 @@ PyPEER: # This is a file describing the stimulus locations from the calibration sequence. stimulus_path: minimal_nuisance_correction: - # PyPEER Minimal nuisance regression # Note: PyPEER employs minimal preprocessing - these choices do not reflect what runs in the main pipeline. # PyPEER uses non-nuisance-regressed data from the main pipeline. From cc34d5669a3dac714b3d893f0ab34c37bc23f36a Mon Sep 17 00:00:00 2001 From: Biraj Date: Tue, 26 Mar 2024 19:42:01 +0000 Subject: [PATCH 105/507] reshaping removed in input array --- CPAC/nuisance/utils/compcor.py | 2 -- 1 file changed, 2 deletions(-) diff --git a/CPAC/nuisance/utils/compcor.py b/CPAC/nuisance/utils/compcor.py index 5fac927591..d253d7acac 100644 --- a/CPAC/nuisance/utils/compcor.py +++ b/CPAC/nuisance/utils/compcor.py @@ -186,8 +186,6 @@ def voxel_generator(): if datashape[0] == 0 and failure_mode != "error": return input_data, np.array([]) - input_data = input_data.reshape((-1, timepoints)) - frametimes = timestep * np.arange(timepoints) X = _full_rank(_cosine_drift(period_cut, frametimes))[0] From 4aad17ba37fe4e34c5bc7fe5b2df8c06288e5cd2 Mon Sep 17 00:00:00 2001 From: Biraj Shrestha Date: Thu, 18 Jul 2024 11:23:45 -0400 Subject: [PATCH 106/507] resolving conflict and resbase with develop --- CPAC/nuisance/utils/compcor.py | 21 ++++----------------- 1 file changed, 4 insertions(+), 17 deletions(-) diff --git a/CPAC/nuisance/utils/compcor.py b/CPAC/nuisance/utils/compcor.py index d253d7acac..62f6702db1 100644 --- a/CPAC/nuisance/utils/compcor.py +++ b/CPAC/nuisance/utils/compcor.py @@ -91,20 +91,10 @@ def cosine_filter( failure_mode="error", ): """ -<<<<<<< HEAD `cosine_filter` adapted from Nipype. https://github.com/nipy/nipype/blob/d353f0d/nipype/algorithms/confounds.py#L1086-L1107 - - Parameters - ---------- - input_image_path : string - Bold image to be filtered. - timestep : float - 'Repetition time (TR) of series (in sec) - derived from image header if unspecified' - period_cut : float - Minimum period (in sec) for DCT high-pass filter, nipype default value: 128. -======= + Apply cosine filter to a BOLD image. Parameters: @@ -136,7 +126,7 @@ def cosine_filter( Adapted from nipype implementation. The function uses a generator to iterate over voxel time series to optimize memory usage. ->>>>>>> 57d70a9d9 (Optimized cosine filter and added Doc strings) + """ # STATEMENT OF CHANGES: # This function is derived from sources licensed under the Apache-2.0 terms, @@ -148,7 +138,7 @@ def cosine_filter( # * Modified docstring to reflect local changes # * Updated style to match C-PAC codebase -<<<<<<< HEAD + # ORIGINAL WORK'S ATTRIBUTION NOTICE: # Copyright (c) 2009-2016, Nipype developers @@ -167,10 +157,7 @@ def cosine_filter( # Prior to release 0.12, Nipype was licensed under a BSD license. # Modifications copyright (C) 2019 - 2024 C-PAC Developers - from nipype.algorithms.confounds import _cosine_drift, _full_rank - input_img = nib.load(input_image_path) -======= def voxel_generator(): for i in range(datashape[0]): for j in range(datashape[1]): @@ -178,7 +165,7 @@ def voxel_generator(): yield input_data[i, j, k, :] input_img = nb.load(input_image_path) ->>>>>>> 57d70a9d9 (Optimized cosine filter and added Doc strings) + input_data = input_img.get_fdata() datashape = input_data.shape From ed9ec31369a40f4b5e143b5cb985eeb49ea8c225 Mon Sep 17 00:00:00 2001 From: Biraj Shrestha Date: Wed, 31 Jul 2024 12:20:05 -0400 Subject: [PATCH 107/507] nb changed to nib --- CPAC/nuisance/utils/compcor.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/CPAC/nuisance/utils/compcor.py b/CPAC/nuisance/utils/compcor.py index 62f6702db1..87597fabd2 100644 --- a/CPAC/nuisance/utils/compcor.py +++ b/CPAC/nuisance/utils/compcor.py @@ -164,7 +164,7 @@ def voxel_generator(): for k in range(datashape[2]): yield input_data[i, j, k, :] - input_img = nb.load(input_image_path) + input_img = nib.load(input_image_path) input_data = input_img.get_fdata() From 8d6e20f713a73c4d14cf29de74577a9867e65a2b Mon Sep 17 00:00:00 2001 From: Biraj Date: Tue, 26 Mar 2024 16:13:08 +0000 Subject: [PATCH 108/507] Optimized cosine filter and added Doc strings --- CPAC/nuisance/utils/compcor.py | 15 ++++++++------- 1 file changed, 8 insertions(+), 7 deletions(-) diff --git a/CPAC/nuisance/utils/compcor.py b/CPAC/nuisance/utils/compcor.py index 87597fabd2..b8acf6a77e 100644 --- a/CPAC/nuisance/utils/compcor.py +++ b/CPAC/nuisance/utils/compcor.py @@ -91,12 +91,6 @@ def cosine_filter( failure_mode="error", ): """ - `cosine_filter` adapted from Nipype. - - https://github.com/nipy/nipype/blob/d353f0d/nipype/algorithms/confounds.py#L1086-L1107 - - Apply cosine filter to a BOLD image. - Parameters: ----------- input_image_path : str @@ -126,7 +120,6 @@ def cosine_filter( Adapted from nipype implementation. The function uses a generator to iterate over voxel time series to optimize memory usage. - """ # STATEMENT OF CHANGES: # This function is derived from sources licensed under the Apache-2.0 terms, @@ -166,6 +159,14 @@ def voxel_generator(): input_img = nib.load(input_image_path) + def voxel_generator(): + for i in range(datashape[0]): + for j in range(datashape[1]): + for k in range(datashape[2]): + yield input_data[i, j, k, :] + + input_img = nb.load(input_image_path) + input_data = input_img.get_fdata() datashape = input_data.shape From a8bd3d8f2818b99e4b727942daf410ab62f6c640 Mon Sep 17 00:00:00 2001 From: "birajstha:construction_worker::penguin" Date: Mon, 26 Aug 2024 16:31:58 -0400 Subject: [PATCH 109/507] rebasing to develop --- .../configs/pipeline_config_blank.yml | 275 ++++++++++++------ 1 file changed, 179 insertions(+), 96 deletions(-) diff --git a/CPAC/resources/configs/pipeline_config_blank.yml b/CPAC/resources/configs/pipeline_config_blank.yml index 2bb40559bd..7f09680fc6 100644 --- a/CPAC/resources/configs/pipeline_config_blank.yml +++ b/CPAC/resources/configs/pipeline_config_blank.yml @@ -7,12 +7,15 @@ # # Tip: This file can be edited manually with a text editor for quick modifications. pipeline_setup: + # Name for this pipeline configuration - useful for identification. # This string will be sanitized and used in filepaths pipeline_name: cpac-blank-template output_directory: + # Quality control outputs quality_control: + # Generate eXtensible Connectivity Pipeline-style quality control files generate_xcpqc_files: Off @@ -53,6 +56,7 @@ pipeline_setup: output_tree: default system_config: + # Stop worklow execution on first crash? fail_fast: Off @@ -69,6 +73,7 @@ pipeline_setup: # A callback.log file from a previous run can be provided to estimate memory usage based on that run. observed_usage: + # Path to callback log file with previously observed usage. # Can be overridden with the commandline flag `--runtime_usage`. callback_log: @@ -86,6 +91,7 @@ pipeline_setup: # Only applies if you are running on a grid or compute cluster. resource_manager: SGE SGE: + # SGE Parallel Environment to use when running CPAC. # Only applies when you are running on a grid or compute cluster using SGE. parallel_environment: mpi_smp @@ -131,6 +137,7 @@ pipeline_setup: FSLDIR: FSLDIR working_directory: + # Directory where C-PAC should store temporary and intermediate files. # - This directory must be saved if you wish to re-run your pipeline from where you left off (if not completed). # - NOTE: As it stores all intermediate files, this directory can grow to become very @@ -146,14 +153,17 @@ pipeline_setup: remove_working_dir: On log_directory: + # Whether to write log details of the pipeline run to the logging files. run_logging: On path: /outputs/logs # Configuration options for logging visualizations of the workflow graph graphviz: + # Configuration for a graphviz visualization of the entire workflow. See https://fcp-indi.github.io/docs/developer/nodes#CPAC.pipeline.nipype_pipeline_engine.Workflow.write_graph for details about the various options entire_workflow: + # Whether to generate the graph visualization generate: Off @@ -167,6 +177,7 @@ pipeline_setup: simple_form: On crash_log_directory: + # Directory where CPAC should write crash logs. path: /outputs/crash @@ -174,6 +185,7 @@ pipeline_setup: run: Off Amazon-AWS: + # If setting the 'Output Directory' to an S3 bucket, insert the path to your AWS credentials file here. aws_output_bucket_credentials: @@ -181,12 +193,14 @@ pipeline_setup: s3_encryption: Off Debugging: + # Verbose developer messages. verbose: Off # PREPROCESSING # ------------- surface_analysis: + # Run freesurfer_abcd_preproc to obtain preprocessed T1w for reconall abcd_prefreesurfer_prep: run: Off @@ -263,6 +277,7 @@ anatomical_preproc: brain_extraction: run: Off FreeSurfer-BET: + # Template to be used for FreeSurfer-BET brain extraction in CCS-options pipeline T1w_brain_template_mask_ccs: /ccs_template/MNI152_T1_1mm_first_brain_mask.nii.gz @@ -272,6 +287,7 @@ anatomical_preproc: # option parameters AFNI-3dSkullStrip: + # Output a mask volume instead of a skull-stripped volume. The mask volume containes 0 to 6, which represents voxel's postion. If set to True, C-PAC will use this output to generate anatomical brain mask for further analysis. mask_vol: Off @@ -336,6 +352,7 @@ anatomical_preproc: monkey: Off FSL-BET: + # Switch "On" to crop out neck regions before generating the mask (default: Off). Robustfov: Off @@ -376,10 +393,12 @@ anatomical_preproc: vertical_gradient: 0.0 UNet: + # UNet model unet_model: s3://fcp-indi/resources/cpac/resources/Site-All-T-epoch_36.model niworkflows-ants: + # Template to be used during niworkflows-ants. # It is not necessary to change this path unless you intend to use a non-standard template. # niworkflows-ants Brain extraction template @@ -400,6 +419,7 @@ anatomical_preproc: # Non-local means filtering via ANTs DenoiseImage non_local_means_filtering: + # this is a fork option run: [Off] @@ -408,6 +428,7 @@ anatomical_preproc: # N4 bias field correction via ANTs n4_bias_field_correction: + # this is a fork option run: [Off] @@ -415,10 +436,12 @@ anatomical_preproc: shrink_factor: 2 segmentation: + # Automatically segment anatomical images into white matter, gray matter, # and CSF based on prior probability maps. run: Off tissue_segmentation: + # using: ['FSL-FAST', 'Template_Based', 'ANTs_Prior_Based', 'FreeSurfer'] # this is a fork point using: [FSL-FAST] @@ -426,10 +449,12 @@ segmentation: # option parameters FSL-FAST: thresholding: + # thresholding of the tissue segmentation probability maps # options: 'Auto', 'Custom' use: Auto Custom: + # Set the threshold value for the segmentation probability masks (CSF, White Matter, and Gray Matter) # The values remaining will become the binary tissue masks. # A good starting point is 0.95. @@ -443,6 +468,7 @@ segmentation: GM_threshold_value: 0.95 use_priors: + # Use template-space tissue priors to refine the binary tissue masks generated by segmentation. run: On @@ -464,6 +490,7 @@ segmentation: CSF_path: $priors_path/avg152T1_csf_bin.nii.gz Template_Based: + # These masks should be in the same space of your registration template, e.g. if # you choose 'EPI Template' , below tissue masks should also be EPI template tissue masks. # @@ -482,6 +509,7 @@ segmentation: CSF: $FSLDIR/data/standard/tissuepriors/2mm/avg152T1_csf_bin.nii.gz ANTs_Prior_Based: + # Generate white matter, gray matter, CSF masks based on antsJointLabelFusion # ANTs Prior-based Segmentation workflow that has shown optimal results for non-human primate data. # The atlas image assumed to be used in ANTs Prior-based Segmentation. @@ -516,6 +544,7 @@ segmentation: CSF_label: [24] FreeSurfer: + # Use mri_binarize --erode option to erode segmentation masks erode: 0 @@ -533,6 +562,7 @@ registration_workflows: run: Off registration: FSL-FNIRT: + # The resolution to which anatomical images should be transformed during registration. # This is the resolution at which processed anatomical files will be output. # specifically for monkey pipeline @@ -574,6 +604,7 @@ registration_workflows: # option parameters ANTs: + # If a lesion mask is available for a T1w image, use it to improve the ANTs' registration # ANTS registration only. use_lesion_mask: Off @@ -585,54 +616,54 @@ registration_workflows: - initial-moving-transform: initializationFeature: 0 - transforms: - - Rigid: - gradientStep: 0.1 - metric: - type: MI - metricWeight: 1 - numberOfBins: 32 - samplingStrategy: Regular - samplingPercentage: 0.25 - convergence: - iteration: 1000x500x250x100 - convergenceThreshold: 1e-08 - convergenceWindowSize: 10 - smoothing-sigmas: 3.0x2.0x1.0x0.0 - shrink-factors: 8x4x2x1 - use-histogram-matching: On - - Affine: - gradientStep: 0.1 - metric: - type: MI - metricWeight: 1 - numberOfBins: 32 - samplingStrategy: Regular - samplingPercentage: 0.25 - convergence: - iteration: 1000x500x250x100 - convergenceThreshold: 1e-08 - convergenceWindowSize: 10 - smoothing-sigmas: 3.0x2.0x1.0x0.0 - shrink-factors: 8x4x2x1 - use-histogram-matching: On - - SyN: - gradientStep: 0.1 - updateFieldVarianceInVoxelSpace: 3.0 - totalFieldVarianceInVoxelSpace: 0.0 - metric: - type: CC - metricWeight: 1 - radius: 4 - convergence: - iteration: 100x100x70x20 - convergenceThreshold: 1e-09 - convergenceWindowSize: 15 - smoothing-sigmas: 3.0x2.0x1.0x0.0 - shrink-factors: 6x4x2x1 - use-histogram-matching: On - winsorize-image-intensities: - lowerQuantile: 0.01 - upperQuantile: 0.99 + - Rigid: + gradientStep: 0.1 + metric: + type: MI + metricWeight: 1 + numberOfBins: 32 + samplingStrategy: Regular + samplingPercentage: 0.25 + convergence: + iteration: 1000x500x250x100 + convergenceThreshold: 1e-08 + convergenceWindowSize: 10 + smoothing-sigmas: 3.0x2.0x1.0x0.0 + shrink-factors: 8x4x2x1 + use-histogram-matching: On + - Affine: + gradientStep: 0.1 + metric: + type: MI + metricWeight: 1 + numberOfBins: 32 + samplingStrategy: Regular + samplingPercentage: 0.25 + convergence: + iteration: 1000x500x250x100 + convergenceThreshold: 1e-08 + convergenceWindowSize: 10 + smoothing-sigmas: 3.0x2.0x1.0x0.0 + shrink-factors: 8x4x2x1 + use-histogram-matching: On + - SyN: + gradientStep: 0.1 + updateFieldVarianceInVoxelSpace: 3.0 + totalFieldVarianceInVoxelSpace: 0.0 + metric: + type: CC + metricWeight: 1 + radius: 4 + convergence: + iteration: 100x100x70x20 + convergenceThreshold: 1e-09 + convergenceWindowSize: 15 + smoothing-sigmas: 3.0x2.0x1.0x0.0 + shrink-factors: 6x4x2x1 + use-histogram-matching: On + winsorize-image-intensities: + lowerQuantile: 0.01 + upperQuantile: 0.99 # Interpolation method for writing out transformed anatomical images. # Possible values: Linear, BSpline, LanczosWindowedSinc @@ -666,9 +697,11 @@ registration_workflows: functional_registration: coregistration: + # functional (BOLD/EPI) registration to anatomical (structural/T1) run: Off func_input_prep: + # Choose whether to use functional brain or skull as the input to functional-to-anatomical registration reg_with_skull: Off @@ -676,16 +709,19 @@ registration_workflows: # input: ['Mean_Functional', 'Selected_Functional_Volume', 'fmriprep_reference'] input: [Mean_Functional] Mean Functional: + # Run ANTs’ N4 Bias Field Correction on the input BOLD (EPI) # this can increase tissue contrast which may improve registration quality in some data n4_correct_func: Off Selected Functional Volume: + # Only for when 'Use as Functional-to-Anatomical Registration Input' is set to 'Selected Functional Volume'. #Input the index of which volume from the functional 4D timeseries input file you wish to use as the input for functional-to-anatomical registration. func_reg_input_volume: 0 boundary_based_registration: + # this is a fork point # run: [On, Off] - this will run both and fork the pipeline run: [Off] @@ -728,6 +764,7 @@ registration_workflows: arguments: func_registration_to_template: + # these options modify the application (to the functional data), not the calculation, of the # T1-to-template and EPI-to-template transforms calculated earlier during registration # apply the functional-to-template (T1 template) registration transform to the functional data @@ -736,6 +773,7 @@ registration_workflows: # apply the functional-to-template (EPI template) registration transform to the functional data run_EPI: Off apply_transform: + # options: 'default', 'abcd', 'single_step_resampling_from_stc', 'dcan_nhp' # 'default': apply func-to-anat and anat-to-template transforms on motion corrected functional image. # 'abcd': apply motion correction, func-to-anat and anat-to-template transforms on each of raw functional volume using FSL applywarp based on ABCD-HCP pipeline. @@ -744,6 +782,7 @@ registration_workflows: using: default output_resolution: + # The resolution (in mm) to which the preprocessed, registered functional timeseries outputs are written into. # NOTE: # selecting a 1 mm or 2 mm resolution might substantially increase your RAM needs- these resolutions should be selected with caution. @@ -760,6 +799,7 @@ registration_workflows: func_derivative_outputs: 3mm target_template: + # choose which template space to transform derivatives towards # using: ['T1_template', 'EPI_template'] # this is a fork point @@ -768,6 +808,7 @@ registration_workflows: # outputs and derivatives to template space using: [T1_template] T1_template: + # Standard Skull Stripped Template. Used as a reference image for functional registration. # This can be different than the template used as the reference/fixed for T1-to-template registration. T1w_brain_template_funcreg: $FSLDIR/data/standard/MNI152_T1_${func_resolution}_brain.nii.gz @@ -784,6 +825,7 @@ registration_workflows: T1w_template_for_resample: $FSLDIR/data/standard/MNI152_T1_1mm_brain.nii.gz EPI_template: + # EPI template for direct functional-to-template registration # (bypassing coregistration and the anatomical-to-template transforms) EPI_template_funcreg: s3://fcp-indi/resources/cpac/resources/epi_hbn.nii.gz @@ -795,11 +837,13 @@ registration_workflows: EPI_template_for_resample: $FSLDIR/data/standard/MNI152_T1_1mm_brain.nii.gz ANTs_pipelines: + # Interpolation method for writing out transformed functional images. # Possible values: Linear, BSpline, LanczosWindowedSinc interpolation: LanczosWindowedSinc FNIRT_pipelines: + # Interpolation method for writing out transformed functional images. # Possible values: trilinear, sinc, spline interpolation: sinc @@ -809,6 +853,7 @@ registration_workflows: identity_matrix: $FSLDIR/etc/flirtsch/ident.mat EPI_registration: + # directly register the mean functional to an EPI template # instead of applying the anatomical T1-to-template transform to the functional data that has been # coregistered to anatomical/T1 space @@ -826,6 +871,7 @@ registration_workflows: # EPI template mask. EPI_template_mask: ANTs: + # EPI registration configuration - synonymous with T1_registration # parameters under anatomical registration above parameters: @@ -834,60 +880,61 @@ registration_workflows: - initial-moving-transform: initializationFeature: 0 - transforms: - - Rigid: - gradientStep: 0.1 - metric: - type: MI - metricWeight: 1 - numberOfBins: 32 - samplingStrategy: Regular - samplingPercentage: 0.25 - convergence: - iteration: 1000x500x250x100 - convergenceThreshold: 1e-08 - convergenceWindowSize: 10 - smoothing-sigmas: 3.0x2.0x1.0x0.0 - shrink-factors: 8x4x2x1 - use-histogram-matching: On - - Affine: - gradientStep: 0.1 - metric: - type: MI - metricWeight: 1 - numberOfBins: 32 - samplingStrategy: Regular - samplingPercentage: 0.25 - convergence: - iteration: 1000x500x250x100 - convergenceThreshold: 1e-08 - convergenceWindowSize: 10 - smoothing-sigmas: 3.0x2.0x1.0x0.0 - shrink-factors: 8x4x2x1 - use-histogram-matching: On - - SyN: - gradientStep: 0.1 - updateFieldVarianceInVoxelSpace: 3.0 - totalFieldVarianceInVoxelSpace: 0.0 - metric: - type: CC - metricWeight: 1 - radius: 4 - convergence: - iteration: 100x100x70x20 - convergenceThreshold: 1e-09 - convergenceWindowSize: 15 - smoothing-sigmas: 3.0x2.0x1.0x0.0 - shrink-factors: 6x4x2x1 - use-histogram-matching: On - winsorize-image-intensities: - lowerQuantile: 0.01 - upperQuantile: 0.99 + - Rigid: + gradientStep: 0.1 + metric: + type: MI + metricWeight: 1 + numberOfBins: 32 + samplingStrategy: Regular + samplingPercentage: 0.25 + convergence: + iteration: 1000x500x250x100 + convergenceThreshold: 1e-08 + convergenceWindowSize: 10 + smoothing-sigmas: 3.0x2.0x1.0x0.0 + shrink-factors: 8x4x2x1 + use-histogram-matching: On + - Affine: + gradientStep: 0.1 + metric: + type: MI + metricWeight: 1 + numberOfBins: 32 + samplingStrategy: Regular + samplingPercentage: 0.25 + convergence: + iteration: 1000x500x250x100 + convergenceThreshold: 1e-08 + convergenceWindowSize: 10 + smoothing-sigmas: 3.0x2.0x1.0x0.0 + shrink-factors: 8x4x2x1 + use-histogram-matching: On + - SyN: + gradientStep: 0.1 + updateFieldVarianceInVoxelSpace: 3.0 + totalFieldVarianceInVoxelSpace: 0.0 + metric: + type: CC + metricWeight: 1 + radius: 4 + convergence: + iteration: 100x100x70x20 + convergenceThreshold: 1e-09 + convergenceWindowSize: 15 + smoothing-sigmas: 3.0x2.0x1.0x0.0 + shrink-factors: 6x4x2x1 + use-histogram-matching: On + winsorize-image-intensities: + lowerQuantile: 0.01 + upperQuantile: 0.99 # Interpolation method for writing out transformed EPI images. # Possible values: Linear, BSpline, LanczosWindowedSinc interpolation: LanczosWindowedSinc FSL-FNIRT: + # Configuration file to be used by FSL to set FNIRT parameters. # It is not necessary to change this path unless you intend to use custom FNIRT parameters or a non-standard template. fnirt_config: T1_2_MNI152_2mm @@ -903,10 +950,12 @@ registration_workflows: functional_preproc: run: Off update_header: + # Convert raw data from LPI to RPI run: On slice_timing_correction: + # Interpolate voxel time courses so they are sampled at the same time points. # this is a fork point # run: [On, Off] - this will run both and fork the pipeline @@ -922,6 +971,7 @@ functional_preproc: motion_estimates_and_correction: run: Off motion_estimates: + # calculate motion statistics BEFORE slice-timing correction calculate_motion_first: Off @@ -929,6 +979,7 @@ functional_preproc: calculate_motion_after: On motion_correction: + # using: ['3dvolreg', 'mcflirt'] # Forking is currently broken for this option. # Please use separate configs if you want to use each of 3dvolreg and mcflirt. @@ -937,6 +988,7 @@ functional_preproc: # option parameters AFNI-3dvolreg: + # This option is useful when aligning high-resolution datasets that may need more alignment than a few voxels. functional_volreg_twopass: On @@ -947,6 +999,7 @@ functional_preproc: motion_correction_reference_volume: 0 motion_estimate_filter: + # Filter physiological (respiration) artifacts from the head motion estimates. # Adapted from DCAN Labs filter. # https://www.ohsu.edu/school-of-medicine/developmental-cognition-and-neuroimaging-lab @@ -957,10 +1010,12 @@ functional_preproc: filters: [] distortion_correction: + # this is a fork point # run: [On, Off] - this will run both and fork the pipeline run: [Off] Blip-FSL-TOPUP: + # (approximate) resolution (in mm) of warp basis for the different sub-sampling levels, default 10 warpres: 10 @@ -1011,6 +1066,7 @@ functional_preproc: # option parameters PhaseDiff: + # Since the quality of the distortion heavily relies on the skull-stripping step, we provide a choice of method ('AFNI' for AFNI 3dSkullStrip or 'BET' for FSL BET). # Options: 'BET' or 'AFNI' fmap_skullstrip_option: BET @@ -1026,6 +1082,7 @@ functional_preproc: func_masking: run: Off FSL-BET: + # Set an intensity threshold to improve skull stripping performances of FSL BET on rodent scans. functional_mean_thr: run: Off @@ -1092,18 +1149,22 @@ functional_preproc: # this is a fork point using: [AFNI] Anatomical_Refined: + # Choose whether or not to dilate the anatomical mask if you choose 'Anatomical_Refined' as the functional masking option. It will dilate one voxel if enabled. anatomical_mask_dilation: Off generate_func_mean: + # Generate mean functional image run: Off normalize_func: + # Normalize functional image run: Off truncation: + # First timepoint to include in analysis. # Default is 0 (beginning of timeseries). # First timepoint selection in the scan parameters in the data configuration file, if present, will over-ride this selection. @@ -1117,6 +1178,7 @@ functional_preproc: stop_tr: scaling: + # Scale functional raw data, usually used in rodent pipeline run: Off @@ -1124,6 +1186,7 @@ functional_preproc: scaling_factor: 10 despiking: + # Run AFNI 3dDespike # this is a fork point # run: [On, Off] - this will run both and fork the pipeline @@ -1131,11 +1194,13 @@ functional_preproc: space: native coreg_prep: + # Generate sbref run: Off nuisance_corrections: 2-nuisance_regression: + # this is a fork point # run: [On, Off] - this will run both and fork the pipeline run: [Off] @@ -1147,6 +1212,7 @@ nuisance_corrections: # regression. regressor_masks: erode_anatomical_brain_mask: + # Erode brain mask in millimeters, default for brain mask is 30 mm # Brain erosion default is using millimeters. brain_mask_erosion_mm: @@ -1163,6 +1229,7 @@ nuisance_corrections: brain_erosion_mm: erode_csf: + # Erode cerebrospinal fluid mask in millimeters, default for cerebrospinal fluid is 30mm # Cerebrospinal fluid erosion default is using millimeters. csf_mask_erosion_mm: @@ -1179,6 +1246,7 @@ nuisance_corrections: csf_erosion_mm: erode_wm: + # Target volume ratio, if using erosion. # Default proportion is 0.6 for white matter mask. # If using erosion, using both proportion and millimeters is not recommended. @@ -1195,6 +1263,7 @@ nuisance_corrections: wm_erosion_mm: erode_gm: + # Target volume ratio, if using erosion. # If using erosion, using both proportion and millimeters is not recommended. gm_erosion_prop: @@ -1232,6 +1301,7 @@ nuisance_corrections: bandpass_filtering_order: After 1-ICA-AROMA: + # this is a fork point # run: [On, Off] - this will run both and fork the pipeline run: [Off] @@ -1244,6 +1314,7 @@ nuisance_corrections: timeseries_extraction: run: Off connectivity_matrix: + # Create a connectivity matrix from timeseries data # Options: # ['AFNI', 'Nilearn', 'ndmg'] @@ -1298,6 +1369,7 @@ timeseries_extraction: realignment: ROI_to_func amplitude_low_frequency_fluctuation: + # ALFF & f/ALFF # Calculate Amplitude of Low Frequency Fluctuations (ALFF) and fractional ALFF (f/ALFF) for all voxels. run: Off @@ -1312,6 +1384,7 @@ amplitude_low_frequency_fluctuation: lowpass_cutoff: [0.1] regional_homogeneity: + # ReHo # Calculate Regional Homogeneity (ReHo) for all voxels. run: Off @@ -1326,10 +1399,12 @@ regional_homogeneity: cluster_size: 27 voxel_mirrored_homotopic_connectivity: + # VMHC # Calculate Voxel-mirrored Homotopic Connectivity (VMHC) for all voxels. run: Off symmetric_registration: + # Included as part of the 'Image Resource Files' package available on the Install page of the User Guide. # It is not necessary to change this path unless you intend to use a non-standard symmetric template. T1w_brain_template_symmetric: $FSLDIR/data/standard/MNI152_T1_${resolution_for_anat}_brain_symmetric.nii.gz @@ -1360,6 +1435,7 @@ voxel_mirrored_homotopic_connectivity: dilated_symmetric_brain_mask_for_resample: $FSLDIR/data/standard/MNI152_T1_1mm_brain_mask_symmetric_dil.nii.gz network_centrality: + # Calculate Degree, Eigenvector Centrality, or Functional Connectivity Density. run: Off @@ -1370,6 +1446,7 @@ network_centrality: # Full path to a NIFTI file describing the mask. Centrality will be calculated for all voxels within the mask. template_specification_file: /cpac_templates/Mask_ABIDE_85Percent_GM.nii.gz degree_centrality: + # Enable/Disable degree centrality by selecting the connectivity weights # weight_options: ['Binarized', 'Weighted'] # disable this type of centrality with: @@ -1388,6 +1465,7 @@ network_centrality: correlation_threshold: 0.001 eigenvector_centrality: + # Enable/Disable eigenvector centrality by selecting the connectivity weights # weight_options: ['Binarized', 'Weighted'] # disable this type of centrality with: @@ -1406,6 +1484,7 @@ network_centrality: correlation_threshold: 0.001 local_functional_connectivity_density: + # Enable/Disable lFCD by selecting the connectivity weights # weight_options: ['Binarized', 'Weighted'] # disable this type of centrality with: @@ -1424,6 +1503,7 @@ network_centrality: correlation_threshold: 0.6 longitudinal_template_generation: + # If you have multiple T1w's, you can generate your own run-specific custom # T1w template to serve as an intermediate to the standard template for # anatomical registration. @@ -1489,6 +1569,7 @@ post_processing: output: [z-scored] seed_based_correlation_analysis: + # SCA - Seed-Based Correlation Analysis # For each extracted ROI Average time series, CPAC will generate a whole-brain correlation map. # It should be noted that for a given seed/ROI, SCA maps for ROI Average time series will be the same. @@ -1514,6 +1595,7 @@ seed_based_correlation_analysis: # PACKAGE INTEGRATIONS # -------------------- PyPEER: + # Training of eye-estimation models. Commonly used for movies data/naturalistic viewing. run: Off @@ -1532,6 +1614,7 @@ PyPEER: # This is a file describing the stimulus locations from the calibration sequence. stimulus_path: minimal_nuisance_correction: + # PyPEER Minimal nuisance regression # Note: PyPEER employs minimal preprocessing - these choices do not reflect what runs in the main pipeline. # PyPEER uses non-nuisance-regressed data from the main pipeline. From ff569b2880b2d398eafe51b6bc101437890b4bff Mon Sep 17 00:00:00 2001 From: "birajstha:construction_worker::penguin" Date: Mon, 26 Aug 2024 16:39:10 -0400 Subject: [PATCH 110/507] fix duplicate commit --- CPAC/nuisance/utils/compcor.py | 8 -------- 1 file changed, 8 deletions(-) diff --git a/CPAC/nuisance/utils/compcor.py b/CPAC/nuisance/utils/compcor.py index b8acf6a77e..7ccde5ed30 100644 --- a/CPAC/nuisance/utils/compcor.py +++ b/CPAC/nuisance/utils/compcor.py @@ -159,14 +159,6 @@ def voxel_generator(): input_img = nib.load(input_image_path) - def voxel_generator(): - for i in range(datashape[0]): - for j in range(datashape[1]): - for k in range(datashape[2]): - yield input_data[i, j, k, :] - - input_img = nb.load(input_image_path) - input_data = input_img.get_fdata() datashape = input_data.shape From cea857bab35f4bc885a8ed4bc224adef9a11ea62 Mon Sep 17 00:00:00 2001 From: "birajstha:construction_worker::penguin" Date: Mon, 26 Aug 2024 16:58:14 -0400 Subject: [PATCH 111/507] added link to the algorithm --- CPAC/nuisance/utils/compcor.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/CPAC/nuisance/utils/compcor.py b/CPAC/nuisance/utils/compcor.py index 7ccde5ed30..5adcfdae77 100644 --- a/CPAC/nuisance/utils/compcor.py +++ b/CPAC/nuisance/utils/compcor.py @@ -91,6 +91,9 @@ def cosine_filter( failure_mode="error", ): """ + `cosine_filter` adapted from Nipype. + https://github.com/nipy/nipype/blob/d353f0d/nipype/algorithms/confounds.py#L1086-L1107 + Parameters: ----------- input_image_path : str From 8a303e7ad1ce67db77ef353db6f897daa4007ccb Mon Sep 17 00:00:00 2001 From: birajstha <111654544+birajstha@users.noreply.github.com> Date: Wed, 28 Aug 2024 16:34:54 -0400 Subject: [PATCH 112/507] Update CPAC/nuisance/utils/compcor.py Co-authored-by: Jon Clucas --- CPAC/nuisance/utils/compcor.py | 1 - 1 file changed, 1 deletion(-) diff --git a/CPAC/nuisance/utils/compcor.py b/CPAC/nuisance/utils/compcor.py index 5adcfdae77..9754c37059 100644 --- a/CPAC/nuisance/utils/compcor.py +++ b/CPAC/nuisance/utils/compcor.py @@ -134,7 +134,6 @@ def cosine_filter( # * Modified docstring to reflect local changes # * Updated style to match C-PAC codebase - # ORIGINAL WORK'S ATTRIBUTION NOTICE: # Copyright (c) 2009-2016, Nipype developers From 13308c67d39c6fa753fc2b165fe512bbbc7bc0c4 Mon Sep 17 00:00:00 2001 From: Biraj Shrestha Date: Thu, 29 Aug 2024 16:31:12 -0400 Subject: [PATCH 113/507] corrected docs and implemented for different axis values --- CPAC/nuisance/utils/compcor.py | 56 +++++++++++++++++++--------------- 1 file changed, 31 insertions(+), 25 deletions(-) diff --git a/CPAC/nuisance/utils/compcor.py b/CPAC/nuisance/utils/compcor.py index 9754c37059..1a2f9ce8eb 100644 --- a/CPAC/nuisance/utils/compcor.py +++ b/CPAC/nuisance/utils/compcor.py @@ -91,11 +91,14 @@ def cosine_filter( failure_mode="error", ): """ - `cosine_filter` adapted from Nipype. - https://github.com/nipy/nipype/blob/d353f0d/nipype/algorithms/confounds.py#L1086-L1107 + The function applies a cosine filter to the input BOLD image using the discrete cosine transform (DCT) method. - Parameters: - ----------- + Adapted from nipype implementation. https://github.com/nipy/nipype/blob/d353f0d/nipype/algorithms/confounds.py#L1086-L1107 + It removes the low-frequency drift from the voxel time series. The filtered image is saved to disk. + + + Parameters + ---------- input_image_path : str Path to the BOLD image to be filtered. timestep : float @@ -110,19 +113,11 @@ def cosine_filter( Specifies how to handle failure modes. If set to 'error', the function raises an error. If set to 'ignore', it returns the input data unchanged in case of failure. Default is 'error'. - Returns: - -------- + Returns + ------- cosfiltered_img : str Path to the filtered BOLD image. - Notes: - ------ - The function applies a cosine filter to the input BOLD image using the discrete cosine transform (DCT) method. - It removes the low-frequency drift from the voxel time series. The filtered image is saved to disk. - - Adapted from nipype implementation. - - The function uses a generator to iterate over voxel time series to optimize memory usage. """ # STATEMENT OF CHANGES: # This function is derived from sources licensed under the Apache-2.0 terms, @@ -133,6 +128,7 @@ def cosine_filter( # * Removed caluclation and return of `non_constant_regressors` # * Modified docstring to reflect local changes # * Updated style to match C-PAC codebase + # * Updated to use generator and iterate over voxel time series to optimize memory usage. # ORIGINAL WORK'S ATTRIBUTION NOTICE: # Copyright (c) 2009-2016, Nipype developers @@ -158,11 +154,11 @@ def voxel_generator(): for j in range(datashape[1]): for k in range(datashape[2]): yield input_data[i, j, k, :] - + + from nipype.algorithms.confounds import _cosine_drift, _full_rank + input_img = nib.load(input_image_path) - input_data = input_img.get_fdata() - datashape = input_data.shape timepoints = datashape[axis] if datashape[0] == 0 and failure_mode != "error": @@ -171,28 +167,38 @@ def voxel_generator(): frametimes = timestep * np.arange(timepoints) X = _full_rank(_cosine_drift(period_cut, frametimes))[0] - output_data = np.zeros(input_data.shape) + # Reshape the input data to bring the time dimension to the last axis if it's not already + if axis != -1: + reshaped_data = np.moveaxis(input_data, axis, -1) + else: + reshaped_data = input_data + + reshaped_output_data = np.zeros_like(reshaped_data) voxel_gen = voxel_generator() - for i in range(datashape[0]): - print(f"calculating {i+1} of {datashape[0]} row of voxels") - for j in range(datashape[1]): - for k in range(datashape[2]): + for i in range(reshaped_data.shape[0]): + print(f"calculating {i+1} of {reshaped_data.shape[0]} row of voxels") + for j in range(reshaped_data.shape[1]): + for k in range(reshaped_data.shape[2]): voxel_time_series = next(voxel_gen) - betas = np.linalg.lstsq(X, voxel_time_series.T)[0] + betas = np.linalg.lstsq(X, voxel_time_series.T, rcond=None)[0] if not remove_mean: X = X[:, :-1] betas = betas[:-1] residuals = voxel_time_series - X.dot(betas) + reshaped_output_data[i, j, k, :] = residuals - output_data[i, j, k, :] = residuals + # Move the time dimension back to its original position if it was reshaped + if axis != -1: + output_data = np.moveaxis(reshaped_output_data, -1, axis) + else: + output_data = reshaped_output_data hdr = input_img.header output_img = nib.Nifti1Image(output_data, header=hdr, affine=input_img.affine) - file_name = input_image_path[input_image_path.rindex("/") + 1 :] cosfiltered_img = os.path.join(os.getcwd(), file_name) From be1c3f78b93096d79d5fd9c71aff0aca8057f4f0 Mon Sep 17 00:00:00 2001 From: "birajstha:construction_worker::penguin" Date: Tue, 17 Sep 2024 14:53:31 -0400 Subject: [PATCH 114/507] moving remove_mean check out of the loop to avoid accidental overwriting of X --- CPAC/nuisance/utils/compcor.py | 128 ++++++++++++++++++--------------- 1 file changed, 69 insertions(+), 59 deletions(-) diff --git a/CPAC/nuisance/utils/compcor.py b/CPAC/nuisance/utils/compcor.py index 1a2f9ce8eb..2aeec2bdec 100644 --- a/CPAC/nuisance/utils/compcor.py +++ b/CPAC/nuisance/utils/compcor.py @@ -91,8 +91,8 @@ def cosine_filter( failure_mode="error", ): """ - The function applies a cosine filter to the input BOLD image using the discrete cosine transform (DCT) method. - + Apply cosine filter to the input BOLD image using the discrete cosine transform (DCT) method. + Adapted from nipype implementation. https://github.com/nipy/nipype/blob/d353f0d/nipype/algorithms/confounds.py#L1086-L1107 It removes the low-frequency drift from the voxel time series. The filtered image is saved to disk. @@ -148,64 +148,74 @@ def cosine_filter( # Prior to release 0.12, Nipype was licensed under a BSD license. # Modifications copyright (C) 2019 - 2024 C-PAC Developers + try: + + def voxel_generator(): + for i in range(datashape[0]): + for j in range(datashape[1]): + for k in range(datashape[2]): + yield input_data[i, j, k, :] + + from nipype.algorithms.confounds import _cosine_drift, _full_rank + + input_img = nib.load(input_image_path) + input_data = input_img.get_fdata() + datashape = input_data.shape + timepoints = datashape[axis] + if datashape[0] == 0 and failure_mode != "error": + return input_data, np.array([]) + + frametimes = timestep * np.arange(timepoints) + X_full = _full_rank(_cosine_drift(period_cut, frametimes))[0] + + # Generate X with and without the mean column + X_with_mean = X_full + X_without_mean = X_full[:, :-1] if X_full.shape[1] > 1 else X_full + + # Reshape the input data to bring the time dimension to the last axis if it's not already + if axis != -1: + reshaped_data = np.moveaxis(input_data, axis, -1) + else: + reshaped_data = input_data + + reshaped_output_data = np.zeros_like(reshaped_data) + + # Choose the appropriate X matrix + X = X_with_mean if remove_mean else X_without_mean + + voxel_gen = voxel_generator() + + for i in range(reshaped_data.shape[0]): + IFLOGGER.info( + f"calculating {i+1} of {reshaped_data.shape[0]} row of voxels" + ) + for j in range(reshaped_data.shape[1]): + for k in range(reshaped_data.shape[2]): + voxel_time_series = next(voxel_gen) + betas = np.linalg.lstsq(X, voxel_time_series.T, rcond=None)[0] + + residuals = voxel_time_series - X.dot(betas) + reshaped_output_data[i, j, k, :] = residuals + + # Move the time dimension back to its original position if it was reshaped + if axis != -1: + output_data = np.moveaxis(reshaped_output_data, -1, axis) + else: + output_data = reshaped_output_data + + hdr = input_img.header + output_img = nib.Nifti1Image(output_data, header=hdr, affine=input_img.affine) + file_name = input_image_path[input_image_path.rindex("/") + 1 :] + + cosfiltered_img = os.path.join(os.getcwd(), file_name) + + output_img.to_filename(cosfiltered_img) + + return cosfiltered_img - def voxel_generator(): - for i in range(datashape[0]): - for j in range(datashape[1]): - for k in range(datashape[2]): - yield input_data[i, j, k, :] - - from nipype.algorithms.confounds import _cosine_drift, _full_rank - - input_img = nib.load(input_image_path) - input_data = input_img.get_fdata() - datashape = input_data.shape - timepoints = datashape[axis] - if datashape[0] == 0 and failure_mode != "error": - return input_data, np.array([]) - - frametimes = timestep * np.arange(timepoints) - X = _full_rank(_cosine_drift(period_cut, frametimes))[0] - - # Reshape the input data to bring the time dimension to the last axis if it's not already - if axis != -1: - reshaped_data = np.moveaxis(input_data, axis, -1) - else: - reshaped_data = input_data - - reshaped_output_data = np.zeros_like(reshaped_data) - - voxel_gen = voxel_generator() - - for i in range(reshaped_data.shape[0]): - print(f"calculating {i+1} of {reshaped_data.shape[0]} row of voxels") - for j in range(reshaped_data.shape[1]): - for k in range(reshaped_data.shape[2]): - voxel_time_series = next(voxel_gen) - betas = np.linalg.lstsq(X, voxel_time_series.T, rcond=None)[0] - - if not remove_mean: - X = X[:, :-1] - betas = betas[:-1] - - residuals = voxel_time_series - X.dot(betas) - reshaped_output_data[i, j, k, :] = residuals - - # Move the time dimension back to its original position if it was reshaped - if axis != -1: - output_data = np.moveaxis(reshaped_output_data, -1, axis) - else: - output_data = reshaped_output_data - - hdr = input_img.header - output_img = nib.Nifti1Image(output_data, header=hdr, affine=input_img.affine) - file_name = input_image_path[input_image_path.rindex("/") + 1 :] - - cosfiltered_img = os.path.join(os.getcwd(), file_name) - - output_img.to_filename(cosfiltered_img) - - return cosfiltered_img + except Exception as e: + message = f"Error in cosine_filter: {e}" + IFLOGGER.error(message) def fallback_svd(a, full_matrices=True, compute_uv=True): From ad637f0fe5c7e944a9a44459c454118a0fb818bb Mon Sep 17 00:00:00 2001 From: Biraj Shrestha Date: Fri, 20 Sep 2024 15:32:59 -0400 Subject: [PATCH 115/507] correcting assignment of X as per selection of remove_mean --- CPAC/nuisance/utils/compcor.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/CPAC/nuisance/utils/compcor.py b/CPAC/nuisance/utils/compcor.py index 2aeec2bdec..8d17de23d1 100644 --- a/CPAC/nuisance/utils/compcor.py +++ b/CPAC/nuisance/utils/compcor.py @@ -181,7 +181,7 @@ def voxel_generator(): reshaped_output_data = np.zeros_like(reshaped_data) # Choose the appropriate X matrix - X = X_with_mean if remove_mean else X_without_mean + X = X_without_mean if remove_mean else X_with_mean voxel_gen = voxel_generator() From 3f1e1093d40726433d51cc7ca56a013fae5059f0 Mon Sep 17 00:00:00 2001 From: birajstha <111654544+birajstha@users.noreply.github.com> Date: Wed, 9 Oct 2024 13:34:17 -0400 Subject: [PATCH 116/507] Update CHANGELOG.md Co-authored-by: Jon Clucas --- CHANGELOG.md | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 2850e0460c..01716d45d9 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -21,7 +21,9 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 - `pyproject.toml` file with `[build-system]` defined. - [![pre-commit.ci status](https://results.pre-commit.ci/badge/github/FCP-INDI/C-PAC/main.svg)](https://results.pre-commit.ci/latest/github/FCP-INDI/C-PAC/main) badge to [`README`](./README.md). - `desired_orientation` key in the blank config under `pipeline_setup`. -- Workflow (`wf`) parameter in input and output of `ingress_pipeconfig_paths` function, where a node to reorient templates is added to the `wf` +- Required positional parameter "wf" in input and output of `ingress_pipeconfig_paths` function, where a node to reorient templates is added to the `wf`. +- Required positional parameter "orientation" to `resolve_resolution`. +- Optional positional argument "cfg" to `create_lesion_preproc`. ### Changed From d24fae671606d7a734e803822f87a107581473ef Mon Sep 17 00:00:00 2001 From: birajstha <111654544+birajstha@users.noreply.github.com> Date: Wed, 9 Oct 2024 13:34:53 -0400 Subject: [PATCH 117/507] Update CHANGELOG.md Co-authored-by: Jon Clucas --- CHANGELOG.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 01716d45d9..998749381e 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -20,7 +20,7 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 - `pyproject.toml` file with `[build-system]` defined. - [![pre-commit.ci status](https://results.pre-commit.ci/badge/github/FCP-INDI/C-PAC/main.svg)](https://results.pre-commit.ci/latest/github/FCP-INDI/C-PAC/main) badge to [`README`](./README.md). -- `desired_orientation` key in the blank config under `pipeline_setup`. +- `desired_orientation` key in participant-level pipeline config under `pipeline_setup`. - Required positional parameter "wf" in input and output of `ingress_pipeconfig_paths` function, where a node to reorient templates is added to the `wf`. - Required positional parameter "orientation" to `resolve_resolution`. - Optional positional argument "cfg" to `create_lesion_preproc`. From 467818f4561761ab71749bd5cead270320549283 Mon Sep 17 00:00:00 2001 From: "birajstha:construction_worker::penguin" Date: Wed, 11 Sep 2024 12:56:25 -0400 Subject: [PATCH 118/507] Adding utility functions for validating TR in CPAC output bolds --- CPAC/pipeline/utils.py | 44 +++++++++++++++++++++++++++++++++++++++++- 1 file changed, 43 insertions(+), 1 deletion(-) diff --git a/CPAC/pipeline/utils.py b/CPAC/pipeline/utils.py index 39acb6429f..5e1e0574b7 100644 --- a/CPAC/pipeline/utils.py +++ b/CPAC/pipeline/utils.py @@ -22,7 +22,49 @@ from CPAC.utils.bids_utils import insert_entity MOVEMENT_FILTER_KEYS = motion_estimate_filter.outputs - +import nibabel as nib +import os +import subprocess + +def find_pixel_dim4(file_path): + nii = nib.load(file_path) + header = nii.header + pixdim = header.get_zooms() + return pixdim[3] + +def update_pixel_dim4(file_path, new_pixdim4): + + if not os.path.isfile(file_path): + raise FileNotFoundError(f"File not found: {file_path}") + + # Print the current pixdim[4] value for verification + print(f'Updating {file_path} with new pixdim[4] value: {new_pixdim4}') + + # Construct the command to update the pixdim[4] value using 3drefit + command = ['3drefit', '-TR', str(new_pixdim4), file_path] + + # Execute the command + try: + subprocess.run(command, check=True) + print(f'Successfully updated TR to {new_pixdim4} seconds.') + except subprocess.CalledProcessError as e: + print(f'Error occurred while updating the file: {e}') + +def validate_outputs(input_bold, RawSource_bold): + """Match pixdim[4]/TR of the input_bold with RawSource_bold.""" + input_pixdim4 = find_pixel_dim4(input_bold) + source_pixdim4 = find_pixel_dim4(RawSource_bold) + + if input_pixdim4 != source_pixdim4: + print(f"TR mismatch detected between input_bold and RawSource_bold.") + print(f"input_bold TR: {input_pixdim4} seconds") + print(f"RawSource_bold TR: {source_pixdim4} seconds") + print(f"Attempting to update the TR of input_bold to match RawSource_bold.") + update_pixel_dim4(input_bold, source_pixdim4) + else: + print(f"TR match detected between input_bold and RawSource_bold.") + print(f"input_bold TR: {input_pixdim4} seconds") + print(f"RawSource_bold TR: {source_pixdim4} seconds") def name_fork(resource_idx, cfg, json_info, out_dct): """Create and insert entities for forkpoints. From 21a2711ee8d0f11401acb93c50ffb97a79308da3 Mon Sep 17 00:00:00 2001 From: "birajstha:construction_worker::penguin" Date: Wed, 11 Sep 2024 13:26:59 -0400 Subject: [PATCH 119/507] Adding validation node for TR of bold outputs --- CPAC/pipeline/engine.py | 13 ++++++++++++- CPAC/pipeline/utils.py | 13 ++++++++----- 2 files changed, 20 insertions(+), 6 deletions(-) diff --git a/CPAC/pipeline/engine.py b/CPAC/pipeline/engine.py index d7f53f7029..085a5d17e6 100644 --- a/CPAC/pipeline/engine.py +++ b/CPAC/pipeline/engine.py @@ -1357,7 +1357,7 @@ def gather_pipes(self, wf, cfg, all=False, add_incl=None, add_excl=None): except OSError as os_error: WFLOGGER.warning(os_error) continue - + write_json_imports = ["import os", "import json"] write_json = pe.Node( Function( @@ -1371,6 +1371,17 @@ def gather_pipes(self, wf, cfg, all=False, add_incl=None, add_excl=None): write_json.inputs.json_data = json_info wf.connect(id_string, "out_filename", write_json, "filename") + + # Node to validate TR (and other scan parameters) + validate_bold_header = pe.Node( + Function( + input_names=["input_resource", "RawSource"], + output_names=["output_resource"], + function=validate_bold_header, + ), + name=f"validate_bold_header_{resource_idx}_{pipe_x}", + ) + ds = pe.Node(DataSink(), name=f"sinker_{resource_idx}_{pipe_x}") ds.inputs.parameterization = False ds.inputs.base_directory = out_dct["out_dir"] diff --git a/CPAC/pipeline/utils.py b/CPAC/pipeline/utils.py index 5e1e0574b7..461ad8f186 100644 --- a/CPAC/pipeline/utils.py +++ b/CPAC/pipeline/utils.py @@ -52,19 +52,22 @@ def update_pixel_dim4(file_path, new_pixdim4): def validate_outputs(input_bold, RawSource_bold): """Match pixdim[4]/TR of the input_bold with RawSource_bold.""" - input_pixdim4 = find_pixel_dim4(input_bold) + + output_bold = input_bold + output_pixdim4 = find_pixel_dim4(output_bold) source_pixdim4 = find_pixel_dim4(RawSource_bold) - if input_pixdim4 != source_pixdim4: + if output_pixdim4 != source_pixdim4: print(f"TR mismatch detected between input_bold and RawSource_bold.") - print(f"input_bold TR: {input_pixdim4} seconds") + print(f"input_bold TR: {output_pixdim4} seconds") print(f"RawSource_bold TR: {source_pixdim4} seconds") print(f"Attempting to update the TR of input_bold to match RawSource_bold.") - update_pixel_dim4(input_bold, source_pixdim4) + update_pixel_dim4(output_bold, source_pixdim4) else: print(f"TR match detected between input_bold and RawSource_bold.") - print(f"input_bold TR: {input_pixdim4} seconds") + print(f"input_bold TR: {output_pixdim4} seconds") print(f"RawSource_bold TR: {source_pixdim4} seconds") + return output_bold def name_fork(resource_idx, cfg, json_info, out_dct): """Create and insert entities for forkpoints. From e82d9dab028a680c428dced55d896192ef00d0c2 Mon Sep 17 00:00:00 2001 From: "birajstha:construction_worker::penguin" Date: Wed, 11 Sep 2024 13:41:32 -0400 Subject: [PATCH 120/507] minor name correction for node --- CPAC/pipeline/engine.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/CPAC/pipeline/engine.py b/CPAC/pipeline/engine.py index 085a5d17e6..b4a1e73208 100644 --- a/CPAC/pipeline/engine.py +++ b/CPAC/pipeline/engine.py @@ -1375,8 +1375,8 @@ def gather_pipes(self, wf, cfg, all=False, add_incl=None, add_excl=None): # Node to validate TR (and other scan parameters) validate_bold_header = pe.Node( Function( - input_names=["input_resource", "RawSource"], - output_names=["output_resource"], + input_names=["input_bold", "RawSource_bold"], + output_names=["output_bold"], function=validate_bold_header, ), name=f"validate_bold_header_{resource_idx}_{pipe_x}", From fd1617094b2752a4d586aae137d9b6c65826bade Mon Sep 17 00:00:00 2001 From: "birajstha:construction_worker::penguin" Date: Wed, 11 Sep 2024 15:41:39 -0400 Subject: [PATCH 121/507] Adding validation node before data sink --- CPAC/pipeline/engine.py | 15 ++++++++++++--- 1 file changed, 12 insertions(+), 3 deletions(-) diff --git a/CPAC/pipeline/engine.py b/CPAC/pipeline/engine.py index b4a1e73208..6e4fc8471c 100644 --- a/CPAC/pipeline/engine.py +++ b/CPAC/pipeline/engine.py @@ -35,7 +35,7 @@ from CPAC.pipeline import nipype_pipeline_engine as pe from CPAC.pipeline.check_outputs import ExpectedOutputs from CPAC.pipeline.nodeblock import NodeBlockFunction -from CPAC.pipeline.utils import MOVEMENT_FILTER_KEYS, name_fork, source_set +from CPAC.pipeline.utils import MOVEMENT_FILTER_KEYS, name_fork, source_set, validate_outputs from CPAC.registration.registration import transform_derivative from CPAC.resources.templates.lookup_table import lookup_identifier from CPAC.utils.bids_utils import res_in_filename @@ -1377,7 +1377,7 @@ def gather_pipes(self, wf, cfg, all=False, add_incl=None, add_excl=None): Function( input_names=["input_bold", "RawSource_bold"], output_names=["output_bold"], - function=validate_bold_header, + function=validate_outputs, ), name=f"validate_bold_header_{resource_idx}_{pipe_x}", ) @@ -1405,7 +1405,16 @@ def gather_pipes(self, wf, cfg, all=False, add_incl=None, add_excl=None): subdir=out_dct["subdir"], ), ) - wf.connect(nii_name, "out_file", ds, f'{out_dct["subdir"]}.@data') + if resource.endswith("_bold"): + raw_source, raw_out = self.get_data("bold", pipe_idx=pipe_idx) + wf.connect([ + (node, validate_bold_header, [out, "input_bold"]), + (raw_source, validate_bold_header, [raw_out, "RawSource_bold"]), + (validate_bold_header, ds, ["output_resource", f'{out_dct["subdir"]}.@data']) + ]) + else: + wf.connect(nii_name, "out_file", ds, f'{out_dct["subdir"]}.@data') + wf.connect(write_json, "json_file", ds, f'{out_dct["subdir"]}.@json') outputs_logger.info(expected_outputs) From 545cd74329df6431abbd344fa6acbe6c337369ee Mon Sep 17 00:00:00 2001 From: "birajstha:construction_worker::penguin" Date: Wed, 11 Sep 2024 22:00:52 -0400 Subject: [PATCH 122/507] Adding doc-strings and exception handling to utility functions --- CPAC/pipeline/utils.py | 158 +++++++++++++++++++++++++++++++---------- 1 file changed, 121 insertions(+), 37 deletions(-) diff --git a/CPAC/pipeline/utils.py b/CPAC/pipeline/utils.py index 461ad8f186..f71658667f 100644 --- a/CPAC/pipeline/utils.py +++ b/CPAC/pipeline/utils.py @@ -17,57 +17,141 @@ """C-PAC pipeline engine utilities.""" from itertools import chain +import os +import subprocess + +import nibabel as nib from CPAC.func_preproc.func_motion import motion_estimate_filter from CPAC.utils.bids_utils import insert_entity +from CPAC.utils.monitoring import WFLOGGER MOVEMENT_FILTER_KEYS = motion_estimate_filter.outputs -import nibabel as nib -import os -import subprocess -def find_pixel_dim4(file_path): - nii = nib.load(file_path) - header = nii.header - pixdim = header.get_zooms() - return pixdim[3] -def update_pixel_dim4(file_path, new_pixdim4): +def find_pixdim4(file_path): + """Find the pixdim4 value of a NIfTI file. + Parameters + ---------- + file_path : str + Path to the NIfTI file. + + Returns + ------- + float + The pixdim4 value of the NIfTI file. + + Raises + ------ + FileNotFoundError + If the file does not exist. + nibabel.filebasedimages.ImageFileError + If there is an error loading the NIfTI file. + IndexError + If pixdim4 is not found in the header. + """ if not os.path.isfile(file_path): - raise FileNotFoundError(f"File not found: {file_path}") - - # Print the current pixdim[4] value for verification - print(f'Updating {file_path} with new pixdim[4] value: {new_pixdim4}') - - # Construct the command to update the pixdim[4] value using 3drefit - command = ['3drefit', '-TR', str(new_pixdim4), file_path] - - # Execute the command + error_message = f"File not found: {file_path}" + raise FileNotFoundError(file_path) + + try: + nii = nib.load(file_path) + header = nii.header + pixdim = header.get_zooms() + return pixdim[3] + except nib.filebasedimages.ImageFileError as e: + error_message = f"Error loading the NIfTI file: {e}" + raise nib.filebasedimages.ImageFileError(error_message) + except IndexError as e: + error_message = f"pixdim4 not found in the header: {e}" + raise IndexError(error_message) + + +def update_pixdim4(file_path, new_pixdim4): + """Update the pixdim4 value of a NIfTI file using 3drefit. + + Parameters + ---------- + file_path : str + Path to the NIfTI file. + new_pixdim4 : float + New pixdim4 value to update the NIfTI file with. + + Raises + ------ + FileNotFoundError + If the file does not exist. + subprocess.CalledProcessError + If there is an error running the subprocess. + + Notes + ----- + The pixdim4 value is the Repetition Time (TR) of the NIfTI file. + + """ + if not os.path.isfile(file_path): + error_message = f"File not found: {file_path}" + raise FileNotFoundError(error_message) + + # Print the current pixdim4 value for verification + WFLOGGER.info(f"Updating {file_path} with new pixdim[4] value: {new_pixdim4}") + + # Construct the command to update the pixdim4 value using 3drefit + command = ["3drefit", "-TR", str(new_pixdim4), file_path] + try: subprocess.run(command, check=True) - print(f'Successfully updated TR to {new_pixdim4} seconds.') + WFLOGGER.info(f"Successfully updated TR to {new_pixdim4} seconds.") except subprocess.CalledProcessError as e: - print(f'Error occurred while updating the file: {e}') + error_message = f"Error occurred while updating the file: {e}" + raise subprocess.CalledProcessError(error_message) + def validate_outputs(input_bold, RawSource_bold): - """Match pixdim[4]/TR of the input_bold with RawSource_bold.""" - - output_bold = input_bold - output_pixdim4 = find_pixel_dim4(output_bold) - source_pixdim4 = find_pixel_dim4(RawSource_bold) - - if output_pixdim4 != source_pixdim4: - print(f"TR mismatch detected between input_bold and RawSource_bold.") - print(f"input_bold TR: {output_pixdim4} seconds") - print(f"RawSource_bold TR: {source_pixdim4} seconds") - print(f"Attempting to update the TR of input_bold to match RawSource_bold.") - update_pixel_dim4(output_bold, source_pixdim4) - else: - print(f"TR match detected between input_bold and RawSource_bold.") - print(f"input_bold TR: {output_pixdim4} seconds") - print(f"RawSource_bold TR: {source_pixdim4} seconds") - return output_bold + """Match pixdim4/TR of the input_bold with RawSource_bold. + + Parameters + ---------- + input_bold : str + Path to the input BOLD file. + RawSource_bold : str + Path to the RawSource BOLD file. + + Returns + ------- + str + Path to the input BOLD file. + + Raises + ------ + Exception + If there is an error in finding or updating pixdim4. + """ + try: + output_bold = input_bold + output_pixdim4 = find_pixdim4(output_bold) + source_pixdim4 = find_pixdim4(RawSource_bold) + + if output_pixdim4 != source_pixdim4: + WFLOGGER.info( + "TR mismatch detected between output_bold and RawSource_bold." + ) + WFLOGGER.info(f"output_bold TR: {output_pixdim4} seconds") + WFLOGGER.info(f"RawSource_bold TR: {source_pixdim4} seconds") + WFLOGGER.info( + "Attempting to update the TR of output_bold to match RawSource_bold." + ) + update_pixdim4(output_bold, source_pixdim4) + else: + WFLOGGER.info("TR match detected between output_bold and RawSource_bold.") + WFLOGGER.info(f"output_bold TR: {output_pixdim4} seconds") + WFLOGGER.info(f"RawSource_bold TR: {source_pixdim4} seconds") + return output_bold + except Exception as e: + error_message = f"Error in validating outputs: {e}" + raise Exception(error_message) + def name_fork(resource_idx, cfg, json_info, out_dct): """Create and insert entities for forkpoints. From 23b13f1ebeff20778f20b048facbac0dc0f495d5 Mon Sep 17 00:00:00 2001 From: "birajstha:construction_worker::penguin" Date: Fri, 13 Sep 2024 13:47:36 -0400 Subject: [PATCH 123/507] correcting the workflow connection --- CPAC/pipeline/engine.py | 16 +++++++++++----- 1 file changed, 11 insertions(+), 5 deletions(-) diff --git a/CPAC/pipeline/engine.py b/CPAC/pipeline/engine.py index 6e4fc8471c..67e1ad9ff2 100644 --- a/CPAC/pipeline/engine.py +++ b/CPAC/pipeline/engine.py @@ -1406,12 +1406,18 @@ def gather_pipes(self, wf, cfg, all=False, add_incl=None, add_excl=None): ), ) if resource.endswith("_bold"): - raw_source, raw_out = self.get_data("bold", pipe_idx=pipe_idx) + raw_source, raw_out = self.get_data("bold") wf.connect([ - (node, validate_bold_header, [out, "input_bold"]), - (raw_source, validate_bold_header, [raw_out, "RawSource_bold"]), - (validate_bold_header, ds, ["output_resource", f'{out_dct["subdir"]}.@data']) - ]) + (node, validate_bold_header, [ + (out, "input_bold") + ]), + (raw_source, validate_bold_header, [ + (raw_out, "RawSource") + ]), + (validate_bold_header, ds, [ + ("output_bold", f'{out_dct["subdir"]}.@data') + ]) + ]) else: wf.connect(nii_name, "out_file", ds, f'{out_dct["subdir"]}.@data') From ea5282b1fa2c2a603337465993ff0b3551c3ab9b Mon Sep 17 00:00:00 2001 From: "birajstha:construction_worker::penguin" Date: Fri, 13 Sep 2024 13:48:14 -0400 Subject: [PATCH 124/507] correcting the workflow connection --- CPAC/pipeline/engine.py | 37 +++++++++++++++++++++++-------------- 1 file changed, 23 insertions(+), 14 deletions(-) diff --git a/CPAC/pipeline/engine.py b/CPAC/pipeline/engine.py index 67e1ad9ff2..07adfee7ac 100644 --- a/CPAC/pipeline/engine.py +++ b/CPAC/pipeline/engine.py @@ -35,7 +35,12 @@ from CPAC.pipeline import nipype_pipeline_engine as pe from CPAC.pipeline.check_outputs import ExpectedOutputs from CPAC.pipeline.nodeblock import NodeBlockFunction -from CPAC.pipeline.utils import MOVEMENT_FILTER_KEYS, name_fork, source_set, validate_outputs +from CPAC.pipeline.utils import ( + MOVEMENT_FILTER_KEYS, + name_fork, + source_set, + validate_outputs, +) from CPAC.registration.registration import transform_derivative from CPAC.resources.templates.lookup_table import lookup_identifier from CPAC.utils.bids_utils import res_in_filename @@ -1357,7 +1362,7 @@ def gather_pipes(self, wf, cfg, all=False, add_incl=None, add_excl=None): except OSError as os_error: WFLOGGER.warning(os_error) continue - + write_json_imports = ["import os", "import json"] write_json = pe.Node( Function( @@ -1371,7 +1376,7 @@ def gather_pipes(self, wf, cfg, all=False, add_incl=None, add_excl=None): write_json.inputs.json_data = json_info wf.connect(id_string, "out_filename", write_json, "filename") - + # Node to validate TR (and other scan parameters) validate_bold_header = pe.Node( Function( @@ -1407,17 +1412,21 @@ def gather_pipes(self, wf, cfg, all=False, add_incl=None, add_excl=None): ) if resource.endswith("_bold"): raw_source, raw_out = self.get_data("bold") - wf.connect([ - (node, validate_bold_header, [ - (out, "input_bold") - ]), - (raw_source, validate_bold_header, [ - (raw_out, "RawSource") - ]), - (validate_bold_header, ds, [ - ("output_bold", f'{out_dct["subdir"]}.@data') - ]) - ]) + wf.connect( + [ + (node, validate_bold_header, [(out, "input_bold")]), + ( + raw_source, + validate_bold_header, + [(raw_out, "RawSource")], + ), + ( + validate_bold_header, + ds, + [("output_bold", f'{out_dct["subdir"]}.@data')], + ), + ] + ) else: wf.connect(nii_name, "out_file", ds, f'{out_dct["subdir"]}.@data') From 0fe8c5815997d65d11d009c16d8bdafe126cbdc9 Mon Sep 17 00:00:00 2001 From: "birajstha:construction_worker::penguin" Date: Tue, 17 Sep 2024 15:53:41 -0400 Subject: [PATCH 125/507] renaming input RawSource to correct value RawSource_bold --- CPAC/pipeline/engine.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/CPAC/pipeline/engine.py b/CPAC/pipeline/engine.py index 07adfee7ac..589009d9ba 100644 --- a/CPAC/pipeline/engine.py +++ b/CPAC/pipeline/engine.py @@ -1418,7 +1418,7 @@ def gather_pipes(self, wf, cfg, all=False, add_incl=None, add_excl=None): ( raw_source, validate_bold_header, - [(raw_out, "RawSource")], + [(raw_out, "RawSource_bold")], ), ( validate_bold_header, From 4ac8e3a39c50a304e9329e129bf8deac1a54a8d1 Mon Sep 17 00:00:00 2001 From: "birajstha:construction_worker::penguin" Date: Tue, 17 Sep 2024 21:34:12 -0400 Subject: [PATCH 126/507] changing validation input from renamed node nii_out --- CPAC/pipeline/engine.py | 3 ++- CPAC/pipeline/utils.py | 27 ++++++++++++++------------- 2 files changed, 16 insertions(+), 14 deletions(-) diff --git a/CPAC/pipeline/engine.py b/CPAC/pipeline/engine.py index 589009d9ba..38f4327dee 100644 --- a/CPAC/pipeline/engine.py +++ b/CPAC/pipeline/engine.py @@ -1383,6 +1383,7 @@ def gather_pipes(self, wf, cfg, all=False, add_incl=None, add_excl=None): input_names=["input_bold", "RawSource_bold"], output_names=["output_bold"], function=validate_outputs, + imports=["from CPAC.pipeline.utils import find_pixdim4, update_pixdim4"], ), name=f"validate_bold_header_{resource_idx}_{pipe_x}", ) @@ -1414,7 +1415,7 @@ def gather_pipes(self, wf, cfg, all=False, add_incl=None, add_excl=None): raw_source, raw_out = self.get_data("bold") wf.connect( [ - (node, validate_bold_header, [(out, "input_bold")]), + (nii_name, validate_bold_header, [(out, "input_bold")]), ( raw_source, validate_bold_header, diff --git a/CPAC/pipeline/utils.py b/CPAC/pipeline/utils.py index f71658667f..dec85add74 100644 --- a/CPAC/pipeline/utils.py +++ b/CPAC/pipeline/utils.py @@ -24,7 +24,7 @@ from CPAC.func_preproc.func_motion import motion_estimate_filter from CPAC.utils.bids_utils import insert_entity -from CPAC.utils.monitoring import WFLOGGER +from CPAC.utils.monitoring import IFLOGGER MOVEMENT_FILTER_KEYS = motion_estimate_filter.outputs @@ -95,14 +95,14 @@ def update_pixdim4(file_path, new_pixdim4): raise FileNotFoundError(error_message) # Print the current pixdim4 value for verification - WFLOGGER.info(f"Updating {file_path} with new pixdim[4] value: {new_pixdim4}") + IFLOGGER.info(f"Updating {file_path} with new pixdim[4] value: {new_pixdim4}") # Construct the command to update the pixdim4 value using 3drefit command = ["3drefit", "-TR", str(new_pixdim4), file_path] try: subprocess.run(command, check=True) - WFLOGGER.info(f"Successfully updated TR to {new_pixdim4} seconds.") + IFLOGGER.info(f"Successfully updated TR to {new_pixdim4} seconds.") except subprocess.CalledProcessError as e: error_message = f"Error occurred while updating the file: {e}" raise subprocess.CalledProcessError(error_message) @@ -120,8 +120,8 @@ def validate_outputs(input_bold, RawSource_bold): Returns ------- - str - Path to the input BOLD file. + output_bold : str + Path to the output BOLD file. Raises ------ @@ -134,23 +134,24 @@ def validate_outputs(input_bold, RawSource_bold): source_pixdim4 = find_pixdim4(RawSource_bold) if output_pixdim4 != source_pixdim4: - WFLOGGER.info( + IFLOGGER.info( "TR mismatch detected between output_bold and RawSource_bold." ) - WFLOGGER.info(f"output_bold TR: {output_pixdim4} seconds") - WFLOGGER.info(f"RawSource_bold TR: {source_pixdim4} seconds") - WFLOGGER.info( + IFLOGGER.info(f"output_bold TR: {output_pixdim4} seconds") + IFLOGGER.info(f"RawSource_bold TR: {source_pixdim4} seconds") + IFLOGGER.info( "Attempting to update the TR of output_bold to match RawSource_bold." ) update_pixdim4(output_bold, source_pixdim4) else: - WFLOGGER.info("TR match detected between output_bold and RawSource_bold.") - WFLOGGER.info(f"output_bold TR: {output_pixdim4} seconds") - WFLOGGER.info(f"RawSource_bold TR: {source_pixdim4} seconds") + IFLOGGER.info("TR match detected between output_bold and RawSource_bold.") + IFLOGGER.info(f"output_bold TR: {output_pixdim4} seconds") + IFLOGGER.info(f"RawSource_bold TR: {source_pixdim4} seconds") return output_bold except Exception as e: error_message = f"Error in validating outputs: {e}" - raise Exception(error_message) + IFLOGGER.error(error_message) + return output_bold def name_fork(resource_idx, cfg, json_info, out_dct): From 8495759d9f5da72563ae6f1f5ac6f3401c8a057c Mon Sep 17 00:00:00 2001 From: "birajstha:construction_worker::penguin" Date: Tue, 17 Sep 2024 21:34:27 -0400 Subject: [PATCH 127/507] changing validation input from renamed node nii_out --- CPAC/pipeline/engine.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/CPAC/pipeline/engine.py b/CPAC/pipeline/engine.py index 38f4327dee..02544064c1 100644 --- a/CPAC/pipeline/engine.py +++ b/CPAC/pipeline/engine.py @@ -1383,7 +1383,9 @@ def gather_pipes(self, wf, cfg, all=False, add_incl=None, add_excl=None): input_names=["input_bold", "RawSource_bold"], output_names=["output_bold"], function=validate_outputs, - imports=["from CPAC.pipeline.utils import find_pixdim4, update_pixdim4"], + imports=[ + "from CPAC.pipeline.utils import find_pixdim4, update_pixdim4" + ], ), name=f"validate_bold_header_{resource_idx}_{pipe_x}", ) From 06fba22af7d44fa5398ec1c7e29ad605c61f9bcd Mon Sep 17 00:00:00 2001 From: "birajstha:construction_worker::penguin" Date: Wed, 18 Sep 2024 15:44:25 -0400 Subject: [PATCH 128/507] added to changelog --- CHANGELOG.md | 1 + 1 file changed, 1 insertion(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index cb0f5a96b7..d8d7e75ad1 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -20,6 +20,7 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 - `pyproject.toml` file with `[build-system]` defined. - [![pre-commit.ci status](https://results.pre-commit.ci/badge/github/FCP-INDI/C-PAC/main.svg)](https://results.pre-commit.ci/latest/github/FCP-INDI/C-PAC/main) badge to [`README`](./README.md). +- validation node to match the pixdim4 of CPAC processed bold outputs with the original raw bold sources. ### Changed From 07882156fbd53253b52f42d7dbc9fe21a84cdfe5 Mon Sep 17 00:00:00 2001 From: "birajstha:construction_worker::penguin" Date: Wed, 18 Sep 2024 16:30:18 -0400 Subject: [PATCH 129/507] moving the node declaration inside the if block --- CPAC/pipeline/engine.py | 25 ++++++++++++------------- CPAC/pipeline/utils.py | 6 +++--- 2 files changed, 15 insertions(+), 16 deletions(-) diff --git a/CPAC/pipeline/engine.py b/CPAC/pipeline/engine.py index 02544064c1..dfcdedde56 100644 --- a/CPAC/pipeline/engine.py +++ b/CPAC/pipeline/engine.py @@ -1377,19 +1377,6 @@ def gather_pipes(self, wf, cfg, all=False, add_incl=None, add_excl=None): wf.connect(id_string, "out_filename", write_json, "filename") - # Node to validate TR (and other scan parameters) - validate_bold_header = pe.Node( - Function( - input_names=["input_bold", "RawSource_bold"], - output_names=["output_bold"], - function=validate_outputs, - imports=[ - "from CPAC.pipeline.utils import find_pixdim4, update_pixdim4" - ], - ), - name=f"validate_bold_header_{resource_idx}_{pipe_x}", - ) - ds = pe.Node(DataSink(), name=f"sinker_{resource_idx}_{pipe_x}") ds.inputs.parameterization = False ds.inputs.base_directory = out_dct["out_dir"] @@ -1414,6 +1401,18 @@ def gather_pipes(self, wf, cfg, all=False, add_incl=None, add_excl=None): ), ) if resource.endswith("_bold"): + # Node to validate TR (and other scan parameters) + validate_bold_header = pe.Node( + Function( + input_names=["input_bold", "RawSource_bold"], + output_names=["output_bold"], + function=validate_outputs, + imports=[ + "from CPAC.pipeline.utils import find_pixdim4, update_pixdim4" + ], + ), + name=f"validate_bold_header_{resource_idx}_{pipe_x}", + ) raw_source, raw_out = self.get_data("bold") wf.connect( [ diff --git a/CPAC/pipeline/utils.py b/CPAC/pipeline/utils.py index dec85add74..d42109da79 100644 --- a/CPAC/pipeline/utils.py +++ b/CPAC/pipeline/utils.py @@ -144,9 +144,9 @@ def validate_outputs(input_bold, RawSource_bold): ) update_pixdim4(output_bold, source_pixdim4) else: - IFLOGGER.info("TR match detected between output_bold and RawSource_bold.") - IFLOGGER.info(f"output_bold TR: {output_pixdim4} seconds") - IFLOGGER.info(f"RawSource_bold TR: {source_pixdim4} seconds") + IFLOGGER.debug("TR match detected between output_bold and RawSource_bold.") + IFLOGGER.debug(f"output_bold TR: {output_pixdim4} seconds") + IFLOGGER.debug(f"RawSource_bold TR: {source_pixdim4} seconds") return output_bold except Exception as e: error_message = f"Error in validating outputs: {e}" From 408d87120218fb1ccd054949b1b613f4177bf385 Mon Sep 17 00:00:00 2001 From: birajstha <111654544+birajstha@users.noreply.github.com> Date: Thu, 10 Oct 2024 11:06:58 -0400 Subject: [PATCH 130/507] Update CHANGELOG.md Co-authored-by: Jon Cluce --- CHANGELOG.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 998749381e..359a62fb37 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -28,7 +28,7 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 ### Changed - Moved `pygraphviz` from requirements to `graphviz` optional dependencies group. -- Fixed-orientation-parameter `RPI` in resolve_resolution `freesurfer_fs_brain_connector`, `anatomical_init_T1`, `lesion_preproc`, `anatomical_init_T2`, `func_reorient` to take in whatever is set in the config `desired_orientation` field. +- Made orientation configurable (was hard-coded as "RPI"). ### Fixed From 2895e8c4cda5f861c1719ca25e325691844e7655 Mon Sep 17 00:00:00 2001 From: "birajstha:construction_worker::penguin" Date: Thu, 10 Oct 2024 11:28:17 -0400 Subject: [PATCH 131/507] fixing pre-commit suggestions --- CPAC/resources/tests/test_templates.py | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/CPAC/resources/tests/test_templates.py b/CPAC/resources/tests/test_templates.py index 341bde6c27..048cbe9b1c 100644 --- a/CPAC/resources/tests/test_templates.py +++ b/CPAC/resources/tests/test_templates.py @@ -19,19 +19,18 @@ import os import pytest +import nipype.pipeline.engine as pe from CPAC.pipeline import ALL_PIPELINE_CONFIGS from CPAC.pipeline.engine import ingress_pipeconfig_paths, ResourcePool from CPAC.utils.configuration import Preconfiguration from CPAC.utils.datasource import get_highest_local_res -import nipype.pipeline.engine as pe @pytest.mark.parametrize("pipeline", ALL_PIPELINE_CONFIGS) def test_packaged_path_exists(pipeline): """ - Check that all local templates are included in image at at - least one resolution. + Check that all local templates are included in image at atleast one resolution. """ wf = pe.Workflow(name="test") wf, rpool = ingress_pipeconfig_paths( From 134544899be0b84e5f09c4fcf585683890a2c088 Mon Sep 17 00:00:00 2001 From: Jon Clucas Date: Fri, 20 Sep 2024 20:38:10 -0400 Subject: [PATCH 132/507] :alembic: Test correlations with new SHA [run reg-suite] From 27fde1095160dc6e78d066ca066a5c30e2c0e6fe Mon Sep 17 00:00:00 2001 From: Jon Clucas Date: Fri, 25 Oct 2024 14:36:38 -0400 Subject: [PATCH 133/507] :construction_worker: Streamline regtest CI flow [run reg-suite lite] --- .github/workflows/build_and_test.yml | 20 ++++----- .github/workflows/on_push.yml | 29 ++++++++++++- .github/workflows/regression_test_full.yml | 41 ------------------- .../{regression_test_lite.yml => regtest.yml} | 14 +++++-- 4 files changed, 49 insertions(+), 55 deletions(-) delete mode 100644 .github/workflows/regression_test_full.yml rename .github/workflows/{regression_test_lite.yml => regtest.yml} (92%) diff --git a/.github/workflows/build_and_test.yml b/.github/workflows/build_and_test.yml index 6dadd8f9f9..342fb60db4 100644 --- a/.github/workflows/build_and_test.yml +++ b/.github/workflows/build_and_test.yml @@ -46,6 +46,10 @@ on: description: 'third phase of staging images to rebuild (base images)' type: string required: true + test_mode: + description: 'lite or full?' + type: string + default: None jobs: Ubuntu: @@ -327,19 +331,15 @@ jobs: if: github.ref_name == 'develop' || github.ref_name == 'main' uses: ./.github/workflows/smoke_test_participant.yml - regtest-lite: - name: Run lite regression test + regtest: + name: Run regression and integration test needs: - C-PAC secrets: inherit - if: contains(github.event.head_commit.message, '[run reg-suite]') - uses: ./.github/workflows/regression_test_lite.yml - - regtest-full: - name: Run full regression test - needs: - - smoke-tests-participant - uses: ./.github/workflows/regression_test_full.yml + if: inputs.test_mode == 'lite' + uses: ./.github/workflows/regtest.yml + with: + test_mode: inputs.test_mode Circle_tests: name: Run tests on CircleCI diff --git a/.github/workflows/on_push.yml b/.github/workflows/on_push.yml index 60f6354dc5..a84359edd3 100644 --- a/.github/workflows/on_push.yml +++ b/.github/workflows/on_push.yml @@ -107,9 +107,35 @@ jobs: echo "phase_three=${phase_three}" >> $GITHUB_OUTPUT echo "rebuild_phase_three=${rebuild_phase_three}" >> $GITHUB_OUTPUT + check_pr: + runs-on: ubuntu-latest + outputs: + test_mode: ${{ steps.check_pr.outputs.test_mode }} + steps: + - name: Check out C-PAC + uses: actions/checkout@v3 + with: + fetch-depth: 2 + - name: Check if commit is in a PR to develop + id: check_pr + run: | + TEST_MODE=none + if echo "${{ github.event.head_commit.message }}" | grep -q '\[run reg-suite lite\]' + then + TEST_MODE=lite + elif gh pr list --base develop --json number,state,draft | jq 'any(.[]; .state == "OPEN" or .draft == true)'; then + TEST_MODE=lite + elif gh pr list --base main --json number,state,draft | jq 'any(.[]; .state == "OPEN" or .draft == true)'; then + TEST_MODE=full + fi + echo "test_mode=${TEST_MODE}" + echo "test_mode=${TEST_MODE}" >> $GITHUB_OUTPUT + build-stages: name: Build multistage image stages - needs: check-updated-preconfigs + needs: + - check_pr + - check-updated-preconfigs uses: ./.github/workflows/build_and_test.yml secrets: inherit with: @@ -119,3 +145,4 @@ jobs: rebuild_phase_two: ${{ needs.check-updated-preconfigs.outputs.rebuild_phase_two }} phase_three: ${{ needs.check-updated-preconfigs.outputs.phase_three }} rebuild_phase_three: ${{ needs.check-updated-preconfigs.outputs.rebuild_phase_three }} + test_mode: ${{ needs.check_pr.outputs.test_mode }} diff --git a/.github/workflows/regression_test_full.yml b/.github/workflows/regression_test_full.yml deleted file mode 100644 index 6dba2d1bf2..0000000000 --- a/.github/workflows/regression_test_full.yml +++ /dev/null @@ -1,41 +0,0 @@ -name: Run Regression Full Test - -on: - workflow_call: - -jobs: - test: - name: Regression Test - Full - runs-on: ubuntu-latest - steps: - - name: Get C-PAC branch - run: | - GITHUB_BRANCH=$(echo ${GITHUB_REF} | cut -d '/' -f 3-) - if [[ ! $GITHUB_BRANCH == 'main' ]] && [[ ! $GITHUB_BRANCH == 'develop' ]] - then - TAG=${GITHUB_BRANCH//\//_} - elif [[ $GITHUB_BRANCH == 'develop' ]] - then - TAG=nightly - elif [[ $GITHUB_BRANCH == 'main' ]] - then - TAG=latest - fi - - - name: Checkout Code - uses: actions/checkout@v2 - - name: Clone reg-suite - run: | - git clone https://github.com/amygutierrez/reg-suite.git - - - name: Run Full Regression Test - if: ${{ github.event_name }} == "pull_request" && ${{ github.event.pull_request.state }} == "closed" && ${{ github.event.pull_request.merged }} == "true" && ${{ github.event.pull_request.base.ref }} == "main" - run: | - echo "Running full regression test" - echo "୧(๑•̀ヮ•́)૭ LET'S GO! ٩(^ᗜ^ )و " - - - uses: actions/upload-artifact@v3 - if: always() - with: - name: logs - path: output/*/*/log/ diff --git a/.github/workflows/regression_test_lite.yml b/.github/workflows/regtest.yml similarity index 92% rename from .github/workflows/regression_test_lite.yml rename to .github/workflows/regtest.yml index d55d8338ad..92a09a924c 100644 --- a/.github/workflows/regression_test_lite.yml +++ b/.github/workflows/regtest.yml @@ -1,4 +1,4 @@ -name: Launch lite regression test +name: Launch regression test on: pull_request: @@ -9,6 +9,10 @@ on: - ready_for_review - reopened workflow_call: + inputs: + test_mode: + type: string + required: true secrets: GH_CLI_BIN_PATH: description: 'path to directory containing GitHub CLI binary if not on default $PATH' @@ -22,10 +26,14 @@ on: SSH_WORK_DIR: required: true workflow_dispatch: + inputs: + test_mode: + type: string + required: true jobs: test: - name: Regression Test - Lite + name: Regression Test - ${{ inputs.test_mode }} environment: ACCESS env: COMPARISON_PATH: ${{ secrets.COMPARISON_PATH }} @@ -70,7 +78,7 @@ jobs: chmod 600 ~/.ssh/id_rsa ssh-keyscan -H -t rsa "${{ env.SSH_HOST }}" > ~/.ssh/known_hosts - - name: Connect and Run Regression Test Lite + - name: Connect and Run Regression Test ${{ inputs.test_mode }} uses: appleboy/ssh-action@v1.0.0 with: host: ${{ env.SSH_HOST }} From f753ef4fe74cacb829ade505a0110e2a7d4fa3ef Mon Sep 17 00:00:00 2001 From: Jon Clucas Date: Mon, 28 Oct 2024 10:38:21 -0400 Subject: [PATCH 134/507] :loud_sound: :alembic: Echo test mode [run reg-suite lite] --- .github/workflows/build_and_test.yml | 8 +++++++- 1 file changed, 7 insertions(+), 1 deletion(-) diff --git a/.github/workflows/build_and_test.yml b/.github/workflows/build_and_test.yml index 342fb60db4..ec3a76932c 100644 --- a/.github/workflows/build_and_test.yml +++ b/.github/workflows/build_and_test.yml @@ -331,6 +331,12 @@ jobs: if: github.ref_name == 'develop' || github.ref_name == 'main' uses: ./.github/workflows/smoke_test_participant.yml + check_test_mode: + name: check_test_mode + runs-on: ubuntu-latest + steps: + - run: echo ${{ inputs.test_mode }} + regtest: name: Run regression and integration test needs: @@ -339,7 +345,7 @@ jobs: if: inputs.test_mode == 'lite' uses: ./.github/workflows/regtest.yml with: - test_mode: inputs.test_mode + test_mode: ${{ inputs.test_mode }} Circle_tests: name: Run tests on CircleCI From 5d6a9ca5008cc6cd43e411e965fc82042c0bb02b Mon Sep 17 00:00:00 2001 From: Jon Clucas Date: Mon, 4 Nov 2024 23:03:43 -0500 Subject: [PATCH 135/507] :rotating_light: Lint in preparation for merge from --- .github/scripts/autoversioning.sh | 26 +- .pre-commit-config.yaml | 47 + .ruff.toml | 46 + CHANGELOG.md | 4 +- CPAC/anat_preproc/anat_preproc.py | 3179 ++++++----- CPAC/anat_preproc/lesion_preproc.py | 74 +- CPAC/func_preproc/func_preproc.py | 1914 ++++--- .../longitudinal_workflow.py | 1090 ++-- CPAC/pipeline/engine.py | 2354 ++++---- CPAC/pipeline/schema.py | 2070 +++---- CPAC/pipeline/test/test_engine.py | 136 +- CPAC/registration/registration.py | 4991 +++++++++-------- CPAC/registration/tests/mocks.py | 190 +- CPAC/registration/tests/test_registration.py | 196 +- .../configs/pipeline_config_blank.yml | 3 +- .../configs/pipeline_config_default.yml | 88 +- CPAC/resources/tests/test_templates.py | 31 +- CPAC/utils/datasource.py | 1440 ++--- CPAC/utils/test_mocks.py | 355 +- dev/docker_data/run.py | 1150 ++-- 20 files changed, 10581 insertions(+), 8803 deletions(-) create mode 100644 .ruff.toml diff --git a/.github/scripts/autoversioning.sh b/.github/scripts/autoversioning.sh index 0543f626a1..f93dc3f57e 100755 --- a/.github/scripts/autoversioning.sh +++ b/.github/scripts/autoversioning.sh @@ -18,7 +18,14 @@ # License along with C-PAC. If not, see . # Update version comment strings -cd CPAC +function wait_for_git_lock() { + while [ -f "./.git/index.lock" ]; do + echo "Waiting for the git lock file to be removed..." + sleep 1 + done +} + +cd CPAC || exit 1 VERSION=$(python -c "from info import __version__; print(('.'.join(('.'.join(__version__[::-1].split('-')[1].split('.')[1:])[::-1], __version__.split('-')[1])) if '-' in __version__ else __version__).split('+', 1)[0])") cd .. echo "v${VERSION}" > version @@ -30,8 +37,8 @@ else # Linux and others find ./CPAC/resources/configs -name "*.yml" -exec sed -i'' -r "${_SED_COMMAND}" {} \; fi -git add version -VERSIONS=( `git show $(git log --pretty=format:'%h' -n 2 version | tail -n 1):version` `cat version` ) +wait_for_git_lock && git add version +VERSIONS=( `git show $(git log --pretty=format:'%h' -n 1 version | tail -n 1):version` `cat version` ) export PATTERN="(declare|typeset) -a" if [[ "$(declare -p VERSIONS)" =~ $PATTERN ]] then @@ -52,11 +59,12 @@ then done unset IFS fi -git add CPAC/resources/configs .github/Dockerfiles +wait_for_git_lock && git add CPAC/resources/configs .github/Dockerfiles # Overwrite top-level Dockerfiles with the CI Dockerfiles -cp .github/Dockerfiles/C-PAC.develop-jammy.Dockerfile Dockerfile -cp .github/Dockerfiles/C-PAC.develop-ABCD-HCP-bionic.Dockerfile variant-ABCD-HCP.Dockerfile -cp .github/Dockerfiles/C-PAC.develop-fMRIPrep-LTS-xenial.Dockerfile variant-fMRIPrep-LTS.Dockerfile -cp .github/Dockerfiles/C-PAC.develop-lite-jammy.Dockerfile variant-lite.Dockerfile -git add *Dockerfile \ No newline at end of file +wait_for_git_lock && cp .github/Dockerfiles/C-PAC.develop-jammy.Dockerfile Dockerfile +wait_for_git_lock && cp .github/Dockerfiles/C-PAC.develop-lite-jammy.Dockerfile variant-lite.Dockerfile +for DOCKERFILE in $(ls *Dockerfile) +do + wait_for_git_lock && git add $DOCKERFILE +done diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 7357d71417..66b0a5da0e 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -14,8 +14,55 @@ # You should have received a copy of the GNU Lesser General Public # License along with C-PAC. If not, see . +ci: + skip: [ruff, update-yaml-comments] + +fail_fast: false repos: + - repo: https://github.com/astral-sh/ruff-pre-commit + rev: v0.3.2 + hooks: + - id: ruff + args: [--fix] + - id: ruff-format + + - repo: https://github.com/pre-commit/pre-commit-hooks + rev: v4.5.0 + hooks: + - id: check-case-conflict + - id: end-of-file-fixer + exclude: '.*\.?sv|.*\.pkl(z)?' + - id: mixed-line-ending + args: + - --fix=lf + - id: trailing-whitespace + exclude: '.*\.tsv' + - id: check-json + - id: pretty-format-json + args: + - --autofix + - --indent=4 + - --no-sort-keys + - id: check-merge-conflict + - id: check-yaml + - id: check-toml + + - repo: https://github.com/macisamuele/language-formatters-pre-commit-hooks + rev: v2.12.0 + hooks: + # pretty-format-yaml is disabled because it doesn't support the "YAML" directive https://yaml.org/spec/1.1/#id895631 + # - id: pretty-format-yaml + # args: + # - --autofix + # - --indent=2 + - id: pretty-format-toml + exclude: ^poetry.lock$ + args: + - --autofix + - --indent=2 + - --no-sort + - repo: local hooks: - id: autoversioning diff --git a/.ruff.toml b/.ruff.toml new file mode 100644 index 0000000000..265427a1ab --- /dev/null +++ b/.ruff.toml @@ -0,0 +1,46 @@ +extend-exclude = ["dev/docker_data/get-pip_23.0.1.py"] +target-version = "py310" + +[format] +line-ending = "auto" # proposed setting to add next release: line-ending = "lf" + +[lint] +extend-select = ["A", "C4", "D", "EM", "F541", "G", "I", "ICN", "NPY", "PL", "RET", "RSE", "RUF", "Q", "T20", "UP032", "W"] # proposed rules to add next release cycle: ["B904", "LOG007", "TRY002", "TRY201", "TRY400", "TRY401"] +external = ["T20"] # Don't autoremove 'noqa` comments for these rules + +[lint.per-file-ignores] +"CPAC/func_preproc/func_preproc.py" = ["E402"] +"CPAC/utils/sklearn.py" = ["RUF003"] +"CPAC/utils/tests/old_functions.py" = ["C", "D", "E", "EM", "PLW", "RET"] +"CPAC/utils/utils.py" = ["T201"] # until `repickle` is removed +"setup.py" = ["D1"] + +[lint.flake8-import-conventions.extend-aliases] +"CPAC.pipeline.cpac_group_runner" = "cgr" +"nibabel" = "nib" +"nipype.interfaces.io" = "nio" +"networkx" = "nx" +"pkg_resources" = "p" +"CPAC.pipeline.nipype_pipeline_engine" = "pe" + +[lint.isort] +combine-as-imports = true +force-sort-within-sections = true +known-first-party = ["CPAC"] +no-lines-before = ["collab", "other-first-party", "local-folder"] +order-by-type = false +section-order = ["future", "standard-library", "third-party", "collab", "other-first-party", "first-party", "local-folder"] + +[lint.isort.sections] +"collab" = ["nibabel", "nilearn", "nipype", "PyBASC", "pybids", "scipy", "spython"] +"other-first-party" = ["flowdump", "indi_aws", "indi_schedulers", "PyPEER"] + +[lint.pydocstyle] +convention = "numpy" +ignore-decorators = ["CPAC.utils.docs.docstring_parameter"] + +[lint.pylint] +max-args = 10 +max-branches = 50 +max-returns = 12 +max-statements = 100 diff --git a/CHANGELOG.md b/CHANGELOG.md index 08878d2708..e8a23221ea 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -18,7 +18,7 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 ### Added -- `Robustfov` feature in `FSL-BET` to crop images ensuring removal of neck regions that may appear in the skull-stripped images. +- `Robustfov` feature in `FSL-BET` to crop images ensuring removal of neck regions that may appear in the skull-stripped images. - Ability to throttle nodes, estimating all available memory when threading ### Changed @@ -236,7 +236,7 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 - In a given pipeline configuration, segmentation probability maps and binary tissue masks are warped to template space, and those warped masks are included in the output directory - if `registration_workflows['functional_registration']['EPI_registration']['run segmentation']` is `On` and `segmentation['tissue_segmentation']['Template_Based']['template_for_segmentation']` includes `EPI_Template` - + and/or - if `registration_workflows['anatomical_registration']['run']` is `On` and `segmentation['tissue_segmentation']['Template_Based']['template_for_segmentation']` includes `T1_Template` - Renamed connectivity matrices from `*_connectome.tsv` to `*_correlations.tsv` diff --git a/CPAC/anat_preproc/anat_preproc.py b/CPAC/anat_preproc/anat_preproc.py index c903f54896..b37aebe003 100644 --- a/CPAC/anat_preproc/anat_preproc.py +++ b/CPAC/anat_preproc/anat_preproc.py @@ -17,408 +17,497 @@ # License along with C-PAC. If not, see . # from copy import deepcopy import os -from CPAC.pipeline.nodeblock import nodeblock -from nipype.interfaces import afni -from nipype.interfaces import ants -from nipype.interfaces import fsl -from nipype.interfaces import freesurfer -import nipype.interfaces.utility as util + +from nipype.interfaces import afni, ants, freesurfer, fsl from nipype.interfaces.fsl import utils as fsl_utils -from CPAC.pipeline import nipype_pipeline_engine as pe +import nipype.interfaces.utility as util + from CPAC.anat_preproc.ants import init_brain_extraction_wf -from CPAC.anat_preproc.utils import create_3dskullstrip_arg_string, \ - freesurfer_hemispheres, \ - fsl_aff_to_rigid, \ - mri_convert, \ - wb_command, \ - fslmaths_command, \ - VolumeRemoveIslands, \ - normalize_wmparc, \ - pad +from CPAC.anat_preproc.utils import ( + create_3dskullstrip_arg_string, + freesurfer_hemispheres, + fsl_aff_to_rigid, + fslmaths_command, + mri_convert, + normalize_wmparc, + pad, + VolumeRemoveIslands, + wb_command, +) +from CPAC.pipeline import nipype_pipeline_engine as pe +from CPAC.pipeline.nodeblock import nodeblock from CPAC.utils.interfaces.fsl import Merge as fslMerge -def acpc_alignment(config=None, acpc_target='whole-head', mask=False, - wf_name='acpc_align'): +def acpc_alignment( + config=None, acpc_target="whole-head", mask=False, wf_name="acpc_align" +): preproc = pe.Workflow(name=wf_name) - inputnode = pe.Node(util.IdentityInterface(fields=['anat_leaf', - 'anat_brain', - 'brain_mask', - 'template_brain_only_for_anat', - 'template_brain_for_acpc', - 'template_head_for_acpc']), - name='inputspec') - - output_node = pe.Node(util.IdentityInterface(fields=['acpc_aligned_head', - 'acpc_brain_mask', - 'from-T1w_to-ACPC_mode-image_desc-aff2rig_xfm', - 'acpc_aligned_brain', - 'acpc_brain_mask']), - name='outputspec') - if config.anatomical_preproc['acpc_alignment']['FOV_crop'] == 'robustfov': - robust_fov = pe.Node(interface=fsl_utils.RobustFOV(), - name='anat_acpc_1_robustfov') - robust_fov.inputs.brainsize = config.anatomical_preproc['acpc_alignment']['brain_size'] - robust_fov.inputs.out_transform = 'fov_xfm.mat' - - fov, in_file = (robust_fov, 'in_file') - fov, fov_mtx = (robust_fov, 'out_transform') - fov, fov_outfile = (robust_fov, 'out_roi') - - elif config.anatomical_preproc['acpc_alignment']['FOV_crop'] == 'flirt': + inputnode = pe.Node( + util.IdentityInterface( + fields=[ + "anat_leaf", + "anat_brain", + "brain_mask", + "template_brain_only_for_anat", + "template_brain_for_acpc", + "template_head_for_acpc", + ] + ), + name="inputspec", + ) + + output_node = pe.Node( + util.IdentityInterface( + fields=[ + "acpc_aligned_head", + "acpc_brain_mask", + "from-T1w_to-ACPC_mode-image_desc-aff2rig_xfm", + "acpc_aligned_brain", + "acpc_brain_mask", + ] + ), + name="outputspec", + ) + if config.anatomical_preproc["acpc_alignment"]["FOV_crop"] == "robustfov": + robust_fov = pe.Node( + interface=fsl_utils.RobustFOV(), name="anat_acpc_1_robustfov" + ) + robust_fov.inputs.brainsize = config.anatomical_preproc["acpc_alignment"][ + "brain_size" + ] + robust_fov.inputs.out_transform = "fov_xfm.mat" + + fov, in_file = (robust_fov, "in_file") + fov, fov_mtx = (robust_fov, "out_transform") + fov, fov_outfile = (robust_fov, "out_roi") + + elif config.anatomical_preproc["acpc_alignment"]["FOV_crop"] == "flirt": # robustfov doesn't work on some monkey data. prefer using flirt. # ${FSLDIR}/bin/flirt -in "${Input}" -applyxfm -ref "${Input}" -omat "$WD"/roi2full.mat -out "$WD"/robustroi.nii.gz # adopted from DCAN NHP https://github.com/DCAN-Labs/dcan-macaque-pipeline/blob/master/PreFreeSurfer/scripts/ACPCAlignment.sh#L80-L81 - flirt_fov = pe.Node(interface=fsl.FLIRT(), - name='anat_acpc_1_fov') - flirt_fov.inputs.args = '-applyxfm' + flirt_fov = pe.Node(interface=fsl.FLIRT(), name="anat_acpc_1_fov") + flirt_fov.inputs.args = "-applyxfm" - fov, in_file = (flirt_fov, 'in_file') - fov, ref_file = (flirt_fov, 'reference') - fov, fov_mtx = (flirt_fov, 'out_matrix_file') - fov, fov_outfile = (flirt_fov, 'out_file') + fov, in_file = (flirt_fov, "in_file") + fov, ref_file = (flirt_fov, "reference") + fov, fov_mtx = (flirt_fov, "out_matrix_file") + fov, fov_outfile = (flirt_fov, "out_file") # align head-to-head to get acpc.mat (for human) - if acpc_target == 'whole-head': - preproc.connect(inputnode, 'anat_leaf', fov, in_file) - if config.anatomical_preproc['acpc_alignment']['FOV_crop'] == 'flirt': - preproc.connect(inputnode, 'anat_leaf', fov, ref_file) + if acpc_target == "whole-head": + preproc.connect(inputnode, "anat_leaf", fov, in_file) + if config.anatomical_preproc["acpc_alignment"]["FOV_crop"] == "flirt": + preproc.connect(inputnode, "anat_leaf", fov, ref_file) # align brain-to-brain to get acpc.mat (for monkey) - if acpc_target == 'brain': - preproc.connect(inputnode, 'anat_brain', fov, in_file) - if config.anatomical_preproc['acpc_alignment']['FOV_crop'] == 'flirt': - preproc.connect(inputnode, 'anat_brain', fov, ref_file) + if acpc_target == "brain": + preproc.connect(inputnode, "anat_brain", fov, in_file) + if config.anatomical_preproc["acpc_alignment"]["FOV_crop"] == "flirt": + preproc.connect(inputnode, "anat_brain", fov, ref_file) - convert_fov_xfm = pe.Node(interface=fsl_utils.ConvertXFM(), - name='anat_acpc_2_fov_convertxfm') + convert_fov_xfm = pe.Node( + interface=fsl_utils.ConvertXFM(), name="anat_acpc_2_fov_convertxfm" + ) convert_fov_xfm.inputs.invert_xfm = True - preproc.connect(fov, fov_mtx, - convert_fov_xfm, 'in_file') + preproc.connect(fov, fov_mtx, convert_fov_xfm, "in_file") - align = pe.Node(interface=fsl.FLIRT(), - name='anat_acpc_3_flirt') - align.inputs.interp = 'spline' + align = pe.Node(interface=fsl.FLIRT(), name="anat_acpc_3_flirt") + align.inputs.interp = "spline" align.inputs.searchr_x = [30, 30] align.inputs.searchr_y = [30, 30] align.inputs.searchr_z = [30, 30] - preproc.connect(fov, fov_outfile, align, 'in_file') + preproc.connect(fov, fov_outfile, align, "in_file") # align head-to-head to get acpc.mat (for human) - if acpc_target == 'whole-head': - preproc.connect(inputnode, 'template_head_for_acpc', align, - 'reference') + if acpc_target == "whole-head": + preproc.connect(inputnode, "template_head_for_acpc", align, "reference") # align brain-to-brain to get acpc.mat (for monkey) - if acpc_target == 'brain': - preproc.connect(inputnode, 'template_brain_for_acpc', align, - 'reference') + if acpc_target == "brain": + preproc.connect(inputnode, "template_brain_for_acpc", align, "reference") - concat_xfm = pe.Node(interface=fsl_utils.ConvertXFM(), - name='anat_acpc_4_concatxfm') + concat_xfm = pe.Node(interface=fsl_utils.ConvertXFM(), name="anat_acpc_4_concatxfm") concat_xfm.inputs.concat_xfm = True - preproc.connect(convert_fov_xfm, 'out_file', concat_xfm, 'in_file') - preproc.connect(align, 'out_matrix_file', concat_xfm, 'in_file2') + preproc.connect(convert_fov_xfm, "out_file", concat_xfm, "in_file") + preproc.connect(align, "out_matrix_file", concat_xfm, "in_file2") - aff_to_rig_imports = ['import os', 'from numpy import *'] - aff_to_rig = pe.Node(util.Function(input_names=['in_xfm', 'out_name'], - output_names=['out_mat'], - function=fsl_aff_to_rigid, - imports=aff_to_rig_imports), - name='anat_acpc_5_aff2rigid') - aff_to_rig.inputs.out_name = 'acpc.mat' + aff_to_rig_imports = ["import os", "from numpy import *"] + aff_to_rig = pe.Node( + util.Function( + input_names=["in_xfm", "out_name"], + output_names=["out_mat"], + function=fsl_aff_to_rigid, + imports=aff_to_rig_imports, + ), + name="anat_acpc_5_aff2rigid", + ) + aff_to_rig.inputs.out_name = "acpc.mat" - preproc.connect(concat_xfm, 'out_file', aff_to_rig, 'in_xfm') - preproc.connect(aff_to_rig, 'out_mat', output_node, 'from-T1w_to-ACPC_mode-image_desc-aff2rig_xfm') + preproc.connect(concat_xfm, "out_file", aff_to_rig, "in_xfm") + preproc.connect( + aff_to_rig, + "out_mat", + output_node, + "from-T1w_to-ACPC_mode-image_desc-aff2rig_xfm", + ) - apply_xfm = pe.Node(interface=fsl.ApplyWarp(), - name='anat_acpc_6_applywarp') - apply_xfm.inputs.interp = 'spline' + apply_xfm = pe.Node(interface=fsl.ApplyWarp(), name="anat_acpc_6_applywarp") + apply_xfm.inputs.interp = "spline" apply_xfm.inputs.relwarp = True - preproc.connect(inputnode, 'anat_leaf', apply_xfm, 'in_file') - preproc.connect(inputnode, 'template_head_for_acpc', apply_xfm, - 'ref_file') - preproc.connect(aff_to_rig, 'out_mat', apply_xfm, 'premat') - preproc.connect(apply_xfm, 'out_file', output_node, 'acpc_aligned_head') + preproc.connect(inputnode, "anat_leaf", apply_xfm, "in_file") + preproc.connect(inputnode, "template_head_for_acpc", apply_xfm, "ref_file") + preproc.connect(aff_to_rig, "out_mat", apply_xfm, "premat") + preproc.connect(apply_xfm, "out_file", output_node, "acpc_aligned_head") - if acpc_target == 'brain': - apply_xfm_brain = pe.Node(interface=fsl.ApplyWarp(), - name='anat_acpc_brain_6_applywarp') - apply_xfm_brain.inputs.interp = 'spline' + if acpc_target == "brain": + apply_xfm_brain = pe.Node( + interface=fsl.ApplyWarp(), name="anat_acpc_brain_6_applywarp" + ) + apply_xfm_brain.inputs.interp = "spline" apply_xfm_brain.inputs.relwarp = True - preproc.connect(inputnode, 'anat_brain', apply_xfm_brain, 'in_file') - preproc.connect(inputnode, 'template_brain_for_acpc', apply_xfm_brain, - 'ref_file') - preproc.connect(aff_to_rig, 'out_mat', apply_xfm_brain, 'premat') - preproc.connect(apply_xfm_brain, 'out_file', output_node, 'acpc_aligned_brain') + preproc.connect(inputnode, "anat_brain", apply_xfm_brain, "in_file") + preproc.connect( + inputnode, "template_brain_for_acpc", apply_xfm_brain, "ref_file" + ) + preproc.connect(aff_to_rig, "out_mat", apply_xfm_brain, "premat") + preproc.connect(apply_xfm_brain, "out_file", output_node, "acpc_aligned_brain") if mask: - apply_xfm_mask = pe.Node(interface=fsl.ApplyWarp(), - name='anat_mask_acpc_7_applywarp') - apply_xfm_mask.inputs.interp = 'nn' + apply_xfm_mask = pe.Node( + interface=fsl.ApplyWarp(), name="anat_mask_acpc_7_applywarp" + ) + apply_xfm_mask.inputs.interp = "nn" apply_xfm_mask.inputs.relwarp = True - preproc.connect(inputnode, 'brain_mask', apply_xfm_mask, 'in_file') - preproc.connect(inputnode, 'template_brain_for_acpc', apply_xfm_mask, - 'ref_file') - preproc.connect(aff_to_rig, 'out_mat', apply_xfm_mask, 'premat') - preproc.connect(apply_xfm_mask, 'out_file', output_node, - 'acpc_brain_mask') + preproc.connect(inputnode, "brain_mask", apply_xfm_mask, "in_file") + preproc.connect( + inputnode, "template_brain_for_acpc", apply_xfm_mask, "ref_file" + ) + preproc.connect(aff_to_rig, "out_mat", apply_xfm_mask, "premat") + preproc.connect(apply_xfm_mask, "out_file", output_node, "acpc_brain_mask") return preproc -def T2wToT1wReg(wf_name='T2w_to_T1w_reg'): - +def T2wToT1wReg(wf_name="T2w_to_T1w_reg"): # Adapted from DCAN lab # https://github.com/DCAN-Labs/dcan-macaque-pipeline/blob/master/PreFreeSurfer/scripts/T2wToT1wReg.sh preproc = pe.Workflow(name=wf_name) - inputnode = pe.Node(util.IdentityInterface(fields=['T1w', - 'T1w_brain', - 'T2w', - 'T2w_brain']), - name='inputspec') + inputnode = pe.Node( + util.IdentityInterface(fields=["T1w", "T1w_brain", "T2w", "T2w_brain"]), + name="inputspec", + ) - outputnode = pe.Node(util.IdentityInterface(fields=['T2w_to_T1w']), - name='outputspec') + outputnode = pe.Node( + util.IdentityInterface(fields=["T2w_to_T1w"]), name="outputspec" + ) # ${FSLDIR}/bin/epi_reg --epi="$T2wImageBrain" --t1="$T1wImage" --t1brain="$WD"/"$T1wImageBrainFile" --out="$WD"/T2w2T1w - T2w2T1w_reg = pe.Node(interface=fsl.EpiReg(), - name='T2w2T1w_reg') - T2w2T1w_reg.inputs.out_base = 'T2w2T1w' + T2w2T1w_reg = pe.Node(interface=fsl.EpiReg(), name="T2w2T1w_reg") + T2w2T1w_reg.inputs.out_base = "T2w2T1w" - preproc.connect(inputnode, 'T2w_brain', T2w2T1w_reg ,'epi') - preproc.connect(inputnode, 'T1w', T2w2T1w_reg ,'t1_head') - preproc.connect(inputnode, 'T1w_brain', T2w2T1w_reg ,'t1_brain') + preproc.connect(inputnode, "T2w_brain", T2w2T1w_reg, "epi") + preproc.connect(inputnode, "T1w", T2w2T1w_reg, "t1_head") + preproc.connect(inputnode, "T1w_brain", T2w2T1w_reg, "t1_brain") # ${FSLDIR}/bin/applywarp --rel --interp=spline --in="$T2wImage" --ref="$T1wImage" --premat="$WD"/T2w2T1w.mat --out="$WD"/T2w2T1w - T2w2T1w = pe.Node(interface=fsl.ApplyWarp(), - name='T2w2T1w_applywarp') - T2w2T1w.inputs.interp = 'spline' + T2w2T1w = pe.Node(interface=fsl.ApplyWarp(), name="T2w2T1w_applywarp") + T2w2T1w.inputs.interp = "spline" T2w2T1w.inputs.relwarp = True - preproc.connect(inputnode, 'T2w', T2w2T1w, 'in_file') - preproc.connect(inputnode, 'T1w', T2w2T1w, 'ref_file') - preproc.connect(T2w2T1w_reg, 'epi2str_mat', T2w2T1w, 'premat') + preproc.connect(inputnode, "T2w", T2w2T1w, "in_file") + preproc.connect(inputnode, "T1w", T2w2T1w, "ref_file") + preproc.connect(T2w2T1w_reg, "epi2str_mat", T2w2T1w, "premat") # ${FSLDIR}/bin/fslmaths "$WD"/T2w2T1w -add 1 "$WD"/T2w2T1w -odt float - T2w2T1w_final = pe.Node(interface=fsl.ImageMaths(), - name='T2w2T1w_final') - T2w2T1w_final.inputs.op_string = "-add 1" + T2w2T1w_final = pe.Node(interface=fsl.ImageMaths(), name="T2w2T1w_final") + T2w2T1w_final.inputs.op_string = "-add 1" - preproc.connect(T2w2T1w, 'out_file', T2w2T1w_final, 'in_file') - preproc.connect(T2w2T1w_final, 'out_file', outputnode, 'T2w_to_T1w') + preproc.connect(T2w2T1w, "out_file", T2w2T1w_final, "in_file") + preproc.connect(T2w2T1w_final, "out_file", outputnode, "T2w_to_T1w") return preproc -def BiasFieldCorrection_sqrtT1wXT1w(config=None, wf_name='biasfield_correction_t1t2'): - +def BiasFieldCorrection_sqrtT1wXT1w(config=None, wf_name="biasfield_correction_t1t2"): # Adapted from DCAN lab # https://github.com/DCAN-Labs/dcan-macaque-pipeline/blob/master/PreFreeSurfer/scripts/BiasFieldCorrection_sqrtT1wXT1w.sh preproc = pe.Workflow(name=wf_name) - inputnode = pe.Node(util.IdentityInterface(fields=['T1w', - 'T1w_brain', - 'T2w']), - name='inputspec') + inputnode = pe.Node( + util.IdentityInterface(fields=["T1w", "T1w_brain", "T2w"]), name="inputspec" + ) - outputnode = pe.Node(util.IdentityInterface(fields=['T1w_biascorrected', - 'T1w_brain_biascorrected', - 'T2w_biascorrected', - 'T2w_brain_biascorrected', - 'biasfield']), - name='outputspec') + outputnode = pe.Node( + util.IdentityInterface( + fields=[ + "T1w_biascorrected", + "T1w_brain_biascorrected", + "T2w_biascorrected", + "T2w_brain_biascorrected", + "biasfield", + ] + ), + name="outputspec", + ) # 1. Form sqrt(T1w*T2w), mask this and normalise by the mean # ${FSLDIR}/bin/fslmaths $T1wImage -mul $T2wImage -abs -sqrt $WD/T1wmulT2w.nii.gz -odt float - T1wmulT2w = pe.Node(interface=fsl.MultiImageMaths(), - name='T1wmulT2w') + T1wmulT2w = pe.Node(interface=fsl.MultiImageMaths(), name="T1wmulT2w") T1wmulT2w.inputs.op_string = "-mul %s -abs -sqrt" - - preproc.connect(inputnode, 'T1w', T1wmulT2w, 'in_file') - preproc.connect(inputnode, 'T2w', T1wmulT2w, 'operand_files') + + preproc.connect(inputnode, "T1w", T1wmulT2w, "in_file") + preproc.connect(inputnode, "T2w", T1wmulT2w, "operand_files") # ${FSLDIR}/bin/fslmaths $WD/T1wmulT2w.nii.gz -mas $T1wImageBrain $WD/T1wmulT2w_brain.nii.gz - T1wmulT2w_brain = pe.Node(interface=fsl.MultiImageMaths(), - name='T1wmulT2w_brain') + T1wmulT2w_brain = pe.Node(interface=fsl.MultiImageMaths(), name="T1wmulT2w_brain") T1wmulT2w_brain.inputs.op_string = "-mas %s " - preproc.connect(T1wmulT2w, 'out_file', T1wmulT2w_brain, 'in_file') - preproc.connect(inputnode, 'T1w_brain', T1wmulT2w_brain, 'operand_files') + preproc.connect(T1wmulT2w, "out_file", T1wmulT2w_brain, "in_file") + preproc.connect(inputnode, "T1w_brain", T1wmulT2w_brain, "operand_files") # meanbrainval=`${FSLDIR}/bin/fslstats $WD/T1wmulT2w_brain.nii.gz -M` - meanbrainval = pe.Node(interface=fsl.ImageStats(), - name='image_stats', - iterfield=['in_file']) - meanbrainval.inputs.op_string = '-M' + meanbrainval = pe.Node( + interface=fsl.ImageStats(), name="image_stats", iterfield=["in_file"] + ) + meanbrainval.inputs.op_string = "-M" - preproc.connect(T1wmulT2w_brain, 'out_file', meanbrainval, 'in_file') + preproc.connect(T1wmulT2w_brain, "out_file", meanbrainval, "in_file") # ${FSLDIR}/bin/fslmaths $WD/T1wmulT2w_brain.nii.gz -div $meanbrainval $WD/T1wmulT2w_brain_norm.nii.gz - T1wmulT2w_brain_norm = pe.Node(interface=fsl.ImageMaths(), - name='T1wmulT2w_brain_norm') - - def form_meanbrainval_string(meanbrainval): - return '-div %f' % (meanbrainval) + T1wmulT2w_brain_norm = pe.Node( + interface=fsl.ImageMaths(), name="T1wmulT2w_brain_norm" + ) - preproc.connect(T1wmulT2w_brain, 'out_file', T1wmulT2w_brain_norm, 'in_file') - preproc.connect(meanbrainval, ('out_stat', form_meanbrainval_string), - T1wmulT2w_brain_norm, 'op_string') + def form_meanbrainval_string(meanbrainval): + return "-div %f" % (meanbrainval) + + preproc.connect(T1wmulT2w_brain, "out_file", T1wmulT2w_brain_norm, "in_file") + preproc.connect( + meanbrainval, + ("out_stat", form_meanbrainval_string), + T1wmulT2w_brain_norm, + "op_string", + ) # 2. Smooth the normalised sqrt image, using within-mask smoothing : s(Mask*X)/s(Mask) # ${FSLDIR}/bin/fslmaths $WD/T1wmulT2w_brain_norm.nii.gz -bin -s $BiasFieldSmoothingSigma $WD/SmoothNorm_s${BiasFieldSmoothingSigma}.nii.gz - SmoothNorm = pe.Node(interface=fsl.ImageMaths(), - name='SmoothNorm') - SmoothNorm.inputs.op_string = "-bin -s %f" % (config.anatomical_preproc['t1t2_bias_field_correction']['BiasFieldSmoothingSigma']) + SmoothNorm = pe.Node(interface=fsl.ImageMaths(), name="SmoothNorm") + SmoothNorm.inputs.op_string = ( + "-bin -s %f" + % ( + config.anatomical_preproc["t1t2_bias_field_correction"][ + "BiasFieldSmoothingSigma" + ] + ) + ) - preproc.connect(T1wmulT2w_brain_norm, 'out_file', SmoothNorm, 'in_file') + preproc.connect(T1wmulT2w_brain_norm, "out_file", SmoothNorm, "in_file") # ${FSLDIR}/bin/fslmaths $WD/T1wmulT2w_brain_norm.nii.gz -s $BiasFieldSmoothingSigma -div $WD/SmoothNorm_s${BiasFieldSmoothingSigma}.nii.gz $WD/T1wmulT2w_brain_norm_s${BiasFieldSmoothingSigma}.nii.gz def T1wmulT2w_brain_norm_s_string(sigma, in_file): - return "-s %f -div %s" %(sigma, in_file) - - T1wmulT2w_brain_norm_s_string = pe.Node(util.Function(input_names=['sigma', 'in_file'], - output_names=['out_str'], - function=T1wmulT2w_brain_norm_s_string), - name='T1wmulT2w_brain_norm_s_string') - T1wmulT2w_brain_norm_s_string.inputs.sigma = config.anatomical_preproc['t1t2_bias_field_correction']['BiasFieldSmoothingSigma'] - - preproc.connect(SmoothNorm, 'out_file', T1wmulT2w_brain_norm_s_string, 'in_file') - - T1wmulT2w_brain_norm_s = pe.Node(interface=fsl.ImageMaths(), - name='T1wmulT2w_brain_norm_s') - - preproc.connect(T1wmulT2w_brain_norm, 'out_file', T1wmulT2w_brain_norm_s, 'in_file') - preproc.connect(T1wmulT2w_brain_norm_s_string, 'out_str', T1wmulT2w_brain_norm_s, 'op_string') + return "-s %f -div %s" % (sigma, in_file) + + T1wmulT2w_brain_norm_s_string = pe.Node( + util.Function( + input_names=["sigma", "in_file"], + output_names=["out_str"], + function=T1wmulT2w_brain_norm_s_string, + ), + name="T1wmulT2w_brain_norm_s_string", + ) + T1wmulT2w_brain_norm_s_string.inputs.sigma = config.anatomical_preproc[ + "t1t2_bias_field_correction" + ]["BiasFieldSmoothingSigma"] + + preproc.connect(SmoothNorm, "out_file", T1wmulT2w_brain_norm_s_string, "in_file") + + T1wmulT2w_brain_norm_s = pe.Node( + interface=fsl.ImageMaths(), name="T1wmulT2w_brain_norm_s" + ) + + preproc.connect(T1wmulT2w_brain_norm, "out_file", T1wmulT2w_brain_norm_s, "in_file") + preproc.connect( + T1wmulT2w_brain_norm_s_string, "out_str", T1wmulT2w_brain_norm_s, "op_string" + ) # 3. Divide normalised sqrt image by smoothed version (to do simple bias correction) # ${FSLDIR}/bin/fslmaths $WD/T1wmulT2w_brain_norm.nii.gz -div $WD/T1wmulT2w_brain_norm_s$BiasFieldSmoothingSigma.nii.gz $WD/T1wmulT2w_brain_norm_modulate.nii.gz - T1wmulT2w_brain_norm_modulate = pe.Node(interface=fsl.MultiImageMaths(), - name='T1wmulT2w_brain_norm_modulate') - T1wmulT2w_brain_norm_modulate.inputs.op_string = "-div %s" + T1wmulT2w_brain_norm_modulate = pe.Node( + interface=fsl.MultiImageMaths(), name="T1wmulT2w_brain_norm_modulate" + ) + T1wmulT2w_brain_norm_modulate.inputs.op_string = "-div %s" - preproc.connect(T1wmulT2w_brain_norm, 'out_file', T1wmulT2w_brain_norm_modulate, 'in_file') - preproc.connect(T1wmulT2w_brain_norm_s, 'out_file', T1wmulT2w_brain_norm_modulate, 'operand_files') + preproc.connect( + T1wmulT2w_brain_norm, "out_file", T1wmulT2w_brain_norm_modulate, "in_file" + ) + preproc.connect( + T1wmulT2w_brain_norm_s, + "out_file", + T1wmulT2w_brain_norm_modulate, + "operand_files", + ) # 4. Create a mask using a threshold at Mean - 0.5*Stddev, with filling of holes to remove any non-grey/white tissue. # STD=`${FSLDIR}/bin/fslstats $WD/T1wmulT2w_brain_norm_modulate.nii.gz -S` - STD = pe.Node(interface=fsl.ImageStats(), - name='STD', - iterfield=['in_file']) - STD.inputs.op_string = '-S' + STD = pe.Node(interface=fsl.ImageStats(), name="STD", iterfield=["in_file"]) + STD.inputs.op_string = "-S" - preproc.connect(T1wmulT2w_brain_norm_modulate, 'out_file', STD, 'in_file') + preproc.connect(T1wmulT2w_brain_norm_modulate, "out_file", STD, "in_file") # MEAN=`${FSLDIR}/bin/fslstats $WD/T1wmulT2w_brain_norm_modulate.nii.gz -M` - MEAN = pe.Node(interface=fsl.ImageStats(), - name='MEAN', - iterfield=['in_file']) - MEAN.inputs.op_string = '-M' + MEAN = pe.Node(interface=fsl.ImageStats(), name="MEAN", iterfield=["in_file"]) + MEAN.inputs.op_string = "-M" + + preproc.connect(T1wmulT2w_brain_norm_modulate, "out_file", MEAN, "in_file") - preproc.connect(T1wmulT2w_brain_norm_modulate, 'out_file', MEAN, 'in_file') - # Lower=`echo "$MEAN - ($STD * $Factor)" | bc -l` def form_lower_string(mean, std): - Factor = 0.5 #Leave this at 0.5 for now it is the number of standard deviations below the mean to threshold the non-brain tissues at - lower = str(float(mean)-(float(std)*float(Factor))) - return '-thr %s -bin -ero -mul 255' % (lower) - - form_lower_string = pe.Node(util.Function(input_names=['mean', 'std'], - output_names=['out_str'], - function=form_lower_string), - name='form_lower_string') + Factor = 0.5 # Leave this at 0.5 for now it is the number of standard deviations below the mean to threshold the non-brain tissues at + lower = str(float(mean) - (float(std) * float(Factor))) + return "-thr %s -bin -ero -mul 255" % (lower) + + form_lower_string = pe.Node( + util.Function( + input_names=["mean", "std"], + output_names=["out_str"], + function=form_lower_string, + ), + name="form_lower_string", + ) - preproc.connect(MEAN, 'out_stat', form_lower_string, 'mean') - preproc.connect(STD, 'out_stat', form_lower_string, 'std') + preproc.connect(MEAN, "out_stat", form_lower_string, "mean") + preproc.connect(STD, "out_stat", form_lower_string, "std") # ${FSLDIR}/bin/fslmaths $WD/T1wmulT2w_brain_norm_modulate -thr $Lower -bin -ero -mul 255 $WD/T1wmulT2w_brain_norm_modulate_mask - T1wmulT2w_brain_norm_modulate_mask = pe.Node(interface=fsl.ImageMaths(), - name='T1wmulT2w_brain_norm_modulate_mask') + T1wmulT2w_brain_norm_modulate_mask = pe.Node( + interface=fsl.ImageMaths(), name="T1wmulT2w_brain_norm_modulate_mask" + ) - preproc.connect(T1wmulT2w_brain_norm_modulate, 'out_file', T1wmulT2w_brain_norm_modulate_mask, 'in_file') - preproc.connect(form_lower_string, 'out_str', T1wmulT2w_brain_norm_modulate_mask, 'op_string') + preproc.connect( + T1wmulT2w_brain_norm_modulate, + "out_file", + T1wmulT2w_brain_norm_modulate_mask, + "in_file", + ) + preproc.connect( + form_lower_string, "out_str", T1wmulT2w_brain_norm_modulate_mask, "op_string" + ) # ${CARET7DIR}/wb_command -volume-remove-islands $WD/T1wmulT2w_brain_norm_modulate_mask.nii.gz $WD/T1wmulT2w_brain_norm_modulate_mask.nii.gz - T1wmulT2w_brain_norm_modulate_mask_roi = pe.Node(interface=VolumeRemoveIslands(), - name='remove_islands') + T1wmulT2w_brain_norm_modulate_mask_roi = pe.Node( + interface=VolumeRemoveIslands(), name="remove_islands" + ) - preproc.connect(T1wmulT2w_brain_norm_modulate_mask, 'out_file', T1wmulT2w_brain_norm_modulate_mask_roi, 'in_file') + preproc.connect( + T1wmulT2w_brain_norm_modulate_mask, + "out_file", + T1wmulT2w_brain_norm_modulate_mask_roi, + "in_file", + ) # 5. Extrapolate normalised sqrt image from mask region out to whole FOV # ${FSLDIR}/bin/fslmaths $WD/T1wmulT2w_brain_norm.nii.gz -mas $WD/T1wmulT2w_brain_norm_modulate_mask.nii.gz -dilall $WD/bias_raw.nii.gz -odt float - bias_raw = pe.Node(interface=fsl.MultiImageMaths(), - name='bias_raw') + bias_raw = pe.Node(interface=fsl.MultiImageMaths(), name="bias_raw") bias_raw.inputs.op_string = "-mas %s -dilall " - preproc.connect(T1wmulT2w_brain_norm, 'out_file', bias_raw, 'in_file') - preproc.connect(T1wmulT2w_brain_norm_modulate_mask_roi, 'out_file', bias_raw, 'operand_files') + preproc.connect(T1wmulT2w_brain_norm, "out_file", bias_raw, "in_file") + preproc.connect( + T1wmulT2w_brain_norm_modulate_mask_roi, "out_file", bias_raw, "operand_files" + ) # ${FSLDIR}/bin/fslmaths $WD/bias_raw.nii.gz -s $BiasFieldSmoothingSigma $OutputBiasField - OutputBiasField = pe.Node(interface=fsl.ImageMaths(), - name='OutputBiasField') - OutputBiasField.inputs.op_string = "-s %f " % (config.anatomical_preproc['t1t2_bias_field_correction']['BiasFieldSmoothingSigma']) + OutputBiasField = pe.Node(interface=fsl.ImageMaths(), name="OutputBiasField") + OutputBiasField.inputs.op_string = ( + "-s %f " + % ( + config.anatomical_preproc["t1t2_bias_field_correction"][ + "BiasFieldSmoothingSigma" + ] + ) + ) - preproc.connect(bias_raw, 'out_file', OutputBiasField, 'in_file') + preproc.connect(bias_raw, "out_file", OutputBiasField, "in_file") # 6. Use bias field output to create corrected images def file_to_a_list(infile_1, infile_2): - return list([infile_1,infile_2]) - - file_to_a_list = pe.Node(util.Function(input_names=['infile_1', 'infile_2'], - output_names=['out_list'], - function=file_to_a_list), - name='file_to_a_list') + return list([infile_1, infile_2]) + + file_to_a_list = pe.Node( + util.Function( + input_names=["infile_1", "infile_2"], + output_names=["out_list"], + function=file_to_a_list, + ), + name="file_to_a_list", + ) - preproc.connect(OutputBiasField, 'out_file', file_to_a_list, 'infile_1') - preproc.connect(inputnode, 'T1w_brain', file_to_a_list, 'infile_2') + preproc.connect(OutputBiasField, "out_file", file_to_a_list, "infile_1") + preproc.connect(inputnode, "T1w_brain", file_to_a_list, "infile_2") # ${FSLDIR}/bin/fslmaths $T1wImage -div $OutputBiasField -mas $T1wImageBrain $OutputT1wRestoredBrainImage -odt float - OutputT1wRestoredBrainImage = pe.Node(interface=fsl.MultiImageMaths(), - name='OutputT1wRestoredBrainImage') - OutputT1wRestoredBrainImage.inputs.op_string = "-div %s -mas %s " + OutputT1wRestoredBrainImage = pe.Node( + interface=fsl.MultiImageMaths(), name="OutputT1wRestoredBrainImage" + ) + OutputT1wRestoredBrainImage.inputs.op_string = "-div %s -mas %s " + + preproc.connect(inputnode, "T1w", OutputT1wRestoredBrainImage, "in_file") + preproc.connect( + file_to_a_list, "out_list", OutputT1wRestoredBrainImage, "operand_files" + ) - preproc.connect(inputnode, 'T1w', OutputT1wRestoredBrainImage, 'in_file') - preproc.connect(file_to_a_list,'out_list',OutputT1wRestoredBrainImage, 'operand_files') - # ${FSLDIR}/bin/fslmaths $T1wImage -div $OutputBiasField $OutputT1wRestoredImage -odt float - OutputT1wRestoredImage = pe.Node(interface=fsl.MultiImageMaths(), - name='OutputT1wRestoredImage') + OutputT1wRestoredImage = pe.Node( + interface=fsl.MultiImageMaths(), name="OutputT1wRestoredImage" + ) OutputT1wRestoredImage.inputs.op_string = "-div %s " - preproc.connect(inputnode, 'T1w', OutputT1wRestoredImage, 'in_file') - preproc.connect(OutputBiasField, 'out_file', OutputT1wRestoredImage, 'operand_files') + preproc.connect(inputnode, "T1w", OutputT1wRestoredImage, "in_file") + preproc.connect( + OutputBiasField, "out_file", OutputT1wRestoredImage, "operand_files" + ) # ${FSLDIR}/bin/fslmaths $T2wImage -div $OutputBiasField -mas $T1wImageBrain $OutputT2wRestoredBrainImage -odt float - OutputT2wRestoredBrainImage = pe.Node(interface=fsl.MultiImageMaths(), - name='OutputT2wRestoredBrainImage') - OutputT2wRestoredBrainImage.inputs.op_string = "-div %s -mas %s " - - preproc.connect(inputnode, 'T2w', OutputT2wRestoredBrainImage, 'in_file') - preproc.connect(file_to_a_list,'out_list',OutputT2wRestoredBrainImage, 'operand_files') + OutputT2wRestoredBrainImage = pe.Node( + interface=fsl.MultiImageMaths(), name="OutputT2wRestoredBrainImage" + ) + OutputT2wRestoredBrainImage.inputs.op_string = "-div %s -mas %s " + + preproc.connect(inputnode, "T2w", OutputT2wRestoredBrainImage, "in_file") + preproc.connect( + file_to_a_list, "out_list", OutputT2wRestoredBrainImage, "operand_files" + ) # ${FSLDIR}/bin/fslmaths $T2wImage -div $OutputBiasField $OutputT2wRestoredImage -odt float - OutputT2wRestoredImage = pe.Node(interface=fsl.MultiImageMaths(), - name='OutputT2wRestoredImage') + OutputT2wRestoredImage = pe.Node( + interface=fsl.MultiImageMaths(), name="OutputT2wRestoredImage" + ) OutputT2wRestoredImage.inputs.op_string = "-div %s " - preproc.connect(inputnode, 'T2w', OutputT2wRestoredImage, 'in_file') - preproc.connect(OutputBiasField, 'out_file', OutputT2wRestoredImage, 'operand_files') + preproc.connect(inputnode, "T2w", OutputT2wRestoredImage, "in_file") + preproc.connect( + OutputBiasField, "out_file", OutputT2wRestoredImage, "operand_files" + ) - preproc.connect(OutputT1wRestoredImage, 'out_file', outputnode, 'T1w_biascorrected') - preproc.connect(OutputT1wRestoredBrainImage, 'out_file', outputnode, 'T1w_brain_biascorrected') - preproc.connect(OutputT2wRestoredImage, 'out_file', outputnode, 'T2w_biascorrected') - preproc.connect(OutputT2wRestoredBrainImage, 'out_file', outputnode, 'T2w_brain_biascorrected') - preproc.connect(OutputBiasField, 'out_file', outputnode, 'biasfield') + preproc.connect(OutputT1wRestoredImage, "out_file", outputnode, "T1w_biascorrected") + preproc.connect( + OutputT1wRestoredBrainImage, "out_file", outputnode, "T1w_brain_biascorrected" + ) + preproc.connect(OutputT2wRestoredImage, "out_file", outputnode, "T2w_biascorrected") + preproc.connect( + OutputT2wRestoredBrainImage, "out_file", outputnode, "T2w_brain_biascorrected" + ) + preproc.connect(OutputBiasField, "out_file", outputnode, "biasfield") return preproc @@ -426,284 +515,330 @@ def file_to_a_list(infile_1, infile_2): def afni_brain_connector(wf, cfg, strat_pool, pipe_num, opt): # Skull-stripping using AFNI 3dSkullStrip inputnode_afni = pe.Node( - util.IdentityInterface(fields=['mask_vol', - 'shrink_factor', - 'var_shrink_fac', - 'shrink_fac_bot_lim', - 'avoid_vent', - 'niter', - 'pushout', - 'touchup', - 'fill_hole', - 'NN_smooth', - 'smooth_final', - 'avoid_eyes', - 'use_edge', - 'exp_frac', - 'push_to_edge', - 'use_skull', - 'perc_int', - 'max_inter_iter', - 'blur_fwhm', - 'fac', - 'monkey']), - name=f'AFNI_options_{pipe_num}') - - skullstrip_args = pe.Node(util.Function(input_names=['spat_norm', - 'spat_norm_dxyz', - 'mask_vol', - 'shrink_fac', - 'var_shrink_fac', - 'shrink_fac_bot_lim', - 'avoid_vent', - 'niter', - 'pushout', - 'touchup', - 'fill_hole', - 'NN_smooth', - 'smooth_final', - 'avoid_eyes', - 'use_edge', - 'exp_frac', - 'push_to_edge', - 'use_skull', - 'perc_int', - 'max_inter_iter', - 'blur_fwhm', - 'fac', - 'monkey'], - output_names=['expr'], - function=create_3dskullstrip_arg_string), - name=f'anat_skullstrip_args_{pipe_num}') + util.IdentityInterface( + fields=[ + "mask_vol", + "shrink_factor", + "var_shrink_fac", + "shrink_fac_bot_lim", + "avoid_vent", + "niter", + "pushout", + "touchup", + "fill_hole", + "NN_smooth", + "smooth_final", + "avoid_eyes", + "use_edge", + "exp_frac", + "push_to_edge", + "use_skull", + "perc_int", + "max_inter_iter", + "blur_fwhm", + "fac", + "monkey", + ] + ), + name=f"AFNI_options_{pipe_num}", + ) + + skullstrip_args = pe.Node( + util.Function( + input_names=[ + "spat_norm", + "spat_norm_dxyz", + "mask_vol", + "shrink_fac", + "var_shrink_fac", + "shrink_fac_bot_lim", + "avoid_vent", + "niter", + "pushout", + "touchup", + "fill_hole", + "NN_smooth", + "smooth_final", + "avoid_eyes", + "use_edge", + "exp_frac", + "push_to_edge", + "use_skull", + "perc_int", + "max_inter_iter", + "blur_fwhm", + "fac", + "monkey", + ], + output_names=["expr"], + function=create_3dskullstrip_arg_string, + ), + name=f"anat_skullstrip_args_{pipe_num}", + ) inputnode_afni.inputs.set( - mask_vol=cfg.anatomical_preproc['brain_extraction'][ - 'AFNI-3dSkullStrip']['mask_vol'], - shrink_factor= - cfg.anatomical_preproc['brain_extraction'][ - 'AFNI-3dSkullStrip']['shrink_factor'], - var_shrink_fac= - cfg.anatomical_preproc['brain_extraction'][ - 'AFNI-3dSkullStrip']['var_shrink_fac'], - shrink_fac_bot_lim= - cfg.anatomical_preproc['brain_extraction'][ - 'AFNI-3dSkullStrip']['shrink_factor_bot_lim'], - avoid_vent= - cfg.anatomical_preproc['brain_extraction'][ - 'AFNI-3dSkullStrip']['avoid_vent'], - niter=cfg.anatomical_preproc['brain_extraction'][ - 'AFNI-3dSkullStrip']['n_iterations'], - pushout=cfg.anatomical_preproc['brain_extraction'][ - 'AFNI-3dSkullStrip']['pushout'], - touchup=cfg.anatomical_preproc['brain_extraction'][ - 'AFNI-3dSkullStrip']['touchup'], - fill_hole=cfg.anatomical_preproc['brain_extraction'][ - 'AFNI-3dSkullStrip']['fill_hole'], - NN_smooth=cfg.anatomical_preproc['brain_extraction'][ - 'AFNI-3dSkullStrip']['NN_smooth'], - smooth_final= - cfg.anatomical_preproc['brain_extraction'][ - 'AFNI-3dSkullStrip']['smooth_final'], - avoid_eyes= - cfg.anatomical_preproc['brain_extraction'][ - 'AFNI-3dSkullStrip']['avoid_eyes'], - use_edge=cfg.anatomical_preproc['brain_extraction'][ - 'AFNI-3dSkullStrip']['use_edge'], - exp_frac=cfg.anatomical_preproc['brain_extraction'][ - 'AFNI-3dSkullStrip']['exp_frac'], - push_to_edge= - cfg.anatomical_preproc['brain_extraction'][ - 'AFNI-3dSkullStrip']['push_to_edge'], - use_skull=cfg.anatomical_preproc['brain_extraction'][ - 'AFNI-3dSkullStrip']['use_skull'], - perc_int=cfg.anatomical_preproc['brain_extraction'][ - 'AFNI-3dSkullStrip']['perc_int'], - max_inter_iter= - cfg.anatomical_preproc['brain_extraction'][ - 'AFNI-3dSkullStrip']['max_inter_iter'], - fac=cfg.anatomical_preproc['brain_extraction'][ - 'AFNI-3dSkullStrip']['fac'], - blur_fwhm=cfg.anatomical_preproc['brain_extraction'][ - 'AFNI-3dSkullStrip']['blur_fwhm'], - monkey=cfg.anatomical_preproc['brain_extraction'][ - 'AFNI-3dSkullStrip']['monkey'], - ) - - wf.connect([ - (inputnode_afni, skullstrip_args, [ - ('mask_vol', 'mask_vol'), - ('shrink_factor', 'shrink_fac'), - ('var_shrink_fac', 'var_shrink_fac'), - ('shrink_fac_bot_lim', 'shrink_fac_bot_lim'), - ('avoid_vent', 'avoid_vent'), - ('niter', 'niter'), - ('pushout', 'pushout'), - ('touchup', 'touchup'), - ('fill_hole', 'fill_hole'), - ('avoid_eyes', 'avoid_eyes'), - ('use_edge', 'use_edge'), - ('exp_frac', 'exp_frac'), - ('NN_smooth', 'NN_smooth'), - ('smooth_final', 'smooth_final'), - ('push_to_edge', 'push_to_edge'), - ('use_skull', 'use_skull'), - ('perc_int', 'perc_int'), - ('max_inter_iter', 'max_inter_iter'), - ('blur_fwhm', 'blur_fwhm'), - ('fac', 'fac'), - ('monkey', 'monkey') - ]) - ]) - - anat_skullstrip = pe.Node(interface=afni.SkullStrip(), - name=f'anat_skullstrip_{pipe_num}') - anat_skullstrip.inputs.outputtype = 'NIFTI_GZ' - - if strat_pool.check_rpool('desc-preproc_T1w'): - node, out = strat_pool.get_data('desc-preproc_T1w') - wf.connect(node, out, anat_skullstrip, 'in_file') - - elif strat_pool.check_rpool('desc-preproc_T2w'): - node, out = strat_pool.get_data('desc-preproc_T2w') - wf.connect(node, out, anat_skullstrip, 'in_file') - - wf.connect(skullstrip_args, 'expr', anat_skullstrip, 'args') + mask_vol=cfg.anatomical_preproc["brain_extraction"]["AFNI-3dSkullStrip"][ + "mask_vol" + ], + shrink_factor=cfg.anatomical_preproc["brain_extraction"]["AFNI-3dSkullStrip"][ + "shrink_factor" + ], + var_shrink_fac=cfg.anatomical_preproc["brain_extraction"]["AFNI-3dSkullStrip"][ + "var_shrink_fac" + ], + shrink_fac_bot_lim=cfg.anatomical_preproc["brain_extraction"][ + "AFNI-3dSkullStrip" + ]["shrink_factor_bot_lim"], + avoid_vent=cfg.anatomical_preproc["brain_extraction"]["AFNI-3dSkullStrip"][ + "avoid_vent" + ], + niter=cfg.anatomical_preproc["brain_extraction"]["AFNI-3dSkullStrip"][ + "n_iterations" + ], + pushout=cfg.anatomical_preproc["brain_extraction"]["AFNI-3dSkullStrip"][ + "pushout" + ], + touchup=cfg.anatomical_preproc["brain_extraction"]["AFNI-3dSkullStrip"][ + "touchup" + ], + fill_hole=cfg.anatomical_preproc["brain_extraction"]["AFNI-3dSkullStrip"][ + "fill_hole" + ], + NN_smooth=cfg.anatomical_preproc["brain_extraction"]["AFNI-3dSkullStrip"][ + "NN_smooth" + ], + smooth_final=cfg.anatomical_preproc["brain_extraction"]["AFNI-3dSkullStrip"][ + "smooth_final" + ], + avoid_eyes=cfg.anatomical_preproc["brain_extraction"]["AFNI-3dSkullStrip"][ + "avoid_eyes" + ], + use_edge=cfg.anatomical_preproc["brain_extraction"]["AFNI-3dSkullStrip"][ + "use_edge" + ], + exp_frac=cfg.anatomical_preproc["brain_extraction"]["AFNI-3dSkullStrip"][ + "exp_frac" + ], + push_to_edge=cfg.anatomical_preproc["brain_extraction"]["AFNI-3dSkullStrip"][ + "push_to_edge" + ], + use_skull=cfg.anatomical_preproc["brain_extraction"]["AFNI-3dSkullStrip"][ + "use_skull" + ], + perc_int=cfg.anatomical_preproc["brain_extraction"]["AFNI-3dSkullStrip"][ + "perc_int" + ], + max_inter_iter=cfg.anatomical_preproc["brain_extraction"]["AFNI-3dSkullStrip"][ + "max_inter_iter" + ], + fac=cfg.anatomical_preproc["brain_extraction"]["AFNI-3dSkullStrip"]["fac"], + blur_fwhm=cfg.anatomical_preproc["brain_extraction"]["AFNI-3dSkullStrip"][ + "blur_fwhm" + ], + monkey=cfg.anatomical_preproc["brain_extraction"]["AFNI-3dSkullStrip"][ + "monkey" + ], + ) + + wf.connect( + [ + ( + inputnode_afni, + skullstrip_args, + [ + ("mask_vol", "mask_vol"), + ("shrink_factor", "shrink_fac"), + ("var_shrink_fac", "var_shrink_fac"), + ("shrink_fac_bot_lim", "shrink_fac_bot_lim"), + ("avoid_vent", "avoid_vent"), + ("niter", "niter"), + ("pushout", "pushout"), + ("touchup", "touchup"), + ("fill_hole", "fill_hole"), + ("avoid_eyes", "avoid_eyes"), + ("use_edge", "use_edge"), + ("exp_frac", "exp_frac"), + ("NN_smooth", "NN_smooth"), + ("smooth_final", "smooth_final"), + ("push_to_edge", "push_to_edge"), + ("use_skull", "use_skull"), + ("perc_int", "perc_int"), + ("max_inter_iter", "max_inter_iter"), + ("blur_fwhm", "blur_fwhm"), + ("fac", "fac"), + ("monkey", "monkey"), + ], + ) + ] + ) + + anat_skullstrip = pe.Node( + interface=afni.SkullStrip(), name=f"anat_skullstrip_{pipe_num}" + ) + anat_skullstrip.inputs.outputtype = "NIFTI_GZ" + + if strat_pool.check_rpool("desc-preproc_T1w"): + node, out = strat_pool.get_data("desc-preproc_T1w") + wf.connect(node, out, anat_skullstrip, "in_file") + + elif strat_pool.check_rpool("desc-preproc_T2w"): + node, out = strat_pool.get_data("desc-preproc_T2w") + wf.connect(node, out, anat_skullstrip, "in_file") + + wf.connect(skullstrip_args, "expr", anat_skullstrip, "args") # Generate anatomical brain mask - anat_brain_mask = pe.Node(interface=afni.Calc(), - name=f'anat_brain_mask_{pipe_num}') + anat_brain_mask = pe.Node(interface=afni.Calc(), name=f"anat_brain_mask_{pipe_num}") - anat_brain_mask.inputs.expr = 'step(a)' - anat_brain_mask.inputs.outputtype = 'NIFTI_GZ' + anat_brain_mask.inputs.expr = "step(a)" + anat_brain_mask.inputs.outputtype = "NIFTI_GZ" - wf.connect(anat_skullstrip, 'out_file', - anat_brain_mask, 'in_file_a') + wf.connect(anat_skullstrip, "out_file", anat_brain_mask, "in_file_a") - if strat_pool.check_rpool('desc-preproc_T1w'): - outputs = { - 'space-T1w_desc-brain_mask': (anat_brain_mask, 'out_file') - } + if strat_pool.check_rpool("desc-preproc_T1w"): + outputs = {"space-T1w_desc-brain_mask": (anat_brain_mask, "out_file")} - elif strat_pool.check_rpool('desc-preproc_T2w'): - outputs = { - 'space-T2w_desc-brain_mask': (anat_brain_mask, 'out_file') - } + elif strat_pool.check_rpool("desc-preproc_T2w"): + outputs = {"space-T2w_desc-brain_mask": (anat_brain_mask, "out_file")} return (wf, outputs) def fsl_brain_connector(wf, cfg, strat_pool, pipe_num, opt): inputnode_bet = pe.Node( - util.IdentityInterface(fields=['frac', - 'mask_boolean', - 'mesh_boolean', - 'outline', - 'padding', - 'radius', - 'reduce_bias', - 'remove_eyes', - 'robust', - 'skull', - 'surfaces', - 'threshold', - 'vertical_gradient']), - name=f'BET_options_{pipe_num}') - - + util.IdentityInterface( + fields=[ + "frac", + "mask_boolean", + "mesh_boolean", + "outline", + "padding", + "radius", + "reduce_bias", + "remove_eyes", + "robust", + "skull", + "surfaces", + "threshold", + "vertical_gradient", + ] + ), + name=f"BET_options_{pipe_num}", + ) + anat_skullstrip = pe.Node( - interface=fsl.BET(), name=f'anat_BET_skullstrip_{pipe_num}') - anat_skullstrip.inputs.output_type = 'NIFTI_GZ' + interface=fsl.BET(), name=f"anat_BET_skullstrip_{pipe_num}" + ) + anat_skullstrip.inputs.output_type = "NIFTI_GZ" inputnode_bet.inputs.set( - frac=cfg.anatomical_preproc['brain_extraction'][ - 'FSL-BET']['frac'], - mask_boolean= True, - mesh_boolean= - cfg.anatomical_preproc['brain_extraction'][ - 'FSL-BET']['mesh_boolean'], - outline=cfg.anatomical_preproc['brain_extraction'][ - 'FSL-BET']['outline'], - padding=cfg.anatomical_preproc['brain_extraction'][ - 'FSL-BET']['padding'], - radius=cfg.anatomical_preproc['brain_extraction'][ - 'FSL-BET']['radius'], - reduce_bias= - cfg.anatomical_preproc['brain_extraction'][ - 'FSL-BET']['reduce_bias'], - remove_eyes= - cfg.anatomical_preproc['brain_extraction'][ - 'FSL-BET']['remove_eyes'], - robust=cfg.anatomical_preproc['brain_extraction'][ - 'FSL-BET']['robust'], - skull=cfg.anatomical_preproc['brain_extraction'][ - 'FSL-BET']['skull'], - surfaces=cfg.anatomical_preproc['brain_extraction'][ - 'FSL-BET']['surfaces'], - threshold=cfg.anatomical_preproc['brain_extraction'][ - 'FSL-BET']['threshold'], - vertical_gradient= - cfg.anatomical_preproc['brain_extraction'][ - 'FSL-BET']['vertical_gradient'], - ) - + frac=cfg.anatomical_preproc["brain_extraction"]["FSL-BET"]["frac"], + mask_boolean=True, + mesh_boolean=cfg.anatomical_preproc["brain_extraction"]["FSL-BET"][ + "mesh_boolean" + ], + outline=cfg.anatomical_preproc["brain_extraction"]["FSL-BET"]["outline"], + padding=cfg.anatomical_preproc["brain_extraction"]["FSL-BET"]["padding"], + radius=cfg.anatomical_preproc["brain_extraction"]["FSL-BET"]["radius"], + reduce_bias=cfg.anatomical_preproc["brain_extraction"]["FSL-BET"][ + "reduce_bias" + ], + remove_eyes=cfg.anatomical_preproc["brain_extraction"]["FSL-BET"][ + "remove_eyes" + ], + robust=cfg.anatomical_preproc["brain_extraction"]["FSL-BET"]["robust"], + skull=cfg.anatomical_preproc["brain_extraction"]["FSL-BET"]["skull"], + surfaces=cfg.anatomical_preproc["brain_extraction"]["FSL-BET"]["surfaces"], + threshold=cfg.anatomical_preproc["brain_extraction"]["FSL-BET"]["threshold"], + vertical_gradient=cfg.anatomical_preproc["brain_extraction"]["FSL-BET"][ + "vertical_gradient" + ], + ) + anat_robustfov = pe.Node( - interface=fsl.RobustFOV(), name=f'anat_RobustFOV_{pipe_num}') - - anat_robustfov.inputs.output_type = 'NIFTI_GZ' - - anat_pad_RobustFOV_cropped = pe.Node(util.Function(input_names=['cropped_image_path', 'target_image_path'], - output_names=['padded_image_path'], - function=pad), - name=f'anat_pad_mask_{pipe_num}' - ) - - if strat_pool.check_rpool('desc-preproc_T1w'): - node, out = strat_pool.get_data('desc-preproc_T1w') - if cfg.anatomical_preproc['brain_extraction']['FSL-BET']['Robustfov']: - wf.connect(node, out, anat_robustfov, 'in_file') - wf.connect(node, out, anat_pad_RobustFOV_cropped, 'target_image_path') - wf.connect(anat_robustfov, 'out_roi', anat_pad_RobustFOV_cropped, 'cropped_image_path') - wf.connect(anat_pad_RobustFOV_cropped, 'padded_image_path', anat_skullstrip,'in_file') - else : - wf.connect(node, out, anat_skullstrip, 'in_file') - - elif strat_pool.check_rpool('desc-preproc_T2w'): - node, out = strat_pool.get_data('desc-preproc_T2w') - if cfg.anatomical_preproc['brain_extraction']['FSL-BET']['Robustfov']: - wf.connect(node, out, anat_robustfov, 'in_file') - wf.connect(node, out, anat_pad_RobustFOV_cropped, 'target_image_path') - wf.connect(anat_robustfov, 'out_roi', anat_pad_RobustFOV_cropped, 'cropped_image_path') - wf.connect(anat_pad_RobustFOV_cropped, 'padded_image_path', anat_skullstrip,'in_file') - else : - wf.connect(node, out, anat_skullstrip, 'in_file') - - wf.connect([ - (inputnode_bet, anat_skullstrip, [ - ('frac', 'frac'), - ('mask_boolean', 'mask'), - ('mesh_boolean', 'mesh'), - ('outline', 'outline'), - ('padding', 'padding'), - ('radius', 'radius'), - ('reduce_bias', 'reduce_bias'), - ('remove_eyes', 'remove_eyes'), - ('robust', 'robust'), - ('skull', 'skull'), - ('surfaces', 'surfaces'), - ('threshold', 'threshold'), - ('vertical_gradient', 'vertical_gradient'), - ]) - ]) - - if strat_pool.check_rpool('desc-preproc_T1w'): - outputs = { - 'space-T1w_desc-brain_mask': (anat_skullstrip, 'mask_file') - } + interface=fsl.RobustFOV(), name=f"anat_RobustFOV_{pipe_num}" + ) - elif strat_pool.check_rpool('desc-preproc_T2w'): - outputs = { - 'space-T2w_desc-brain_mask': (anat_skullstrip, 'mask_file') - } + anat_robustfov.inputs.output_type = "NIFTI_GZ" + + anat_pad_RobustFOV_cropped = pe.Node( + util.Function( + input_names=["cropped_image_path", "target_image_path"], + output_names=["padded_image_path"], + function=pad, + ), + name=f"anat_pad_mask_{pipe_num}", + ) + + if strat_pool.check_rpool("desc-preproc_T1w"): + node, out = strat_pool.get_data("desc-preproc_T1w") + if cfg.anatomical_preproc["brain_extraction"]["FSL-BET"]["Robustfov"]: + wf.connect(node, out, anat_robustfov, "in_file") + wf.connect(node, out, anat_pad_RobustFOV_cropped, "target_image_path") + wf.connect( + anat_robustfov, + "out_roi", + anat_pad_RobustFOV_cropped, + "cropped_image_path", + ) + wf.connect( + anat_pad_RobustFOV_cropped, + "padded_image_path", + anat_skullstrip, + "in_file", + ) + else: + wf.connect(node, out, anat_skullstrip, "in_file") + + elif strat_pool.check_rpool("desc-preproc_T2w"): + node, out = strat_pool.get_data("desc-preproc_T2w") + if cfg.anatomical_preproc["brain_extraction"]["FSL-BET"]["Robustfov"]: + wf.connect(node, out, anat_robustfov, "in_file") + wf.connect(node, out, anat_pad_RobustFOV_cropped, "target_image_path") + wf.connect( + anat_robustfov, + "out_roi", + anat_pad_RobustFOV_cropped, + "cropped_image_path", + ) + wf.connect( + anat_pad_RobustFOV_cropped, + "padded_image_path", + anat_skullstrip, + "in_file", + ) + else: + wf.connect(node, out, anat_skullstrip, "in_file") + + wf.connect( + [ + ( + inputnode_bet, + anat_skullstrip, + [ + ("frac", "frac"), + ("mask_boolean", "mask"), + ("mesh_boolean", "mesh"), + ("outline", "outline"), + ("padding", "padding"), + ("radius", "radius"), + ("reduce_bias", "reduce_bias"), + ("remove_eyes", "remove_eyes"), + ("robust", "robust"), + ("skull", "skull"), + ("surfaces", "surfaces"), + ("threshold", "threshold"), + ("vertical_gradient", "vertical_gradient"), + ], + ) + ] + ) + + if strat_pool.check_rpool("desc-preproc_T1w"): + outputs = {"space-T1w_desc-brain_mask": (anat_skullstrip, "mask_file")} + + elif strat_pool.check_rpool("desc-preproc_T2w"): + outputs = {"space-T2w_desc-brain_mask": (anat_skullstrip, "mask_file")} return (wf, outputs) @@ -711,37 +846,44 @@ def fsl_brain_connector(wf, cfg, strat_pool, pipe_num, opt): def niworkflows_ants_brain_connector(wf, cfg, strat_pool, pipe_num, opt): # Skull-stripping using niworkflows-ants anat_skullstrip_ants = init_brain_extraction_wf( - tpl_target_path=cfg.anatomical_preproc['brain_extraction'][ - 'niworkflows-ants'][ - 'template_path'], - tpl_mask_path=cfg.anatomical_preproc['brain_extraction'][ - 'niworkflows-ants'][ - 'mask_path'], - tpl_regmask_path=cfg.anatomical_preproc['brain_extraction'][ - 'niworkflows-ants'][ - 'regmask_path'], - name='anat_skullstrip_ants', - atropos_use_random_seed=cfg.pipeline_setup['system_config'][ - 'random_seed'] is None) - - if strat_pool.check_rpool('desc-preproc_T1w'): - node, out = strat_pool.get_data('desc-preproc_T1w') - wf.connect(node, out, anat_skullstrip_ants, 'inputnode.in_files') - - elif strat_pool.check_rpool('desc-preproc_T2w'): - node, out = strat_pool.get_data('desc-preproc_T2w') - wf.connect(node, out, anat_skullstrip_ants, 'inputnode.in_files') - - if strat_pool.check_rpool('desc-preproc_T1w'): + tpl_target_path=cfg.anatomical_preproc["brain_extraction"]["niworkflows-ants"][ + "template_path" + ], + tpl_mask_path=cfg.anatomical_preproc["brain_extraction"]["niworkflows-ants"][ + "mask_path" + ], + tpl_regmask_path=cfg.anatomical_preproc["brain_extraction"]["niworkflows-ants"][ + "regmask_path" + ], + name="anat_skullstrip_ants", + atropos_use_random_seed=cfg.pipeline_setup["system_config"]["random_seed"] + is None, + ) + + if strat_pool.check_rpool("desc-preproc_T1w"): + node, out = strat_pool.get_data("desc-preproc_T1w") + wf.connect(node, out, anat_skullstrip_ants, "inputnode.in_files") + + elif strat_pool.check_rpool("desc-preproc_T2w"): + node, out = strat_pool.get_data("desc-preproc_T2w") + wf.connect(node, out, anat_skullstrip_ants, "inputnode.in_files") + + if strat_pool.check_rpool("desc-preproc_T1w"): outputs = { - 'space-T1w_desc-brain_mask': (anat_skullstrip_ants, 'atropos_wf.copy_xform.out_mask'), - 'desc-preproc_T1w': (anat_skullstrip_ants, 'copy_xform.out_file') + "space-T1w_desc-brain_mask": ( + anat_skullstrip_ants, + "atropos_wf.copy_xform.out_mask", + ), + "desc-preproc_T1w": (anat_skullstrip_ants, "copy_xform.out_file"), } - elif strat_pool.check_rpool('desc-preproc_T2w'): + elif strat_pool.check_rpool("desc-preproc_T2w"): outputs = { - 'space-T2w_desc-brain_mask': (anat_skullstrip_ants, 'atropos_wf.copy_xform.out_mask'), - 'desc-preproc_T2w': (anat_skullstrip_ants, 'copy_xform.out_file') + "space-T2w_desc-brain_mask": ( + anat_skullstrip_ants, + "atropos_wf.copy_xform.out_mask", + ), + "desc-preproc_T2w": (anat_skullstrip_ants, "copy_xform.out_file"), } return (wf, outputs) @@ -757,142 +899,169 @@ def unet_brain_connector(wf, cfg, strat_pool, pipe_num, opt): rescale_dim: 256 """ from CPAC.unet.function import predict_volumes - unet_mask = pe.Node(util.Function(input_names=['model_path', 'cimg_in'], - output_names=['out_path'], - function=predict_volumes), - name=f'unet_mask_{pipe_num}') - node, out = strat_pool.get_data('unet-model') - wf.connect(node, out, unet_mask, 'model_path') + unet_mask = pe.Node( + util.Function( + input_names=["model_path", "cimg_in"], + output_names=["out_path"], + function=predict_volumes, + ), + name=f"unet_mask_{pipe_num}", + ) + + node, out = strat_pool.get_data("unet-model") + wf.connect(node, out, unet_mask, "model_path") - if strat_pool.check_rpool('desc-preproc_T1w'): - node, out = strat_pool.get_data('desc-preproc_T1w') - wf.connect(node, out, unet_mask, 'cimg_in') + if strat_pool.check_rpool("desc-preproc_T1w"): + node, out = strat_pool.get_data("desc-preproc_T1w") + wf.connect(node, out, unet_mask, "cimg_in") - elif strat_pool.check_rpool('desc-preproc_T2w'): - node, out = strat_pool.get_data('desc-preproc_T2w') - wf.connect(node, out, unet_mask, 'cimg_in') + elif strat_pool.check_rpool("desc-preproc_T2w"): + node, out = strat_pool.get_data("desc-preproc_T2w") + wf.connect(node, out, unet_mask, "cimg_in") """ Revised mask with ANTs """ # fslmaths -mul brain.nii.gz - unet_masked_brain = pe.Node(interface=fsl.MultiImageMaths(), - name=f'unet_masked_brain_{pipe_num}') + unet_masked_brain = pe.Node( + interface=fsl.MultiImageMaths(), name=f"unet_masked_brain_{pipe_num}" + ) unet_masked_brain.inputs.op_string = "-mul %s" - if strat_pool.check_rpool('desc-preproc_T1w'): - node, out = strat_pool.get_data('desc-preproc_T1w') - wf.connect(node, out, unet_masked_brain, 'in_file') - - elif strat_pool.check_rpool('desc-preproc_T2w'): - node, out = strat_pool.get_data('desc-preproc_T2w') - wf.connect(node, out, unet_masked_brain, 'in_file') + if strat_pool.check_rpool("desc-preproc_T1w"): + node, out = strat_pool.get_data("desc-preproc_T1w") + wf.connect(node, out, unet_masked_brain, "in_file") + + elif strat_pool.check_rpool("desc-preproc_T2w"): + node, out = strat_pool.get_data("desc-preproc_T2w") + wf.connect(node, out, unet_masked_brain, "in_file") - wf.connect(unet_mask, 'out_path', unet_masked_brain, 'operand_files') + wf.connect(unet_mask, "out_path", unet_masked_brain, "operand_files") # flirt -v -dof 6 -in brain.nii.gz -ref NMT_SS_0.5mm.nii.gz -o brain_rot2atl -omat brain_rot2atl.mat -interp sinc - native_brain_to_template_brain = pe.Node(interface=fsl.FLIRT(), - name=f'native_brain_to_template_' - f'brain_{pipe_num}') + native_brain_to_template_brain = pe.Node( + interface=fsl.FLIRT(), name=f"native_brain_to_template_" f"brain_{pipe_num}" + ) native_brain_to_template_brain.inputs.dof = 6 - native_brain_to_template_brain.inputs.interp = 'sinc' - wf.connect(unet_masked_brain, 'out_file', - native_brain_to_template_brain, 'in_file') + native_brain_to_template_brain.inputs.interp = "sinc" + wf.connect(unet_masked_brain, "out_file", native_brain_to_template_brain, "in_file") - node, out = strat_pool.get_data('T1w-brain-template') - wf.connect(node, out, native_brain_to_template_brain, 'reference') + node, out = strat_pool.get_data("T1w-brain-template") + wf.connect(node, out, native_brain_to_template_brain, "reference") # flirt -in head.nii.gz -ref NMT_0.5mm.nii.gz -o head_rot2atl -applyxfm -init brain_rot2atl.mat - native_head_to_template_head = pe.Node(interface=fsl.FLIRT(), - name=f'native_head_to_template_' - f'head_{pipe_num}') + native_head_to_template_head = pe.Node( + interface=fsl.FLIRT(), name=f"native_head_to_template_" f"head_{pipe_num}" + ) native_head_to_template_head.inputs.apply_xfm = True - if strat_pool.check_rpool('desc-preproc_T1w'): - node, out = strat_pool.get_data('desc-preproc_T1w') - wf.connect(node, out, native_head_to_template_head, 'in_file') - - elif strat_pool.check_rpool('desc-preproc_T2w'): - node, out = strat_pool.get_data('desc-preproc_T2w') - wf.connect(node, out, native_head_to_template_head, 'in_file') + if strat_pool.check_rpool("desc-preproc_T1w"): + node, out = strat_pool.get_data("desc-preproc_T1w") + wf.connect(node, out, native_head_to_template_head, "in_file") + + elif strat_pool.check_rpool("desc-preproc_T2w"): + node, out = strat_pool.get_data("desc-preproc_T2w") + wf.connect(node, out, native_head_to_template_head, "in_file") - wf.connect(native_brain_to_template_brain, 'out_matrix_file', - native_head_to_template_head, 'in_matrix_file') + wf.connect( + native_brain_to_template_brain, + "out_matrix_file", + native_head_to_template_head, + "in_matrix_file", + ) - node, out = strat_pool.get_data('T1w-template') - wf.connect(node, out, native_head_to_template_head, 'reference') + node, out = strat_pool.get_data("T1w-template") + wf.connect(node, out, native_head_to_template_head, "reference") # fslmaths NMT_SS_0.5mm.nii.gz -bin templateMask.nii.gz - template_brain_mask = pe.Node(interface=fsl.maths.MathsCommand(), - name=f'template_brain_mask_{pipe_num}') - template_brain_mask.inputs.args = '-bin' + template_brain_mask = pe.Node( + interface=fsl.maths.MathsCommand(), name=f"template_brain_mask_{pipe_num}" + ) + template_brain_mask.inputs.args = "-bin" - node, out = strat_pool.get_data('T1w-brain-template') - wf.connect(node, out, template_brain_mask, 'in_file') + node, out = strat_pool.get_data("T1w-brain-template") + wf.connect(node, out, template_brain_mask, "in_file") # ANTS 3 -m CC[head_rot2atl.nii.gz,NMT_0.5mm.nii.gz,1,5] -t SyN[0.25] -r Gauss[3,0] -o atl2T1rot -i 60x50x20 --use-Histogram-Matching --number-of-affine-iterations 10000x10000x10000x10000x10000 --MI-option 32x16000 - ants_template_head_to_template = pe.Node(interface=ants.Registration(), - name=f'template_head_to_' - f'template_{pipe_num}') - ants_template_head_to_template.inputs.metric = ['CC'] + ants_template_head_to_template = pe.Node( + interface=ants.Registration(), name=f"template_head_to_" f"template_{pipe_num}" + ) + ants_template_head_to_template.inputs.metric = ["CC"] ants_template_head_to_template.inputs.metric_weight = [1, 5] - ants_template_head_to_template.inputs.transforms = ['SyN'] + ants_template_head_to_template.inputs.transforms = ["SyN"] ants_template_head_to_template.inputs.transform_parameters = [(0.25,)] - ants_template_head_to_template.inputs.interpolation = 'NearestNeighbor' - ants_template_head_to_template.inputs.number_of_iterations = [ - [60, 50, 20]] + ants_template_head_to_template.inputs.interpolation = "NearestNeighbor" + ants_template_head_to_template.inputs.number_of_iterations = [[60, 50, 20]] ants_template_head_to_template.inputs.smoothing_sigmas = [[0.6, 0.2, 0.0]] ants_template_head_to_template.inputs.shrink_factors = [[4, 2, 1]] - ants_template_head_to_template.inputs.convergence_threshold = [1.e-8] - wf.connect(native_head_to_template_head, 'out_file', - ants_template_head_to_template, 'fixed_image') + ants_template_head_to_template.inputs.convergence_threshold = [1.0e-8] + wf.connect( + native_head_to_template_head, + "out_file", + ants_template_head_to_template, + "fixed_image", + ) - node, out = strat_pool.get_data('T1w-brain-template') - wf.connect(node, out, ants_template_head_to_template, 'moving_image') + node, out = strat_pool.get_data("T1w-brain-template") + wf.connect(node, out, ants_template_head_to_template, "moving_image") # antsApplyTransforms -d 3 -i templateMask.nii.gz -t atl2T1rotWarp.nii.gz atl2T1rotAffine.txt -r brain_rot2atl.nii.gz -o brain_rot2atl_mask.nii.gz template_head_transform_to_template = pe.Node( interface=ants.ApplyTransforms(), - name=f'template_head_transform_to_template_{pipe_num}') + name=f"template_head_transform_to_template_{pipe_num}", + ) template_head_transform_to_template.inputs.dimension = 3 - wf.connect(template_brain_mask, 'out_file', - template_head_transform_to_template, 'input_image') - wf.connect(native_brain_to_template_brain, 'out_file', - template_head_transform_to_template, 'reference_image') - wf.connect(ants_template_head_to_template, 'forward_transforms', - template_head_transform_to_template, 'transforms') + wf.connect( + template_brain_mask, + "out_file", + template_head_transform_to_template, + "input_image", + ) + wf.connect( + native_brain_to_template_brain, + "out_file", + template_head_transform_to_template, + "reference_image", + ) + wf.connect( + ants_template_head_to_template, + "forward_transforms", + template_head_transform_to_template, + "transforms", + ) - # convert_xfm -omat brain_rot2native.mat -inverse brain_rot2atl.mat  - invt = pe.Node(interface=fsl.ConvertXFM(), name='convert_xfm') + # convert_xfm -omat brain_rot2native.mat -inverse brain_rot2atl.mat + invt = pe.Node(interface=fsl.ConvertXFM(), name="convert_xfm") invt.inputs.invert_xfm = True - wf.connect(native_brain_to_template_brain, 'out_matrix_file', invt, - 'in_file') + wf.connect(native_brain_to_template_brain, "out_matrix_file", invt, "in_file") # flirt -in brain_rot2atl_mask.nii.gz -ref brain.nii.gz -o brain_mask.nii.gz -applyxfm -init brain_rot2native.mat - template_brain_to_native_brain = pe.Node(interface=fsl.FLIRT(), - name=f'template_brain_to_native_' - f'brain_{pipe_num}') + template_brain_to_native_brain = pe.Node( + interface=fsl.FLIRT(), name=f"template_brain_to_native_" f"brain_{pipe_num}" + ) template_brain_to_native_brain.inputs.apply_xfm = True - wf.connect(template_head_transform_to_template, 'output_image', - template_brain_to_native_brain, 'in_file') - wf.connect(unet_masked_brain, 'out_file', template_brain_to_native_brain, - 'reference') - wf.connect(invt, 'out_file', template_brain_to_native_brain, - 'in_matrix_file') + wf.connect( + template_head_transform_to_template, + "output_image", + template_brain_to_native_brain, + "in_file", + ) + wf.connect( + unet_masked_brain, "out_file", template_brain_to_native_brain, "reference" + ) + wf.connect(invt, "out_file", template_brain_to_native_brain, "in_matrix_file") # fslmaths brain_mask.nii.gz -thr .5 -bin brain_mask_thr.nii.gz - refined_mask = pe.Node(interface=fsl.Threshold(), name=f'refined_mask' - f'_{pipe_num}') + refined_mask = pe.Node( + interface=fsl.Threshold(), name=f"refined_mask" f"_{pipe_num}" + ) refined_mask.inputs.thresh = 0.5 - refined_mask.inputs.args = '-bin' - wf.connect(template_brain_to_native_brain, 'out_file', refined_mask, - 'in_file') + refined_mask.inputs.args = "-bin" + wf.connect(template_brain_to_native_brain, "out_file", refined_mask, "in_file") - outputs = { - 'space-T1w_desc-brain_mask': (refined_mask, 'out_file') - } + outputs = {"space-T1w_desc-brain_mask": (refined_mask, "out_file")} return (wf, outputs) @@ -900,365 +1069,374 @@ def unet_brain_connector(wf, cfg, strat_pool, pipe_num, opt): def freesurfer_brain_connector(wf, cfg, strat_pool, pipe_num, opt): # register FS brain mask to native space fs_brain_mask_to_native = pe.Node( - interface=freesurfer.ApplyVolTransform(), - name='fs_brain_mask_to_native') + interface=freesurfer.ApplyVolTransform(), name="fs_brain_mask_to_native" + ) fs_brain_mask_to_native.inputs.reg_header = True - node, out = strat_pool.get_data('pipeline-fs_brainmask') - wf.connect(node, out, fs_brain_mask_to_native, 'source_file') + node, out = strat_pool.get_data("pipeline-fs_brainmask") + wf.connect(node, out, fs_brain_mask_to_native, "source_file") - node, out = strat_pool.get_data('pipeline-fs_raw-average') - wf.connect(node, out, fs_brain_mask_to_native, 'target_file') + node, out = strat_pool.get_data("pipeline-fs_raw-average") + wf.connect(node, out, fs_brain_mask_to_native, "target_file") - node, out = strat_pool.get_data('freesurfer-subject-dir') - wf.connect(node, out, fs_brain_mask_to_native, 'subjects_dir') + node, out = strat_pool.get_data("freesurfer-subject-dir") + wf.connect(node, out, fs_brain_mask_to_native, "subjects_dir") # convert brain mask file from .mgz to .nii.gz - fs_brain_mask_to_nifti = pe.Node(util.Function(input_names=['in_file'], - output_names=['out_file'], - function=mri_convert), - name=f'fs_brainmask_to_nifti_{pipe_num}') - wf.connect(fs_brain_mask_to_native, 'transformed_file', - fs_brain_mask_to_nifti, 'in_file') + fs_brain_mask_to_nifti = pe.Node( + util.Function( + input_names=["in_file"], output_names=["out_file"], function=mri_convert + ), + name=f"fs_brainmask_to_nifti_{pipe_num}", + ) + wf.connect( + fs_brain_mask_to_native, "transformed_file", fs_brain_mask_to_nifti, "in_file" + ) # binarize the brain mask - binarize_fs_brain_mask = pe.Node(interface=fsl.maths.MathsCommand(), - name=f'binarize_fs_brainmask_{pipe_num}') - binarize_fs_brain_mask.inputs.args = '-bin' - wf.connect(fs_brain_mask_to_nifti, 'out_file', - binarize_fs_brain_mask, 'in_file') + binarize_fs_brain_mask = pe.Node( + interface=fsl.maths.MathsCommand(), name=f"binarize_fs_brainmask_{pipe_num}" + ) + binarize_fs_brain_mask.inputs.args = "-bin" + wf.connect(fs_brain_mask_to_nifti, "out_file", binarize_fs_brain_mask, "in_file") # fill holes - fill_fs_brain_mask = pe.Node(interface=afni.MaskTool(), - name=f'fill_fs_brainmask_{pipe_num}') + fill_fs_brain_mask = pe.Node( + interface=afni.MaskTool(), name=f"fill_fs_brainmask_{pipe_num}" + ) fill_fs_brain_mask.inputs.fill_holes = True - fill_fs_brain_mask.inputs.outputtype = 'NIFTI_GZ' - wf.connect(binarize_fs_brain_mask, 'out_file', - fill_fs_brain_mask, 'in_file') + fill_fs_brain_mask.inputs.outputtype = "NIFTI_GZ" + wf.connect(binarize_fs_brain_mask, "out_file", fill_fs_brain_mask, "in_file") - outputs = { - 'space-T1w_desc-brain_mask': (fill_fs_brain_mask, 'out_file') - } + outputs = {"space-T1w_desc-brain_mask": (fill_fs_brain_mask, "out_file")} return (wf, outputs) def freesurfer_abcd_brain_connector(wf, cfg, strat_pool, pipe_num, opt): - ''' + """ ABCD harmonization - anatomical brain mask generation Ref: https://github.com/DCAN-Labs/DCAN-HCP/blob/master/PostFreeSurfer/PostFreeSurferPipeline.sh#L151-L156 - ''' - - wmparc_to_nifti = pe.Node(util.Function(input_names=['in_file', - 'reslice_like', - 'args'], - output_names=['out_file'], - function=mri_convert), - name=f'wmparc_to_nifti_{pipe_num}') - + """ + wmparc_to_nifti = pe.Node( + util.Function( + input_names=["in_file", "reslice_like", "args"], + output_names=["out_file"], + function=mri_convert, + ), + name=f"wmparc_to_nifti_{pipe_num}", + ) + # Register wmparc file if ingressing FreeSurfer data - if strat_pool.check_rpool('pipeline-fs_xfm'): + if strat_pool.check_rpool("pipeline-fs_xfm"): + wmparc_to_native = pe.Node( + util.Function( + input_names=["source_file", "target_file", "xfm", "out_file"], + output_names=["transformed_file"], + function=normalize_wmparc, + ), + name=f"wmparc_to_native_{pipe_num}", + ) - wmparc_to_native = pe.Node(util.Function(input_names=['source_file', - 'target_file', - 'xfm', - 'out_file'], - output_names=['transformed_file'], - function=normalize_wmparc), - name=f'wmparc_to_native_{pipe_num}') - - wmparc_to_native.inputs.out_file = 'wmparc_warped.mgz' + wmparc_to_native.inputs.out_file = "wmparc_warped.mgz" - node, out = strat_pool.get_data('pipeline-fs_wmparc') - wf.connect(node, out, wmparc_to_native, 'source_file') + node, out = strat_pool.get_data("pipeline-fs_wmparc") + wf.connect(node, out, wmparc_to_native, "source_file") - node, out = strat_pool.get_data('pipeline-fs_raw-average') - wf.connect(node, out, wmparc_to_native, 'target_file') + node, out = strat_pool.get_data("pipeline-fs_raw-average") + wf.connect(node, out, wmparc_to_native, "target_file") - node, out = strat_pool.get_data('pipeline-fs_xfm') - wf.connect(node, out, wmparc_to_native, 'xfm') + node, out = strat_pool.get_data("pipeline-fs_xfm") + wf.connect(node, out, wmparc_to_native, "xfm") + + wf.connect(wmparc_to_native, "transformed_file", wmparc_to_nifti, "in_file") - wf.connect(wmparc_to_native, 'transformed_file', wmparc_to_nifti, 'in_file') - else: - - node, out = strat_pool.get_data('pipeline-fs_wmparc') - wf.connect(node, out, wmparc_to_nifti, 'in_file') + node, out = strat_pool.get_data("pipeline-fs_wmparc") + wf.connect(node, out, wmparc_to_nifti, "in_file") - wmparc_to_nifti.inputs.args = '-rt nearest' + wmparc_to_nifti.inputs.args = "-rt nearest" - node, out = strat_pool.get_data('desc-preproc_T1w') - wf.connect(node, out, wmparc_to_nifti, 'reslice_like') + node, out = strat_pool.get_data("desc-preproc_T1w") + wf.connect(node, out, wmparc_to_nifti, "reslice_like") - binary_mask = pe.Node(interface=fsl.maths.MathsCommand(), - name=f'binarize_wmparc_{pipe_num}') - binary_mask.inputs.args = '-bin -dilD -dilD -dilD -ero -ero' + binary_mask = pe.Node( + interface=fsl.maths.MathsCommand(), name=f"binarize_wmparc_{pipe_num}" + ) + binary_mask.inputs.args = "-bin -dilD -dilD -dilD -ero -ero" - wf.connect(wmparc_to_nifti, 'out_file', binary_mask, 'in_file') + wf.connect(wmparc_to_nifti, "out_file", binary_mask, "in_file") - wb_command_fill_holes = pe.Node(util.Function(input_names=['in_file'], - output_names=['out_file'], - function=wb_command), - name=f'wb_command_fill_holes_{pipe_num}') + wb_command_fill_holes = pe.Node( + util.Function( + input_names=["in_file"], output_names=["out_file"], function=wb_command + ), + name=f"wb_command_fill_holes_{pipe_num}", + ) - wf.connect(binary_mask, 'out_file', wb_command_fill_holes, 'in_file') + wf.connect(binary_mask, "out_file", wb_command_fill_holes, "in_file") - binary_filled_mask = pe.Node(interface=fsl.maths.MathsCommand(), - name=f'binarize_filled_wmparc_{pipe_num}') - binary_filled_mask.inputs.args = '-bin' + binary_filled_mask = pe.Node( + interface=fsl.maths.MathsCommand(), name=f"binarize_filled_wmparc_{pipe_num}" + ) + binary_filled_mask.inputs.args = "-bin" - wf.connect(wb_command_fill_holes, 'out_file', - binary_filled_mask, 'in_file') + wf.connect(wb_command_fill_holes, "out_file", binary_filled_mask, "in_file") - brain_mask_to_t1_restore = pe.Node(interface=fsl.ApplyWarp(), - name=f'brain_mask_to_t1_restore_{pipe_num}') - brain_mask_to_t1_restore.inputs.interp = 'nn' - brain_mask_to_t1_restore.inputs.premat = cfg.registration_workflows['anatomical_registration']['registration']['FSL-FNIRT']['identity_matrix'] + brain_mask_to_t1_restore = pe.Node( + interface=fsl.ApplyWarp(), name=f"brain_mask_to_t1_restore_{pipe_num}" + ) + brain_mask_to_t1_restore.inputs.interp = "nn" + brain_mask_to_t1_restore.inputs.premat = cfg.registration_workflows[ + "anatomical_registration" + ]["registration"]["FSL-FNIRT"]["identity_matrix"] - wf.connect(binary_filled_mask, 'out_file', - brain_mask_to_t1_restore, 'in_file') + wf.connect(binary_filled_mask, "out_file", brain_mask_to_t1_restore, "in_file") - node, out = strat_pool.get_data('desc-preproc_T1w') - wf.connect(node, out, brain_mask_to_t1_restore, 'ref_file') + node, out = strat_pool.get_data("desc-preproc_T1w") + wf.connect(node, out, brain_mask_to_t1_restore, "ref_file") - outputs = { - 'space-T1w_desc-brain_mask': (brain_mask_to_t1_restore, 'out_file') - } + outputs = {"space-T1w_desc-brain_mask": (brain_mask_to_t1_restore, "out_file")} return (wf, outputs) def freesurfer_fsl_brain_connector(wf, cfg, strat_pool, pipe_num, opt): - - node_id = f'{opt.lower()}_{pipe_num}' + node_id = f"{opt.lower()}_{pipe_num}" # mri_convert -it mgz ${SUBJECTS_DIR}/${subject}/mri/brainmask.mgz -ot nii brainmask.nii.gz - convert_fs_brainmask_to_nifti = pe.Node(util.Function(input_names=['in_file'], - output_names=['out_file'], - function=mri_convert), - name=f'convert_fs_brainmask_to_nifti_{node_id}') + convert_fs_brainmask_to_nifti = pe.Node( + util.Function( + input_names=["in_file"], output_names=["out_file"], function=mri_convert + ), + name=f"convert_fs_brainmask_to_nifti_{node_id}", + ) - node, out = strat_pool.get_data('pipeline-fs_brainmask') - wf.connect(node, out, convert_fs_brainmask_to_nifti, 'in_file') + node, out = strat_pool.get_data("pipeline-fs_brainmask") + wf.connect(node, out, convert_fs_brainmask_to_nifti, "in_file") # mri_convert -it mgz ${SUBJECTS_DIR}/${subject}/mri/T1.mgz -ot nii T1.nii.gz - convert_fs_T1_to_nifti = pe.Node(util.Function(input_names=['in_file'], - output_names=['out_file'], - function=mri_convert), - name=f'convert_fs_T1_to_nifti_{node_id}') + convert_fs_T1_to_nifti = pe.Node( + util.Function( + input_names=["in_file"], output_names=["out_file"], function=mri_convert + ), + name=f"convert_fs_T1_to_nifti_{node_id}", + ) - node, out = strat_pool.get_data('pipeline-fs_T1') - wf.connect(node, out, convert_fs_T1_to_nifti, 'in_file') + node, out = strat_pool.get_data("pipeline-fs_T1") + wf.connect(node, out, convert_fs_T1_to_nifti, "in_file") # 3dresample -orient RPI -inset brainmask.nii.gz -prefix brain_fs.nii.gz - reorient_fs_brainmask = pe.Node(interface=afni.Resample(), - name=f'reorient_fs_brainmask_{node_id}', - mem_gb=0, - mem_x=(0.0115, 'in_file', 't')) - reorient_fs_brainmask.inputs.orientation = 'RPI' - reorient_fs_brainmask.inputs.outputtype = 'NIFTI_GZ' + reorient_fs_brainmask = pe.Node( + interface=afni.Resample(), + name=f"reorient_fs_brainmask_{node_id}", + mem_gb=0, + mem_x=(0.0115, "in_file", "t"), + ) + reorient_fs_brainmask.inputs.orientation = "RPI" + reorient_fs_brainmask.inputs.outputtype = "NIFTI_GZ" - wf.connect(convert_fs_brainmask_to_nifti, 'out_file', - reorient_fs_brainmask, 'in_file') + wf.connect( + convert_fs_brainmask_to_nifti, "out_file", reorient_fs_brainmask, "in_file" + ) # fslmaths brain_fs.nii.gz -abs -bin brain_fs_mask.nii.gz - binarize_fs_brain = pe.Node(interface=fsl.maths.MathsCommand(), - name=f'binarize_fs_brain_{node_id}') - binarize_fs_brain.inputs.args = '-abs -bin' + binarize_fs_brain = pe.Node( + interface=fsl.maths.MathsCommand(), name=f"binarize_fs_brain_{node_id}" + ) + binarize_fs_brain.inputs.args = "-abs -bin" - wf.connect(reorient_fs_brainmask, 'out_file', - binarize_fs_brain, 'in_file') + wf.connect(reorient_fs_brainmask, "out_file", binarize_fs_brain, "in_file") # 3dresample -orient RPI -inset T1.nii.gz -prefix head_fs.nii.gz - reorient_fs_T1 = pe.Node(interface=afni.Resample(), - name=f'reorient_fs_T1_{node_id}', - mem_gb=0, - mem_x=(0.0115, 'in_file', 't')) - reorient_fs_T1.inputs.orientation = 'RPI' - reorient_fs_T1.inputs.outputtype = 'NIFTI_GZ' + reorient_fs_T1 = pe.Node( + interface=afni.Resample(), + name=f"reorient_fs_T1_{node_id}", + mem_gb=0, + mem_x=(0.0115, "in_file", "t"), + ) + reorient_fs_T1.inputs.orientation = "RPI" + reorient_fs_T1.inputs.outputtype = "NIFTI_GZ" - wf.connect(convert_fs_T1_to_nifti, 'out_file', - reorient_fs_T1, 'in_file') + wf.connect(convert_fs_T1_to_nifti, "out_file", reorient_fs_T1, "in_file") # flirt -in head_fs.nii.gz -ref ${FSLDIR}/data/standard/MNI152_T1_1mm.nii.gz \ # -out tmp_head_fs2standard.nii.gz -omat tmp_head_fs2standard.mat -bins 256 -cost corratio \ # -searchrx -90 90 -searchry -90 90 -searchrz -90 90 -dof 12 -interp trilinear - convert_head_to_template = pe.Node(interface=fsl.FLIRT(), - name=f'convert_head_to_template_{node_id}') - convert_head_to_template.inputs.cost = 'corratio' - convert_head_to_template.inputs.interp = 'trilinear' + convert_head_to_template = pe.Node( + interface=fsl.FLIRT(), name=f"convert_head_to_template_{node_id}" + ) + convert_head_to_template.inputs.cost = "corratio" + convert_head_to_template.inputs.interp = "trilinear" convert_head_to_template.inputs.bins = 256 convert_head_to_template.inputs.dof = 12 convert_head_to_template.inputs.searchr_x = [-90, 90] convert_head_to_template.inputs.searchr_y = [-90, 90] convert_head_to_template.inputs.searchr_z = [-90, 90] - wf.connect(reorient_fs_T1, 'out_file', - convert_head_to_template, 'in_file') + wf.connect(reorient_fs_T1, "out_file", convert_head_to_template, "in_file") - node, out = strat_pool.get_data('T1w-ACPC-template') - wf.connect(node, out, convert_head_to_template, 'reference') + node, out = strat_pool.get_data("T1w-ACPC-template") + wf.connect(node, out, convert_head_to_template, "reference") # convert_xfm -omat tmp_standard2head_fs.mat -inverse tmp_head_fs2standard.mat - convert_xfm = pe.Node(interface=fsl_utils.ConvertXFM(), - name=f'convert_xfm_{node_id}') + convert_xfm = pe.Node( + interface=fsl_utils.ConvertXFM(), name=f"convert_xfm_{node_id}" + ) convert_xfm.inputs.invert_xfm = True - wf.connect(convert_head_to_template, 'out_matrix_file', - convert_xfm, 'in_file') + wf.connect(convert_head_to_template, "out_matrix_file", convert_xfm, "in_file") # bet tmp_head_fs2standard.nii.gz tmp.nii.gz -f ${bet_thr_tight} -m - skullstrip = pe.Node(interface=fsl.BET(), - name=f'anat_BET_skullstrip_{node_id}') - skullstrip.inputs.output_type = 'NIFTI_GZ' - skullstrip.inputs.mask=True - - if opt == 'FreeSurfer-BET-Tight': - skullstrip.inputs.frac=0.3 - elif opt == 'FreeSurfer-BET-Loose': - skullstrip.inputs.frac=0.1 - - wf.connect(convert_head_to_template, 'out_file', - skullstrip, 'in_file') - + skullstrip = pe.Node(interface=fsl.BET(), name=f"anat_BET_skullstrip_{node_id}") + skullstrip.inputs.output_type = "NIFTI_GZ" + skullstrip.inputs.mask = True + + if opt == "FreeSurfer-BET-Tight": + skullstrip.inputs.frac = 0.3 + elif opt == "FreeSurfer-BET-Loose": + skullstrip.inputs.frac = 0.1 + + wf.connect(convert_head_to_template, "out_file", skullstrip, "in_file") + # fslmaths tmp_mask.nii.gz -mas ${CCSDIR}/templates/MNI152_T1_1mm_first_brain_mask.nii.gz tmp_mask.nii.gz - apply_mask = pe.Node(interface=fsl.maths.ApplyMask(), - name=f'apply_mask_{node_id}') + apply_mask = pe.Node(interface=fsl.maths.ApplyMask(), name=f"apply_mask_{node_id}") - wf.connect(skullstrip, 'out_file', - apply_mask, 'in_file') + wf.connect(skullstrip, "out_file", apply_mask, "in_file") - node, out = strat_pool.get_data('T1w-brain-template-mask-ccs') - wf.connect(node, out, apply_mask, 'mask_file') + node, out = strat_pool.get_data("T1w-brain-template-mask-ccs") + wf.connect(node, out, apply_mask, "mask_file") # flirt -in tmp_mask.nii.gz -applyxfm -init tmp_standard2head_fs.mat -out brain_fsl_mask_tight.nii.gz \ # -paddingsize 0.0 -interp nearestneighbour -ref head_fs.nii.gz - convert_template_mask_to_native = pe.Node(interface=fsl.FLIRT(), - name=f'convert_template_mask_to_native_{node_id}') + convert_template_mask_to_native = pe.Node( + interface=fsl.FLIRT(), name=f"convert_template_mask_to_native_{node_id}" + ) convert_template_mask_to_native.inputs.apply_xfm = True convert_template_mask_to_native.inputs.padding_size = 0 - convert_template_mask_to_native.inputs.interp = 'nearestneighbour' + convert_template_mask_to_native.inputs.interp = "nearestneighbour" - wf.connect(apply_mask, 'out_file', - convert_template_mask_to_native, 'in_file') + wf.connect(apply_mask, "out_file", convert_template_mask_to_native, "in_file") - wf.connect(convert_xfm, 'out_file', - convert_template_mask_to_native, 'in_matrix_file') + wf.connect( + convert_xfm, "out_file", convert_template_mask_to_native, "in_matrix_file" + ) - wf.connect(reorient_fs_T1, 'out_file', - convert_template_mask_to_native, 'reference') + wf.connect(reorient_fs_T1, "out_file", convert_template_mask_to_native, "reference") # fslmaths brain_fs_mask.nii.gz -add brain_fsl_mask_tight.nii.gz -bin brain_mask_tight.nii.gz - # BinaryMaths doesn't use -bin! - combine_mask = pe.Node(interface=fsl.BinaryMaths(), - name=f'combine_mask_{node_id}') + # BinaryMaths doesn't use -bin! + combine_mask = pe.Node(interface=fsl.BinaryMaths(), name=f"combine_mask_{node_id}") - if opt == 'FreeSurfer-BET-Tight': - combine_mask.inputs.operation = 'add' - elif opt == 'FreeSurfer-BET-Loose': - combine_mask.inputs.operation = 'mul' + if opt == "FreeSurfer-BET-Tight": + combine_mask.inputs.operation = "add" + elif opt == "FreeSurfer-BET-Loose": + combine_mask.inputs.operation = "mul" - wf.connect(binarize_fs_brain, 'out_file', - combine_mask, 'in_file') + wf.connect(binarize_fs_brain, "out_file", combine_mask, "in_file") - wf.connect(convert_template_mask_to_native, 'out_file', - combine_mask, 'operand_file') + wf.connect( + convert_template_mask_to_native, "out_file", combine_mask, "operand_file" + ) - binarize_combined_mask = pe.Node(interface=fsl.maths.MathsCommand(), - name=f'binarize_combined_mask_{node_id}') - binarize_combined_mask.inputs.args = '-bin' + binarize_combined_mask = pe.Node( + interface=fsl.maths.MathsCommand(), name=f"binarize_combined_mask_{node_id}" + ) + binarize_combined_mask.inputs.args = "-bin" - wf.connect(combine_mask, 'out_file', - binarize_combined_mask, 'in_file') + wf.connect(combine_mask, "out_file", binarize_combined_mask, "in_file") # CCS brain mask is in FS space, transfer it back to native T1 space - fs_fsl_brain_mask_to_native = pe.Node(interface=freesurfer.ApplyVolTransform(), - name=f'fs_fsl_brain_mask_to_native_{node_id}') + fs_fsl_brain_mask_to_native = pe.Node( + interface=freesurfer.ApplyVolTransform(), + name=f"fs_fsl_brain_mask_to_native_{node_id}", + ) fs_fsl_brain_mask_to_native.inputs.reg_header = True - fs_fsl_brain_mask_to_native.inputs.interp = 'nearest' + fs_fsl_brain_mask_to_native.inputs.interp = "nearest" - wf.connect(binarize_combined_mask, 'out_file', - fs_fsl_brain_mask_to_native, 'source_file') + wf.connect( + binarize_combined_mask, "out_file", fs_fsl_brain_mask_to_native, "source_file" + ) - node, out = strat_pool.get_data('pipeline-fs_raw-average') - wf.connect(node, out, fs_fsl_brain_mask_to_native, 'target_file') + node, out = strat_pool.get_data("pipeline-fs_raw-average") + wf.connect(node, out, fs_fsl_brain_mask_to_native, "target_file") - node, out = strat_pool.get_data('freesurfer-subject-dir') - wf.connect(node, out, fs_fsl_brain_mask_to_native, 'subjects_dir') + node, out = strat_pool.get_data("freesurfer-subject-dir") + wf.connect(node, out, fs_fsl_brain_mask_to_native, "subjects_dir") - if opt == 'FreeSurfer-BET-Tight': + if opt == "FreeSurfer-BET-Tight": outputs = { - 'space-T1w_desc-tight_brain_mask': (fs_fsl_brain_mask_to_native, 'transformed_file') + "space-T1w_desc-tight_brain_mask": ( + fs_fsl_brain_mask_to_native, + "transformed_file", + ) } - elif opt == 'FreeSurfer-BET-Loose': + elif opt == "FreeSurfer-BET-Loose": outputs = { - 'space-T1w_desc-loose_brain_mask': (fs_fsl_brain_mask_to_native, 'transformed_file') + "space-T1w_desc-loose_brain_mask": ( + fs_fsl_brain_mask_to_native, + "transformed_file", + ) } return (wf, outputs) -def mask_T2(wf_name='mask_T2'): +def mask_T2(wf_name="mask_T2"): # create T2 mask based on T1 mask # reference https://github.com/DCAN-Labs/dcan-macaque-pipeline/blob/master/PreliminaryMasking/macaque_masking.py - + preproc = pe.Workflow(name=wf_name) - inputnode = pe.Node(util.IdentityInterface(fields=['T1w', - 'T1w_mask', - 'T2w']), - name='inputspec') + inputnode = pe.Node( + util.IdentityInterface(fields=["T1w", "T1w_mask", "T2w"]), name="inputspec" + ) - outputnode = pe.Node(util.IdentityInterface(fields=['T1w_brain', - 'T2w_mask', - 'T2w_brain']), - name='outputspec') + outputnode = pe.Node( + util.IdentityInterface(fields=["T1w_brain", "T2w_mask", "T2w_brain"]), + name="outputspec", + ) # mask_t1w = 'fslmaths {t1w} -mas {t1w_mask_edit} {t1w_brain}'.format(**kwargs) - mask_t1w = pe.Node(interface=fsl.MultiImageMaths(), - name='mask_t1w') + mask_t1w = pe.Node(interface=fsl.MultiImageMaths(), name="mask_t1w") mask_t1w.inputs.op_string = "-mas %s " - preproc.connect(inputnode, 'T1w', mask_t1w, 'in_file') - preproc.connect(inputnode, 'T1w_mask', mask_t1w, 'operand_files') - + preproc.connect(inputnode, "T1w", mask_t1w, "in_file") + preproc.connect(inputnode, "T1w_mask", mask_t1w, "operand_files") # t1w2t2w_rigid = 'flirt -dof 6 -cost mutualinfo -in {t1w} -ref {t2w} ' \ # '-omat {t1w2t2w}'.format(**kwargs) - t1w2t2w_rigid = pe.Node(interface=fsl.FLIRT(), - name='t1w2t2w_rigid') + t1w2t2w_rigid = pe.Node(interface=fsl.FLIRT(), name="t1w2t2w_rigid") t1w2t2w_rigid.inputs.dof = 6 - t1w2t2w_rigid.inputs.cost = 'mutualinfo' - preproc.connect(inputnode, 'T1w', t1w2t2w_rigid, 'in_file') - preproc.connect(inputnode, 'T2w', t1w2t2w_rigid, 'reference') + t1w2t2w_rigid.inputs.cost = "mutualinfo" + preproc.connect(inputnode, "T1w", t1w2t2w_rigid, "in_file") + preproc.connect(inputnode, "T2w", t1w2t2w_rigid, "reference") # t1w2t2w_mask = 'flirt -in {t1w_mask_edit} -interp nearestneighbour -ref {' \ # 't2w} -o {t2w_brain_mask} -applyxfm -init {' \ # 't1w2t2w}'.format(**kwargs) - t1w2t2w_mask = pe.Node(interface=fsl.FLIRT(), - name='t1w2t2w_mask') + t1w2t2w_mask = pe.Node(interface=fsl.FLIRT(), name="t1w2t2w_mask") t1w2t2w_mask.inputs.apply_xfm = True - t1w2t2w_mask.inputs.interp = 'nearestneighbour' + t1w2t2w_mask.inputs.interp = "nearestneighbour" - preproc.connect(inputnode, 'T1w_mask', t1w2t2w_mask, 'in_file') - preproc.connect(inputnode, 'T2w', t1w2t2w_mask, 'reference') - preproc.connect(t1w2t2w_rigid, 'out_matrix_file', t1w2t2w_mask, 'in_matrix_file') + preproc.connect(inputnode, "T1w_mask", t1w2t2w_mask, "in_file") + preproc.connect(inputnode, "T2w", t1w2t2w_mask, "reference") + preproc.connect(t1w2t2w_rigid, "out_matrix_file", t1w2t2w_mask, "in_matrix_file") # mask_t2w = 'fslmaths {t2w} -mas {t2w_brain_mask} ' \ # '{t2w_brain}'.format(**kwargs) - mask_t2w = pe.Node(interface=fsl.MultiImageMaths(), - name='mask_t2w') + mask_t2w = pe.Node(interface=fsl.MultiImageMaths(), name="mask_t2w") mask_t2w.inputs.op_string = "-mas %s " - preproc.connect(inputnode, 'T2w', mask_t2w, 'in_file') - preproc.connect(t1w2t2w_mask, 'out_file', mask_t2w, 'operand_files') + preproc.connect(inputnode, "T2w", mask_t2w, "in_file") + preproc.connect(t1w2t2w_mask, "out_file", mask_t2w, "operand_files") - preproc.connect(mask_t1w, 'out_file', outputnode, 'T1w_brain') - preproc.connect(mask_t2w, 'out_file', outputnode, 'T2w_brain') - preproc.connect(t1w2t2w_mask, 'out_file', outputnode, 'T2w_mask') + preproc.connect(mask_t1w, "out_file", outputnode, "T1w_brain") + preproc.connect(mask_t2w, "out_file", outputnode, "T2w_brain") + preproc.connect(t1w2t2w_mask, "out_file", outputnode, "T2w_mask") return preproc @@ -1271,29 +1449,32 @@ def mask_T2(wf_name='mask_T2'): outputs=["desc-preproc_T1w", "desc-reorient_T1w", "desc-head_T1w"], ) def anatomical_init(wf, cfg, strat_pool, pipe_num, opt=None): - - anat_deoblique = pe.Node(interface=afni.Refit(), - name=f'anat_deoblique_{pipe_num}') + anat_deoblique = pe.Node(interface=afni.Refit(), name=f"anat_deoblique_{pipe_num}") anat_deoblique.inputs.deoblique = True - node, out = strat_pool.get_data('T1w') - wf.connect(node, out, anat_deoblique, 'in_file') + node, out = strat_pool.get_data("T1w") + wf.connect(node, out, anat_deoblique, "in_file") - anat_reorient = pe.Node(interface=afni.Resample(), - name=f'anat_reorient_{pipe_num}', - mem_gb=0, - mem_x=(0.0115, 'in_file', 't')) - anat_reorient.inputs.orientation = 'RPI' - anat_reorient.inputs.outputtype = 'NIFTI_GZ' + anat_reorient = pe.Node( + interface=afni.Resample(), + name=f"anat_reorient_{pipe_num}", + mem_gb=0, + mem_x=(0.0115, "in_file", "t"), + ) + anat_reorient.inputs.orientation = "RPI" + anat_reorient.inputs.outputtype = "NIFTI_GZ" - wf.connect(anat_deoblique, 'out_file', anat_reorient, 'in_file') + wf.connect(anat_deoblique, "out_file", anat_reorient, "in_file") - outputs = {'desc-preproc_T1w': (anat_reorient, 'out_file'), - 'desc-reorient_T1w': (anat_reorient, 'out_file'), - 'desc-head_T1w': (anat_reorient, 'out_file')} + outputs = { + "desc-preproc_T1w": (anat_reorient, "out_file"), + "desc-reorient_T1w": (anat_reorient, "out_file"), + "desc-head_T1w": (anat_reorient, "out_file"), + } return (wf, outputs) + @nodeblock( name="acpc_alignment_head", switch=[ @@ -1308,25 +1489,26 @@ def anatomical_init(wf, cfg, strat_pool, pipe_num, opt=None): ], ) def acpc_align_head(wf, cfg, strat_pool, pipe_num, opt=None): + acpc_align = acpc_alignment( + config=cfg, + acpc_target=cfg.anatomical_preproc["acpc_alignment"]["acpc_target"], + mask=False, + wf_name=f"acpc_align_{pipe_num}", + ) - acpc_align = acpc_alignment(config=cfg, - acpc_target=cfg.anatomical_preproc[ - 'acpc_alignment']['acpc_target'], - mask=False, - wf_name=f'acpc_align_{pipe_num}') - - node, out = strat_pool.get_data(['desc-preproc_T1w','desc-head_T1w']) - wf.connect(node, out, acpc_align, 'inputspec.anat_leaf') + node, out = strat_pool.get_data(["desc-preproc_T1w", "desc-head_T1w"]) + wf.connect(node, out, acpc_align, "inputspec.anat_leaf") - node, out = strat_pool.get_data('T1w-ACPC-template') - wf.connect(node, out, acpc_align, 'inputspec.template_head_for_acpc') + node, out = strat_pool.get_data("T1w-ACPC-template") + wf.connect(node, out, acpc_align, "inputspec.template_head_for_acpc") outputs = { - 'desc-head_T1w': (acpc_align, 'outputspec.acpc_aligned_head'), - 'desc-preproc_T1w': (acpc_align, 'outputspec.acpc_aligned_head'), - 'from-T1w_to-ACPC_mode-image_desc-aff2rig_xfm': ( + "desc-head_T1w": (acpc_align, "outputspec.acpc_aligned_head"), + "desc-preproc_T1w": (acpc_align, "outputspec.acpc_aligned_head"), + "from-T1w_to-ACPC_mode-image_desc-aff2rig_xfm": ( acpc_align, - 'outputspec.from-T1w_to-ACPC_mode-image_desc-aff2rig_xfm') + "outputspec.from-T1w_to-ACPC_mode-image_desc-aff2rig_xfm", + ), } return (wf, outputs) @@ -1355,33 +1537,34 @@ def acpc_align_head(wf, cfg, strat_pool, pipe_num, opt=None): ], ) def acpc_align_head_with_mask(wf, cfg, strat_pool, pipe_num, opt=None): + acpc_align = acpc_alignment( + config=cfg, + acpc_target=cfg.anatomical_preproc["acpc_alignment"]["acpc_target"], + mask=True, + wf_name=f"acpc_align_{pipe_num}", + ) - acpc_align = acpc_alignment(config=cfg, - acpc_target=cfg.anatomical_preproc[ - 'acpc_alignment']['acpc_target'], - mask=True, - wf_name=f'acpc_align_{pipe_num}') - - node, out = strat_pool.get_data(['desc-head_T1w', 'desc-preproc_T1w']) - wf.connect(node, out, acpc_align, 'inputspec.anat_leaf') + node, out = strat_pool.get_data(["desc-head_T1w", "desc-preproc_T1w"]) + wf.connect(node, out, acpc_align, "inputspec.anat_leaf") - node, out = strat_pool.get_data('T1w-ACPC-template') - wf.connect(node, out, acpc_align, 'inputspec.template_head_for_acpc') + node, out = strat_pool.get_data("T1w-ACPC-template") + wf.connect(node, out, acpc_align, "inputspec.template_head_for_acpc") if strat_pool.check_rpool("space-T1w_desc-brain_mask"): node, out = strat_pool.get_data("space-T1w_desc-brain_mask") - wf.connect(node, out, acpc_align, 'inputspec.brain_mask') + wf.connect(node, out, acpc_align, "inputspec.brain_mask") - node, out = strat_pool.get_data('T1w-brain-ACPC-template') - wf.connect(node, out, acpc_align, 'inputspec.template_brain_for_acpc') + node, out = strat_pool.get_data("T1w-brain-ACPC-template") + wf.connect(node, out, acpc_align, "inputspec.template_brain_for_acpc") outputs = { - 'desc-head_T1w': (acpc_align, 'outputspec.acpc_aligned_head'), - 'desc-preproc_T1w': (acpc_align, 'outputspec.acpc_aligned_head'), - 'space-T1w_desc-brain_mask': ( - acpc_align, 'outputspec.acpc_brain_mask'), - 'from-T1w_to-ACPC_mode-image_desc-aff2rig_xfm': ( - acpc_align, 'outputspec.from-T1w_to-ACPC_mode-image_desc-aff2rig_xfm') + "desc-head_T1w": (acpc_align, "outputspec.acpc_aligned_head"), + "desc-preproc_T1w": (acpc_align, "outputspec.acpc_aligned_head"), + "space-T1w_desc-brain_mask": (acpc_align, "outputspec.acpc_brain_mask"), + "from-T1w_to-ACPC_mode-image_desc-aff2rig_xfm": ( + acpc_align, + "outputspec.from-T1w_to-ACPC_mode-image_desc-aff2rig_xfm", + ), } return (wf, outputs) @@ -1408,30 +1591,32 @@ def acpc_align_head_with_mask(wf, cfg, strat_pool, pipe_num, opt=None): ], ) def acpc_align_brain(wf, cfg, strat_pool, pipe_num, opt=None): + acpc_align = acpc_alignment( + config=cfg, + acpc_target=cfg.anatomical_preproc["acpc_alignment"]["acpc_target"], + mask=False, + wf_name=f"acpc_align_{pipe_num}", + ) - acpc_align = acpc_alignment(config=cfg, - acpc_target=cfg.anatomical_preproc[ - 'acpc_alignment']['acpc_target'], - mask=False, - wf_name=f'acpc_align_{pipe_num}') - - node, out = strat_pool.get_data('desc-preproc_T1w') - wf.connect(node, out, acpc_align, 'inputspec.anat_leaf') + node, out = strat_pool.get_data("desc-preproc_T1w") + wf.connect(node, out, acpc_align, "inputspec.anat_leaf") - node, out = strat_pool.get_data('desc-tempbrain_T1w') - wf.connect(node, out, acpc_align, 'inputspec.anat_brain') + node, out = strat_pool.get_data("desc-tempbrain_T1w") + wf.connect(node, out, acpc_align, "inputspec.anat_brain") - node, out = strat_pool.get_data('T1w-ACPC-template') - wf.connect(node, out, acpc_align, 'inputspec.template_head_for_acpc') + node, out = strat_pool.get_data("T1w-ACPC-template") + wf.connect(node, out, acpc_align, "inputspec.template_head_for_acpc") - node, out = strat_pool.get_data('T1w-brain-ACPC-template') - wf.connect(node, out, acpc_align, 'inputspec.template_brain_for_acpc') + node, out = strat_pool.get_data("T1w-brain-ACPC-template") + wf.connect(node, out, acpc_align, "inputspec.template_brain_for_acpc") outputs = { - 'desc-preproc_T1w': (acpc_align, 'outputspec.acpc_aligned_head'), - 'desc-acpcbrain_T1w': (acpc_align, 'outputspec.acpc_aligned_brain'), - 'from-T1w_to-ACPC_mode-image_desc-aff2rig_xfm': ( - acpc_align, 'outputspec.from-T1w_to-ACPC_mode-image_desc-aff2rig_xfm') + "desc-preproc_T1w": (acpc_align, "outputspec.acpc_aligned_head"), + "desc-acpcbrain_T1w": (acpc_align, "outputspec.acpc_aligned_brain"), + "from-T1w_to-ACPC_mode-image_desc-aff2rig_xfm": ( + acpc_align, + "outputspec.from-T1w_to-ACPC_mode-image_desc-aff2rig_xfm", + ), } return (wf, outputs) @@ -1456,33 +1641,35 @@ def acpc_align_brain(wf, cfg, strat_pool, pipe_num, opt=None): ], ) def acpc_align_brain_with_mask(wf, cfg, strat_pool, pipe_num, opt=None): + acpc_align = acpc_alignment( + config=cfg, + acpc_target=cfg.anatomical_preproc["acpc_alignment"]["acpc_target"], + mask=True, + wf_name=f"acpc_align_{pipe_num}", + ) - acpc_align = acpc_alignment(config=cfg, - acpc_target=cfg.anatomical_preproc[ - 'acpc_alignment']['acpc_target'], - mask=True, - wf_name=f'acpc_align_{pipe_num}') - - node, out = strat_pool.get_data('desc-preproc_T1w') - wf.connect(node, out, acpc_align, 'inputspec.anat_leaf') + node, out = strat_pool.get_data("desc-preproc_T1w") + wf.connect(node, out, acpc_align, "inputspec.anat_leaf") - node, out = strat_pool.get_data('desc-tempbrain_T1w') - wf.connect(node, out, acpc_align, 'inputspec.anat_brain') + node, out = strat_pool.get_data("desc-tempbrain_T1w") + wf.connect(node, out, acpc_align, "inputspec.anat_brain") node, out = strat_pool.get_data("space-T1w_desc-brain_mask") - wf.connect(node, out, acpc_align, 'inputspec.brain_mask') + wf.connect(node, out, acpc_align, "inputspec.brain_mask") - node, out = strat_pool.get_data('T1w-ACPC-template') - wf.connect(node, out, acpc_align, 'inputspec.template_head_for_acpc') + node, out = strat_pool.get_data("T1w-ACPC-template") + wf.connect(node, out, acpc_align, "inputspec.template_head_for_acpc") - node, out = strat_pool.get_data('T1w-brain-ACPC-template') - wf.connect(node, out, acpc_align, 'inputspec.template_brain_for_acpc') + node, out = strat_pool.get_data("T1w-brain-ACPC-template") + wf.connect(node, out, acpc_align, "inputspec.template_brain_for_acpc") outputs = { - 'desc-preproc_T1w': (acpc_align, 'outputspec.acpc_aligned_head'), - 'desc-acpcbrain_T1w': (acpc_align, 'outputspec.acpc_aligned_brain'), - 'space-T1w_desc-brain_mask': (acpc_align, 'outputspec.acpc_brain_mask'), - 'space-T1w_desc-prebrain_mask': (strat_pool.get_data('space-T1_desc-brain_mask')) + "desc-preproc_T1w": (acpc_align, "outputspec.acpc_aligned_head"), + "desc-acpcbrain_T1w": (acpc_align, "outputspec.acpc_aligned_brain"), + "space-T1w_desc-brain_mask": (acpc_align, "outputspec.acpc_brain_mask"), + "space-T1w_desc-prebrain_mask": ( + strat_pool.get_data("space-T1_desc-brain_mask") + ), } return (wf, outputs) @@ -1503,24 +1690,21 @@ def acpc_align_brain_with_mask(wf, cfg, strat_pool, pipe_num, opt=None): outputs=["desc-preproc_T2w"], ) def registration_T2w_to_T1w(wf, cfg, strat_pool, pipe_num, opt=None): + T2_to_T1_reg = T2wToT1wReg(wf_name=f"T2w_to_T1w_Reg_{pipe_num}") - T2_to_T1_reg = T2wToT1wReg(wf_name=f'T2w_to_T1w_Reg_{pipe_num}') + node, out = strat_pool.get_data("desc-preproc_T1w") + wf.connect(node, out, T2_to_T1_reg, "inputspec.T1w") - node, out = strat_pool.get_data('desc-preproc_T1w') - wf.connect(node, out, T2_to_T1_reg, 'inputspec.T1w') + node, out = strat_pool.get_data("desc-preproc_T2w") + wf.connect(node, out, T2_to_T1_reg, "inputspec.T2w") - node, out = strat_pool.get_data('desc-preproc_T2w') - wf.connect(node, out, T2_to_T1_reg, 'inputspec.T2w') + node, out = strat_pool.get_data(["desc-acpcbrain_T1w"]) + wf.connect(node, out, T2_to_T1_reg, "inputspec.T1w_brain") - node, out = strat_pool.get_data(['desc-acpcbrain_T1w']) - wf.connect(node, out, T2_to_T1_reg, 'inputspec.T1w_brain') + node, out = strat_pool.get_data(["desc-acpcbrain_T2w"]) + wf.connect(node, out, T2_to_T1_reg, "inputspec.T2w_brain") - node, out = strat_pool.get_data(['desc-acpcbrain_T2w']) - wf.connect(node, out, T2_to_T1_reg, 'inputspec.T2w_brain') - - outputs = { - 'desc-preproc_T2w': (T2_to_T1_reg, 'outputspec.T2w_to_T1w') - } + outputs = {"desc-preproc_T2w": (T2_to_T1_reg, "outputspec.T2w_to_T1w")} return (wf, outputs) @@ -1535,18 +1719,16 @@ def registration_T2w_to_T1w(wf, cfg, strat_pool, pipe_num, opt=None): outputs=["desc-preproc_T1w"], ) def non_local_means(wf, cfg, strat_pool, pipe_num, opt=None): + denoise = pe.Node(interface=ants.DenoiseImage(), name=f"anat_denoise_{pipe_num}") - denoise = pe.Node(interface=ants.DenoiseImage(), - name=f'anat_denoise_{pipe_num}') - - denoise.inputs.noise_model = cfg.anatomical_preproc['non_local_means_filtering']['noise_model'] + denoise.inputs.noise_model = cfg.anatomical_preproc["non_local_means_filtering"][ + "noise_model" + ] - node, out = strat_pool.get_data('desc-preproc_T1w') - wf.connect(node, out, denoise, 'input_image') + node, out = strat_pool.get_data("desc-preproc_T1w") + wf.connect(node, out, denoise, "input_image") - outputs = { - 'desc-preproc_T1w': (denoise, 'output_image') - } + outputs = {"desc-preproc_T1w": (denoise, "output_image")} return (wf, outputs) @@ -1568,18 +1750,20 @@ def non_local_means(wf, cfg, strat_pool, pipe_num, opt=None): }, ) def n4_bias_correction(wf, cfg, strat_pool, pipe_num, opt=None): + n4 = pe.Node( + interface=ants.N4BiasFieldCorrection(dimension=3, copy_header=True), + name=f"anat_n4_{pipe_num}", + ) + n4.inputs.shrink_factor = cfg.anatomical_preproc["n4_bias_field_correction"][ + "shrink_factor" + ] - n4 = pe.Node(interface=ants.N4BiasFieldCorrection(dimension=3, - copy_header=True), - name=f'anat_n4_{pipe_num}') - n4.inputs.shrink_factor = cfg.anatomical_preproc['n4_bias_field_correction']['shrink_factor'] - - node, out = strat_pool.get_data('desc-preproc_T1w') - wf.connect(node, out, n4, 'input_image') + node, out = strat_pool.get_data("desc-preproc_T1w") + wf.connect(node, out, n4, "input_image") outputs = { - 'desc-preproc_T1w': (n4, 'output_image'), - 'desc-n4_T1w': (n4, 'output_image') + "desc-preproc_T1w": (n4, "output_image"), + "desc-n4_T1w": (n4, "output_image"), } return (wf, outputs) @@ -1599,24 +1783,25 @@ def n4_bias_correction(wf, cfg, strat_pool, pipe_num, opt=None): ], ) def t1t2_bias_correction(wf, cfg, strat_pool, pipe_num, opt=None): + t1t2_bias_correction = BiasFieldCorrection_sqrtT1wXT1w( + config=cfg, wf_name=f"t1t2_bias_correction_{pipe_num}" + ) - t1t2_bias_correction = BiasFieldCorrection_sqrtT1wXT1w(config=cfg, wf_name=f't1t2_bias_correction_{pipe_num}') - - node, out = strat_pool.get_data('desc-preproc_T1w') - wf.connect(node, out, t1t2_bias_correction, 'inputspec.T1w') + node, out = strat_pool.get_data("desc-preproc_T1w") + wf.connect(node, out, t1t2_bias_correction, "inputspec.T1w") - node, out = strat_pool.get_data('desc-preproc_T2w') - wf.connect(node, out, t1t2_bias_correction, 'inputspec.T2w') + node, out = strat_pool.get_data("desc-preproc_T2w") + wf.connect(node, out, t1t2_bias_correction, "inputspec.T2w") node, out = strat_pool.get_data("desc-acpcbrain_T1w") - wf.connect(node, out, t1t2_bias_correction, 'inputspec.T1w_brain') + wf.connect(node, out, t1t2_bias_correction, "inputspec.T1w_brain") outputs = { - 'desc-preproc_T1w': (t1t2_bias_correction, 'outputspec.T1w_biascorrected'), - 'desc-brain_T1w': (t1t2_bias_correction, 'outputspec.T1w_brain_biascorrected'), - 'desc-preproc_T2w': (t1t2_bias_correction, 'outputspec.T2w_biascorrected'), - 'desc-brain_T2w': (t1t2_bias_correction, 'outputspec.T2w_brain_biascorrected'), - 'desc-biasfield_T1wT2w': (t1t2_bias_correction, 'outputspec.biasfield'), + "desc-preproc_T1w": (t1t2_bias_correction, "outputspec.T1w_biascorrected"), + "desc-brain_T1w": (t1t2_bias_correction, "outputspec.T1w_brain_biascorrected"), + "desc-preproc_T2w": (t1t2_bias_correction, "outputspec.T2w_biascorrected"), + "desc-brain_T2w": (t1t2_bias_correction, "outputspec.T2w_brain_biascorrected"), + "desc-biasfield_T1wT2w": (t1t2_bias_correction, "outputspec.biasfield"), } return (wf, outputs) @@ -1634,7 +1819,6 @@ def t1t2_bias_correction(wf, cfg, strat_pool, pipe_num, opt=None): outputs=["space-T1w_desc-brain_mask"], ) def brain_mask_afni(wf, cfg, strat_pool, pipe_num, opt=None): - wf, outputs = afni_brain_connector(wf, cfg, strat_pool, pipe_num, opt) return (wf, outputs) @@ -1652,13 +1836,9 @@ def brain_mask_afni(wf, cfg, strat_pool, pipe_num, opt=None): outputs=["space-T1w_desc-acpcbrain_mask"], ) def brain_mask_acpc_afni(wf, cfg, strat_pool, pipe_num, opt=None): - wf, wf_outputs = afni_brain_connector(wf, cfg, strat_pool, pipe_num, opt) - outputs = { - 'space-T1w_desc-acpcbrain_mask': - wf_outputs['space-T1w_desc-brain_mask'] - } + outputs = {"space-T1w_desc-acpcbrain_mask": wf_outputs["space-T1w_desc-brain_mask"]} return (wf, outputs) @@ -1675,7 +1855,6 @@ def brain_mask_acpc_afni(wf, cfg, strat_pool, pipe_num, opt=None): outputs=["space-T1w_desc-brain_mask"], ) def brain_mask_fsl(wf, cfg, strat_pool, pipe_num, opt=None): - wf, outputs = fsl_brain_connector(wf, cfg, strat_pool, pipe_num, opt) return (wf, outputs) @@ -1693,13 +1872,9 @@ def brain_mask_fsl(wf, cfg, strat_pool, pipe_num, opt=None): outputs=["space-T1w_desc-acpcbrain_mask"], ) def brain_mask_acpc_fsl(wf, cfg, strat_pool, pipe_num, opt=None): - wf, wf_outputs = fsl_brain_connector(wf, cfg, strat_pool, pipe_num, opt) - outputs = { - 'space-T1w_desc-acpcbrain_mask': - wf_outputs['space-T1w_desc-brain_mask'] - } + outputs = {"space-T1w_desc-acpcbrain_mask": wf_outputs["space-T1w_desc-brain_mask"]} return (wf, outputs) @@ -1716,9 +1891,7 @@ def brain_mask_acpc_fsl(wf, cfg, strat_pool, pipe_num, opt=None): outputs=["space-T1w_desc-brain_mask", "desc-preproc_T1w"], ) def brain_mask_niworkflows_ants(wf, cfg, strat_pool, pipe_num, opt=None): - - wf, outputs = niworkflows_ants_brain_connector(wf, cfg, strat_pool, - pipe_num, opt) + wf, outputs = niworkflows_ants_brain_connector(wf, cfg, strat_pool, pipe_num, opt) return (wf, outputs) @@ -1735,15 +1908,13 @@ def brain_mask_niworkflows_ants(wf, cfg, strat_pool, pipe_num, opt=None): outputs=["space-T1w_desc-acpcbrain_mask", "desc-preproc_T1w"], ) def brain_mask_acpc_niworkflows_ants(wf, cfg, strat_pool, pipe_num, opt=None): - - wf, wf_outputs = niworkflows_ants_brain_connector(wf, cfg, strat_pool, - pipe_num, opt) + wf, wf_outputs = niworkflows_ants_brain_connector( + wf, cfg, strat_pool, pipe_num, opt + ) outputs = { - 'space-T1w_desc-acpcbrain_mask': - wf_outputs['space-T1w_desc-brain_mask'], - 'desc-preproc_T1w': - wf_outputs['desc-preproc_T1w'] + "space-T1w_desc-acpcbrain_mask": wf_outputs["space-T1w_desc-brain_mask"], + "desc-preproc_T1w": wf_outputs["desc-preproc_T1w"], } return (wf, outputs) @@ -1761,7 +1932,6 @@ def brain_mask_acpc_niworkflows_ants(wf, cfg, strat_pool, pipe_num, opt=None): outputs=["space-T1w_desc-brain_mask"], ) def brain_mask_unet(wf, cfg, strat_pool, pipe_num, opt=None): - wf, outputs = unet_brain_connector(wf, cfg, strat_pool, pipe_num, opt) return (wf, outputs) @@ -1779,13 +1949,9 @@ def brain_mask_unet(wf, cfg, strat_pool, pipe_num, opt=None): outputs=["space-T1w_desc-acpcbrain_mask"], ) def brain_mask_acpc_unet(wf, cfg, strat_pool, pipe_num, opt=None): - wf, wf_outputs = unet_brain_connector(wf, cfg, strat_pool, pipe_num, opt) - outputs = { - 'space-T1w_desc-acpcbrain_mask': - wf_outputs['space-T1w_desc-brain_mask'] - } + outputs = {"space-T1w_desc-acpcbrain_mask": wf_outputs["space-T1w_desc-brain_mask"]} return (wf, outputs) @@ -1806,9 +1972,7 @@ def brain_mask_acpc_unet(wf, cfg, strat_pool, pipe_num, opt=None): outputs=["space-T1w_desc-brain_mask"], ) def brain_mask_freesurfer(wf, cfg, strat_pool, pipe_num, opt=None): - - wf, outputs = freesurfer_brain_connector(wf, cfg, strat_pool, pipe_num, - opt) + wf, outputs = freesurfer_brain_connector(wf, cfg, strat_pool, pipe_num, opt) return (wf, outputs) @@ -1829,12 +1993,9 @@ def brain_mask_freesurfer(wf, cfg, strat_pool, pipe_num, opt=None): outputs=["space-T1w_desc-acpcbrain_mask"], ) def brain_mask_acpc_freesurfer(wf, cfg, strat_pool, pipe_num, opt=None): + wf, wf_outputs = freesurfer_brain_connector(wf, cfg, strat_pool, pipe_num, opt) - wf, wf_outputs = freesurfer_brain_connector(wf, cfg, strat_pool, pipe_num, - opt) - - outputs = {'space-T1w_desc-acpcbrain_mask': - wf_outputs['space-T1w_desc-brain_mask']} + outputs = {"space-T1w_desc-acpcbrain_mask": wf_outputs["space-T1w_desc-brain_mask"]} return (wf, outputs) @@ -1857,9 +2018,7 @@ def brain_mask_acpc_freesurfer(wf, cfg, strat_pool, pipe_num, opt=None): outputs=["space-T1w_desc-brain_mask"], ) def brain_mask_freesurfer_abcd(wf, cfg, strat_pool, pipe_num, opt=None): - - wf, outputs = freesurfer_abcd_brain_connector(wf, cfg, strat_pool, - pipe_num, opt) + wf, outputs = freesurfer_abcd_brain_connector(wf, cfg, strat_pool, pipe_num, opt) return (wf, outputs) @@ -1883,7 +2042,6 @@ def brain_mask_freesurfer_abcd(wf, cfg, strat_pool, pipe_num, opt=None): outputs=["space-T1w_desc-tight_brain_mask"], ) def brain_mask_freesurfer_fsl_tight(wf, cfg, strat_pool, pipe_num, opt=None): - wf, outputs = freesurfer_fsl_brain_connector(wf, cfg, strat_pool, pipe_num, opt) return (wf, outputs) @@ -1907,11 +2065,9 @@ def brain_mask_freesurfer_fsl_tight(wf, cfg, strat_pool, pipe_num, opt=None): outputs=["space-T1w_desc-acpcbrain_mask"], ) def brain_mask_acpc_freesurfer_abcd(wf, cfg, strat_pool, pipe_num, opt=None): - wf, wf_outputs = freesurfer_abcd_brain_connector(wf, cfg, strat_pool, pipe_num, opt) - outputs = {'space-T1w_desc-acpcbrain_mask': - wf_outputs['space-T1w_desc-brain_mask']} + outputs = {"space-T1w_desc-acpcbrain_mask": wf_outputs["space-T1w_desc-brain_mask"]} return (wf, outputs) @@ -1935,7 +2091,6 @@ def brain_mask_acpc_freesurfer_abcd(wf, cfg, strat_pool, pipe_num, opt=None): outputs=["space-T1w_desc-loose_brain_mask"], ) def brain_mask_freesurfer_fsl_loose(wf, cfg, strat_pool, pipe_num, opt=None): - wf, outputs = freesurfer_fsl_brain_connector(wf, cfg, strat_pool, pipe_num, opt) return (wf, outputs) @@ -1958,12 +2113,13 @@ def brain_mask_freesurfer_fsl_loose(wf, cfg, strat_pool, pipe_num, opt=None): outputs=["space-T1w_desc-tight_acpcbrain_mask"], ) def brain_mask_acpc_freesurfer_fsl_tight(wf, cfg, strat_pool, pipe_num, opt=None): + wf, wf_outputs = freesurfer_fsl_brain_connector(wf, cfg, strat_pool, pipe_num, opt) - wf, wf_outputs = freesurfer_fsl_brain_connector(wf, cfg, strat_pool, - pipe_num, opt) - - outputs = {'space-T1w_desc-tight_acpcbrain_mask': - wf_outputs['space-T1w_desc-tight_brain_mask']} + outputs = { + "space-T1w_desc-tight_acpcbrain_mask": wf_outputs[ + "space-T1w_desc-tight_brain_mask" + ] + } return (wf, outputs) @@ -1985,11 +2141,13 @@ def brain_mask_acpc_freesurfer_fsl_tight(wf, cfg, strat_pool, pipe_num, opt=None outputs=["space-T1w_desc-loose_acpcbrain_mask"], ) def brain_mask_acpc_freesurfer_fsl_loose(wf, cfg, strat_pool, pipe_num, opt=None): - wf, wf_outputs = freesurfer_fsl_brain_connector(wf, cfg, strat_pool, pipe_num, opt) - outputs = {'space-T1w_desc-loose_acpcbrain_mask': - wf_outputs['space-T1w_desc-loose_brain_mask']} + outputs = { + "space-T1w_desc-loose_acpcbrain_mask": wf_outputs[ + "space-T1w_desc-loose_brain_mask" + ] + } return (wf, outputs) @@ -2014,8 +2172,7 @@ def brain_mask_acpc_freesurfer_fsl_loose(wf, cfg, strat_pool, pipe_num, opt=None }, ) def brain_extraction(wf, cfg, strat_pool, pipe_num, opt=None): - - ''' + """ brain_mask_deoblique = pe.Node(interface=afni.Refit(), name='brain_mask_deoblique') brain_mask_deoblique.inputs.deoblique = True @@ -2030,25 +2187,26 @@ def brain_extraction(wf, cfg, strat_pool, pipe_num, opt=None): brain_mask_reorient.inputs.outputtype = 'NIFTI_GZ' wf.connect(brain_mask_deoblique, 'out_file', brain_mask_reorient, 'in_file') - ''' - - anat_skullstrip_orig_vol = pe.Node(interface=afni.Calc(), - name=f'brain_extraction_{pipe_num}') + """ + anat_skullstrip_orig_vol = pe.Node( + interface=afni.Calc(), name=f"brain_extraction_{pipe_num}" + ) - anat_skullstrip_orig_vol.inputs.expr = 'a*step(b)' - anat_skullstrip_orig_vol.inputs.outputtype = 'NIFTI_GZ' + anat_skullstrip_orig_vol.inputs.expr = "a*step(b)" + anat_skullstrip_orig_vol.inputs.outputtype = "NIFTI_GZ" - node_T1w, out_T1w = strat_pool.get_data('desc-head_T1w') - wf.connect(node_T1w, out_T1w, anat_skullstrip_orig_vol, 'in_file_a') + node_T1w, out_T1w = strat_pool.get_data("desc-head_T1w") + wf.connect(node_T1w, out_T1w, anat_skullstrip_orig_vol, "in_file_a") - node, out = strat_pool.get_data(['space-T1w_desc-brain_mask', - 'space-T1w_desc-acpcbrain_mask']) - wf.connect(node, out, anat_skullstrip_orig_vol, 'in_file_b') + node, out = strat_pool.get_data( + ["space-T1w_desc-brain_mask", "space-T1w_desc-acpcbrain_mask"] + ) + wf.connect(node, out, anat_skullstrip_orig_vol, "in_file_b") outputs = { - 'desc-preproc_T1w': (anat_skullstrip_orig_vol, 'out_file'), - 'desc-brain_T1w': (anat_skullstrip_orig_vol, 'out_file'), - 'desc-head_T1w': (node_T1w, out_T1w) + "desc-preproc_T1w": (anat_skullstrip_orig_vol, "out_file"), + "desc-brain_T1w": (anat_skullstrip_orig_vol, "out_file"), + "desc-head_T1w": (node_T1w, out_T1w), } return (wf, outputs) @@ -2068,23 +2226,24 @@ def brain_extraction(wf, cfg, strat_pool, pipe_num, opt=None): }, ) def brain_extraction_temp(wf, cfg, strat_pool, pipe_num, opt=None): + anat_skullstrip_orig_vol = pe.Node( + interface=afni.Calc(), name=f"brain_extraction_temp_{pipe_num}" + ) - anat_skullstrip_orig_vol = pe.Node(interface=afni.Calc(), - name=f'brain_extraction_temp_{pipe_num}') - - anat_skullstrip_orig_vol.inputs.expr = 'a*step(b)' - anat_skullstrip_orig_vol.inputs.outputtype = 'NIFTI_GZ' + anat_skullstrip_orig_vol.inputs.expr = "a*step(b)" + anat_skullstrip_orig_vol.inputs.outputtype = "NIFTI_GZ" - node, out = strat_pool.get_data('desc-preproc_T1w') - wf.connect(node, out, anat_skullstrip_orig_vol, 'in_file_a') + node, out = strat_pool.get_data("desc-preproc_T1w") + wf.connect(node, out, anat_skullstrip_orig_vol, "in_file_a") - node, out = strat_pool.get_data(['space-T1w_desc-brain_mask', - 'space-T1w_desc-acpcbrain_mask']) - wf.connect(node, out, anat_skullstrip_orig_vol, 'in_file_b') + node, out = strat_pool.get_data( + ["space-T1w_desc-brain_mask", "space-T1w_desc-acpcbrain_mask"] + ) + wf.connect(node, out, anat_skullstrip_orig_vol, "in_file_b") outputs = { - 'desc-preproc_T1w': (anat_skullstrip_orig_vol, 'out_file'), - 'desc-tempbrain_T1w': (anat_skullstrip_orig_vol, 'out_file') + "desc-preproc_T1w": (anat_skullstrip_orig_vol, "out_file"), + "desc-tempbrain_T1w": (anat_skullstrip_orig_vol, "out_file"), } return (wf, outputs) @@ -2098,26 +2257,28 @@ def brain_extraction_temp(wf, cfg, strat_pool, pipe_num, opt=None): outputs=["desc-preproc_T2w", "desc-reorient_T2w", "desc-head_T2w"], ) def anatomical_init_T2(wf, cfg, strat_pool, pipe_num, opt=None): - - T2_deoblique = pe.Node(interface=afni.Refit(), - name=f'T2_deoblique_{pipe_num}') + T2_deoblique = pe.Node(interface=afni.Refit(), name=f"T2_deoblique_{pipe_num}") T2_deoblique.inputs.deoblique = True - node, out = strat_pool.get_data('T2w') - wf.connect(node, out, T2_deoblique, 'in_file') + node, out = strat_pool.get_data("T2w") + wf.connect(node, out, T2_deoblique, "in_file") - T2_reorient = pe.Node(interface=afni.Resample(), - name=f'T2_reorient_{pipe_num}', - mem_gb=0, - mem_x=(0.0115, 'in_file', 't')) - T2_reorient.inputs.orientation = 'RPI' - T2_reorient.inputs.outputtype = 'NIFTI_GZ' + T2_reorient = pe.Node( + interface=afni.Resample(), + name=f"T2_reorient_{pipe_num}", + mem_gb=0, + mem_x=(0.0115, "in_file", "t"), + ) + T2_reorient.inputs.orientation = "RPI" + T2_reorient.inputs.outputtype = "NIFTI_GZ" - wf.connect(T2_deoblique, 'out_file', T2_reorient, 'in_file') + wf.connect(T2_deoblique, "out_file", T2_reorient, "in_file") - outputs = {'desc-preproc_T2w': (T2_reorient, 'out_file'), - 'desc-reorient_T2w': (T2_reorient, 'out_file'), - 'desc-head_T2w': (T2_reorient, 'out_file')} + outputs = { + "desc-preproc_T2w": (T2_reorient, "out_file"), + "desc-reorient_T2w": (T2_reorient, "out_file"), + "desc-head_T2w": (T2_reorient, "out_file"), + } return (wf, outputs) @@ -2132,22 +2293,20 @@ def anatomical_init_T2(wf, cfg, strat_pool, pipe_num, opt=None): outputs=["desc-preproc_T2w"], ) def acpc_align_head_T2(wf, cfg, strat_pool, pipe_num, opt=None): + acpc_align = acpc_alignment( + config=cfg, + acpc_target=cfg.anatomical_preproc["acpc_alignment"]["acpc_target"], + mask=False, + wf_name=f"acpc_align_T2_{pipe_num}", + ) - acpc_align = acpc_alignment(config=cfg, - acpc_target=cfg.anatomical_preproc[ - 'acpc_alignment']['acpc_target'], - mask=False, - wf_name=f'acpc_align_T2_{pipe_num}') - - node, out = strat_pool.get_data('desc-preproc_T2w') - wf.connect(node, out, acpc_align, 'inputspec.anat_leaf') + node, out = strat_pool.get_data("desc-preproc_T2w") + wf.connect(node, out, acpc_align, "inputspec.anat_leaf") - node, out = strat_pool.get_data('T2w-ACPC-template') - wf.connect(node, out, acpc_align, 'inputspec.template_head_for_acpc') + node, out = strat_pool.get_data("T2w-ACPC-template") + wf.connect(node, out, acpc_align, "inputspec.template_head_for_acpc") - outputs = { - 'desc-preproc_T2w': (acpc_align, 'outputspec.acpc_aligned_head') - } + outputs = {"desc-preproc_T2w": (acpc_align, "outputspec.acpc_aligned_head")} return (wf, outputs) @@ -2162,23 +2321,22 @@ def acpc_align_head_T2(wf, cfg, strat_pool, pipe_num, opt=None): outputs=["desc-preproc_T2w", "space-T2w_desc-brain_mask"], ) def acpc_align_head_with_mask_T2(wf, cfg, strat_pool, pipe_num, opt=None): + acpc_align = acpc_alignment( + config=cfg, + acpc_target=cfg.anatomical_preproc["acpc_alignment"]["acpc_target"], + mask=True, + wf_name=f"acpc_align_T2_{pipe_num}", + ) - acpc_align = acpc_alignment(config=cfg, - acpc_target=cfg.anatomical_preproc[ - 'acpc_alignment']['acpc_target'], - mask=True, - wf_name=f'acpc_align_T2_{pipe_num}') - - node, out = strat_pool.get_data('desc-preproc_T2w') - wf.connect(node, out, acpc_align, 'inputspec.anat_leaf') + node, out = strat_pool.get_data("desc-preproc_T2w") + wf.connect(node, out, acpc_align, "inputspec.anat_leaf") - node, out = strat_pool.get_data('T2w-ACPC-template') - wf.connect(node, out, acpc_align, 'inputspec.template_head_for_acpc') + node, out = strat_pool.get_data("T2w-ACPC-template") + wf.connect(node, out, acpc_align, "inputspec.template_head_for_acpc") outputs = { - 'desc-preproc_T2w': (acpc_align, 'outputspec.acpc_aligned_head'), - 'space-T2w_desc-brain_mask': ( - acpc_align, 'outputspec.acpc_brain_mask') + "desc-preproc_T2w": (acpc_align, "outputspec.acpc_aligned_head"), + "space-T2w_desc-brain_mask": (acpc_align, "outputspec.acpc_brain_mask"), } return (wf, outputs) @@ -2201,28 +2359,28 @@ def acpc_align_head_with_mask_T2(wf, cfg, strat_pool, pipe_num, opt=None): outputs=["desc-preproc_T2w", "desc-acpcbrain_T2w"], ) def acpc_align_brain_T2(wf, cfg, strat_pool, pipe_num, opt=None): + acpc_align = acpc_alignment( + config=cfg, + acpc_target=cfg.anatomical_preproc["acpc_alignment"]["acpc_target"], + mask=False, + wf_name=f"acpc_align_T2_{pipe_num}", + ) - acpc_align = acpc_alignment(config=cfg, - acpc_target=cfg.anatomical_preproc[ - 'acpc_alignment']['acpc_target'], - mask=False, - wf_name=f'acpc_align_T2_{pipe_num}') - - node, out = strat_pool.get_data('desc-preproc_T2w') - wf.connect(node, out, acpc_align, 'inputspec.anat_leaf') + node, out = strat_pool.get_data("desc-preproc_T2w") + wf.connect(node, out, acpc_align, "inputspec.anat_leaf") - node, out = strat_pool.get_data('desc-tempbrain_T2w') - wf.connect(node, out, acpc_align, 'inputspec.anat_brain') + node, out = strat_pool.get_data("desc-tempbrain_T2w") + wf.connect(node, out, acpc_align, "inputspec.anat_brain") - node, out = strat_pool.get_data('T2w-ACPC-template') - wf.connect(node, out, acpc_align, 'inputspec.template_head_for_acpc') + node, out = strat_pool.get_data("T2w-ACPC-template") + wf.connect(node, out, acpc_align, "inputspec.template_head_for_acpc") - node, out = strat_pool.get_data('T2w-brain-ACPC-template') - wf.connect(node, out, acpc_align, 'inputspec.template_brain_for_acpc') + node, out = strat_pool.get_data("T2w-brain-ACPC-template") + wf.connect(node, out, acpc_align, "inputspec.template_brain_for_acpc") outputs = { - 'desc-preproc_T2w': (acpc_align, 'outputspec.acpc_aligned_head'), - 'desc-acpcbrain_T2w': (acpc_align, 'outputspec.acpc_aligned_brain') + "desc-preproc_T2w": (acpc_align, "outputspec.acpc_aligned_head"), + "desc-acpcbrain_T2w": (acpc_align, "outputspec.acpc_aligned_brain"), } return (wf, outputs) @@ -2242,33 +2400,32 @@ def acpc_align_brain_T2(wf, cfg, strat_pool, pipe_num, opt=None): outputs=["desc-preproc_T2w", "desc-acpcbrain_T2w", "space-T2w_desc-brain_mask"], ) def acpc_align_brain_with_mask_T2(wf, cfg, strat_pool, pipe_num, opt=None): + acpc_align = acpc_alignment( + config=cfg, + acpc_target=cfg.anatomical_preproc["acpc_alignment"]["acpc_target"], + mask=True, + wf_name=f"acpc_align_T2_{pipe_num}", + ) - acpc_align = acpc_alignment(config=cfg, - acpc_target=cfg.anatomical_preproc[ - 'acpc_alignment']['acpc_target'], - mask=True, - wf_name=f'acpc_align_T2_{pipe_num}') - - node, out = strat_pool.get_data('desc-preproc_T2w') - wf.connect(node, out, acpc_align, 'inputspec.anat_leaf') + node, out = strat_pool.get_data("desc-preproc_T2w") + wf.connect(node, out, acpc_align, "inputspec.anat_leaf") - node, out = strat_pool.get_data('desc-tempbrain_T2w') - wf.connect(node, out, acpc_align, 'inputspec.anat_brain') + node, out = strat_pool.get_data("desc-tempbrain_T2w") + wf.connect(node, out, acpc_align, "inputspec.anat_brain") - node, out = strat_pool.get_data('space-T2w_desc-brain_mask') - wf.connect(node, out, acpc_align, 'inputspec.brain_mask') + node, out = strat_pool.get_data("space-T2w_desc-brain_mask") + wf.connect(node, out, acpc_align, "inputspec.brain_mask") - node, out = strat_pool.get_data('T2w-ACPC-template') - wf.connect(node, out, acpc_align, 'inputspec.template_head_for_acpc') + node, out = strat_pool.get_data("T2w-ACPC-template") + wf.connect(node, out, acpc_align, "inputspec.template_head_for_acpc") - node, out = strat_pool.get_data('T2w-brain-ACPC-template') - wf.connect(node, out, acpc_align, 'inputspec.template_brain_for_acpc') + node, out = strat_pool.get_data("T2w-brain-ACPC-template") + wf.connect(node, out, acpc_align, "inputspec.template_brain_for_acpc") outputs = { - 'desc-preproc_T2w': (acpc_align, 'outputspec.acpc_aligned_head'), - 'desc-acpcbrain_T2w': (acpc_align, 'outputspec.acpc_aligned_brain'), - 'space-T2w_desc-brain_mask': ( - acpc_align, 'outputspec.acpc_brain_mask') + "desc-preproc_T2w": (acpc_align, "outputspec.acpc_aligned_head"), + "desc-acpcbrain_T2w": (acpc_align, "outputspec.acpc_aligned_brain"), + "space-T2w_desc-brain_mask": (acpc_align, "outputspec.acpc_brain_mask"), } return (wf, outputs) @@ -2284,16 +2441,12 @@ def acpc_align_brain_with_mask_T2(wf, cfg, strat_pool, pipe_num, opt=None): outputs=["desc-preproc_T2w"], ) def non_local_means_T2(wf, cfg, strat_pool, pipe_num, opt=None): + denoise = pe.Node(interface=ants.DenoiseImage(), name=f"anat_denoise_T2_{pipe_num}") - denoise = pe.Node(interface=ants.DenoiseImage(), - name=f'anat_denoise_T2_{pipe_num}') - - node, out = strat_pool.get_data('desc-preproc_T2w') - wf.connect(node, out, denoise, 'input_image') + node, out = strat_pool.get_data("desc-preproc_T2w") + wf.connect(node, out, denoise, "input_image") - outputs = { - 'desc-preproc_T2w': (denoise, 'output_image') - } + outputs = {"desc-preproc_T2w": (denoise, "output_image")} return (wf, outputs) @@ -2308,18 +2461,17 @@ def non_local_means_T2(wf, cfg, strat_pool, pipe_num, opt=None): outputs=["desc-preproc_T2w"], ) def n4_bias_correction_T2(wf, cfg, strat_pool, pipe_num, opt=None): + n4 = pe.Node( + interface=ants.N4BiasFieldCorrection( + dimension=3, shrink_factor=2, copy_header=True + ), + name=f"anat_n4_T2_{pipe_num}", + ) - n4 = pe.Node(interface=ants.N4BiasFieldCorrection(dimension=3, - shrink_factor=2, - copy_header=True), - name=f'anat_n4_T2_{pipe_num}') - - node, out = strat_pool.get_data('desc-preproc_T2w') - wf.connect(node, out, n4, 'input_image') + node, out = strat_pool.get_data("desc-preproc_T2w") + wf.connect(node, out, n4, "input_image") - outputs = { - 'desc-preproc_T2w': (n4, 'output_image') - } + outputs = {"desc-preproc_T2w": (n4, "output_image")} return (wf, outputs) @@ -2347,13 +2499,9 @@ def brain_mask_afni_T2(wf, cfg, strat_pool, pipe_num, opt=None): outputs=["space-T2w_desc-acpcbrain_mask"], ) def brain_mask_acpc_afni_T2(wf, cfg, strat_pool, pipe_num, opt=None): - wf, wf_outputs = afni_brain_connector(wf, cfg, strat_pool, pipe_num, opt) - outputs = { - 'space-T2w_desc-acpcbrain_mask': - wf_outputs['space-T2w_desc-brain_mask'] - } + outputs = {"space-T2w_desc-acpcbrain_mask": wf_outputs["space-T2w_desc-brain_mask"]} return (wf, outputs) @@ -2367,7 +2515,6 @@ def brain_mask_acpc_afni_T2(wf, cfg, strat_pool, pipe_num, opt=None): outputs=["space-T2w_desc-brain_mask"], ) def brain_mask_fsl_T2(wf, cfg, strat_pool, pipe_num, opt=None): - wf, outputs = fsl_brain_connector(wf, cfg, strat_pool, pipe_num, opt) return (wf, outputs) @@ -2382,13 +2529,9 @@ def brain_mask_fsl_T2(wf, cfg, strat_pool, pipe_num, opt=None): outputs=["space-T2w_desc-acpcbrain_mask"], ) def brain_mask_acpc_fsl_T2(wf, cfg, strat_pool, pipe_num, opt=None): - wf, wf_outputs = fsl_brain_connector(wf, cfg, strat_pool, pipe_num, opt) - outputs = { - 'space-T2w_desc-acpcbrain_mask': - wf_outputs['space-T2w_desc-brain_mask'] - } + outputs = {"space-T2w_desc-acpcbrain_mask": wf_outputs["space-T2w_desc-brain_mask"]} return (wf, outputs) @@ -2402,9 +2545,7 @@ def brain_mask_acpc_fsl_T2(wf, cfg, strat_pool, pipe_num, opt=None): outputs=["space-T2w_desc-brain_mask"], ) def brain_mask_niworkflows_ants_T2(wf, cfg, strat_pool, pipe_num, opt=None): - - wf, outputs = niworkflows_ants_brain_connector(wf, cfg, strat_pool, - pipe_num, opt) + wf, outputs = niworkflows_ants_brain_connector(wf, cfg, strat_pool, pipe_num, opt) return (wf, outputs) @@ -2418,14 +2559,11 @@ def brain_mask_niworkflows_ants_T2(wf, cfg, strat_pool, pipe_num, opt=None): outputs=["space-T2w_desc-acpcbrain_mask"], ) def brain_mask_acpc_niworkflows_ants_T2(wf, cfg, strat_pool, pipe_num, opt=None): + wf, wf_outputs = niworkflows_ants_brain_connector( + wf, cfg, strat_pool, pipe_num, opt + ) - wf, wf_outputs = niworkflows_ants_brain_connector(wf, cfg, strat_pool, - pipe_num, opt) - - outputs = { - 'space-T2w_desc-acpcbrain_mask': - wf_outputs['space-T2w_desc-brain_mask'] - } + outputs = {"space-T2w_desc-acpcbrain_mask": wf_outputs["space-T2w_desc-brain_mask"]} return (wf, outputs) @@ -2439,7 +2577,6 @@ def brain_mask_acpc_niworkflows_ants_T2(wf, cfg, strat_pool, pipe_num, opt=None) outputs=["space-T2w_desc-brain_mask"], ) def brain_mask_unet_T2(wf, cfg, strat_pool, pipe_num, opt=None): - wf, outputs = unet_brain_connector(wf, cfg, strat_pool, pipe_num, opt) return (wf, outputs) @@ -2454,13 +2591,9 @@ def brain_mask_unet_T2(wf, cfg, strat_pool, pipe_num, opt=None): outputs=["space-T2w_desc-acpcbrain_mask"], ) def brain_mask_acpc_unet_T2(wf, cfg, strat_pool, pipe_num, opt=None): - wf, wf_outputs = unet_brain_connector(wf, cfg, strat_pool, pipe_num, opt) - outputs = { - 'space-T2w_desc-acpcbrain_mask': - wf_outputs['space-T2w_desc-brain_mask'] - } + outputs = {"space-T2w_desc-acpcbrain_mask": wf_outputs["space-T2w_desc-brain_mask"]} return (wf, outputs) @@ -2480,30 +2613,36 @@ def brain_mask_acpc_unet_T2(wf, cfg, strat_pool, pipe_num, opt=None): outputs=["space-T2w_desc-brain_mask"], ) def brain_mask_T2(wf, cfg, strat_pool, pipe_num, opt=None): + brain_mask_T2 = mask_T2(wf_name=f"brain_mask_T2_{pipe_num}") - brain_mask_T2 = mask_T2(wf_name=f'brain_mask_T2_{pipe_num}') - - if not cfg.anatomical_preproc['acpc_alignment']['run']: - node, out = strat_pool.get_data(['desc-reorient_T1w','T1w','desc-preproc_T1w']) - wf.connect(node, out, brain_mask_T2, 'inputspec.T1w') + if not cfg.anatomical_preproc["acpc_alignment"]["run"]: + node, out = strat_pool.get_data( + ["desc-reorient_T1w", "T1w", "desc-preproc_T1w"] + ) + wf.connect(node, out, brain_mask_T2, "inputspec.T1w") - node, out = strat_pool.get_data(['desc-reorient_T2w', 'T2w', 'desc-preproc_T2w']) - wf.connect(node, out, brain_mask_T2, 'inputspec.T2w') + node, out = strat_pool.get_data( + ["desc-reorient_T2w", "T2w", "desc-preproc_T2w"] + ) + wf.connect(node, out, brain_mask_T2, "inputspec.T2w") else: - node, out = strat_pool.get_data(['desc-preproc_T1w','desc-reorient_T1w','T1w']) - wf.connect(node, out, brain_mask_T2, 'inputspec.T1w') + node, out = strat_pool.get_data( + ["desc-preproc_T1w", "desc-reorient_T1w", "T1w"] + ) + wf.connect(node, out, brain_mask_T2, "inputspec.T1w") - node, out = strat_pool.get_data(['desc-preproc_T2w','desc-reorient_T2w', 'T2w']) - wf.connect(node, out, brain_mask_T2, 'inputspec.T2w') + node, out = strat_pool.get_data( + ["desc-preproc_T2w", "desc-reorient_T2w", "T2w"] + ) + wf.connect(node, out, brain_mask_T2, "inputspec.T2w") - node, out = strat_pool.get_data(["space-T1w_desc-brain_mask", - "space-T1w_desc-acpcbrain_mask"]) - wf.connect(node, out, brain_mask_T2, 'inputspec.T1w_mask') - - outputs = { - 'space-T2w_desc-brain_mask': (brain_mask_T2, 'outputspec.T2w_mask') - } + node, out = strat_pool.get_data( + ["space-T1w_desc-brain_mask", "space-T1w_desc-acpcbrain_mask"] + ) + wf.connect(node, out, brain_mask_T2, "inputspec.T1w_mask") + + outputs = {"space-T2w_desc-brain_mask": (brain_mask_T2, "outputspec.T2w_mask")} return (wf, outputs) @@ -2520,21 +2659,20 @@ def brain_mask_T2(wf, cfg, strat_pool, pipe_num, opt=None): outputs=["space-T2w_desc-acpcbrain_mask"], ) def brain_mask_acpc_T2(wf, cfg, strat_pool, pipe_num, opt=None): + brain_mask_T2 = mask_T2(wf_name=f"brain_mask_acpc_T2_{pipe_num}") - brain_mask_T2 = mask_T2(wf_name=f'brain_mask_acpc_T2_{pipe_num}') + node, out = strat_pool.get_data("desc-reorient_T1w") + wf.connect(node, out, brain_mask_T2, "inputspec.T1w") - node, out = strat_pool.get_data('desc-reorient_T1w') - wf.connect(node, out, brain_mask_T2, 'inputspec.T1w') + node, out = strat_pool.get_data("desc-reorient_T2w") + wf.connect(node, out, brain_mask_T2, "inputspec.T2w") - node, out = strat_pool.get_data('desc-reorient_T2w') - wf.connect(node, out, brain_mask_T2, 'inputspec.T2w') - - node, out = strat_pool.get_data(["space-T1w_desc-acpcbrain_mask", "space-T1w_desc-prebrain_mask"]) - wf.connect(node, out, brain_mask_T2, 'inputspec.T1w_mask') + node, out = strat_pool.get_data( + ["space-T1w_desc-acpcbrain_mask", "space-T1w_desc-prebrain_mask"] + ) + wf.connect(node, out, brain_mask_T2, "inputspec.T1w_mask") - outputs = { - 'space-T2w_desc-acpcbrain_mask': (brain_mask_T2, 'outputspec.T2w_mask') - } + outputs = {"space-T2w_desc-acpcbrain_mask": (brain_mask_T2, "outputspec.T2w_mask")} return (wf, outputs) @@ -2553,27 +2691,26 @@ def brain_mask_acpc_T2(wf, cfg, strat_pool, pipe_num, opt=None): outputs=["desc-brain_T2w"], ) def brain_extraction_T2(wf, cfg, strat_pool, pipe_num, opt=None): - - if cfg.anatomical_preproc['acpc_alignment']['run'] and cfg.anatomical_preproc['acpc_alignment']['acpc_target'] == 'brain': - outputs = { - 'desc-brain_T2w': (strat_pool.get_data(["desc-acpcbrain_T2w"])) - } + if ( + cfg.anatomical_preproc["acpc_alignment"]["run"] + and cfg.anatomical_preproc["acpc_alignment"]["acpc_target"] == "brain" + ): + outputs = {"desc-brain_T2w": (strat_pool.get_data(["desc-acpcbrain_T2w"]))} else: - anat_skullstrip_orig_vol = pe.Node(interface=afni.Calc(), - name=f'brain_extraction_T2_{pipe_num}') + anat_skullstrip_orig_vol = pe.Node( + interface=afni.Calc(), name=f"brain_extraction_T2_{pipe_num}" + ) - anat_skullstrip_orig_vol.inputs.expr = 'a*step(b)' - anat_skullstrip_orig_vol.inputs.outputtype = 'NIFTI_GZ' + anat_skullstrip_orig_vol.inputs.expr = "a*step(b)" + anat_skullstrip_orig_vol.inputs.outputtype = "NIFTI_GZ" - node, out = strat_pool.get_data('desc-preproc_T2w') - wf.connect(node, out, anat_skullstrip_orig_vol, 'in_file_a') + node, out = strat_pool.get_data("desc-preproc_T2w") + wf.connect(node, out, anat_skullstrip_orig_vol, "in_file_a") - node, out = strat_pool.get_data(['space-T2w_desc-brain_mask']) - wf.connect(node, out, anat_skullstrip_orig_vol, 'in_file_b') + node, out = strat_pool.get_data(["space-T2w_desc-brain_mask"]) + wf.connect(node, out, anat_skullstrip_orig_vol, "in_file_b") - outputs = { - 'desc-brain_T2w': (anat_skullstrip_orig_vol, 'out_file') - } + outputs = {"desc-brain_T2w": (anat_skullstrip_orig_vol, "out_file")} return (wf, outputs) @@ -2591,26 +2728,26 @@ def brain_extraction_T2(wf, cfg, strat_pool, pipe_num, opt=None): outputs=["desc-tempbrain_T2w"], ) def brain_extraction_temp_T2(wf, cfg, strat_pool, pipe_num, opt=None): + anat_skullstrip_orig_vol = pe.Node( + interface=afni.Calc(), name=f"brain_extraction_temp_T2_{pipe_num}" + ) - anat_skullstrip_orig_vol = pe.Node(interface=afni.Calc(), - name=f'brain_extraction_temp_T2_{pipe_num}') - - anat_skullstrip_orig_vol.inputs.expr = 'a*step(b)' - anat_skullstrip_orig_vol.inputs.outputtype = 'NIFTI_GZ' + anat_skullstrip_orig_vol.inputs.expr = "a*step(b)" + anat_skullstrip_orig_vol.inputs.outputtype = "NIFTI_GZ" - node, out = strat_pool.get_data('desc-preproc_T2w') - wf.connect(node, out, anat_skullstrip_orig_vol, 'in_file_a') + node, out = strat_pool.get_data("desc-preproc_T2w") + wf.connect(node, out, anat_skullstrip_orig_vol, "in_file_a") - node, out = strat_pool.get_data(['space-T2w_desc-brain_mask', - 'space-T2w_desc-acpcbrain_mask']) - wf.connect(node, out, anat_skullstrip_orig_vol, 'in_file_b') + node, out = strat_pool.get_data( + ["space-T2w_desc-brain_mask", "space-T2w_desc-acpcbrain_mask"] + ) + wf.connect(node, out, anat_skullstrip_orig_vol, "in_file_b") - outputs = { - 'desc-tempbrain_T2w': (anat_skullstrip_orig_vol, 'out_file') - } + outputs = {"desc-tempbrain_T2w": (anat_skullstrip_orig_vol, "out_file")} return (wf, outputs) + @nodeblock( name="freesurfer_abcd_preproc", config=["surface_analysis", "abcd_prefreesurfer_prep"], @@ -2650,105 +2787,130 @@ def brain_extraction_temp_T2(wf, cfg, strat_pool, pipe_num, opt=None): ) def freesurfer_abcd_preproc(wf, cfg, strat_pool, pipe_num, opt=None): # fnirt-based brain extraction - brain_extraction = fnirt_based_brain_extraction(config=cfg, - wf_name=f'fnirt_based_brain_extraction_{pipe_num}') + brain_extraction = fnirt_based_brain_extraction( + config=cfg, wf_name=f"fnirt_based_brain_extraction_{pipe_num}" + ) - node, out = strat_pool.get_data('desc-preproc_T1w') - wf.connect(node, out, brain_extraction, 'inputspec.anat_data') + node, out = strat_pool.get_data("desc-preproc_T1w") + wf.connect(node, out, brain_extraction, "inputspec.anat_data") - node, out = strat_pool.get_data('template-ref-mask-res-2') - wf.connect(node, out, brain_extraction, 'inputspec.template-ref-mask-res-2') + node, out = strat_pool.get_data("template-ref-mask-res-2") + wf.connect(node, out, brain_extraction, "inputspec.template-ref-mask-res-2") - node, out = strat_pool.get_data('T1w-template') - wf.connect(node, out, brain_extraction, 'inputspec.template_skull_for_anat') + node, out = strat_pool.get_data("T1w-template") + wf.connect(node, out, brain_extraction, "inputspec.template_skull_for_anat") - node, out = strat_pool.get_data('T1w-template-res-2') - wf.connect(node, out, brain_extraction, 'inputspec.template_skull_for_anat_2mm') + node, out = strat_pool.get_data("T1w-template-res-2") + wf.connect(node, out, brain_extraction, "inputspec.template_skull_for_anat_2mm") - node, out = strat_pool.get_data('T1w-brain-template-mask') - wf.connect(node, out, brain_extraction, 'inputspec.template_brain_mask_for_anat') + node, out = strat_pool.get_data("T1w-brain-template-mask") + wf.connect(node, out, brain_extraction, "inputspec.template_brain_mask_for_anat") # fast bias field correction - fast_correction = fast_bias_field_correction(config=cfg, - wf_name=f'fast_bias_field_correction_{pipe_num}') + fast_correction = fast_bias_field_correction( + config=cfg, wf_name=f"fast_bias_field_correction_{pipe_num}" + ) - node, out = strat_pool.get_data('desc-preproc_T1w') - wf.connect(node, out, fast_correction, 'inputspec.anat_data') + node, out = strat_pool.get_data("desc-preproc_T1w") + wf.connect(node, out, fast_correction, "inputspec.anat_data") - wf.connect(brain_extraction, 'outputspec.anat_brain', fast_correction, 'inputspec.anat_brain') + wf.connect( + brain_extraction, + "outputspec.anat_brain", + fast_correction, + "inputspec.anat_brain", + ) - wf.connect(brain_extraction, 'outputspec.anat_brain_mask', fast_correction, 'inputspec.anat_brain_mask') + wf.connect( + brain_extraction, + "outputspec.anat_brain_mask", + fast_correction, + "inputspec.anat_brain_mask", + ) ### ABCD Harmonization ### # Ref: https://github.com/DCAN-Labs/DCAN-HCP/blob/master/FreeSurfer/FreeSurferPipeline.sh#L140-L144 # flirt -interp spline -in "$T1wImage" -ref "$T1wImage" -applyisoxfm 1 -out "$T1wImageFile"_1mm.nii.gz - resample_head_1mm = pe.Node(interface=fsl.FLIRT(), - name=f'resample_anat_head_1mm_{pipe_num}') - resample_head_1mm.inputs.interp = 'spline' + resample_head_1mm = pe.Node( + interface=fsl.FLIRT(), name=f"resample_anat_head_1mm_{pipe_num}" + ) + resample_head_1mm.inputs.interp = "spline" resample_head_1mm.inputs.apply_isoxfm = 1 - node, out = strat_pool.get_data('desc-preproc_T1w') - wf.connect(node, out, resample_head_1mm, 'in_file') + node, out = strat_pool.get_data("desc-preproc_T1w") + wf.connect(node, out, resample_head_1mm, "in_file") - wf.connect(node, out, resample_head_1mm, 'reference') + wf.connect(node, out, resample_head_1mm, "reference") # applywarp --rel --interp=spline -i "$T1wImage" -r "$T1wImageFile"_1mm.nii.gz --premat=$FSLDIR/etc/flirtsch/ident.mat -o "$T1wImageFile"_1mm.nii.gz - applywarp_head_to_head_1mm = pe.Node(interface=fsl.ApplyWarp(), - name=f'applywarp_head_to_head_1mm_{pipe_num}') + applywarp_head_to_head_1mm = pe.Node( + interface=fsl.ApplyWarp(), name=f"applywarp_head_to_head_1mm_{pipe_num}" + ) applywarp_head_to_head_1mm.inputs.relwarp = True - applywarp_head_to_head_1mm.inputs.interp = 'spline' - applywarp_head_to_head_1mm.inputs.premat = cfg.registration_workflows['anatomical_registration']['registration']['FSL-FNIRT']['identity_matrix'] + applywarp_head_to_head_1mm.inputs.interp = "spline" + applywarp_head_to_head_1mm.inputs.premat = cfg.registration_workflows[ + "anatomical_registration" + ]["registration"]["FSL-FNIRT"]["identity_matrix"] - wf.connect(node, out, applywarp_head_to_head_1mm, 'in_file') + wf.connect(node, out, applywarp_head_to_head_1mm, "in_file") - wf.connect(resample_head_1mm, 'out_file', - applywarp_head_to_head_1mm, 'ref_file') + wf.connect(resample_head_1mm, "out_file", applywarp_head_to_head_1mm, "ref_file") # applywarp --rel --interp=nn -i "$T1wImageBrain" -r "$T1wImageFile"_1mm.nii.gz --premat=$FSLDIR/etc/flirtsch/ident.mat -o "$T1wImageBrainFile"_1mm.nii.gz - applywarp_brain_to_head_1mm = pe.Node(interface=fsl.ApplyWarp(), - name=f'applywarp_brain_to_head_1mm_{pipe_num}') + applywarp_brain_to_head_1mm = pe.Node( + interface=fsl.ApplyWarp(), name=f"applywarp_brain_to_head_1mm_{pipe_num}" + ) applywarp_brain_to_head_1mm.inputs.relwarp = True - applywarp_brain_to_head_1mm.inputs.interp = 'nn' - applywarp_brain_to_head_1mm.inputs.premat = cfg.registration_workflows['anatomical_registration']['registration']['FSL-FNIRT']['identity_matrix'] - - wf.connect(fast_correction, 'outputspec.anat_brain_restore', - applywarp_brain_to_head_1mm, 'in_file') + applywarp_brain_to_head_1mm.inputs.interp = "nn" + applywarp_brain_to_head_1mm.inputs.premat = cfg.registration_workflows[ + "anatomical_registration" + ]["registration"]["FSL-FNIRT"]["identity_matrix"] + + wf.connect( + fast_correction, + "outputspec.anat_brain_restore", + applywarp_brain_to_head_1mm, + "in_file", + ) - wf.connect(resample_head_1mm, 'out_file', - applywarp_brain_to_head_1mm, 'ref_file') + wf.connect(resample_head_1mm, "out_file", applywarp_brain_to_head_1mm, "ref_file") # fslstats $T1wImageBrain -M - average_brain = pe.Node(interface=fsl.ImageStats(), - name=f'average_brain_{pipe_num}') - average_brain.inputs.op_string = '-M' - average_brain.inputs.output_type = 'NIFTI_GZ' + average_brain = pe.Node( + interface=fsl.ImageStats(), name=f"average_brain_{pipe_num}" + ) + average_brain.inputs.op_string = "-M" + average_brain.inputs.output_type = "NIFTI_GZ" - wf.connect(fast_correction, 'outputspec.anat_brain_restore', - average_brain, 'in_file') + wf.connect( + fast_correction, "outputspec.anat_brain_restore", average_brain, "in_file" + ) # fslmaths "$T1wImageFile"_1mm.nii.gz -div $Mean -mul 150 -abs "$T1wImageFile"_1mm.nii.gz - normalize_head = pe.Node(util.Function(input_names=['in_file', 'number', 'out_file_suffix'], - output_names=['out_file'], - function=fslmaths_command), - name=f'normalize_head_{pipe_num}') - normalize_head.inputs.out_file_suffix = '_norm' + normalize_head = pe.Node( + util.Function( + input_names=["in_file", "number", "out_file_suffix"], + output_names=["out_file"], + function=fslmaths_command, + ), + name=f"normalize_head_{pipe_num}", + ) + normalize_head.inputs.out_file_suffix = "_norm" - wf.connect(applywarp_head_to_head_1mm, 'out_file', - normalize_head, 'in_file') + wf.connect(applywarp_head_to_head_1mm, "out_file", normalize_head, "in_file") - wf.connect(average_brain, 'out_stat', - normalize_head, 'number') + wf.connect(average_brain, "out_stat", normalize_head, "number") outputs = { - 'desc-restore_T1w': (fast_correction, 'outputspec.anat_restore'), - 'desc-restore-brain_T1w': (fast_correction, - 'outputspec.anat_brain_restore'), - 'pipeline-fs_desc-fast_biasfield': (fast_correction, 'outputspec.bias_field'), - 'desc-ABCDpreproc_T1w': (normalize_head, 'out_file') - } + "desc-restore_T1w": (fast_correction, "outputspec.anat_restore"), + "desc-restore-brain_T1w": (fast_correction, "outputspec.anat_brain_restore"), + "pipeline-fs_desc-fast_biasfield": (fast_correction, "outputspec.bias_field"), + "desc-ABCDpreproc_T1w": (normalize_head, "out_file"), + } return (wf, outputs) + @nodeblock( name="freesurfer_reconall", config=["surface_analysis", "freesurfer"], @@ -2761,49 +2923,47 @@ def freesurfer_abcd_preproc(wf, cfg, strat_pool, pipe_num, opt=None): "pipeline-fs_brainmask", "pipeline-fs_wmparc", "pipeline-fs_T1", - *freesurfer_abcd_preproc.outputs + *freesurfer_abcd_preproc.outputs, # we're grabbing the postproc outputs and appending them to # the reconall outputs ], ) def freesurfer_reconall(wf, cfg, strat_pool, pipe_num, opt=None): - - reconall = pe.Node(interface=freesurfer.ReconAll(), - name=f'anat_freesurfer_{pipe_num}', - mem_gb=2.7) + reconall = pe.Node( + interface=freesurfer.ReconAll(), name=f"anat_freesurfer_{pipe_num}", mem_gb=2.7 + ) reconall.skip_timeout = True # this Node could take > 24 hours freesurfer_subject_dir = os.path.join( - cfg.pipeline_setup['working_directory']['path'], - 'cpac_'+cfg['subject_id'], - f'anat_preproc_freesurfer_{pipe_num}', - 'anat_freesurfer') + cfg.pipeline_setup["working_directory"]["path"], + "cpac_" + cfg["subject_id"], + f"anat_preproc_freesurfer_{pipe_num}", + "anat_freesurfer", + ) if not os.path.exists(freesurfer_subject_dir): os.makedirs(freesurfer_subject_dir) - reconall.inputs.directive = 'all' + reconall.inputs.directive = "all" reconall.inputs.subjects_dir = freesurfer_subject_dir - reconall.inputs.openmp = cfg.pipeline_setup['system_config'][ - 'num_OMP_threads'] + reconall.inputs.openmp = cfg.pipeline_setup["system_config"]["num_OMP_threads"] - if cfg.surface_analysis['freesurfer']['reconall_args'] is not None: - reconall.inputs.args = cfg.surface_analysis['freesurfer'][ - 'reconall_args'] + if cfg.surface_analysis["freesurfer"]["reconall_args"] is not None: + reconall.inputs.args = cfg.surface_analysis["freesurfer"]["reconall_args"] - node, out = strat_pool.get_data(["desc-ABCDpreproc_T1w","desc-preproc_T1w"]) - wf.connect(node, out, reconall, 'T1_files') + node, out = strat_pool.get_data(["desc-ABCDpreproc_T1w", "desc-preproc_T1w"]) + wf.connect(node, out, reconall, "T1_files") wf, hemisphere_outputs = freesurfer_hemispheres(wf, reconall, pipe_num) outputs = { - 'freesurfer-subject-dir': (reconall, 'subjects_dir'), + "freesurfer-subject-dir": (reconall, "subjects_dir"), **hemisphere_outputs, - 'pipeline-fs_raw-average': (reconall, 'rawavg'), - 'pipeline-fs_subcortical-seg': (reconall, 'aseg'), - 'pipeline-fs_brainmask': (reconall, 'brainmask'), - 'pipeline-fs_wmparc': (reconall, 'wmparc'), - 'pipeline-fs_T1': (reconall, 'T1') + "pipeline-fs_raw-average": (reconall, "rawavg"), + "pipeline-fs_subcortical-seg": (reconall, "aseg"), + "pipeline-fs_brainmask": (reconall, "brainmask"), + "pipeline-fs_wmparc": (reconall, "wmparc"), + "pipeline-fs_T1": (reconall, "T1"), } # for label, connection in outputs.items(): @@ -2818,39 +2978,41 @@ def freesurfer_reconall(wf, cfg, strat_pool, pipe_num, opt=None): return wf, outputs -def fnirt_based_brain_extraction(config=None, - wf_name='fnirt_based_brain_extraction'): - +def fnirt_based_brain_extraction(config=None, wf_name="fnirt_based_brain_extraction"): ### ABCD Harmonization - FNIRT-based brain extraction ### # Ref: https://github.com/DCAN-Labs/DCAN-HCP/blob/master/PreFreeSurfer/scripts/BrainExtraction_FNIRTbased.sh preproc = pe.Workflow(name=wf_name) - inputnode = pe.Node(util.IdentityInterface(fields=['anat_data', - 'template-ref-mask-res-2', - 'template_skull_for_anat', - 'template_skull_for_anat_2mm', - 'template_brain_mask_for_anat']), - name='inputspec') - - outputnode = pe.Node(util.IdentityInterface(fields=['anat_brain', - 'anat_brain_mask']), - name='outputspec') + inputnode = pe.Node( + util.IdentityInterface( + fields=[ + "anat_data", + "template-ref-mask-res-2", + "template_skull_for_anat", + "template_skull_for_anat_2mm", + "template_brain_mask_for_anat", + ] + ), + name="inputspec", + ) + + outputnode = pe.Node( + util.IdentityInterface(fields=["anat_brain", "anat_brain_mask"]), + name="outputspec", + ) # Register to 2mm reference image (linear then non-linear) # linear registration to 2mm reference # flirt -interp spline -dof 12 -in "$Input" -ref "$Reference2mm" -omat "$WD"/roughlin.mat -out "$WD"/"$BaseName"_to_MNI_roughlin.nii.gz -nosearch - linear_reg = pe.Node(interface=fsl.FLIRT(), - name='linear_reg') + linear_reg = pe.Node(interface=fsl.FLIRT(), name="linear_reg") linear_reg.inputs.dof = 12 - linear_reg.inputs.interp = 'spline' + linear_reg.inputs.interp = "spline" linear_reg.inputs.no_search = True - preproc.connect(inputnode, 'anat_data', - linear_reg, 'in_file') + preproc.connect(inputnode, "anat_data", linear_reg, "in_file") - preproc.connect(inputnode, 'template_skull_for_anat_2mm', - linear_reg, 'reference') + preproc.connect(inputnode, "template_skull_for_anat_2mm", linear_reg, "reference") # non-linear registration to 2mm reference # fnirt --in="$Input" --ref="$Reference2mm" --aff="$WD"/roughlin.mat --refmask="$Reference2mmMask" \ @@ -2858,172 +3020,152 @@ def fnirt_based_brain_extraction(config=None, # --refout="$WD"/IntensityModulatedT1.nii.gz --iout="$WD"/"$BaseName"_to_MNI_nonlin.nii.gz \ # --logout="$WD"/NonlinearReg.txt --intout="$WD"/NonlinearIntensities.nii.gz \ # --cout="$WD"/NonlinearReg.nii.gz --config="$FNIRTConfig" - non_linear_reg = pe.Node(interface=fsl.FNIRT(), - name='non_linear_reg') + non_linear_reg = pe.Node(interface=fsl.FNIRT(), name="non_linear_reg") - non_linear_reg.inputs.field_file = True # --fout - non_linear_reg.inputs.jacobian_file = True # --jout - non_linear_reg.inputs.modulatedref_file = True # --refout + non_linear_reg.inputs.field_file = True # --fout + non_linear_reg.inputs.jacobian_file = True # --jout + non_linear_reg.inputs.modulatedref_file = True # --refout # non_linear_reg.inputs.warped_file = 'T1w_acpc_to_MNI_nonlin.nii.gz' # --iout # non_linear_reg.inputs.log_file = 'NonlinearReg.txt' # --logout - non_linear_reg.inputs.out_intensitymap_file = True # --intout - non_linear_reg.inputs.fieldcoeff_file = True # --cout + non_linear_reg.inputs.out_intensitymap_file = True # --intout + non_linear_reg.inputs.fieldcoeff_file = True # --cout non_linear_reg.inputs.config_file = config.registration_workflows[ - 'anatomical_registration']['registration']['FSL-FNIRT']['fnirt_config'] + "anatomical_registration" + ]["registration"]["FSL-FNIRT"]["fnirt_config"] - preproc.connect(inputnode, 'anat_data', - non_linear_reg, 'in_file') + preproc.connect(inputnode, "anat_data", non_linear_reg, "in_file") - preproc.connect(inputnode, 'template_skull_for_anat_2mm', - non_linear_reg, 'ref_file') + preproc.connect( + inputnode, "template_skull_for_anat_2mm", non_linear_reg, "ref_file" + ) - preproc.connect(linear_reg, 'out_matrix_file', - non_linear_reg, 'affine_file') + preproc.connect(linear_reg, "out_matrix_file", non_linear_reg, "affine_file") - preproc.connect(inputnode, 'template-ref-mask-res-2', - non_linear_reg, 'refmask_file') + preproc.connect( + inputnode, "template-ref-mask-res-2", non_linear_reg, "refmask_file" + ) # Overwrite the image output from FNIRT with a spline interpolated highres version # creating spline interpolated hires version # applywarp --rel --interp=spline --in="$Input" --ref="$Reference" -w "$WD"/str2standard.nii.gz --out="$WD"/"$BaseName"_to_MNI_nonlin.nii.gz - apply_warp = pe.Node(interface=fsl.ApplyWarp(), - name='apply_warp') + apply_warp = pe.Node(interface=fsl.ApplyWarp(), name="apply_warp") - apply_warp.inputs.interp = 'spline' + apply_warp.inputs.interp = "spline" apply_warp.inputs.relwarp = True - preproc.connect(inputnode, 'anat_data', - apply_warp, 'in_file') + preproc.connect(inputnode, "anat_data", apply_warp, "in_file") - preproc.connect(inputnode, 'template_skull_for_anat', - apply_warp, 'ref_file') + preproc.connect(inputnode, "template_skull_for_anat", apply_warp, "ref_file") - preproc.connect(non_linear_reg, 'field_file', - apply_warp, 'field_file') + preproc.connect(non_linear_reg, "field_file", apply_warp, "field_file") # Invert warp and transform dilated brain mask back into native space, and use it to mask input image # Input and reference spaces are the same, using 2mm reference to save time # invwarp --ref="$Reference2mm" -w "$WD"/str2standard.nii.gz -o "$WD"/standard2str.nii.gz - inverse_warp = pe.Node(interface=fsl.InvWarp(), name='inverse_warp') - inverse_warp.inputs.output_type = 'NIFTI_GZ' + inverse_warp = pe.Node(interface=fsl.InvWarp(), name="inverse_warp") + inverse_warp.inputs.output_type = "NIFTI_GZ" - preproc.connect(inputnode, 'template_skull_for_anat_2mm', - inverse_warp, 'reference') + preproc.connect(inputnode, "template_skull_for_anat_2mm", inverse_warp, "reference") - preproc.connect(non_linear_reg, 'field_file', - inverse_warp, 'warp') + preproc.connect(non_linear_reg, "field_file", inverse_warp, "warp") # Apply inverse warp # applywarp --rel --interp=nn --in="$ReferenceMask" --ref="$Input" -w "$WD"/standard2str.nii.gz -o "$OutputBrainMask" - apply_inv_warp = pe.Node(interface=fsl.ApplyWarp(), - name='apply_inv_warp') - apply_inv_warp.inputs.interp = 'nn' + apply_inv_warp = pe.Node(interface=fsl.ApplyWarp(), name="apply_inv_warp") + apply_inv_warp.inputs.interp = "nn" apply_inv_warp.inputs.relwarp = True - preproc.connect(inputnode, 'template_brain_mask_for_anat', - apply_inv_warp, 'in_file') + preproc.connect( + inputnode, "template_brain_mask_for_anat", apply_inv_warp, "in_file" + ) + + preproc.connect(inputnode, "anat_data", apply_inv_warp, "ref_file") - preproc.connect(inputnode, 'anat_data', - apply_inv_warp, 'ref_file') + preproc.connect(inverse_warp, "inverse_warp", apply_inv_warp, "field_file") - preproc.connect(inverse_warp, 'inverse_warp', - apply_inv_warp, 'field_file') + preproc.connect(apply_inv_warp, "out_file", outputnode, "anat_brain_mask") - preproc.connect(apply_inv_warp, 'out_file', - outputnode, 'anat_brain_mask') - # Apply mask to create brain # fslmaths "$Input" -mas "$OutputBrainMask" "$OutputBrainExtractedImage" - apply_mask = pe.Node(interface=fsl.MultiImageMaths(), - name='apply_mask') - apply_mask.inputs.op_string = '-mas %s' + apply_mask = pe.Node(interface=fsl.MultiImageMaths(), name="apply_mask") + apply_mask.inputs.op_string = "-mas %s" - preproc.connect(inputnode, 'anat_data', - apply_mask, 'in_file') + preproc.connect(inputnode, "anat_data", apply_mask, "in_file") - preproc.connect(apply_inv_warp, 'out_file', - apply_mask, 'operand_files') + preproc.connect(apply_inv_warp, "out_file", apply_mask, "operand_files") - preproc.connect(apply_mask, 'out_file', - outputnode, 'anat_brain') - - return preproc + preproc.connect(apply_mask, "out_file", outputnode, "anat_brain") + return preproc -def fast_bias_field_correction(config=None, wf_name='fast_bias_field_correction'): +def fast_bias_field_correction(config=None, wf_name="fast_bias_field_correction"): ### ABCD Harmonization - FAST bias field correction ### # Ref: https://github.com/DCAN-Labs/DCAN-HCP/blob/master/PreFreeSurfer/PreFreeSurferPipeline.sh#L688-L694 preproc = pe.Workflow(name=wf_name) - inputnode = pe.Node(util.IdentityInterface(fields=['anat_data', - 'anat_brain', - 'anat_brain_mask']), - name='inputspec') + inputnode = pe.Node( + util.IdentityInterface(fields=["anat_data", "anat_brain", "anat_brain_mask"]), + name="inputspec", + ) - outputnode = pe.Node(util.IdentityInterface(fields=['anat_restore', - 'anat_brain_restore', - 'bias_field']), - name='outputspec') + outputnode = pe.Node( + util.IdentityInterface( + fields=["anat_restore", "anat_brain_restore", "bias_field"] + ), + name="outputspec", + ) # fast -b -B -o ${T1wFolder}/T1w_fast -t 1 ${T1wFolder}/T1w_acpc_dc_brain.nii.gz - fast_bias_field_correction = pe.Node(interface=fsl.FAST(), - name='fast_bias_field_correction') + fast_bias_field_correction = pe.Node( + interface=fsl.FAST(), name="fast_bias_field_correction" + ) fast_bias_field_correction.inputs.img_type = 1 fast_bias_field_correction.inputs.output_biasfield = True fast_bias_field_correction.inputs.output_biascorrected = True - preproc.connect(inputnode, 'anat_brain', - fast_bias_field_correction, 'in_files') + preproc.connect(inputnode, "anat_brain", fast_bias_field_correction, "in_files") - preproc.connect(fast_bias_field_correction, 'restored_image', - outputnode, 'anat_brain_restore') + preproc.connect( + fast_bias_field_correction, "restored_image", outputnode, "anat_brain_restore" + ) - preproc.connect(fast_bias_field_correction, 'bias_field', - outputnode, 'bias_field') + preproc.connect(fast_bias_field_correction, "bias_field", outputnode, "bias_field") - # FAST does not output a non-brain extracted image so create an inverse mask, - # apply it to T1w_acpc_dc.nii.gz, insert the T1w_fast_restore to the skull of + # FAST does not output a non-brain extracted image so create an inverse mask, + # apply it to T1w_acpc_dc.nii.gz, insert the T1w_fast_restore to the skull of # the T1w_acpc_dc.nii.gz and use that for the T1w_acpc_dc_restore head # fslmaths ${T1wFolder}/T1w_acpc_brain_mask.nii.gz -mul -1 -add 1 ${T1wFolder}/T1w_acpc_inverse_brain_mask.nii.gz - inverse_brain_mask = pe.Node(interface=fsl.ImageMaths(), - name='inverse_brain_mask') - inverse_brain_mask.inputs.op_string = '-mul -1 -add 1' + inverse_brain_mask = pe.Node(interface=fsl.ImageMaths(), name="inverse_brain_mask") + inverse_brain_mask.inputs.op_string = "-mul -1 -add 1" - preproc.connect(inputnode, 'anat_brain_mask', - inverse_brain_mask, 'in_file') + preproc.connect(inputnode, "anat_brain_mask", inverse_brain_mask, "in_file") # fslmaths ${T1wFolder}/T1w_acpc_dc.nii.gz -mul ${T1wFolder}/T1w_acpc_inverse_brain_mask.nii.gz ${T1wFolder}/T1w_acpc_dc_skull.nii.gz - apply_mask = pe.Node(interface=fsl.MultiImageMaths(), - name='apply_mask') - apply_mask.inputs.op_string = '-mul %s' + apply_mask = pe.Node(interface=fsl.MultiImageMaths(), name="apply_mask") + apply_mask.inputs.op_string = "-mul %s" - preproc.connect(inputnode, 'anat_data', - apply_mask, 'in_file') + preproc.connect(inputnode, "anat_data", apply_mask, "in_file") - preproc.connect(inverse_brain_mask, 'out_file', - apply_mask, 'operand_files') + preproc.connect(inverse_brain_mask, "out_file", apply_mask, "operand_files") # fslmaths ${T1wFolder}/T1w_fast_restore.nii.gz -add ${T1wFolder}/T1w_acpc_dc_skull.nii.gz ${T1wFolder}/${T1wImage}_acpc_dc_restore - anat_restore = pe.Node(interface=fsl.MultiImageMaths(), - name='get_anat_restore') - anat_restore.inputs.op_string = '-add %s' + anat_restore = pe.Node(interface=fsl.MultiImageMaths(), name="get_anat_restore") + anat_restore.inputs.op_string = "-add %s" - preproc.connect(fast_bias_field_correction, 'restored_image', - anat_restore, 'in_file') + preproc.connect( + fast_bias_field_correction, "restored_image", anat_restore, "in_file" + ) - preproc.connect(apply_mask, 'out_file', - anat_restore, 'operand_files') + preproc.connect(apply_mask, "out_file", anat_restore, "operand_files") - preproc.connect(anat_restore, 'out_file', - outputnode, 'anat_restore') + preproc.connect(anat_restore, "out_file", outputnode, "anat_restore") return preproc - @nodeblock( name="correct_restore_brain_intensity_abcd", config=["anatomical_preproc", "brain_extraction"], @@ -3042,127 +3184,120 @@ def fast_bias_field_correction(config=None, wf_name='fast_bias_field_correction' ], outputs=["desc-restore-brain_T1w"], ) -def correct_restore_brain_intensity_abcd(wf, cfg, strat_pool, pipe_num, - opt=None): - +def correct_restore_brain_intensity_abcd(wf, cfg, strat_pool, pipe_num, opt=None): ### ABCD Harmonization - Myelin Map ### # Ref: https://github.com/DCAN-Labs/DCAN-HCP/blob/master/PreFreeSurfer/PreFreeSurferPipeline.sh#L655-L656 # fslmerge -t ${T1wFolder}/xfms/${T1wImage}_dc ${T1wFolder}/${T1wImage}_acpc ${T1wFolder}/${T1wImage}_acpc ${T1wFolder}/${T1wImage}_acpc - merge_t1_acpc_to_list = pe.Node(util.Merge(3), - name=f'merge_t1_acpc_to_list_{pipe_num}') + merge_t1_acpc_to_list = pe.Node( + util.Merge(3), name=f"merge_t1_acpc_to_list_{pipe_num}" + ) - node, out = strat_pool.get_data('desc-preproc_T1w') - wf.connect(node, out, merge_t1_acpc_to_list, 'in1') - wf.connect(node, out, merge_t1_acpc_to_list, 'in2') - wf.connect(node, out, merge_t1_acpc_to_list, 'in3') + node, out = strat_pool.get_data("desc-preproc_T1w") + wf.connect(node, out, merge_t1_acpc_to_list, "in1") + wf.connect(node, out, merge_t1_acpc_to_list, "in2") + wf.connect(node, out, merge_t1_acpc_to_list, "in3") - merge_t1_acpc = pe.Node(interface=fslMerge(), - name=f'merge_t1_acpc_{pipe_num}') + merge_t1_acpc = pe.Node(interface=fslMerge(), name=f"merge_t1_acpc_{pipe_num}") - merge_t1_acpc.inputs.dimension = 't' + merge_t1_acpc.inputs.dimension = "t" - wf.connect(merge_t1_acpc_to_list, 'out', - merge_t1_acpc, 'in_files') + wf.connect(merge_t1_acpc_to_list, "out", merge_t1_acpc, "in_files") # fslmaths ${T1wFolder}/xfms/${T1wImage}_dc -mul 0 ${T1wFolder}/xfms/${T1wImage}_dc - multiply_t1_acpc_by_zero = pe.Node(interface=fsl.ImageMaths(), - name=f'multiply_t1_acpc_by_zero_{pipe_num}') - - multiply_t1_acpc_by_zero.inputs.op_string = '-mul 0' + multiply_t1_acpc_by_zero = pe.Node( + interface=fsl.ImageMaths(), name=f"multiply_t1_acpc_by_zero_{pipe_num}" + ) - wf.connect(merge_t1_acpc, 'merged_file', - multiply_t1_acpc_by_zero, 'in_file') + multiply_t1_acpc_by_zero.inputs.op_string = "-mul 0" + + wf.connect(merge_t1_acpc, "merged_file", multiply_t1_acpc_by_zero, "in_file") # Ref: https://github.com/DCAN-Labs/DCAN-HCP/blob/master/PostFreeSurfer/PostFreeSurferPipeline.sh#L157 # convertwarp --relout --rel --ref="$T1wFolder"/"$T1wImageBrainMask" --premat="$T1wFolder"/xfms/"$InitialT1wTransform" \ # --warp1="$T1wFolder"/xfms/"$dcT1wTransform" --out="$T1wFolder"/xfms/"$OutputOrigT1wToT1w" - convertwarp_orig_t1_to_t1 = pe.Node(interface=fsl.ConvertWarp(), - name=f'convertwarp_orig_t1_to_t1_{pipe_num}') + convertwarp_orig_t1_to_t1 = pe.Node( + interface=fsl.ConvertWarp(), name=f"convertwarp_orig_t1_to_t1_{pipe_num}" + ) convertwarp_orig_t1_to_t1.inputs.out_relwarp = True convertwarp_orig_t1_to_t1.inputs.relwarp = True - + node, out = strat_pool.get_data("space-T1w_desc-brain_mask") - wf.connect(node, out, convertwarp_orig_t1_to_t1, 'reference') + wf.connect(node, out, convertwarp_orig_t1_to_t1, "reference") - node, out = strat_pool.get_data('from-T1w_to-ACPC_mode-image_desc-aff2rig_xfm') - wf.connect(node, out, convertwarp_orig_t1_to_t1, 'premat') - wf.connect(multiply_t1_acpc_by_zero, 'out_file', - convertwarp_orig_t1_to_t1, 'warp1') + node, out = strat_pool.get_data("from-T1w_to-ACPC_mode-image_desc-aff2rig_xfm") + wf.connect(node, out, convertwarp_orig_t1_to_t1, "premat") + wf.connect(multiply_t1_acpc_by_zero, "out_file", convertwarp_orig_t1_to_t1, "warp1") # Ref: https://github.com/DCAN-Labs/DCAN-HCP/blob/master/PostFreeSurfer/scripts/CreateMyelinMaps.sh#L72-L73 # applywarp --rel --interp=spline -i "$BiasField" -r "$T1wImageBrain" -w "$AtlasTransform" -o "$BiasFieldOutput" - applywarp_biasfield = pe.Node(interface=fsl.ApplyWarp(), - name=f'applywarp_biasfield_{pipe_num}') + applywarp_biasfield = pe.Node( + interface=fsl.ApplyWarp(), name=f"applywarp_biasfield_{pipe_num}" + ) applywarp_biasfield.inputs.relwarp = True - applywarp_biasfield.inputs.interp = 'spline' + applywarp_biasfield.inputs.interp = "spline" - node, out = strat_pool.get_data('pipeline-fs_desc-fast_biasfield') - wf.connect(node, out, applywarp_biasfield, 'in_file') + node, out = strat_pool.get_data("pipeline-fs_desc-fast_biasfield") + wf.connect(node, out, applywarp_biasfield, "in_file") node, out = strat_pool.get_data("space-T1w_desc-brain_mask") - wf.connect(node, out, applywarp_biasfield, 'ref_file') + wf.connect(node, out, applywarp_biasfield, "ref_file") - node, out = strat_pool.get_data('from-T1w_to-template_mode-image_xfm') - wf.connect(node, out, applywarp_biasfield, 'field_file') + node, out = strat_pool.get_data("from-T1w_to-template_mode-image_xfm") + wf.connect(node, out, applywarp_biasfield, "field_file") # fslmaths "$BiasFieldOutput" -thr 0.1 "$BiasFieldOutput" - threshold_biasfield = pe.Node(interface=fsl.ImageMaths(), - name=f'threshold_biasfield_{pipe_num}') + threshold_biasfield = pe.Node( + interface=fsl.ImageMaths(), name=f"threshold_biasfield_{pipe_num}" + ) - threshold_biasfield.inputs.op_string = '-thr 0.1' - wf.connect(applywarp_biasfield, 'out_file', - threshold_biasfield, 'in_file') + threshold_biasfield.inputs.op_string = "-thr 0.1" + wf.connect(applywarp_biasfield, "out_file", threshold_biasfield, "in_file") # Ref: https://github.com/DCAN-Labs/DCAN-HCP/blob/master/PostFreeSurfer/scripts/CreateMyelinMaps.sh#L67-L70 # applywarp --rel --interp=spline -i "$OrginalT1wImage" -r "$T1wImageBrain" -w "$OutputOrigT1wToT1w" -o "$OutputT1wImage" - applywarp_t1 = pe.Node(interface=fsl.ApplyWarp(), - name=f'applywarp_t1_{pipe_num}') - + applywarp_t1 = pe.Node(interface=fsl.ApplyWarp(), name=f"applywarp_t1_{pipe_num}") + applywarp_t1.inputs.relwarp = True - applywarp_t1.inputs.interp = 'spline' - - node, out = strat_pool.get_data('desc-n4_T1w') - wf.connect(node, out, applywarp_t1, 'in_file') - + applywarp_t1.inputs.interp = "spline" + + node, out = strat_pool.get_data("desc-n4_T1w") + wf.connect(node, out, applywarp_t1, "in_file") + node, out = strat_pool.get_data("space-T1w_desc-brain_mask") - wf.connect(node, out, applywarp_t1, 'ref_file') - - wf.connect(convertwarp_orig_t1_to_t1, 'out_file', - applywarp_t1, 'field_file') + wf.connect(node, out, applywarp_t1, "ref_file") + + wf.connect(convertwarp_orig_t1_to_t1, "out_file", applywarp_t1, "field_file") # fslmaths "$OutputT1wImage" -abs "$OutputT1wImage" -odt float - abs_t1 = pe.Node(interface=fsl.ImageMaths(), - name=f'abs_t1_{pipe_num}') + abs_t1 = pe.Node(interface=fsl.ImageMaths(), name=f"abs_t1_{pipe_num}") - abs_t1.inputs.op_string = '-abs' - wf.connect(applywarp_t1, 'out_file', abs_t1, 'in_file') + abs_t1.inputs.op_string = "-abs" + wf.connect(applywarp_t1, "out_file", abs_t1, "in_file") # fslmaths "$OutputT1wImage" -div "$BiasField" "$OutputT1wImageRestore" - div_t1_by_biasfield = pe.Node(interface=fsl.ImageMaths(), - name=f'div_t1_by_biasfield_{pipe_num}') + div_t1_by_biasfield = pe.Node( + interface=fsl.ImageMaths(), name=f"div_t1_by_biasfield_{pipe_num}" + ) - div_t1_by_biasfield.inputs.op_string = '-div' + div_t1_by_biasfield.inputs.op_string = "-div" - wf.connect(abs_t1, 'out_file', div_t1_by_biasfield, 'in_file') + wf.connect(abs_t1, "out_file", div_t1_by_biasfield, "in_file") - node, out = strat_pool.get_data('pipeline-fs_desc-fast_biasfield') - wf.connect(node, out, div_t1_by_biasfield, 'in_file2') + node, out = strat_pool.get_data("pipeline-fs_desc-fast_biasfield") + wf.connect(node, out, div_t1_by_biasfield, "in_file2") # fslmaths "$OutputT1wImageRestore" -mas "$T1wImageBrain" "$OutputT1wImageRestoreBrain" - apply_mask = pe.Node(interface=fsl.maths.ApplyMask(), - name=f'get_restored_corrected_brain_{pipe_num}') + apply_mask = pe.Node( + interface=fsl.maths.ApplyMask(), name=f"get_restored_corrected_brain_{pipe_num}" + ) - wf.connect(div_t1_by_biasfield, 'out_file', - apply_mask, 'in_file') + wf.connect(div_t1_by_biasfield, "out_file", apply_mask, "in_file") node, out = strat_pool.get_data("space-T1w_desc-brain_mask") - wf.connect(node, out, apply_mask, 'mask_file') + wf.connect(node, out, apply_mask, "mask_file") - outputs = { - 'desc-restore-brain_T1w': (apply_mask, 'out_file') - } + outputs = {"desc-restore-brain_T1w": (apply_mask, "out_file")} return (wf, outputs) - diff --git a/CPAC/anat_preproc/lesion_preproc.py b/CPAC/anat_preproc/lesion_preproc.py index f6f12fd983..2ef58c3d2a 100644 --- a/CPAC/anat_preproc/lesion_preproc.py +++ b/CPAC/anat_preproc/lesion_preproc.py @@ -1,9 +1,10 @@ # -*- coding: utf-8 -*- from nipype.interfaces import afni -from CPAC.pipeline import nipype_pipeline_engine as pe import nipype.interfaces.utility as util +from CPAC.pipeline import nipype_pipeline_engine as pe + def inverse_lesion(lesion_path): """ @@ -21,15 +22,16 @@ def inverse_lesion(lesion_path): path to the output file, if the lesion does not require to be inverted it returns the unchanged lesion_path input """ - import shutil - import os import ntpath + import os + import shutil - import CPAC.utils.nifti_utils as nu import nibabel as nib + import CPAC.utils.nifti_utils as nu + lesion_out = lesion_path - + if nu.more_zeros_than_ones(image=lesion_path): lesion_out = os.path.join(os.getcwd(), ntpath.basename(lesion_path)) shutil.copyfile(lesion_path, lesion_out) @@ -40,7 +42,7 @@ def inverse_lesion(lesion_path): return lesion_out -def create_lesion_preproc(wf_name='lesion_preproc'): +def create_lesion_preproc(wf_name="lesion_preproc"): """ The main purpose of this workflow is to process lesions masks. Lesion mask file is deobliqued and reoriented in the same way as the T1 in @@ -80,49 +82,45 @@ def create_lesion_preproc(wf_name='lesion_preproc'): >>> preproc.inputs.inputspec.lesion = 'sub1/anat/lesion-mask.nii.gz' >>> preproc.run() #doctest: +SKIP """ - preproc = pe.Workflow(name=wf_name) - inputnode = pe.Node(util.IdentityInterface( - fields=['lesion']), name='inputspec') + inputnode = pe.Node(util.IdentityInterface(fields=["lesion"]), name="inputspec") - outputnode = pe.Node(util.IdentityInterface(fields=['refit', - 'reorient']), - name='outputspec') + outputnode = pe.Node( + util.IdentityInterface(fields=["refit", "reorient"]), name="outputspec" + ) - lesion_deoblique = pe.Node(interface=afni.Refit(), - name='lesion_deoblique') + lesion_deoblique = pe.Node(interface=afni.Refit(), name="lesion_deoblique") lesion_deoblique.inputs.deoblique = True - lesion_inverted = pe.Node(interface=util.Function( - input_names=['lesion_path'], - output_names=['lesion_out'], - function=inverse_lesion), - name='inverse_lesion') + lesion_inverted = pe.Node( + interface=util.Function( + input_names=["lesion_path"], + output_names=["lesion_out"], + function=inverse_lesion, + ), + name="inverse_lesion", + ) # We first check and invert the lesion if needed to be used by ANTs - preproc.connect( - inputnode, 'lesion', lesion_inverted, 'lesion_path') + preproc.connect(inputnode, "lesion", lesion_inverted, "lesion_path") - preproc.connect( - lesion_inverted, 'lesion_out', lesion_deoblique, 'in_file') + preproc.connect(lesion_inverted, "lesion_out", lesion_deoblique, "in_file") - preproc.connect( - lesion_deoblique, 'out_file', outputnode, 'refit') + preproc.connect(lesion_deoblique, "out_file", outputnode, "refit") # Anatomical reorientation - lesion_reorient = pe.Node(interface=afni.Resample(), - name='lesion_reorient', - mem_gb=0, - mem_x=(0.0115, 'in_file', 't')) - - lesion_reorient.inputs.orientation = 'RPI' - lesion_reorient.inputs.outputtype = 'NIFTI_GZ' - - preproc.connect( - lesion_deoblique, 'out_file', lesion_reorient, - 'in_file') - preproc.connect( - lesion_reorient, 'out_file', outputnode, 'reorient') + lesion_reorient = pe.Node( + interface=afni.Resample(), + name="lesion_reorient", + mem_gb=0, + mem_x=(0.0115, "in_file", "t"), + ) + + lesion_reorient.inputs.orientation = "RPI" + lesion_reorient.inputs.outputtype = "NIFTI_GZ" + + preproc.connect(lesion_deoblique, "out_file", lesion_reorient, "in_file") + preproc.connect(lesion_reorient, "out_file", outputnode, "reorient") return preproc diff --git a/CPAC/func_preproc/func_preproc.py b/CPAC/func_preproc/func_preproc.py index b2f9e49ae1..fef6a01024 100644 --- a/CPAC/func_preproc/func_preproc.py +++ b/CPAC/func_preproc/func_preproc.py @@ -15,18 +15,22 @@ # You should have received a copy of the GNU Lesser General Public # License along with C-PAC. If not, see . """Functional preprocessing""" + # pylint: disable=ungrouped-imports,wrong-import-order,wrong-import-position from nipype import logging from nipype.interfaces import afni, ants, fsl, utility as util -logger = logging.getLogger('nipype.workflow') -from CPAC.pipeline import nipype_pipeline_engine as pe -from CPAC.pipeline.nodeblock import nodeblock -from nipype.interfaces.afni import preprocess -from nipype.interfaces.afni import utils as afni_utils + +logger = logging.getLogger("nipype.workflow") +from nipype.interfaces.afni import preprocess, utils as afni_utils from CPAC.func_preproc.utils import nullify -from CPAC.utils.interfaces.ants import AI # niworkflows -from CPAC.utils.interfaces.ants import PrintHeader, SetDirectionByMatrix +from CPAC.pipeline import nipype_pipeline_engine as pe +from CPAC.pipeline.nodeblock import nodeblock +from CPAC.utils.interfaces.ants import ( + AI, # niworkflows + PrintHeader, + SetDirectionByMatrix, +) from CPAC.utils.utils import add_afni_prefix @@ -35,222 +39,213 @@ def collect_arguments(*args): if args[0]: command_args += [args[1]] command_args += args[2:] - return ' '.join(command_args) + return " ".join(command_args) -def anat_refined_mask(init_bold_mask=True, wf_name='init_bold_mask'): - +def anat_refined_mask(init_bold_mask=True, wf_name="init_bold_mask"): wf = pe.Workflow(name=wf_name) - input_node = pe.Node(util.IdentityInterface(fields=['func', - 'anatomical_brain_mask', - 'anat_brain', - 'init_func_brain_mask']), - name='inputspec') + input_node = pe.Node( + util.IdentityInterface( + fields=[ + "func", + "anatomical_brain_mask", + "anat_brain", + "init_func_brain_mask", + ] + ), + name="inputspec", + ) - output_node = pe.Node(util.IdentityInterface(fields=['func_brain_mask']), - name='outputspec') + output_node = pe.Node( + util.IdentityInterface(fields=["func_brain_mask"]), name="outputspec" + ) # 1 Take single volume of func - func_single_volume = pe.Node(interface=afni.Calc(), - name='func_single_volume') + func_single_volume = pe.Node(interface=afni.Calc(), name="func_single_volume") # TODO add an option to select volume - func_single_volume.inputs.set( - expr='a', - single_idx=1, - outputtype='NIFTI_GZ' - ) + func_single_volume.inputs.set(expr="a", single_idx=1, outputtype="NIFTI_GZ") - wf.connect(input_node, 'func', - func_single_volume, 'in_file_a') + wf.connect(input_node, "func", func_single_volume, "in_file_a") # 2 get temporary func brain - func_tmp_brain = pe.Node(interface=afni_utils.Calc(), - name='func_tmp_brain') - func_tmp_brain.inputs.expr = 'a*b' - func_tmp_brain.inputs.outputtype = 'NIFTI_GZ' + func_tmp_brain = pe.Node(interface=afni_utils.Calc(), name="func_tmp_brain") + func_tmp_brain.inputs.expr = "a*b" + func_tmp_brain.inputs.outputtype = "NIFTI_GZ" - wf.connect(func_single_volume, 'out_file', - func_tmp_brain, 'in_file_a') + wf.connect(func_single_volume, "out_file", func_tmp_brain, "in_file_a") # 2.1 get a tmp func brain mask if init_bold_mask == True: # 2.1.1 N4BiasFieldCorrection single volume of raw_func func_single_volume_n4_corrected = pe.Node( - interface=ants.N4BiasFieldCorrection(dimension=3, - copy_header=True, - bspline_fitting_distance=200), + interface=ants.N4BiasFieldCorrection( + dimension=3, copy_header=True, bspline_fitting_distance=200 + ), shrink_factor=2, - name='func_single_volume_n4_corrected') - func_single_volume_n4_corrected.inputs.args = '-r True' + name="func_single_volume_n4_corrected", + ) + func_single_volume_n4_corrected.inputs.args = "-r True" - wf.connect(func_single_volume, 'out_file', - func_single_volume_n4_corrected, 'input_image') + wf.connect( + func_single_volume, + "out_file", + func_single_volume_n4_corrected, + "input_image", + ) # 2.1.2 bet n4 corrected image - generate tmp func brain mask - func_tmp_brain_mask = pe.Node(interface=fsl.BET(), - name='func_tmp_brain_mask_pre') + func_tmp_brain_mask = pe.Node( + interface=fsl.BET(), name="func_tmp_brain_mask_pre" + ) func_tmp_brain_mask.inputs.mask = True - wf.connect(func_single_volume_n4_corrected, 'output_image', - func_tmp_brain_mask, 'in_file') + wf.connect( + func_single_volume_n4_corrected, + "output_image", + func_tmp_brain_mask, + "in_file", + ) # 2.1.3 dilate func tmp brain mask - func_tmp_brain_mask_dil = pe.Node(interface=fsl.ImageMaths(), - name='func_tmp_brain_mask_dil') - func_tmp_brain_mask_dil.inputs.op_string = '-dilM' + func_tmp_brain_mask_dil = pe.Node( + interface=fsl.ImageMaths(), name="func_tmp_brain_mask_dil" + ) + func_tmp_brain_mask_dil.inputs.op_string = "-dilM" - wf.connect(func_tmp_brain_mask, 'mask_file', - func_tmp_brain_mask_dil, 'in_file') + wf.connect(func_tmp_brain_mask, "mask_file", func_tmp_brain_mask_dil, "in_file") - wf.connect(func_tmp_brain_mask_dil, 'out_file', - func_tmp_brain, 'in_file_b') + wf.connect(func_tmp_brain_mask_dil, "out_file", func_tmp_brain, "in_file_b") else: # 2.1.1 connect dilated init func brain mask - wf.connect(input_node, 'init_func_brain_mask', - func_tmp_brain, 'in_file_b') + wf.connect(input_node, "init_func_brain_mask", func_tmp_brain, "in_file_b") # 3. get transformation of anat to func # 3.1 Register func tmp brain to anat brain to get func2anat matrix - linear_reg_func_to_anat = pe.Node(interface=fsl.FLIRT(), - name='func_to_anat_linear_reg') - linear_reg_func_to_anat.inputs.cost = 'mutualinfo' + linear_reg_func_to_anat = pe.Node( + interface=fsl.FLIRT(), name="func_to_anat_linear_reg" + ) + linear_reg_func_to_anat.inputs.cost = "mutualinfo" linear_reg_func_to_anat.inputs.dof = 6 - wf.connect(func_tmp_brain, 'out_file', - linear_reg_func_to_anat, 'in_file') + wf.connect(func_tmp_brain, "out_file", linear_reg_func_to_anat, "in_file") - wf.connect(input_node, 'anat_brain', - linear_reg_func_to_anat, 'reference') + wf.connect(input_node, "anat_brain", linear_reg_func_to_anat, "reference") # 3.2 Inverse func to anat affine - inv_func_to_anat_affine = pe.Node(interface=fsl.ConvertXFM(), - name='inv_func2anat_affine') + inv_func_to_anat_affine = pe.Node( + interface=fsl.ConvertXFM(), name="inv_func2anat_affine" + ) inv_func_to_anat_affine.inputs.invert_xfm = True - wf.connect(linear_reg_func_to_anat, 'out_matrix_file', - inv_func_to_anat_affine, 'in_file') + wf.connect( + linear_reg_func_to_anat, "out_matrix_file", inv_func_to_anat_affine, "in_file" + ) # 4. anat mask to func space # Transform anatomical mask to functional space to get BOLD mask - reg_anat_mask_to_func = pe.Node(interface=fsl.FLIRT(), - name='reg_anat_mask_to_func') + reg_anat_mask_to_func = pe.Node(interface=fsl.FLIRT(), name="reg_anat_mask_to_func") reg_anat_mask_to_func.inputs.apply_xfm = True - reg_anat_mask_to_func.inputs.cost = 'mutualinfo' + reg_anat_mask_to_func.inputs.cost = "mutualinfo" reg_anat_mask_to_func.inputs.dof = 6 - reg_anat_mask_to_func.inputs.interp = 'nearestneighbour' + reg_anat_mask_to_func.inputs.interp = "nearestneighbour" - wf.connect(input_node, 'anatomical_brain_mask', - reg_anat_mask_to_func, 'in_file') + wf.connect(input_node, "anatomical_brain_mask", reg_anat_mask_to_func, "in_file") - wf.connect(func_tmp_brain, 'out_file', - reg_anat_mask_to_func, 'reference') + wf.connect(func_tmp_brain, "out_file", reg_anat_mask_to_func, "reference") - wf.connect(inv_func_to_anat_affine, 'out_file', - reg_anat_mask_to_func, 'in_matrix_file') + wf.connect( + inv_func_to_anat_affine, "out_file", reg_anat_mask_to_func, "in_matrix_file" + ) # 5. get final func mask: refine func tmp mask with anat_mask_in_func mask - func_mask = pe.Node(interface=fsl.MultiImageMaths(), name='func_mask') + func_mask = pe.Node(interface=fsl.MultiImageMaths(), name="func_mask") func_mask.inputs.op_string = "-mul %s" - wf.connect(reg_anat_mask_to_func, 'out_file', - func_mask, 'operand_files') + wf.connect(reg_anat_mask_to_func, "out_file", func_mask, "operand_files") if init_bold_mask == True: - wf.connect(func_tmp_brain_mask_dil, 'out_file', - func_mask, 'in_file') + wf.connect(func_tmp_brain_mask_dil, "out_file", func_mask, "in_file") else: - wf.connect(input_node, 'init_func_brain_mask', - func_mask, 'in_file') + wf.connect(input_node, "init_func_brain_mask", func_mask, "in_file") - wf.connect(func_mask, 'out_file', - output_node, 'func_brain_mask') + wf.connect(func_mask, "out_file", output_node, "func_brain_mask") return wf -def anat_based_mask(wf_name='bold_mask'): - """reference `DCAN lab BOLD mask `_ - """ +def anat_based_mask(wf_name="bold_mask"): + """Reference `DCAN lab BOLD mask `_""" wf = pe.Workflow(name=wf_name) - input_node = pe.Node(util.IdentityInterface(fields=['func', - 'anat_brain', - 'anat_head']), - name='inputspec') + input_node = pe.Node( + util.IdentityInterface(fields=["func", "anat_brain", "anat_head"]), + name="inputspec", + ) - output_node = pe.Node(util.IdentityInterface(fields=['func_brain_mask']), - name='outputspec') + output_node = pe.Node( + util.IdentityInterface(fields=["func_brain_mask"]), name="outputspec" + ) # 0. Take single volume of func - func_single_volume = pe.Node(interface=afni.Calc(), - name='func_single_volume') + func_single_volume = pe.Node(interface=afni.Calc(), name="func_single_volume") - func_single_volume.inputs.set( - expr='a', - single_idx=1, - outputtype='NIFTI_GZ' - ) + func_single_volume.inputs.set(expr="a", single_idx=1, outputtype="NIFTI_GZ") - wf.connect(input_node, 'func', - func_single_volume, 'in_file_a') + wf.connect(input_node, "func", func_single_volume, "in_file_a") # 1. Register func head to anat head to get func2anat matrix - linear_reg_func_to_anat = pe.Node(interface=fsl.FLIRT(), - name='func_to_anat_linear_reg') + linear_reg_func_to_anat = pe.Node( + interface=fsl.FLIRT(), name="func_to_anat_linear_reg" + ) linear_reg_func_to_anat.inputs.dof = 6 - linear_reg_func_to_anat.inputs.interp = 'spline' + linear_reg_func_to_anat.inputs.interp = "spline" linear_reg_func_to_anat.inputs.searchr_x = [30, 30] linear_reg_func_to_anat.inputs.searchr_y = [30, 30] linear_reg_func_to_anat.inputs.searchr_z = [30, 30] - wf.connect(func_single_volume, 'out_file', - linear_reg_func_to_anat, 'in_file') + wf.connect(func_single_volume, "out_file", linear_reg_func_to_anat, "in_file") - wf.connect(input_node, 'anat_head', - linear_reg_func_to_anat, 'reference') + wf.connect(input_node, "anat_head", linear_reg_func_to_anat, "reference") # 2. Inverse func to anat affine, to get anat-to-func transform - inv_func_to_anat_affine = pe.Node(interface=fsl.ConvertXFM(), - name='inv_func2anat_affine') + inv_func_to_anat_affine = pe.Node( + interface=fsl.ConvertXFM(), name="inv_func2anat_affine" + ) inv_func_to_anat_affine.inputs.invert_xfm = True - wf.connect(linear_reg_func_to_anat, 'out_matrix_file', - inv_func_to_anat_affine, 'in_file') + wf.connect( + linear_reg_func_to_anat, "out_matrix_file", inv_func_to_anat_affine, "in_file" + ) # 3. get BOLD mask # 3.1 Apply anat-to-func transform to transfer anatomical brain to functional space - reg_anat_brain_to_func = pe.Node(interface=fsl.ApplyWarp(), - name='reg_anat_brain_to_func') - reg_anat_brain_to_func.inputs.interp = 'nn' + reg_anat_brain_to_func = pe.Node( + interface=fsl.ApplyWarp(), name="reg_anat_brain_to_func" + ) + reg_anat_brain_to_func.inputs.interp = "nn" reg_anat_brain_to_func.inputs.relwarp = True - wf.connect(input_node, 'anat_brain', - reg_anat_brain_to_func, 'in_file') + wf.connect(input_node, "anat_brain", reg_anat_brain_to_func, "in_file") - wf.connect(input_node, 'func', - reg_anat_brain_to_func, 'ref_file') + wf.connect(input_node, "func", reg_anat_brain_to_func, "ref_file") - wf.connect(inv_func_to_anat_affine, 'out_file', - reg_anat_brain_to_func, 'premat') + wf.connect(inv_func_to_anat_affine, "out_file", reg_anat_brain_to_func, "premat") # 3.2 Binarize transfered image and fill holes to get BOLD mask. # Binarize - func_mask_bin = pe.Node(interface=fsl.ImageMaths(), - name='func_mask') - func_mask_bin.inputs.op_string = '-bin' + func_mask_bin = pe.Node(interface=fsl.ImageMaths(), name="func_mask") + func_mask_bin.inputs.op_string = "-bin" - wf.connect(reg_anat_brain_to_func, 'out_file', - func_mask_bin, 'in_file') + wf.connect(reg_anat_brain_to_func, "out_file", func_mask_bin, "in_file") - wf.connect(func_mask_bin, 'out_file', - output_node, 'func_brain_mask') + wf.connect(func_mask_bin, "out_file", output_node, "func_brain_mask") return wf -def create_scale_func_wf(scaling_factor, wf_name='scale_func'): +def create_scale_func_wf(scaling_factor, wf_name="scale_func"): """Workflow to scale func data. Workflow Inputs:: @@ -271,31 +266,27 @@ def create_scale_func_wf(scaling_factor, wf_name='scale_func'): wf_name : str name of the workflow """ - # allocate a workflow object preproc = pe.Workflow(name=wf_name) # configure the workflow's input spec - inputNode = pe.Node(util.IdentityInterface(fields=['func']), - name='inputspec') + inputNode = pe.Node(util.IdentityInterface(fields=["func"]), name="inputspec") # configure the workflow's output spec - outputNode = pe.Node(util.IdentityInterface(fields=['scaled_func']), - name='outputspec') + outputNode = pe.Node( + util.IdentityInterface(fields=["scaled_func"]), name="outputspec" + ) # allocate a node to edit the functional file - func_scale = pe.Node(interface=afni_utils.Refit(), - name='func_scale') + func_scale = pe.Node(interface=afni_utils.Refit(), name="func_scale") func_scale.inputs.xyzscale = scaling_factor # wire in the func_get_idx node - preproc.connect(inputNode, 'func', - func_scale, 'in_file') + preproc.connect(inputNode, "func", func_scale, "in_file") # wire the output - preproc.connect(func_scale, 'out_file', - outputNode, 'scaled_func') + preproc.connect(func_scale, "out_file", outputNode, "scaled_func") return preproc @@ -333,125 +324,120 @@ def create_wf_edit_func(wf_name="edit_func"): -prefix rest_3dc.nii.gz """ - # allocate a workflow object preproc = pe.Workflow(name=wf_name) # configure the workflow's input spec - inputNode = pe.Node(util.IdentityInterface(fields=['func', - 'start_idx', - 'stop_idx']), - name='inputspec') + inputNode = pe.Node( + util.IdentityInterface(fields=["func", "start_idx", "stop_idx"]), + name="inputspec", + ) # configure the workflow's output spec - outputNode = pe.Node(util.IdentityInterface(fields=['edited_func']), - name='outputspec') + outputNode = pe.Node( + util.IdentityInterface(fields=["edited_func"]), name="outputspec" + ) # allocate a node to check that the requested edits are # reasonable given the data - func_get_idx = pe.Node(util.Function(input_names=['in_files', - 'stop_idx', - 'start_idx'], - output_names=['stopidx', - 'startidx'], - function=get_idx), - name='func_get_idx') + func_get_idx = pe.Node( + util.Function( + input_names=["in_files", "stop_idx", "start_idx"], + output_names=["stopidx", "startidx"], + function=get_idx, + ), + name="func_get_idx", + ) # wire in the func_get_idx node - preproc.connect(inputNode, 'func', - func_get_idx, 'in_files') - preproc.connect(inputNode, 'start_idx', - func_get_idx, 'start_idx') - preproc.connect(inputNode, 'stop_idx', - func_get_idx, 'stop_idx') + preproc.connect(inputNode, "func", func_get_idx, "in_files") + preproc.connect(inputNode, "start_idx", func_get_idx, "start_idx") + preproc.connect(inputNode, "stop_idx", func_get_idx, "stop_idx") # allocate a node to edit the functional file - func_drop_trs = pe.Node(interface=afni_utils.Calc(), - name='func_drop_trs', - mem_gb=0.37, - mem_x=(739971956005215 / 151115727451828646838272, - 'in_file_a')) + func_drop_trs = pe.Node( + interface=afni_utils.Calc(), + name="func_drop_trs", + mem_gb=0.37, + mem_x=(739971956005215 / 151115727451828646838272, "in_file_a"), + ) - func_drop_trs.inputs.expr = 'a' - func_drop_trs.inputs.outputtype = 'NIFTI_GZ' + func_drop_trs.inputs.expr = "a" + func_drop_trs.inputs.outputtype = "NIFTI_GZ" # wire in the inputs - preproc.connect(inputNode, 'func', - func_drop_trs, 'in_file_a') + preproc.connect(inputNode, "func", func_drop_trs, "in_file_a") - preproc.connect(func_get_idx, 'startidx', - func_drop_trs, 'start_idx') + preproc.connect(func_get_idx, "startidx", func_drop_trs, "start_idx") - preproc.connect(func_get_idx, 'stopidx', - func_drop_trs, 'stop_idx') + preproc.connect(func_get_idx, "stopidx", func_drop_trs, "stop_idx") # wire the output - preproc.connect(func_drop_trs, 'out_file', - outputNode, 'edited_func') + preproc.connect(func_drop_trs, "out_file", outputNode, "edited_func") return preproc -def slice_timing_wf(name='slice_timing', tpattern=None, tzero=None): +def slice_timing_wf(name="slice_timing", tpattern=None, tzero=None): # allocate a workflow object wf = pe.Workflow(name=name) # configure the workflow's input spec - inputNode = pe.Node(util.IdentityInterface(fields=['func_ts', - 'tr', - 'tpattern']), - name='inputspec') + inputNode = pe.Node( + util.IdentityInterface(fields=["func_ts", "tr", "tpattern"]), name="inputspec" + ) # configure the workflow's output spec outputNode = pe.Node( - util.IdentityInterface(fields=['slice_time_corrected']), - name='outputspec') + util.IdentityInterface(fields=["slice_time_corrected"]), name="outputspec" + ) # create TShift AFNI node - func_slice_timing_correction = pe.Node(interface=preprocess.TShift(), - name='slice_timing', - mem_gb=0.45, - mem_x=(5247073869855161 / - 604462909807314587353088, - 'in_file')) - func_slice_timing_correction.inputs.outputtype = 'NIFTI_GZ' + func_slice_timing_correction = pe.Node( + interface=preprocess.TShift(), + name="slice_timing", + mem_gb=0.45, + mem_x=(5247073869855161 / 604462909807314587353088, "in_file"), + ) + func_slice_timing_correction.inputs.outputtype = "NIFTI_GZ" if tzero is not None: func_slice_timing_correction.inputs.tzero = tzero - wf.connect([ - ( - inputNode, - func_slice_timing_correction, - [ - ( - 'func_ts', - 'in_file' - ), - # ( - # # add the @ prefix to the tpattern file going into - # # AFNI 3dTshift - needed this so the tpattern file - # # output from get_scan_params would be tied downstream - # # via a connection (to avoid poofing) - # ('tpattern', nullify, add_afni_prefix), - # 'tpattern' - # ), - ( - ('tr', nullify), - 'tr' - ), - ] - ), - ]) + wf.connect( + [ + ( + inputNode, + func_slice_timing_correction, + [ + ("func_ts", "in_file"), + # ( + # # add the @ prefix to the tpattern file going into + # # AFNI 3dTshift - needed this so the tpattern file + # # output from get_scan_params would be tied downstream + # # via a connection (to avoid poofing) + # ('tpattern', nullify, add_afni_prefix), + # 'tpattern' + # ), + (("tr", nullify), "tr"), + ], + ), + ] + ) if tpattern is not None: func_slice_timing_correction.inputs.tpattern = tpattern else: - wf.connect(inputNode, ('tpattern', nullify, add_afni_prefix), - func_slice_timing_correction, 'tpattern') + wf.connect( + inputNode, + ("tpattern", nullify, add_afni_prefix), + func_slice_timing_correction, + "tpattern", + ) - wf.connect(func_slice_timing_correction, 'out_file', - outputNode, 'slice_time_corrected') + wf.connect( + func_slice_timing_correction, "out_file", outputNode, "slice_time_corrected" + ) return wf @@ -485,7 +471,6 @@ def get_idx(in_files, stop_idx=None, start_idx=None): Value of last slice to consider for the functional run """ - # Import packages from nibabel import load @@ -496,12 +481,11 @@ def get_idx(in_files, stop_idx=None, start_idx=None): # Check to make sure the input file is 4-dimensional if len(shape) != 4: - raise TypeError('Input nifti file: %s is not a 4D file' % in_files) + raise TypeError("Input nifti file: %s is not a 4D file" % in_files) # Grab the number of volumes nvols = int(hdr.get_data_shape()[3]) - if (start_idx == None) or (int(start_idx) < 0) or ( - int(start_idx) > (nvols - 1)): + if (start_idx == None) or (int(start_idx) < 0) or (int(start_idx) > (nvols - 1)): startidx = 0 else: startidx = int(start_idx) @@ -515,76 +499,75 @@ def get_idx(in_files, stop_idx=None, start_idx=None): @nodeblock( - name='func_reorient', - config=['functional_preproc', 'update_header'], - switch=['run'], - inputs=['bold'], - outputs=['desc-preproc_bold', 'desc-reorient_bold'] + name="func_reorient", + config=["functional_preproc", "update_header"], + switch=["run"], + inputs=["bold"], + outputs=["desc-preproc_bold", "desc-reorient_bold"], ) def func_reorient(wf, cfg, strat_pool, pipe_num, opt=None): - - func_deoblique = pe.Node(interface=afni_utils.Refit(), - name=f'func_deoblique_{pipe_num}', - mem_gb=0.68, - mem_x=(4664065662093477 / - 1208925819614629174706176, - 'in_file')) + func_deoblique = pe.Node( + interface=afni_utils.Refit(), + name=f"func_deoblique_{pipe_num}", + mem_gb=0.68, + mem_x=(4664065662093477 / 1208925819614629174706176, "in_file"), + ) func_deoblique.inputs.deoblique = True - node, out = strat_pool.get_data('bold') - wf.connect(node, out, func_deoblique, 'in_file') + node, out = strat_pool.get_data("bold") + wf.connect(node, out, func_deoblique, "in_file") - func_reorient = pe.Node(interface=afni_utils.Resample(), - name=f'func_reorient_{pipe_num}', - mem_gb=0, - mem_x=(0.0115, 'in_file', 't')) + func_reorient = pe.Node( + interface=afni_utils.Resample(), + name=f"func_reorient_{pipe_num}", + mem_gb=0, + mem_x=(0.0115, "in_file", "t"), + ) - func_reorient.inputs.orientation = 'RPI' - func_reorient.inputs.outputtype = 'NIFTI_GZ' + func_reorient.inputs.orientation = "RPI" + func_reorient.inputs.outputtype = "NIFTI_GZ" - wf.connect(func_deoblique, 'out_file', func_reorient, 'in_file') + wf.connect(func_deoblique, "out_file", func_reorient, "in_file") outputs = { - 'desc-preproc_bold': (func_reorient, 'out_file'), - 'desc-reorient_bold': (func_reorient, 'out_file') + "desc-preproc_bold": (func_reorient, "out_file"), + "desc-reorient_bold": (func_reorient, "out_file"), } return (wf, outputs) @nodeblock( - name='func_scaling', - config=['functional_preproc', 'scaling'], - switch=['run'], - inputs=['desc-preproc_bold'], - outputs=['desc-preproc_bold'] + name="func_scaling", + config=["functional_preproc", "scaling"], + switch=["run"], + inputs=["desc-preproc_bold"], + outputs=["desc-preproc_bold"], ) def func_scaling(wf, cfg, strat_pool, pipe_num, opt=None): - scale_func_wf = create_scale_func_wf( - scaling_factor=cfg.scaling_factor, - wf_name=f"scale_func_{pipe_num}" + scaling_factor=cfg.scaling_factor, wf_name=f"scale_func_{pipe_num}" ) node, out = strat_pool.get_data("desc-preproc_bold") - wf.connect(node, out, scale_func_wf, 'inputspec.func') + wf.connect(node, out, scale_func_wf, "inputspec.func") - outputs = { - 'desc-preproc_bold': (scale_func_wf, 'outputspec.scaled_func') - } + outputs = {"desc-preproc_bold": (scale_func_wf, "outputspec.scaled_func")} return (wf, outputs) @nodeblock( - name='func_truncate', - config=['functional_preproc', 'truncation'], - inputs=['desc-preproc_bold'], - outputs={'desc-preproc_bold': { - 'Description': 'Truncated functional time-series BOLD data.'}} + name="func_truncate", + config=["functional_preproc", "truncation"], + inputs=["desc-preproc_bold"], + outputs={ + "desc-preproc_bold": { + "Description": "Truncated functional time-series BOLD data." + } + }, ) def func_truncate(wf, cfg, strat_pool, pipe_num, opt=None): - # if cfg.functional_preproc['truncation']['start_tr'] == 0 and \ # cfg.functional_preproc['truncation']['stop_tr'] == None: # data, key = strat_pool.get_data("desc-preproc_bold", @@ -592,333 +575,375 @@ def func_truncate(wf, cfg, strat_pool, pipe_num, opt=None): # outputs = {key: data} # return (wf, outputs) - trunc_wf = create_wf_edit_func( - wf_name=f"edit_func_{pipe_num}" - ) - trunc_wf.inputs.inputspec.start_idx = cfg.functional_preproc[ - 'truncation']['start_tr'] - trunc_wf.inputs.inputspec.stop_idx = cfg.functional_preproc['truncation'][ - 'stop_tr'] + trunc_wf = create_wf_edit_func(wf_name=f"edit_func_{pipe_num}") + trunc_wf.inputs.inputspec.start_idx = cfg.functional_preproc["truncation"][ + "start_tr" + ] + trunc_wf.inputs.inputspec.stop_idx = cfg.functional_preproc["truncation"]["stop_tr"] node, out = strat_pool.get_data("desc-preproc_bold") - wf.connect(node, out, trunc_wf, 'inputspec.func') + wf.connect(node, out, trunc_wf, "inputspec.func") - outputs = { - 'desc-preproc_bold': (trunc_wf, 'outputspec.edited_func') - } + outputs = {"desc-preproc_bold": (trunc_wf, "outputspec.edited_func")} return (wf, outputs) @nodeblock( - name='func_despike', - config=['functional_preproc', 'despiking'], - switch=['run'], - option_key=['space'], - option_val=['native'], - inputs=['desc-preproc_bold'], - outputs={'desc-preproc_bold': { - 'Description': 'De-spiked BOLD time-series via AFNI 3dDespike.'}} + name="func_despike", + config=["functional_preproc", "despiking"], + switch=["run"], + option_key=["space"], + option_val=["native"], + inputs=["desc-preproc_bold"], + outputs={ + "desc-preproc_bold": { + "Description": "De-spiked BOLD time-series via AFNI 3dDespike." + } + }, ) def func_despike(wf, cfg, strat_pool, pipe_num, opt=None): - - despike = pe.Node(interface=preprocess.Despike(), - name=f'func_despiked_{pipe_num}', - mem_gb=0.66, - mem_x=(8251808479088459 / 1208925819614629174706176, - 'in_file')) - despike.inputs.outputtype = 'NIFTI_GZ' + despike = pe.Node( + interface=preprocess.Despike(), + name=f"func_despiked_{pipe_num}", + mem_gb=0.66, + mem_x=(8251808479088459 / 1208925819614629174706176, "in_file"), + ) + despike.inputs.outputtype = "NIFTI_GZ" node, out = strat_pool.get_data("desc-preproc_bold") - wf.connect(node, out, despike, 'in_file') + wf.connect(node, out, despike, "in_file") - outputs = { - 'desc-preproc_bold': (despike, 'out_file') - } + outputs = {"desc-preproc_bold": (despike, "out_file")} return (wf, outputs) @nodeblock( - name='func_despike_template', - config=['functional_preproc', 'despiking'], - switch=['run'], - option_key=['space'], - option_val=['template'], - inputs=[('space-template_desc-preproc_bold', - 'space-template_res-derivative_desc-preproc_bold'), - 'T1w-template-funcreg', 'T1w-template-deriv'], - outputs={'space-template_desc-preproc_bold': { - 'Description': 'De-spiked BOLD time-series via AFNI 3dDespike.', - 'Template': 'T1w-template-funcreg'}, - 'space-template_res-derivative_desc-preproc_bold': { - 'Description': 'De-spiked BOLD time-series via AFNI 3dDespike.', - 'Template': 'T1w-template-deriv'}} + name="func_despike_template", + config=["functional_preproc", "despiking"], + switch=["run"], + option_key=["space"], + option_val=["template"], + inputs=[ + ( + "space-template_desc-preproc_bold", + "space-template_res-derivative_desc-preproc_bold", + ), + "T1w-template-funcreg", + "T1w-template-deriv", + ], + outputs={ + "space-template_desc-preproc_bold": { + "Description": "De-spiked BOLD time-series via AFNI 3dDespike.", + "Template": "T1w-template-funcreg", + }, + "space-template_res-derivative_desc-preproc_bold": { + "Description": "De-spiked BOLD time-series via AFNI 3dDespike.", + "Template": "T1w-template-deriv", + }, + }, ) def func_despike_template(wf, cfg, strat_pool, pipe_num, opt=None): - - despike = pe.Node(interface=preprocess.Despike(), - name=f'func_despiked_template_{pipe_num}', - mem_gb=0.66, - mem_x=(8251808479088459 / 1208925819614629174706176, - 'in_file')) - despike.inputs.outputtype = 'NIFTI_GZ' + despike = pe.Node( + interface=preprocess.Despike(), + name=f"func_despiked_template_{pipe_num}", + mem_gb=0.66, + mem_x=(8251808479088459 / 1208925819614629174706176, "in_file"), + ) + despike.inputs.outputtype = "NIFTI_GZ" node, out = strat_pool.get_data("space-template_desc-preproc_bold") - wf.connect(node, out, despike, 'in_file') + wf.connect(node, out, despike, "in_file") - outputs = { - 'space-template_desc-preproc_bold': (despike, 'out_file') - } - - if strat_pool.get_data("space-template_res-derivative_desc-preproc_bold"): - despike_funcderiv = pe.Node(interface=preprocess.Despike(), - name=f'func_deriv_despiked_template_{pipe_num}', - mem_gb=0.66, - mem_x=(8251808479088459 / 1208925819614629174706176, - 'in_file')) - despike_funcderiv.inputs.outputtype = 'NIFTI_GZ' + outputs = {"space-template_desc-preproc_bold": (despike, "out_file")} - node, out = strat_pool.get_data("space-template_res-derivative_desc-preproc_bold") - wf.connect(node, out, despike_funcderiv, 'in_file') - - outputs.update({ - 'space-template_res-derivative_desc-preproc_bold': - (despike_funcderiv, 'out_file')}) + if strat_pool.get_data("space-template_res-derivative_desc-preproc_bold"): + despike_funcderiv = pe.Node( + interface=preprocess.Despike(), + name=f"func_deriv_despiked_template_{pipe_num}", + mem_gb=0.66, + mem_x=(8251808479088459 / 1208925819614629174706176, "in_file"), + ) + despike_funcderiv.inputs.outputtype = "NIFTI_GZ" + + node, out = strat_pool.get_data( + "space-template_res-derivative_desc-preproc_bold" + ) + wf.connect(node, out, despike_funcderiv, "in_file") + + outputs.update( + { + "space-template_res-derivative_desc-preproc_bold": ( + despike_funcderiv, + "out_file", + ) + } + ) return (wf, outputs) @nodeblock( - name='func_slice_time', - config=['functional_preproc', 'slice_timing_correction'], - switch=['run'], - inputs=['desc-preproc_bold', 'TR', 'tpattern'], - outputs={'desc-preproc_bold': { - 'Description': 'Slice-time corrected BOLD time-series via AFNI 3dTShift.'}, - 'desc-stc_bold': { - 'Description': 'Slice-time corrected BOLD time-series via AFNI 3dTShift.'}} + name="func_slice_time", + config=["functional_preproc", "slice_timing_correction"], + switch=["run"], + inputs=["desc-preproc_bold", "TR", "tpattern"], + outputs={ + "desc-preproc_bold": { + "Description": "Slice-time corrected BOLD time-series via AFNI 3dTShift." + }, + "desc-stc_bold": { + "Description": "Slice-time corrected BOLD time-series via AFNI 3dTShift." + }, + }, ) def func_slice_time(wf, cfg, strat_pool, pipe_num, opt=None): - - slice_time = slice_timing_wf(name='func_slice_timing_correction_' - f'{pipe_num}', - tpattern=cfg.functional_preproc[ - 'slice_timing_correction']['tpattern'], - tzero=cfg.functional_preproc[ - 'slice_timing_correction']['tzero']) + slice_time = slice_timing_wf( + name="func_slice_timing_correction_" f"{pipe_num}", + tpattern=cfg.functional_preproc["slice_timing_correction"]["tpattern"], + tzero=cfg.functional_preproc["slice_timing_correction"]["tzero"], + ) node, out = strat_pool.get_data("desc-preproc_bold") - wf.connect(node, out, slice_time, 'inputspec.func_ts') + wf.connect(node, out, slice_time, "inputspec.func_ts") - node, out = strat_pool.get_data('TR') - wf.connect(node, out, slice_time, 'inputspec.tr') + node, out = strat_pool.get_data("TR") + wf.connect(node, out, slice_time, "inputspec.tr") - node, out = strat_pool.get_data('tpattern') - wf.connect(node, out, slice_time, 'inputspec.tpattern') + node, out = strat_pool.get_data("tpattern") + wf.connect(node, out, slice_time, "inputspec.tpattern") outputs = { - 'desc-preproc_bold': (slice_time, 'outputspec.slice_time_corrected'), - 'desc-stc_bold': (slice_time, 'outputspec.slice_time_corrected') + "desc-preproc_bold": (slice_time, "outputspec.slice_time_corrected"), + "desc-stc_bold": (slice_time, "outputspec.slice_time_corrected"), } return (wf, outputs) @nodeblock( - name='bold_mask_afni', - switch=[['functional_preproc', 'run'], - ['functional_preproc', 'func_masking', 'run']], - option_key=['functional_preproc', 'func_masking', 'using'], - option_val='AFNI', - inputs=['desc-preproc_bold'], - outputs={'space-bold_desc-brain_mask': - {'Description': 'Binary brain mask of the BOLD functional time-series created by AFNI 3dAutomask.'}} + name="bold_mask_afni", + switch=[ + ["functional_preproc", "run"], + ["functional_preproc", "func_masking", "run"], + ], + option_key=["functional_preproc", "func_masking", "using"], + option_val="AFNI", + inputs=["desc-preproc_bold"], + outputs={ + "space-bold_desc-brain_mask": { + "Description": "Binary brain mask of the BOLD functional time-series created by AFNI 3dAutomask." + } + }, ) def bold_mask_afni(wf, cfg, strat_pool, pipe_num, opt=None): - - func_get_brain_mask = pe.Node(interface=preprocess.Automask(), - name=f'func_get_brain_mask_AFNI_{pipe_num}') - func_get_brain_mask.inputs.outputtype = 'NIFTI_GZ' + func_get_brain_mask = pe.Node( + interface=preprocess.Automask(), name=f"func_get_brain_mask_AFNI_{pipe_num}" + ) + func_get_brain_mask.inputs.outputtype = "NIFTI_GZ" node, out = strat_pool.get_data("desc-preproc_bold") - wf.connect(node, out, func_get_brain_mask, 'in_file') + wf.connect(node, out, func_get_brain_mask, "in_file") - outputs = { - 'space-bold_desc-brain_mask': (func_get_brain_mask, 'out_file') - } + outputs = {"space-bold_desc-brain_mask": (func_get_brain_mask, "out_file")} return (wf, outputs) @nodeblock( - name='bold_mask_fsl', - switch=[['functional_preproc', 'run'], - ['functional_preproc', 'func_masking', 'run']], - option_key=['functional_preproc', 'func_masking', 'using'], - option_val='FSL', - inputs=['desc-preproc_bold'], - outputs=['space-bold_desc-brain_mask'] + name="bold_mask_fsl", + switch=[ + ["functional_preproc", "run"], + ["functional_preproc", "func_masking", "run"], + ], + option_key=["functional_preproc", "func_masking", "using"], + option_val="FSL", + inputs=["desc-preproc_bold"], + outputs=["space-bold_desc-brain_mask"], ) def bold_mask_fsl(wf, cfg, strat_pool, pipe_num, opt=None): - inputnode_bet = pe.Node( - util.IdentityInterface(fields=['frac', - 'mesh_boolean', - 'outline', - 'padding', - 'radius', - 'reduce_bias', - 'remove_eyes', - 'robust', - 'skull', - 'surfaces', - 'threshold', - 'vertical_gradient']), - name=f'BET_options_{pipe_num}') - - func_get_brain_mask = pe.Node(interface=fsl.BET(), - name=f'func_get_brain_mask_BET_{pipe_num}') - func_get_brain_mask.inputs.output_type = 'NIFTI_GZ' + util.IdentityInterface( + fields=[ + "frac", + "mesh_boolean", + "outline", + "padding", + "radius", + "reduce_bias", + "remove_eyes", + "robust", + "skull", + "surfaces", + "threshold", + "vertical_gradient", + ] + ), + name=f"BET_options_{pipe_num}", + ) + + func_get_brain_mask = pe.Node( + interface=fsl.BET(), name=f"func_get_brain_mask_BET_{pipe_num}" + ) + func_get_brain_mask.inputs.output_type = "NIFTI_GZ" func_get_brain_mask.inputs.mask = True inputnode_bet.inputs.set( - frac=cfg.functional_preproc['func_masking']['FSL-BET']['frac'], - mesh_boolean=cfg.functional_preproc['func_masking']['FSL-BET'][ - 'mesh_boolean'], - outline=cfg.functional_preproc['func_masking']['FSL-BET'][ - 'outline'], - padding=cfg.functional_preproc['func_masking']['FSL-BET'][ - 'padding'], - radius=cfg.functional_preproc['func_masking']['FSL-BET']['radius'], - reduce_bias=cfg.functional_preproc['func_masking']['FSL-BET'][ - 'reduce_bias'], - remove_eyes=cfg.functional_preproc['func_masking']['FSL-BET'][ - 'remove_eyes'], - robust=cfg.functional_preproc['func_masking']['FSL-BET']['robust'], - skull=cfg.functional_preproc['func_masking']['FSL-BET']['skull'], - surfaces=cfg.functional_preproc['func_masking']['FSL-BET'][ - 'surfaces'], - threshold=cfg.functional_preproc['func_masking']['FSL-BET'][ - 'threshold'], - vertical_gradient= - cfg.functional_preproc['func_masking']['FSL-BET'][ - 'vertical_gradient'], - ) - - wf.connect([ - (inputnode_bet, func_get_brain_mask, [ - ('frac', 'frac'), - ('mesh_boolean', 'mesh'), - ('outline', 'outline'), - ('padding', 'padding'), - ('radius', 'radius'), - ('reduce_bias', 'reduce_bias'), - ('remove_eyes', 'remove_eyes'), - ('robust', 'robust'), - ('skull', 'skull'), - ('surfaces', 'surfaces'), - ('threshold', 'threshold'), - ('vertical_gradient', 'vertical_gradient'), - ]) - ]) - - if cfg.functional_preproc['func_masking']['FSL-BET'][ - 'functional_mean_boolean']: - func_skull_mean = pe.Node(interface=afni_utils.TStat(), - name=f'func_mean_skull_{pipe_num}') - func_skull_mean.inputs.options = '-mean' - func_skull_mean.inputs.outputtype = 'NIFTI_GZ' + frac=cfg.functional_preproc["func_masking"]["FSL-BET"]["frac"], + mesh_boolean=cfg.functional_preproc["func_masking"]["FSL-BET"]["mesh_boolean"], + outline=cfg.functional_preproc["func_masking"]["FSL-BET"]["outline"], + padding=cfg.functional_preproc["func_masking"]["FSL-BET"]["padding"], + radius=cfg.functional_preproc["func_masking"]["FSL-BET"]["radius"], + reduce_bias=cfg.functional_preproc["func_masking"]["FSL-BET"]["reduce_bias"], + remove_eyes=cfg.functional_preproc["func_masking"]["FSL-BET"]["remove_eyes"], + robust=cfg.functional_preproc["func_masking"]["FSL-BET"]["robust"], + skull=cfg.functional_preproc["func_masking"]["FSL-BET"]["skull"], + surfaces=cfg.functional_preproc["func_masking"]["FSL-BET"]["surfaces"], + threshold=cfg.functional_preproc["func_masking"]["FSL-BET"]["threshold"], + vertical_gradient=cfg.functional_preproc["func_masking"]["FSL-BET"][ + "vertical_gradient" + ], + ) + + wf.connect( + [ + ( + inputnode_bet, + func_get_brain_mask, + [ + ("frac", "frac"), + ("mesh_boolean", "mesh"), + ("outline", "outline"), + ("padding", "padding"), + ("radius", "radius"), + ("reduce_bias", "reduce_bias"), + ("remove_eyes", "remove_eyes"), + ("robust", "robust"), + ("skull", "skull"), + ("surfaces", "surfaces"), + ("threshold", "threshold"), + ("vertical_gradient", "vertical_gradient"), + ], + ) + ] + ) + + if cfg.functional_preproc["func_masking"]["FSL-BET"]["functional_mean_boolean"]: + func_skull_mean = pe.Node( + interface=afni_utils.TStat(), name=f"func_mean_skull_{pipe_num}" + ) + func_skull_mean.inputs.options = "-mean" + func_skull_mean.inputs.outputtype = "NIFTI_GZ" node, out = strat_pool.get_data("desc-preproc_bold") - wf.connect(node, out, func_skull_mean, 'in_file') + wf.connect(node, out, func_skull_mean, "in_file") - out_node, out_file = (func_skull_mean, 'out_file') + out_node, out_file = (func_skull_mean, "out_file") - if cfg.functional_preproc['func_masking']['FSL-BET'][ - 'functional_mean_thr']['run']: + if cfg.functional_preproc["func_masking"]["FSL-BET"]["functional_mean_thr"][ + "run" + ]: # T=$(fslstats ${subject}_tmean.nii.gz -p 98) - threshold_T = pe.Node(interface=fsl.ImageStats(), - name=f'func_mean_skull_thr_value_{pipe_num}', - iterfield=['in_file']) - threshold_T.inputs.op_string = "-p %f " % (cfg.functional_preproc['func_masking']['FSL-BET']['functional_mean_thr']['threshold_value']) - - wf.connect(func_skull_mean, 'out_file', threshold_T, 'in_file') + threshold_T = pe.Node( + interface=fsl.ImageStats(), + name=f"func_mean_skull_thr_value_{pipe_num}", + iterfield=["in_file"], + ) + threshold_T.inputs.op_string = ( + "-p %f " + % ( + cfg.functional_preproc["func_masking"]["FSL-BET"][ + "functional_mean_thr" + ]["threshold_value"] + ) + ) + + wf.connect(func_skull_mean, "out_file", threshold_T, "in_file") # z=$(echo "$T / 10" | bc -l) def form_thr_string(thr): - threshold_z = str(float(thr/10)) - return '-thr %s' % (threshold_z) - - form_thr_string = pe.Node(util.Function(input_names=['thr'], - output_names=['out_str'], - function=form_thr_string), - name=f'form_thr_string_{pipe_num}') + threshold_z = str(float(thr / 10)) + return "-thr %s" % (threshold_z) + + form_thr_string = pe.Node( + util.Function( + input_names=["thr"], + output_names=["out_str"], + function=form_thr_string, + ), + name=f"form_thr_string_{pipe_num}", + ) - wf.connect(threshold_T, 'out_stat', form_thr_string, 'thr') + wf.connect(threshold_T, "out_stat", form_thr_string, "thr") # fslmaths ${subject}_tmean.nii.gz -thr ${z} ${subject}_tmean_thr.nii.gz - func_skull_mean_thr = pe.Node(interface=fsl.ImageMaths(), - name=f'func_mean_skull_thr_{pipe_num}') - - wf.connect(func_skull_mean, 'out_file', func_skull_mean_thr, 'in_file') - wf.connect(form_thr_string, 'out_str', func_skull_mean_thr, 'op_string') + func_skull_mean_thr = pe.Node( + interface=fsl.ImageMaths(), name=f"func_mean_skull_thr_{pipe_num}" + ) - out_node, out_file = (func_skull_mean_thr, 'out_file') + wf.connect(func_skull_mean, "out_file", func_skull_mean_thr, "in_file") + wf.connect(form_thr_string, "out_str", func_skull_mean_thr, "op_string") - if cfg.functional_preproc['func_masking']['FSL-BET'][ - 'functional_mean_bias_correction']: + out_node, out_file = (func_skull_mean_thr, "out_file") + if cfg.functional_preproc["func_masking"]["FSL-BET"][ + "functional_mean_bias_correction" + ]: # fast --nopve -B ${subject}_tmean_thr.nii.gz - func_mean_skull_fast = pe.Node(interface=fsl.FAST(), - name=f'func_mean_skull_fast_{pipe_num}') + func_mean_skull_fast = pe.Node( + interface=fsl.FAST(), name=f"func_mean_skull_fast_{pipe_num}" + ) func_mean_skull_fast.inputs.no_pve = True func_mean_skull_fast.inputs.output_biascorrected = True - wf.connect(out_node, out_file, func_mean_skull_fast, 'in_files') + wf.connect(out_node, out_file, func_mean_skull_fast, "in_files") - out_node, out_file = (func_mean_skull_fast, 'restored_image') + out_node, out_file = (func_mean_skull_fast, "restored_image") - wf.connect(out_node, out_file, func_get_brain_mask, 'in_file') + wf.connect(out_node, out_file, func_get_brain_mask, "in_file") else: func_get_brain_mask.inputs.functional = True node, out = strat_pool.get_data("desc-preproc_bold") - wf.connect(node, out, func_get_brain_mask, 'in_file') + wf.connect(node, out, func_get_brain_mask, "in_file") # erode one voxel of functional brian mask - erode_one_voxel = pe.Node(interface=fsl.ErodeImage(), - name=f'erode_one_voxel_{pipe_num}') + erode_one_voxel = pe.Node( + interface=fsl.ErodeImage(), name=f"erode_one_voxel_{pipe_num}" + ) - erode_one_voxel.inputs.kernel_shape = 'box' + erode_one_voxel.inputs.kernel_shape = "box" erode_one_voxel.inputs.kernel_size = 1.0 - wf.connect(func_get_brain_mask, 'mask_file', - erode_one_voxel, 'in_file') + wf.connect(func_get_brain_mask, "mask_file", erode_one_voxel, "in_file") - outputs = { - 'space-bold_desc-brain_mask': (erode_one_voxel, 'out_file') - } + outputs = {"space-bold_desc-brain_mask": (erode_one_voxel, "out_file")} return (wf, outputs) @nodeblock( - name='bold_mask_fsl_afni', - switch=[['functional_preproc', 'run'], - ['functional_preproc', 'func_masking', 'run']], - option_key=['functional_preproc', 'func_masking', 'using'], - option_val='FSL_AFNI', - inputs=[('motion-basefile', 'desc-preproc_bold'), 'FSL-AFNI-bold-ref', 'FSL-AFNI-brain-mask', - 'FSL-AFNI-brain-probseg'], - outputs=['space-bold_desc-brain_mask', 'desc-ref_bold'] + name="bold_mask_fsl_afni", + switch=[ + ["functional_preproc", "run"], + ["functional_preproc", "func_masking", "run"], + ], + option_key=["functional_preproc", "func_masking", "using"], + option_val="FSL_AFNI", + inputs=[ + ("motion-basefile", "desc-preproc_bold"), + "FSL-AFNI-bold-ref", + "FSL-AFNI-brain-mask", + "FSL-AFNI-brain-probseg", + ], + outputs=["space-bold_desc-brain_mask", "desc-ref_bold"], ) def bold_mask_fsl_afni(wf, cfg, strat_pool, pipe_num, opt=None): """fMRIPrep-style BOLD mask `Ref `_ """ - # Initialize transforms with antsAI init_aff = pe.Node( AI( @@ -930,13 +955,13 @@ def bold_mask_fsl_afni(wf, cfg, strat_pool, pipe_num, opt=None): verbose=True, ), name=f"init_aff_{pipe_num}", - n_procs=cfg.pipeline_setup['system_config']['num_OMP_threads'], + n_procs=cfg.pipeline_setup["system_config"]["num_OMP_threads"], ) - node, out = strat_pool.get_data('FSL-AFNI-bold-ref') - wf.connect(node, out, init_aff, 'fixed_image') + node, out = strat_pool.get_data("FSL-AFNI-bold-ref") + wf.connect(node, out, init_aff, "fixed_image") - node, out = strat_pool.get_data('FSL-AFNI-brain-mask') - wf.connect(node, out, init_aff, 'fixed_image_mask') + node, out = strat_pool.get_data("FSL-AFNI-brain-mask") + wf.connect(node, out, init_aff, "fixed_image_mask") init_aff.inputs.search_grid = (40, (0, 40, 40)) @@ -946,27 +971,27 @@ def bold_mask_fsl_afni(wf, cfg, strat_pool, pipe_num, opt=None): winsorize_upper_quantile=0.98, winsorize_lower_quantile=0.05, float=True, - metric=['Mattes'], + metric=["Mattes"], metric_weight=[1], radius_or_number_of_bins=[64], - transforms=['Affine'], + transforms=["Affine"], transform_parameters=[[0.1]], number_of_iterations=[[200]], convergence_window_size=[10], - convergence_threshold=[1.e-9], - sampling_strategy=['Random', 'Random'], + convergence_threshold=[1.0e-9], + sampling_strategy=["Random", "Random"], smoothing_sigmas=[[2]], - sigma_units=['mm', 'mm', 'mm'], + sigma_units=["mm", "mm", "mm"], shrink_factors=[[2]], sampling_percentage=[0.2], - use_histogram_matching=[True] + use_histogram_matching=[True], ), name=f"norm_{pipe_num}", - n_procs=cfg.pipeline_setup['system_config']['num_OMP_threads'], + n_procs=cfg.pipeline_setup["system_config"]["num_OMP_threads"], ) - node, out = strat_pool.get_data('FSL-AFNI-bold-ref') - wf.connect(node, out, norm, 'fixed_image') + node, out = strat_pool.get_data("FSL-AFNI-bold-ref") + wf.connect(node, out, norm, "fixed_image") map_brainmask = pe.Node( ants.ApplyTransforms( @@ -977,12 +1002,13 @@ def bold_mask_fsl_afni(wf, cfg, strat_pool, pipe_num, opt=None): ) # Use the higher resolution and probseg for numerical stability in rounding - node, out = strat_pool.get_data('FSL-AFNI-brain-probseg') - wf.connect(node, out, map_brainmask, 'input_image') + node, out = strat_pool.get_data("FSL-AFNI-brain-probseg") + wf.connect(node, out, map_brainmask, "input_image") - binarize_mask = pe.Node(interface=fsl.maths.MathsCommand(), - name=f'binarize_mask_{pipe_num}') - binarize_mask.inputs.args = '-thr 0.85 -bin' + binarize_mask = pe.Node( + interface=fsl.maths.MathsCommand(), name=f"binarize_mask_{pipe_num}" + ) + binarize_mask.inputs.args = "-thr 0.85 -bin" # Dilate pre_mask pre_dilate = pe.Node( @@ -997,10 +1023,10 @@ def bold_mask_fsl_afni(wf, cfg, strat_pool, pipe_num, opt=None): # Fix precision errors # https://github.com/ANTsX/ANTs/wiki/Inputs-do-not-occupy-the-same-physical-space#fixing-precision-errors - print_header = pe.Node(PrintHeader(what_information=4), - name=f'print_header_{pipe_num}') - set_direction = pe.Node(SetDirectionByMatrix(), - name=f'set_direction_{pipe_num}') + print_header = pe.Node( + PrintHeader(what_information=4), name=f"print_header_{pipe_num}" + ) + set_direction = pe.Node(SetDirectionByMatrix(), name=f"set_direction_{pipe_num}") # Run N4 normally, force num_threads=1 for stability (images are # small, no need for >1) @@ -1009,634 +1035,742 @@ def bold_mask_fsl_afni(wf, cfg, strat_pool, pipe_num, opt=None): dimension=3, copy_header=True, bspline_fitting_distance=200 ), shrink_factor=2, - rescale_intensities = True, + rescale_intensities=True, name=f"n4_correct_{pipe_num}", n_procs=1, ) skullstrip_first_pass = pe.Node( fsl.BET(frac=0.2, mask=True, functional=False), - name=f'skullstrip_first_pass_{pipe_num}') + name=f"skullstrip_first_pass_{pipe_num}", + ) bet_dilate = pe.Node( - fsl.DilateImage(operation='max', kernel_shape='sphere', - kernel_size=6.0, internal_datatype='char'), - name=f'skullstrip_first_dilate_{pipe_num}') + fsl.DilateImage( + operation="max", + kernel_shape="sphere", + kernel_size=6.0, + internal_datatype="char", + ), + name=f"skullstrip_first_dilate_{pipe_num}", + ) - bet_mask = pe.Node(fsl.ApplyMask(), name=f'skullstrip_first_mask_' - f'{pipe_num}') + bet_mask = pe.Node(fsl.ApplyMask(), name=f"skullstrip_first_mask_" f"{pipe_num}") - unifize = pe.Node(afni_utils.Unifize(t2=True, outputtype='NIFTI_GZ', - args='-clfrac 0.2 -rbt 18.3 65.0 90.0', - out_file="uni.nii.gz"), - name=f'unifize_{pipe_num}') + unifize = pe.Node( + afni_utils.Unifize( + t2=True, + outputtype="NIFTI_GZ", + args="-clfrac 0.2 -rbt 18.3 65.0 90.0", + out_file="uni.nii.gz", + ), + name=f"unifize_{pipe_num}", + ) skullstrip_second_pass = pe.Node( - preprocess.Automask(dilate=1, outputtype='NIFTI_GZ'), - name=f'skullstrip_second_pass_{pipe_num}') + preprocess.Automask(dilate=1, outputtype="NIFTI_GZ"), + name=f"skullstrip_second_pass_{pipe_num}", + ) - combine_masks = pe.Node(fsl.BinaryMaths(operation='mul'), - name=f'combine_masks_{pipe_num}') + combine_masks = pe.Node( + fsl.BinaryMaths(operation="mul"), name=f"combine_masks_{pipe_num}" + ) - apply_mask = pe.Node(fsl.ApplyMask(), - name=f'extract_ref_brain_bold_{pipe_num}') + apply_mask = pe.Node(fsl.ApplyMask(), name=f"extract_ref_brain_bold_{pipe_num}") node, out = strat_pool.get_data(["motion-basefile"]) - wf.connect([(node, init_aff, [(out, "moving_image")]), - (node, map_brainmask, [(out, "reference_image")]), - (node, norm, [(out, "moving_image")]), - (init_aff, norm, [ - ("output_transform", "initial_moving_transform")]), - (norm, map_brainmask, [ + wf.connect( + [ + (node, init_aff, [(out, "moving_image")]), + (node, map_brainmask, [(out, "reference_image")]), + (node, norm, [(out, "moving_image")]), + (init_aff, norm, [("output_transform", "initial_moving_transform")]), + ( + norm, + map_brainmask, + [ ("reverse_invert_flags", "invert_transform_flags"), ("reverse_transforms", "transforms"), - ]), - (map_brainmask, binarize_mask, [("output_image", "in_file")]), - (binarize_mask, pre_dilate, [("out_file", "in_file")]), - (pre_dilate, print_header, [("out_file", "image")]), - (print_header, set_direction, [("header", "direction")]), - (node, set_direction, [(out, "infile"), (out, "outfile")]), - (set_direction, n4_correct, [("outfile", "mask_image")]), - (node, n4_correct, [(out, "input_image")]), - (n4_correct, skullstrip_first_pass, - [('output_image', 'in_file')]), - (skullstrip_first_pass, bet_dilate, - [('mask_file', 'in_file')]), - (bet_dilate, bet_mask, [('out_file', 'mask_file')]), - (skullstrip_first_pass, bet_mask, [('out_file', 'in_file')]), - (bet_mask, unifize, [('out_file', 'in_file')]), - (unifize, skullstrip_second_pass, [('out_file', 'in_file')]), - (skullstrip_first_pass, combine_masks, - [('mask_file', 'in_file')]), - (skullstrip_second_pass, combine_masks, - [('out_file', 'operand_file')]), - (unifize, apply_mask, [('out_file', 'in_file')]), - (combine_masks, apply_mask, [('out_file', 'mask_file')]), - ]) + ], + ), + (map_brainmask, binarize_mask, [("output_image", "in_file")]), + (binarize_mask, pre_dilate, [("out_file", "in_file")]), + (pre_dilate, print_header, [("out_file", "image")]), + (print_header, set_direction, [("header", "direction")]), + (node, set_direction, [(out, "infile"), (out, "outfile")]), + (set_direction, n4_correct, [("outfile", "mask_image")]), + (node, n4_correct, [(out, "input_image")]), + (n4_correct, skullstrip_first_pass, [("output_image", "in_file")]), + (skullstrip_first_pass, bet_dilate, [("mask_file", "in_file")]), + (bet_dilate, bet_mask, [("out_file", "mask_file")]), + (skullstrip_first_pass, bet_mask, [("out_file", "in_file")]), + (bet_mask, unifize, [("out_file", "in_file")]), + (unifize, skullstrip_second_pass, [("out_file", "in_file")]), + (skullstrip_first_pass, combine_masks, [("mask_file", "in_file")]), + (skullstrip_second_pass, combine_masks, [("out_file", "operand_file")]), + (unifize, apply_mask, [("out_file", "in_file")]), + (combine_masks, apply_mask, [("out_file", "mask_file")]), + ] + ) outputs = { - 'space-bold_desc-brain_mask': (combine_masks, 'out_file'), - 'desc-ref_bold': (apply_mask, 'out_file') + "space-bold_desc-brain_mask": (combine_masks, "out_file"), + "desc-ref_bold": (apply_mask, "out_file"), } return (wf, outputs) @nodeblock( - name='bold_mask_anatomical_refined', - switch=[['functional_preproc', 'run'], - ['functional_preproc', 'func_masking', 'run']], - option_key=['functional_preproc', 'func_masking', 'using'], - option_val='Anatomical_Refined', - inputs=[('bold', 'desc-preproc_bold'), - ('desc-brain_T1w', ['space-T1w_desc-brain_mask', 'space-T1w_desc-acpcbrain_mask'])], - outputs=['space-bold_desc-brain_mask'] + name="bold_mask_anatomical_refined", + switch=[ + ["functional_preproc", "run"], + ["functional_preproc", "func_masking", "run"], + ], + option_key=["functional_preproc", "func_masking", "using"], + option_val="Anatomical_Refined", + inputs=[ + ("bold", "desc-preproc_bold"), + ( + "desc-brain_T1w", + ["space-T1w_desc-brain_mask", "space-T1w_desc-acpcbrain_mask"], + ), + ], + outputs=["space-bold_desc-brain_mask"], ) def bold_mask_anatomical_refined(wf, cfg, strat_pool, pipe_num, opt=None): - # binarize anat mask, in case it is not a binary mask. - anat_brain_mask_bin = pe.Node(interface=fsl.ImageMaths(), - name=f'anat_brain_mask_bin_{pipe_num}') - anat_brain_mask_bin.inputs.op_string = '-bin' + anat_brain_mask_bin = pe.Node( + interface=fsl.ImageMaths(), name=f"anat_brain_mask_bin_{pipe_num}" + ) + anat_brain_mask_bin.inputs.op_string = "-bin" - node, out = strat_pool.get_data(['space-T1w_desc-brain_mask', - 'space-T1w_desc-acpcbrain_mask']) - wf.connect(node, out, anat_brain_mask_bin, 'in_file') + node, out = strat_pool.get_data( + ["space-T1w_desc-brain_mask", "space-T1w_desc-acpcbrain_mask"] + ) + wf.connect(node, out, anat_brain_mask_bin, "in_file") # fill holes of anat mask - anat_mask_filled = pe.Node(interface=afni.MaskTool(), - name=f'anat_brain_mask_filled_{pipe_num}') + anat_mask_filled = pe.Node( + interface=afni.MaskTool(), name=f"anat_brain_mask_filled_{pipe_num}" + ) anat_mask_filled.inputs.fill_holes = True - anat_mask_filled.inputs.outputtype = 'NIFTI_GZ' + anat_mask_filled.inputs.outputtype = "NIFTI_GZ" - wf.connect(anat_brain_mask_bin, 'out_file', - anat_mask_filled, 'in_file') + wf.connect(anat_brain_mask_bin, "out_file", anat_mask_filled, "in_file") # init_bold_mask : input raw func - init_bold_mask = anat_refined_mask(init_bold_mask=True, - wf_name=f'init_bold_mask_{pipe_num}') + init_bold_mask = anat_refined_mask( + init_bold_mask=True, wf_name=f"init_bold_mask_{pipe_num}" + ) - func_deoblique = pe.Node(interface=afni_utils.Refit(), - name=f'raw_func_deoblique_{pipe_num}') + func_deoblique = pe.Node( + interface=afni_utils.Refit(), name=f"raw_func_deoblique_{pipe_num}" + ) func_deoblique.inputs.deoblique = True - node, out = strat_pool.get_data('bold') - wf.connect(node, out, func_deoblique, 'in_file') + node, out = strat_pool.get_data("bold") + wf.connect(node, out, func_deoblique, "in_file") - func_reorient = pe.Node(interface=afni_utils.Resample(), - name=f'raw_func_reorient_{pipe_num}', - mem_gb=0, - mem_x=(0.0115, 'in_file', 't')) + func_reorient = pe.Node( + interface=afni_utils.Resample(), + name=f"raw_func_reorient_{pipe_num}", + mem_gb=0, + mem_x=(0.0115, "in_file", "t"), + ) - func_reorient.inputs.orientation = 'RPI' - func_reorient.inputs.outputtype = 'NIFTI_GZ' + func_reorient.inputs.orientation = "RPI" + func_reorient.inputs.outputtype = "NIFTI_GZ" - wf.connect(func_deoblique, 'out_file', - func_reorient, 'in_file') + wf.connect(func_deoblique, "out_file", func_reorient, "in_file") - wf.connect(func_reorient, 'out_file', - init_bold_mask, 'inputspec.func') + wf.connect(func_reorient, "out_file", init_bold_mask, "inputspec.func") - wf.connect(anat_mask_filled, 'out_file', - init_bold_mask, 'inputspec.anatomical_brain_mask') + wf.connect( + anat_mask_filled, "out_file", init_bold_mask, "inputspec.anatomical_brain_mask" + ) - node, out = strat_pool.get_data('desc-brain_T1w') - wf.connect(node, out, init_bold_mask, 'inputspec.anat_brain') + node, out = strat_pool.get_data("desc-brain_T1w") + wf.connect(node, out, init_bold_mask, "inputspec.anat_brain") # dilate init func brain mask - func_tmp_brain_mask = pe.Node(interface=fsl.ImageMaths(), - name=f'func_tmp_brain_mask_dil_{pipe_num}') - func_tmp_brain_mask.inputs.op_string = '-dilM' + func_tmp_brain_mask = pe.Node( + interface=fsl.ImageMaths(), name=f"func_tmp_brain_mask_dil_{pipe_num}" + ) + func_tmp_brain_mask.inputs.op_string = "-dilM" - wf.connect(init_bold_mask, 'outputspec.func_brain_mask', - func_tmp_brain_mask, 'in_file') + wf.connect( + init_bold_mask, "outputspec.func_brain_mask", func_tmp_brain_mask, "in_file" + ) # refined_bold_mask : input motion corrected func - refined_bold_mask = anat_refined_mask(init_bold_mask=False, - wf_name='refined_bold_mask' - f'_{pipe_num}') + refined_bold_mask = anat_refined_mask( + init_bold_mask=False, wf_name="refined_bold_mask" f"_{pipe_num}" + ) - node, out = strat_pool.get_data(["desc-preproc_bold", - "bold"]) - wf.connect(node, out, refined_bold_mask, 'inputspec.func') + node, out = strat_pool.get_data(["desc-preproc_bold", "bold"]) + wf.connect(node, out, refined_bold_mask, "inputspec.func") - node, out = strat_pool.get_data('desc-brain_T1w') - wf.connect(node, out, refined_bold_mask, 'inputspec.anat_brain') + node, out = strat_pool.get_data("desc-brain_T1w") + wf.connect(node, out, refined_bold_mask, "inputspec.anat_brain") - wf.connect(func_tmp_brain_mask, 'out_file', - refined_bold_mask, 'inputspec.init_func_brain_mask') + wf.connect( + func_tmp_brain_mask, + "out_file", + refined_bold_mask, + "inputspec.init_func_brain_mask", + ) # dilate anatomical mask - if cfg.functional_preproc['func_masking']['Anatomical_Refined'][ - 'anatomical_mask_dilation']: - anat_mask_dilate = pe.Node(interface=afni.MaskTool(), - name=f'anat_mask_dilate_{pipe_num}') - anat_mask_dilate.inputs.dilate_inputs = '1' - anat_mask_dilate.inputs.outputtype = 'NIFTI_GZ' - - wf.connect(anat_mask_filled, 'out_file', - anat_mask_dilate, 'in_file') - wf.connect(anat_mask_dilate, 'out_file', - refined_bold_mask, 'inputspec.anatomical_brain_mask') + if cfg.functional_preproc["func_masking"]["Anatomical_Refined"][ + "anatomical_mask_dilation" + ]: + anat_mask_dilate = pe.Node( + interface=afni.MaskTool(), name=f"anat_mask_dilate_{pipe_num}" + ) + anat_mask_dilate.inputs.dilate_inputs = "1" + anat_mask_dilate.inputs.outputtype = "NIFTI_GZ" + + wf.connect(anat_mask_filled, "out_file", anat_mask_dilate, "in_file") + wf.connect( + anat_mask_dilate, + "out_file", + refined_bold_mask, + "inputspec.anatomical_brain_mask", + ) else: - wf.connect(anat_mask_filled, 'out_file', - refined_bold_mask, 'inputspec.anatomical_brain_mask') + wf.connect( + anat_mask_filled, + "out_file", + refined_bold_mask, + "inputspec.anatomical_brain_mask", + ) # get final func mask - func_mask_final = pe.Node(interface=fsl.MultiImageMaths(), - name=f'func_mask_final_{pipe_num}') + func_mask_final = pe.Node( + interface=fsl.MultiImageMaths(), name=f"func_mask_final_{pipe_num}" + ) func_mask_final.inputs.op_string = "-mul %s" - wf.connect(func_tmp_brain_mask, 'out_file', - func_mask_final, 'in_file') + wf.connect(func_tmp_brain_mask, "out_file", func_mask_final, "in_file") - wf.connect(refined_bold_mask, 'outputspec.func_brain_mask', - func_mask_final, 'operand_files') + wf.connect( + refined_bold_mask, + "outputspec.func_brain_mask", + func_mask_final, + "operand_files", + ) - outputs = { - 'space-bold_desc-brain_mask': (func_mask_final, 'out_file') - } + outputs = {"space-bold_desc-brain_mask": (func_mask_final, "out_file")} return (wf, outputs) @nodeblock( - name='bold_mask_anatomical_based', - switch=[['functional_preproc', 'run'], - ['functional_preproc', 'func_masking', 'run']], - option_key=['functional_preproc', 'func_masking', 'using'], - option_val='Anatomical_Based', - inputs=['desc-preproc_bold', ('desc-brain_T1w', ['desc-preproc_T1w', 'desc-reorient_T1w', 'T1w'])], - outputs=['space-bold_desc-brain_mask'] + name="bold_mask_anatomical_based", + switch=[ + ["functional_preproc", "run"], + ["functional_preproc", "func_masking", "run"], + ], + option_key=["functional_preproc", "func_masking", "using"], + option_val="Anatomical_Based", + inputs=[ + "desc-preproc_bold", + ("desc-brain_T1w", ["desc-preproc_T1w", "desc-reorient_T1w", "T1w"]), + ], + outputs=["space-bold_desc-brain_mask"], ) def bold_mask_anatomical_based(wf, cfg, strat_pool, pipe_num, opt=None): - '''Generate the BOLD mask by basing it off of the anatomical brain mask. + """Generate the BOLD mask by basing it off of the anatomical brain mask. Adapted from `DCAN Lab's BOLD mask method from the ABCD pipeline `_. - ''' - + """ # 0. Take single volume of func - func_single_volume = pe.Node(interface=afni.Calc(), - name='func_single_volume') + func_single_volume = pe.Node(interface=afni.Calc(), name="func_single_volume") - func_single_volume.inputs.set( - expr='a', - single_idx=1, - outputtype='NIFTI_GZ' - ) + func_single_volume.inputs.set(expr="a", single_idx=1, outputtype="NIFTI_GZ") node, out = strat_pool.get_data("desc-preproc_bold") - wf.connect(node, out, func_single_volume, 'in_file_a') + wf.connect(node, out, func_single_volume, "in_file_a") # 1. Register func head to anat head to get func2anat matrix - linear_reg_func_to_anat = pe.Node(interface=fsl.FLIRT(), - name='func_to_anat_linear_reg') + linear_reg_func_to_anat = pe.Node( + interface=fsl.FLIRT(), name="func_to_anat_linear_reg" + ) linear_reg_func_to_anat.inputs.dof = 6 - linear_reg_func_to_anat.inputs.interp = 'spline' + linear_reg_func_to_anat.inputs.interp = "spline" linear_reg_func_to_anat.inputs.searchr_x = [30, 30] linear_reg_func_to_anat.inputs.searchr_y = [30, 30] linear_reg_func_to_anat.inputs.searchr_z = [30, 30] - wf.connect(func_single_volume, 'out_file', - linear_reg_func_to_anat, 'in_file') + wf.connect(func_single_volume, "out_file", linear_reg_func_to_anat, "in_file") - node, out = strat_pool.get_data(["desc-preproc_T1w", "desc-reorient_T1w", - "T1w"]) - wf.connect(node, out, linear_reg_func_to_anat, 'reference') + node, out = strat_pool.get_data(["desc-preproc_T1w", "desc-reorient_T1w", "T1w"]) + wf.connect(node, out, linear_reg_func_to_anat, "reference") # 2. Inverse func to anat affine, to get anat-to-func transform - inv_func_to_anat_affine = pe.Node(interface=fsl.ConvertXFM(), - name='inv_func2anat_affine') + inv_func_to_anat_affine = pe.Node( + interface=fsl.ConvertXFM(), name="inv_func2anat_affine" + ) inv_func_to_anat_affine.inputs.invert_xfm = True - wf.connect(linear_reg_func_to_anat, 'out_matrix_file', - inv_func_to_anat_affine, 'in_file') + wf.connect( + linear_reg_func_to_anat, "out_matrix_file", inv_func_to_anat_affine, "in_file" + ) # 3. get BOLD mask # 3.1 Apply anat-to-func transform to transfer anatomical brain to functional space - reg_anat_brain_to_func = pe.Node(interface=fsl.ApplyWarp(), - name='reg_anat_brain_to_func') - reg_anat_brain_to_func.inputs.interp = 'nn' + reg_anat_brain_to_func = pe.Node( + interface=fsl.ApplyWarp(), name="reg_anat_brain_to_func" + ) + reg_anat_brain_to_func.inputs.interp = "nn" reg_anat_brain_to_func.inputs.relwarp = True node, out = strat_pool.get_data("desc-brain_T1w") - wf.connect(node, out, reg_anat_brain_to_func, 'in_file') + wf.connect(node, out, reg_anat_brain_to_func, "in_file") node, out = strat_pool.get_data("desc-preproc_bold") - wf.connect(node, out, reg_anat_brain_to_func, 'ref_file') + wf.connect(node, out, reg_anat_brain_to_func, "ref_file") - wf.connect(inv_func_to_anat_affine, 'out_file', - reg_anat_brain_to_func, 'premat') + wf.connect(inv_func_to_anat_affine, "out_file", reg_anat_brain_to_func, "premat") # 3.2 Binarize transfered image - func_mask_bin = pe.Node(interface=fsl.ImageMaths(), - name='func_mask_bin') - func_mask_bin.inputs.op_string = '-abs -bin' + func_mask_bin = pe.Node(interface=fsl.ImageMaths(), name="func_mask_bin") + func_mask_bin.inputs.op_string = "-abs -bin" - wf.connect(reg_anat_brain_to_func, 'out_file', - func_mask_bin, 'in_file') + wf.connect(reg_anat_brain_to_func, "out_file", func_mask_bin, "in_file") # 3.3 Fill holes to get BOLD mask - func_mask_fill_holes = pe.Node(interface=afni.MaskTool(), - name='func_mask_fill_holes') + func_mask_fill_holes = pe.Node( + interface=afni.MaskTool(), name="func_mask_fill_holes" + ) func_mask_fill_holes.inputs.fill_holes = True - func_mask_fill_holes.inputs.outputtype = 'NIFTI_GZ' + func_mask_fill_holes.inputs.outputtype = "NIFTI_GZ" - wf.connect(func_mask_bin, 'out_file', - func_mask_fill_holes, 'in_file') + wf.connect(func_mask_bin, "out_file", func_mask_fill_holes, "in_file") - outputs = { - 'space-bold_desc-brain_mask': (func_mask_fill_holes, 'out_file') - } + outputs = {"space-bold_desc-brain_mask": (func_mask_fill_holes, "out_file")} return (wf, outputs) + def anat_brain_to_bold_res(wf_name, cfg, pipe_num): - wf = pe.Workflow(name=f"{wf_name}_{pipe_num}") - - inputNode = pe.Node(util.IdentityInterface(fields=['T1w-template-funcreg', - 'space-template_desc-preproc_T1w']), - name='inputspec') - outputNode = pe.Node(util.IdentityInterface(fields=['space-template_res-bold_desc-brain_T1w']), - name='outputspec') + + inputNode = pe.Node( + util.IdentityInterface( + fields=["T1w-template-funcreg", "space-template_desc-preproc_T1w"] + ), + name="inputspec", + ) + outputNode = pe.Node( + util.IdentityInterface(fields=["space-template_res-bold_desc-brain_T1w"]), + name="outputspec", + ) # applywarp --rel --interp=spline -i ${T1wImage} -r ${ResampRefIm} --premat=$FSLDIR/etc/flirtsch/ident.mat -o ${WD}/${T1wImageFile}.${FinalfMRIResolution} - anat_brain_to_func_res = pe.Node(interface=fsl.ApplyWarp(), - name=f'resample_anat_brain_in_standard_{pipe_num}') + anat_brain_to_func_res = pe.Node( + interface=fsl.ApplyWarp(), name=f"resample_anat_brain_in_standard_{pipe_num}" + ) - anat_brain_to_func_res.inputs.interp = 'spline' + anat_brain_to_func_res.inputs.interp = "spline" anat_brain_to_func_res.inputs.premat = cfg.registration_workflows[ - 'anatomical_registration']['registration']['FSL-FNIRT']['identity_matrix'] + "anatomical_registration" + ]["registration"]["FSL-FNIRT"]["identity_matrix"] - wf.connect(inputNode, 'space-template_desc-preproc_T1w', anat_brain_to_func_res, 'in_file') - wf.connect(inputNode, 'T1w-template-funcreg', anat_brain_to_func_res, 'ref_file') + wf.connect( + inputNode, "space-template_desc-preproc_T1w", anat_brain_to_func_res, "in_file" + ) + wf.connect(inputNode, "T1w-template-funcreg", anat_brain_to_func_res, "ref_file") - wf.connect(anat_brain_to_func_res, 'out_file', outputNode, 'space-template_res-bold_desc-brain_T1w') + wf.connect( + anat_brain_to_func_res, + "out_file", + outputNode, + "space-template_res-bold_desc-brain_T1w", + ) return wf + def anat_brain_mask_to_bold_res(wf_name, cfg, pipe_num): # Create brain masks in this space from the FreeSurfer output (changing resolution) # applywarp --rel --interp=nn -i ${FreeSurferBrainMask}.nii.gz -r ${WD}/${T1wImageFile}.${FinalfMRIResolution} --premat=$FSLDIR/etc/flirtsch/ident.mat -o ${WD}/${FreeSurferBrainMaskFile}.${FinalfMRIResolution}.nii.gz wf = pe.Workflow(name=f"{wf_name}_{pipe_num}") - inputNode = pe.Node(util.IdentityInterface(fields=['space-template_desc-T1w_mask', - 'space-template_desc-preproc_T1w']), - name='inputspec') - outputNode = pe.Node(util.IdentityInterface(fields=['space-template_desc-bold_mask']), - name='outputspec') - - anat_brain_mask_to_func_res = pe.Node(interface=fsl.ApplyWarp(), - name=f'resample_anat_brain_mask_in_standard_{pipe_num}') - - anat_brain_mask_to_func_res.inputs.interp = 'nn' - anat_brain_mask_to_func_res.inputs.premat = cfg.registration_workflows[ - 'anatomical_registration']['registration']['FSL-FNIRT']['identity_matrix'] + inputNode = pe.Node( + util.IdentityInterface( + fields=["space-template_desc-T1w_mask", "space-template_desc-preproc_T1w"] + ), + name="inputspec", + ) + outputNode = pe.Node( + util.IdentityInterface(fields=["space-template_desc-bold_mask"]), + name="outputspec", + ) - wf.connect(inputNode, 'space-template_desc-T1w_mask', anat_brain_mask_to_func_res, 'in_file') - wf.connect(inputNode, 'space-template_desc-preproc_T1w', anat_brain_mask_to_func_res, 'ref_file') - wf.connect(anat_brain_mask_to_func_res, 'out_file', outputNode, 'space-template_desc-bold_mask') + anat_brain_mask_to_func_res = pe.Node( + interface=fsl.ApplyWarp(), + name=f"resample_anat_brain_mask_in_standard_{pipe_num}", + ) + + anat_brain_mask_to_func_res.inputs.interp = "nn" + anat_brain_mask_to_func_res.inputs.premat = cfg.registration_workflows[ + "anatomical_registration" + ]["registration"]["FSL-FNIRT"]["identity_matrix"] + + wf.connect( + inputNode, + "space-template_desc-T1w_mask", + anat_brain_mask_to_func_res, + "in_file", + ) + wf.connect( + inputNode, + "space-template_desc-preproc_T1w", + anat_brain_mask_to_func_res, + "ref_file", + ) + wf.connect( + anat_brain_mask_to_func_res, + "out_file", + outputNode, + "space-template_desc-bold_mask", + ) return wf + @nodeblock( - name='bold_mask_anatomical_resampled', - switch=[['functional_preproc', 'run'], - ['functional_preproc', 'func_masking', 'run']], - option_key=['functional_preproc', 'func_masking', 'using'], - option_val='Anatomical_Resampled', - inputs=['desc-preproc_bold', 'T1w-template-funcreg', 'space-template_desc-preproc_T1w', - 'space-template_desc-brain_mask'], - outputs=['space-template_res-bold_desc-brain_T1w', 'space-template_desc-bold_mask', 'space-bold_desc-brain_mask'] + name="bold_mask_anatomical_resampled", + switch=[ + ["functional_preproc", "run"], + ["functional_preproc", "func_masking", "run"], + ], + option_key=["functional_preproc", "func_masking", "using"], + option_val="Anatomical_Resampled", + inputs=[ + "desc-preproc_bold", + "T1w-template-funcreg", + "space-template_desc-preproc_T1w", + "space-template_desc-brain_mask", + ], + outputs=[ + "space-template_res-bold_desc-brain_T1w", + "space-template_desc-bold_mask", + "space-bold_desc-brain_mask", + ], ) def bold_mask_anatomical_resampled(wf, cfg, strat_pool, pipe_num, opt=None): - '''Resample anatomical brain mask in standard space to get BOLD brain mask in standard space + """Resample anatomical brain mask in standard space to get BOLD brain mask in standard space Adapted from `DCAN Lab's BOLD mask method from the ABCD pipeline `_. - ''' - + """ anat_brain_to_func_res = anat_brain_to_bold_res(wf, cfg, pipe_num) - node, out = strat_pool.get_data('space-template_desc-preproc_T1w') - wf.connect(node, out, anat_brain_to_func_res, 'inputspec.space-template_desc-preproc_T1w') + node, out = strat_pool.get_data("space-template_desc-preproc_T1w") + wf.connect( + node, out, anat_brain_to_func_res, "inputspec.space-template_desc-preproc_T1w" + ) - node, out = strat_pool.get_data('T1w-template-funcreg') - wf.connect(node, out, anat_brain_to_func_res, 'inputspec.T1w-template-funcreg') + node, out = strat_pool.get_data("T1w-template-funcreg") + wf.connect(node, out, anat_brain_to_func_res, "inputspec.T1w-template-funcreg") # Create brain masks in this space from the FreeSurfer output (changing resolution) # applywarp --rel --interp=nn -i ${FreeSurferBrainMask}.nii.gz -r ${WD}/${T1wImageFile}.${FinalfMRIResolution} --premat=$FSLDIR/etc/flirtsch/ident.mat -o ${WD}/${FreeSurferBrainMaskFile}.${FinalfMRIResolution}.nii.gz - anat_brain_mask_to_func_res = anat_brain_mask_to_bold_res(wf_name='anat_brain_mask_to_bold_res', cfg=cfg, pipe_num=pipe_num) - - node, out = strat_pool.get_data('space-template_desc-brain_mask') - wf.connect(node, out, anat_brain_mask_to_func_res, 'inputspec.space-template_desc-T1w_mask') + anat_brain_mask_to_func_res = anat_brain_mask_to_bold_res( + wf_name="anat_brain_mask_to_bold_res", cfg=cfg, pipe_num=pipe_num + ) - wf.connect(anat_brain_to_func_res, 'outputspec.space-template_res-bold_desc-brain_T1w', - anat_brain_mask_to_func_res, 'inputspec.space-template_desc-preproc_T1w') + node, out = strat_pool.get_data("space-template_desc-brain_mask") + wf.connect( + node, out, anat_brain_mask_to_func_res, "inputspec.space-template_desc-T1w_mask" + ) + + wf.connect( + anat_brain_to_func_res, + "outputspec.space-template_res-bold_desc-brain_T1w", + anat_brain_mask_to_func_res, + "inputspec.space-template_desc-preproc_T1w", + ) # Resample func mask in template space back to native space func_mask_template_to_native = pe.Node( interface=afni.Resample(), - name=f'resample_func_mask_to_native_{pipe_num}', + name=f"resample_func_mask_to_native_{pipe_num}", mem_gb=0, - mem_x=(0.0115, 'in_file', 't')) - func_mask_template_to_native.inputs.resample_mode = 'NN' - func_mask_template_to_native.inputs.outputtype = 'NIFTI_GZ' - - wf.connect(anat_brain_mask_to_func_res, 'outputspec.space-template_desc-bold_mask', - func_mask_template_to_native, 'in_file') + mem_x=(0.0115, "in_file", "t"), + ) + func_mask_template_to_native.inputs.resample_mode = "NN" + func_mask_template_to_native.inputs.outputtype = "NIFTI_GZ" + + wf.connect( + anat_brain_mask_to_func_res, + "outputspec.space-template_desc-bold_mask", + func_mask_template_to_native, + "in_file", + ) node, out = strat_pool.get_data("desc-preproc_bold") - wf.connect(node, out, func_mask_template_to_native, 'master') + wf.connect(node, out, func_mask_template_to_native, "master") outputs = { - 'space-template_res-bold_desc-brain_T1w': (anat_brain_to_func_res, 'outputspec.space-template_res-bold_desc-brain_T1w'), - 'space-template_desc-bold_mask': (anat_brain_mask_to_func_res, 'outputspec.space-template_desc-bold_mask'), - 'space-bold_desc-brain_mask': (func_mask_template_to_native, 'out_file') + "space-template_res-bold_desc-brain_T1w": ( + anat_brain_to_func_res, + "outputspec.space-template_res-bold_desc-brain_T1w", + ), + "space-template_desc-bold_mask": ( + anat_brain_mask_to_func_res, + "outputspec.space-template_desc-bold_mask", + ), + "space-bold_desc-brain_mask": (func_mask_template_to_native, "out_file"), } return (wf, outputs) + @nodeblock( - name='bold_mask_ccs', - switch=[['functional_preproc', 'run'], - ['functional_preproc', 'func_masking', 'run']], - option_key=['functional_preproc', 'func_masking', 'using'], - option_val='CCS_Anatomical_Refined', - inputs=[['desc-motion_bold', 'desc-preproc_bold', 'bold'], 'desc-brain_T1w', - ['desc-preproc_T1w', 'desc-reorient_T1w', 'T1w']], - outputs=['space-bold_desc-brain_mask', 'desc-ref_bold'] + name="bold_mask_ccs", + switch=[ + ["functional_preproc", "run"], + ["functional_preproc", "func_masking", "run"], + ], + option_key=["functional_preproc", "func_masking", "using"], + option_val="CCS_Anatomical_Refined", + inputs=[ + ["desc-motion_bold", "desc-preproc_bold", "bold"], + "desc-brain_T1w", + ["desc-preproc_T1w", "desc-reorient_T1w", "T1w"], + ], + outputs=["space-bold_desc-brain_mask", "desc-ref_bold"], ) def bold_mask_ccs(wf, cfg, strat_pool, pipe_num, opt=None): - '''Generate the BOLD mask by basing it off of the anatomical brain. + """Generate the BOLD mask by basing it off of the anatomical brain. Adapted from `the BOLD mask method from the CCS pipeline `_. - ''' - + """ # Run 3dAutomask to generate func initial mask - func_tmp_brain_mask = pe.Node(interface=preprocess.Automask(), - name=f'func_tmp_brain_mask_AFNI_{pipe_num}') + func_tmp_brain_mask = pe.Node( + interface=preprocess.Automask(), name=f"func_tmp_brain_mask_AFNI_{pipe_num}" + ) func_tmp_brain_mask.inputs.dilate = 1 - func_tmp_brain_mask.inputs.outputtype = 'NIFTI_GZ' + func_tmp_brain_mask.inputs.outputtype = "NIFTI_GZ" - node, out = strat_pool.get_data(["desc-motion_bold", - "desc-preproc_bold", - "bold"]) - wf.connect(node, out, func_tmp_brain_mask, 'in_file') + node, out = strat_pool.get_data(["desc-motion_bold", "desc-preproc_bold", "bold"]) + wf.connect(node, out, func_tmp_brain_mask, "in_file") # Extract 8th volume as func ROI - func_roi = pe.Node(interface=fsl.ExtractROI(), - name=f'extract_func_roi_{pipe_num}') + func_roi = pe.Node(interface=fsl.ExtractROI(), name=f"extract_func_roi_{pipe_num}") func_roi.inputs.t_min = 7 func_roi.inputs.t_size = 1 - node, out = strat_pool.get_data(["desc-motion_bold", - "desc-preproc_bold", - "bold"]) - wf.connect(node, out, func_roi, 'in_file') + node, out = strat_pool.get_data(["desc-motion_bold", "desc-preproc_bold", "bold"]) + wf.connect(node, out, func_roi, "in_file") # Apply func initial mask on func ROI volume - func_tmp_brain = pe.Node(interface=fsl.maths.ApplyMask(), - name=f'get_func_tmp_brain_{pipe_num}') + func_tmp_brain = pe.Node( + interface=fsl.maths.ApplyMask(), name=f"get_func_tmp_brain_{pipe_num}" + ) - wf.connect(func_roi, 'roi_file', - func_tmp_brain, 'in_file') + wf.connect(func_roi, "roi_file", func_tmp_brain, "in_file") - wf.connect(func_tmp_brain_mask, 'out_file', - func_tmp_brain, 'mask_file') + wf.connect(func_tmp_brain_mask, "out_file", func_tmp_brain, "mask_file") # Register func tmp brain to anat brain to get func2anat matrix - reg_func_to_anat = pe.Node(interface=fsl.FLIRT(), - name=f'func_to_anat_linear_reg_{pipe_num}') - reg_func_to_anat.inputs.interp = 'trilinear' - reg_func_to_anat.inputs.cost = 'corratio' + reg_func_to_anat = pe.Node( + interface=fsl.FLIRT(), name=f"func_to_anat_linear_reg_{pipe_num}" + ) + reg_func_to_anat.inputs.interp = "trilinear" + reg_func_to_anat.inputs.cost = "corratio" reg_func_to_anat.inputs.dof = 6 - wf.connect(func_tmp_brain, 'out_file', - reg_func_to_anat, 'in_file') + wf.connect(func_tmp_brain, "out_file", reg_func_to_anat, "in_file") node, out = strat_pool.get_data("desc-brain_T1w") - wf.connect(node, out, reg_func_to_anat, 'reference') + wf.connect(node, out, reg_func_to_anat, "reference") # Inverse func2anat matrix - inv_func_to_anat_affine = pe.Node(interface=fsl.ConvertXFM(), - name=f'inv_func2anat_affine_{pipe_num}') + inv_func_to_anat_affine = pe.Node( + interface=fsl.ConvertXFM(), name=f"inv_func2anat_affine_{pipe_num}" + ) inv_func_to_anat_affine.inputs.invert_xfm = True - wf.connect(reg_func_to_anat, 'out_matrix_file', - inv_func_to_anat_affine, 'in_file') + wf.connect(reg_func_to_anat, "out_matrix_file", inv_func_to_anat_affine, "in_file") # Transform anat brain to func space - reg_anat_brain_to_func = pe.Node(interface=fsl.FLIRT(), - name=f'reg_anat_brain_to_func_{pipe_num}') + reg_anat_brain_to_func = pe.Node( + interface=fsl.FLIRT(), name=f"reg_anat_brain_to_func_{pipe_num}" + ) reg_anat_brain_to_func.inputs.apply_xfm = True - reg_anat_brain_to_func.inputs.interp = 'trilinear' + reg_anat_brain_to_func.inputs.interp = "trilinear" node, out = strat_pool.get_data("desc-brain_T1w") - wf.connect(node, out, reg_anat_brain_to_func, 'in_file') + wf.connect(node, out, reg_anat_brain_to_func, "in_file") - wf.connect(func_roi, 'roi_file', - reg_anat_brain_to_func, 'reference') + wf.connect(func_roi, "roi_file", reg_anat_brain_to_func, "reference") - wf.connect(inv_func_to_anat_affine, 'out_file', - reg_anat_brain_to_func, 'in_matrix_file') + wf.connect( + inv_func_to_anat_affine, "out_file", reg_anat_brain_to_func, "in_matrix_file" + ) # Binarize and dilate anat brain in func space - bin_anat_brain_in_func = pe.Node(interface=fsl.ImageMaths(), - name=f'bin_anat_brain_in_func_{pipe_num}') - bin_anat_brain_in_func.inputs.op_string = '-bin -dilM' + bin_anat_brain_in_func = pe.Node( + interface=fsl.ImageMaths(), name=f"bin_anat_brain_in_func_{pipe_num}" + ) + bin_anat_brain_in_func.inputs.op_string = "-bin -dilM" - wf.connect(reg_anat_brain_to_func, 'out_file', - bin_anat_brain_in_func, 'in_file') + wf.connect(reg_anat_brain_to_func, "out_file", bin_anat_brain_in_func, "in_file") # Binarize detectable func signals - bin_func = pe.Node(interface=fsl.ImageMaths(), - name=f'bin_func_{pipe_num}') - bin_func.inputs.op_string = '-Tstd -bin' + bin_func = pe.Node(interface=fsl.ImageMaths(), name=f"bin_func_{pipe_num}") + bin_func.inputs.op_string = "-Tstd -bin" - node, out = strat_pool.get_data(["desc-motion_bold", - "desc-preproc_bold", - "bold"]) - wf.connect(node, out, bin_func, 'in_file') + node, out = strat_pool.get_data(["desc-motion_bold", "desc-preproc_bold", "bold"]) + wf.connect(node, out, bin_func, "in_file") # Take intersection of masks - merge_func_mask = pe.Node(util.Merge(2), - name=f'merge_func_mask_{pipe_num}') + merge_func_mask = pe.Node(util.Merge(2), name=f"merge_func_mask_{pipe_num}") - wf.connect(func_tmp_brain_mask, 'out_file', - merge_func_mask, 'in1') + wf.connect(func_tmp_brain_mask, "out_file", merge_func_mask, "in1") - wf.connect(bin_anat_brain_in_func, 'out_file', - merge_func_mask, 'in2') + wf.connect(bin_anat_brain_in_func, "out_file", merge_func_mask, "in2") - intersect_mask = pe.Node(interface=fsl.MultiImageMaths(), - name=f'intersect_mask_{pipe_num}') - intersect_mask.inputs.op_string = '-mul %s -mul %s' - intersect_mask.inputs.output_datatype = 'char' + intersect_mask = pe.Node( + interface=fsl.MultiImageMaths(), name=f"intersect_mask_{pipe_num}" + ) + intersect_mask.inputs.op_string = "-mul %s -mul %s" + intersect_mask.inputs.output_datatype = "char" - wf.connect(bin_func, 'out_file', - intersect_mask, 'in_file') + wf.connect(bin_func, "out_file", intersect_mask, "in_file") - wf.connect(merge_func_mask, 'out', - intersect_mask, 'operand_files') + wf.connect(merge_func_mask, "out", intersect_mask, "operand_files") # this is the func input for coreg in ccs # TODO evaluate if it's necessary to use this brain - example_func_brain = pe.Node(interface=fsl.maths.ApplyMask(), - name=f'get_example_func_brain_{pipe_num}') + example_func_brain = pe.Node( + interface=fsl.maths.ApplyMask(), name=f"get_example_func_brain_{pipe_num}" + ) - wf.connect(func_roi, 'roi_file', - example_func_brain, 'in_file') + wf.connect(func_roi, "roi_file", example_func_brain, "in_file") - wf.connect(intersect_mask, 'out_file', - example_func_brain, 'mask_file') + wf.connect(intersect_mask, "out_file", example_func_brain, "mask_file") outputs = { - 'space-bold_desc-brain_mask': (intersect_mask, 'out_file'), - 'desc-ref_bold': (example_func_brain, 'out_file') + "space-bold_desc-brain_mask": (intersect_mask, "out_file"), + "desc-ref_bold": (example_func_brain, "out_file"), } return (wf, outputs) @nodeblock( - name='bold_masking', - switch=[['functional_preproc', 'run'], - ['functional_preproc', 'func_masking', 'run'], - ['functional_preproc', 'func_masking', 'apply_func_mask_in_native_space']], - inputs=[('desc-preproc_bold', 'space-bold_desc-brain_mask')], - outputs={'desc-preproc_bold': {'Description': 'The skull-stripped BOLD time-series.', 'SkullStripped': True}, - 'desc-brain_bold': {'Description': 'The skull-stripped BOLD time-series.', 'SkullStripped': True}} + name="bold_masking", + switch=[ + ["functional_preproc", "run"], + ["functional_preproc", "func_masking", "run"], + ["functional_preproc", "func_masking", "apply_func_mask_in_native_space"], + ], + inputs=[("desc-preproc_bold", "space-bold_desc-brain_mask")], + outputs={ + "desc-preproc_bold": { + "Description": "The skull-stripped BOLD time-series.", + "SkullStripped": True, + }, + "desc-brain_bold": { + "Description": "The skull-stripped BOLD time-series.", + "SkullStripped": True, + }, + }, ) def bold_masking(wf, cfg, strat_pool, pipe_num, opt=None): - func_edge_detect = pe.Node(interface=afni_utils.Calc(), - name=f'func_extract_brain_{pipe_num}') + func_edge_detect = pe.Node( + interface=afni_utils.Calc(), name=f"func_extract_brain_{pipe_num}" + ) - func_edge_detect.inputs.expr = 'a*b' - func_edge_detect.inputs.outputtype = 'NIFTI_GZ' + func_edge_detect.inputs.expr = "a*b" + func_edge_detect.inputs.outputtype = "NIFTI_GZ" node, out = strat_pool.get_data("desc-preproc_bold") - wf.connect(node, out, func_edge_detect, 'in_file_a') + wf.connect(node, out, func_edge_detect, "in_file_a") node, out = strat_pool.get_data("space-bold_desc-brain_mask") - wf.connect(node, out, func_edge_detect, 'in_file_b') + wf.connect(node, out, func_edge_detect, "in_file_b") outputs = { - 'desc-preproc_bold': (func_edge_detect, 'out_file'), - 'desc-brain_bold': (func_edge_detect, 'out_file') + "desc-preproc_bold": (func_edge_detect, "out_file"), + "desc-brain_bold": (func_edge_detect, "out_file"), } return (wf, outputs) @nodeblock( - name='func_mean', - switch=[['functional_preproc', 'run'], ['functional_preproc', 'generate_func_mean', 'run']], - inputs=['desc-preproc_bold'], - outputs=['desc-mean_bold'] + name="func_mean", + switch=[ + ["functional_preproc", "run"], + ["functional_preproc", "generate_func_mean", "run"], + ], + inputs=["desc-preproc_bold"], + outputs=["desc-mean_bold"], ) def func_mean(wf, cfg, strat_pool, pipe_num, opt=None): + func_mean = pe.Node(interface=afni_utils.TStat(), name=f"func_mean_{pipe_num}") - func_mean = pe.Node(interface=afni_utils.TStat(), - name=f'func_mean_{pipe_num}') - - func_mean.inputs.options = '-mean' - func_mean.inputs.outputtype = 'NIFTI_GZ' + func_mean.inputs.options = "-mean" + func_mean.inputs.outputtype = "NIFTI_GZ" node, out = strat_pool.get_data("desc-preproc_bold") - wf.connect(node, out, func_mean, 'in_file') + wf.connect(node, out, func_mean, "in_file") - outputs = { - 'desc-mean_bold': (func_mean, 'out_file') - } + outputs = {"desc-mean_bold": (func_mean, "out_file")} return (wf, outputs) @nodeblock( - name='func_normalize', - switch=[['functional_preproc', 'run'], ['functional_preproc', 'normalize_func', 'run']], - inputs=['desc-preproc_bold'], - outputs=['desc-preproc_bold'] + name="func_normalize", + switch=[ + ["functional_preproc", "run"], + ["functional_preproc", "normalize_func", "run"], + ], + inputs=["desc-preproc_bold"], + outputs=["desc-preproc_bold"], ) def func_normalize(wf, cfg, strat_pool, pipe_num, opt=None): - func_normalize = pe.Node(interface=fsl.ImageMaths(), - name=f'func_normalize_{pipe_num}', - mem_gb=0.7, - mem_x=(4538494663498653 / - 604462909807314587353088, 'in_file')) - func_normalize.inputs.op_string = '-ing 10000' - func_normalize.inputs.out_data_type = 'float' + func_normalize = pe.Node( + interface=fsl.ImageMaths(), + name=f"func_normalize_{pipe_num}", + mem_gb=0.7, + mem_x=(4538494663498653 / 604462909807314587353088, "in_file"), + ) + func_normalize.inputs.op_string = "-ing 10000" + func_normalize.inputs.out_data_type = "float" node, out = strat_pool.get_data("desc-preproc_bold") - wf.connect(node, out, func_normalize, 'in_file') + wf.connect(node, out, func_normalize, "in_file") - outputs = { - 'desc-preproc_bold': (func_normalize, 'out_file') - } + outputs = {"desc-preproc_bold": (func_normalize, "out_file")} return (wf, outputs) @nodeblock( - name='func_mask_normalize', - config=['functional_preproc'], - switch=['run'], - inputs=[('desc-preproc_bold', 'space-bold_desc-brain_mask')], - outputs=['space-bold_desc-brain_mask'] + name="func_mask_normalize", + config=["functional_preproc"], + switch=["run"], + inputs=[("desc-preproc_bold", "space-bold_desc-brain_mask")], + outputs=["space-bold_desc-brain_mask"], ) def func_mask_normalize(wf, cfg, strat_pool, pipe_num, opt=None): - - func_mask_normalize = pe.Node(interface=fsl.ImageMaths(), - name=f'func_mask_normalize_{pipe_num}', - mem_gb=0.7, - mem_x=(4538494663498653 / - 604462909807314587353088, 'in_file')) - func_mask_normalize.inputs.op_string = '-Tmin -bin' - func_mask_normalize.inputs.out_data_type = 'char' + func_mask_normalize = pe.Node( + interface=fsl.ImageMaths(), + name=f"func_mask_normalize_{pipe_num}", + mem_gb=0.7, + mem_x=(4538494663498653 / 604462909807314587353088, "in_file"), + ) + func_mask_normalize.inputs.op_string = "-Tmin -bin" + func_mask_normalize.inputs.out_data_type = "char" node, out = strat_pool.get_data("desc-preproc_bold") - wf.connect(node, out, func_mask_normalize, 'in_file') + wf.connect(node, out, func_mask_normalize, "in_file") - outputs = { - 'space-bold_desc-brain_mask': (func_mask_normalize, 'out_file') - } + outputs = {"space-bold_desc-brain_mask": (func_mask_normalize, "out_file")} return (wf, outputs) diff --git a/CPAC/longitudinal_pipeline/longitudinal_workflow.py b/CPAC/longitudinal_pipeline/longitudinal_workflow.py index fb12d49ab7..9b2c389a09 100644 --- a/CPAC/longitudinal_pipeline/longitudinal_workflow.py +++ b/CPAC/longitudinal_pipeline/longitudinal_workflow.py @@ -16,60 +16,38 @@ # You should have received a copy of the GNU Lesser General Public # License along with C-PAC. If not, see . import os -import copy -import time -import shutil -from CPAC.pipeline.nodeblock import nodeblock -from nipype import config from nipype import logging -from CPAC.pipeline import nipype_pipeline_engine as pe -import nipype.interfaces.afni as afni -import nipype.interfaces.fsl as fsl +from nipype.interfaces import fsl import nipype.interfaces.io as nio -from nipype.interfaces.utility import Merge, IdentityInterface -import nipype.interfaces.utility as util - from indi_aws import aws_utils -from CPAC.utils.utils import concat_list -from CPAC.utils.interfaces.datasink import DataSink -from CPAC.utils.interfaces.function import Function - -import CPAC - -from CPAC.pipeline.cpac_pipeline import initialize_nipype_wf, \ - connect_pipeline, build_anat_preproc_stack, build_T1w_registration_stack,\ - build_segmentation_stack -from CPAC.pipeline.engine import initiate_rpool, ingress_output_dir - +from CPAC.longitudinal_pipeline.longitudinal_preproc import subject_specific_template +from CPAC.pipeline import nipype_pipeline_engine as pe +from CPAC.pipeline.cpac_pipeline import ( + build_anat_preproc_stack, + build_segmentation_stack, + build_T1w_registration_stack, + connect_pipeline, + initialize_nipype_wf, +) +from CPAC.pipeline.engine import ingress_output_dir, initiate_rpool +from CPAC.pipeline.nodeblock import nodeblock from CPAC.registration import ( create_fsl_flirt_linear_reg, create_fsl_fnirt_nonlinear_reg, - create_wf_calculate_ants_warp + create_wf_calculate_ants_warp, ) - from CPAC.registration.registration import apply_transform - from CPAC.utils.datasource import ( resolve_resolution, - create_anat_datasource, - create_check_for_s3_node -) - -from CPAC.longitudinal_pipeline.longitudinal_preproc import ( - subject_specific_template ) - -from CPAC.utils import find_files, function -from CPAC.utils.outputs import Outputs +from CPAC.utils.interfaces.datasink import DataSink +from CPAC.utils.interfaces.function import Function from CPAC.utils.strategy import Strategy -from CPAC.utils.utils import ( - check_config_resources, - check_prov_for_regtool -) +from CPAC.utils.utils import check_config_resources, check_prov_for_regtool -logger = logging.getLogger('nipype.workflow') +logger = logging.getLogger("nipype.workflow") @nodeblock( @@ -80,24 +58,28 @@ outputs=["space-T1w_desc-brain_mask"], ) def mask_T1w_longitudinal_template(wf, cfg, strat_pool, pipe_num, opt=None): + brain_mask = pe.Node( + interface=fsl.maths.MathsCommand(), + name=f"longitudinal_anatomical_brain_mask_" f"{pipe_num}", + ) + brain_mask.inputs.args = "-bin" - brain_mask = pe.Node(interface=fsl.maths.MathsCommand(), - name=f'longitudinal_anatomical_brain_mask_' - f'{pipe_num}') - brain_mask.inputs.args = '-bin' - - node, out = strat_pool.get_data('desc-brain_T1w') - wf.connect(node, out, brain_mask, 'in_file') + node, out = strat_pool.get_data("desc-brain_T1w") + wf.connect(node, out, brain_mask, "in_file") - outputs = { - 'space-T1w_desc-brain_mask': (brain_mask, 'out_file') - } + outputs = {"space-T1w_desc-brain_mask": (brain_mask, "out_file")} return (wf, outputs) -def create_datasink(datasink_name, config, subject_id, session_id='', - strat_name='', map_node_iterfield=None): +def create_datasink( + datasink_name, + config, + subject_id, + session_id="", + strat_name="", + map_node_iterfield=None, +): """ Parameters @@ -114,8 +96,7 @@ def create_datasink(datasink_name, config, subject_id, session_id='', """ try: - encrypt_data = bool( - config.pipeline_setup['Amazon-AWS']['s3_encryption']) + encrypt_data = bool(config.pipeline_setup["Amazon-AWS"]["s3_encryption"]) except: encrypt_data = False @@ -123,58 +104,62 @@ def create_datasink(datasink_name, config, subject_id, session_id='', # Extract credentials path for output if it exists try: # Get path to creds file - creds_path = '' - if config.pipeline_setup['Amazon-AWS'][ - 'aws_output_bucket_credentials']: - creds_path = str(config.pipeline_setup['Amazon-AWS'][ - 'aws_output_bucket_credentials']) + creds_path = "" + if config.pipeline_setup["Amazon-AWS"]["aws_output_bucket_credentials"]: + creds_path = str( + config.pipeline_setup["Amazon-AWS"]["aws_output_bucket_credentials"] + ) creds_path = os.path.abspath(creds_path) - if config.pipeline_setup['output_directory'][ - 'path'].lower().startswith('s3://'): + if ( + config.pipeline_setup["output_directory"]["path"] + .lower() + .startswith("s3://") + ): # Test for s3 write access - s3_write_access = \ - aws_utils.test_bucket_access(creds_path, - config.pipeline_setup[ - 'output_directory']['path']) + s3_write_access = aws_utils.test_bucket_access( + creds_path, config.pipeline_setup["output_directory"]["path"] + ) if not s3_write_access: - raise Exception('Not able to write to bucket!') + raise Exception("Not able to write to bucket!") except Exception as e: - if config.pipeline_setup['output_directory'][ - 'path'].lower().startswith('s3://'): - err_msg = 'There was an error processing credentials or ' \ - 'accessing the S3 bucket. Check and try again.\n' \ - 'Error: %s' % e + if ( + config.pipeline_setup["output_directory"]["path"] + .lower() + .startswith("s3://") + ): + err_msg = ( + "There was an error processing credentials or " + "accessing the S3 bucket. Check and try again.\n" + "Error: %s" % e + ) raise Exception(err_msg) if map_node_iterfield is not None: ds = pe.MapNode( DataSink(infields=map_node_iterfield), - name='sinker_{}'.format(datasink_name), - iterfield=map_node_iterfield + name=f"sinker_{datasink_name}", + iterfield=map_node_iterfield, ) else: - ds = pe.Node( - DataSink(), - name='sinker_{}'.format(datasink_name) - ) + ds = pe.Node(DataSink(), name=f"sinker_{datasink_name}") - ds.inputs.base_directory = config.pipeline_setup['output_directory'][ - 'path'] + ds.inputs.base_directory = config.pipeline_setup["output_directory"]["path"] ds.inputs.creds_path = creds_path ds.inputs.encrypt_bucket_keys = encrypt_data ds.inputs.container = os.path.join( - 'pipeline_%s_%s' % ( - config.pipeline_setup['pipeline_name'], strat_name), - subject_id, session_id + "pipeline_%s_%s" % (config.pipeline_setup["pipeline_name"], strat_name), + subject_id, + session_id, ) return ds -def connect_anat_preproc_inputs(strat, anat_preproc, strat_name, - strat_nodes_list_list, workflow): +def connect_anat_preproc_inputs( + strat, anat_preproc, strat_name, strat_nodes_list_list, workflow +): """ Parameters ---------- @@ -196,26 +181,23 @@ def connect_anat_preproc_inputs(strat, anat_preproc, strat_name, strat_nodes_list_list : list a list of strat_nodes_list """ - new_strat = strat.fork() - tmp_node, out_key = new_strat['anatomical'] - workflow.connect(tmp_node, out_key, anat_preproc, 'inputspec.anat') + tmp_node, out_key = new_strat["anatomical"] + workflow.connect(tmp_node, out_key, anat_preproc, "inputspec.anat") - tmp_node, out_key = new_strat['template_cmass'] - workflow.connect(tmp_node, out_key, anat_preproc, - 'inputspec.template_cmass') + tmp_node, out_key = new_strat["template_cmass"] + workflow.connect(tmp_node, out_key, anat_preproc, "inputspec.template_cmass") new_strat.append_name(anat_preproc.name) - new_strat.update_resource_pool({ - 'anatomical_brain': ( - anat_preproc, 'outputspec.brain'), - 'anatomical_skull_leaf': ( - anat_preproc, 'outputspec.reorient'), - 'anatomical_brain_mask': ( - anat_preproc, 'outputspec.brain_mask'), - }) + new_strat.update_resource_pool( + { + "anatomical_brain": (anat_preproc, "outputspec.brain"), + "anatomical_skull_leaf": (anat_preproc, "outputspec.reorient"), + "anatomical_brain_mask": (anat_preproc, "outputspec.brain_mask"), + } + ) try: strat_nodes_list_list[strat_name].append(new_strat) @@ -239,10 +221,10 @@ def select_session(session, output_brains, warps): brain_path = None warp_path = None for brain_path in output_brains: - if f'{session}_' in brain_path: + if f"{session}_" in brain_path: break for warp_path in warps: - if f'{session}_' in warp_path: + if f"{session}_" in warp_path: break return (brain_path, warp_path) @@ -255,17 +237,16 @@ def select_session(session, output_brains, warps): outputs=["space-longitudinal_desc-brain_mask"], ) def mask_longitudinal_T1w_brain(wf, cfg, strat_pool, pipe_num, opt=None): - - brain_mask = pe.Node(interface=fsl.maths.MathsCommand(), - name=f'longitudinal_T1w_brain_mask_{pipe_num}') - brain_mask.inputs.args = '-bin' + brain_mask = pe.Node( + interface=fsl.maths.MathsCommand(), + name=f"longitudinal_T1w_brain_mask_{pipe_num}", + ) + brain_mask.inputs.args = "-bin" node, out = strat_pool.get_data("space-longitudinal_desc-brain_T1w") - wf.connect(node, out, brain_mask, 'in_file') + wf.connect(node, out, brain_mask, "in_file") - outputs = { - 'space-longitudinal_desc-brain_mask': (brain_mask, 'out_file') - } + outputs = {"space-longitudinal_desc-brain_mask": (brain_mask, "out_file")} return (wf, outputs) @@ -282,46 +263,43 @@ def mask_longitudinal_T1w_brain(wf, cfg, strat_pool, pipe_num, opt=None): ], outputs=["space-template_desc-brain_T1w"], ) -def warp_longitudinal_T1w_to_template(wf, cfg, strat_pool, pipe_num, - opt=None): - +def warp_longitudinal_T1w_to_template(wf, cfg, strat_pool, pipe_num, opt=None): xfm_prov = strat_pool.get_cpac_provenance( - 'from-longitudinal_to-template_mode-image_xfm') + "from-longitudinal_to-template_mode-image_xfm" + ) reg_tool = check_prov_for_regtool(xfm_prov) - num_cpus = cfg.pipeline_setup['system_config'][ - 'max_cores_per_participant'] + num_cpus = cfg.pipeline_setup["system_config"]["max_cores_per_participant"] - num_ants_cores = cfg.pipeline_setup['system_config']['num_ants_threads'] + num_ants_cores = cfg.pipeline_setup["system_config"]["num_ants_threads"] - apply_xfm = apply_transform(f'warp_longitudinal_to_T1template_{pipe_num}', - reg_tool, time_series=False, - num_cpus=num_cpus, - num_ants_cores=num_ants_cores) + apply_xfm = apply_transform( + f"warp_longitudinal_to_T1template_{pipe_num}", + reg_tool, + time_series=False, + num_cpus=num_cpus, + num_ants_cores=num_ants_cores, + ) - if reg_tool == 'ants': + if reg_tool == "ants": apply_xfm.inputs.inputspec.interpolation = cfg.registration_workflows[ - 'anatomical_registration']['registration']['ANTs'][ - 'interpolation'] - elif reg_tool == 'fsl': + "anatomical_registration" + ]["registration"]["ANTs"]["interpolation"] + elif reg_tool == "fsl": apply_xfm.inputs.inputspec.interpolation = cfg.registration_workflows[ - 'anatomical_registration']['registration']['FSL-FNIRT'][ - 'interpolation'] + "anatomical_registration" + ]["registration"]["FSL-FNIRT"]["interpolation"] node, out = strat_pool.get_data("space-longitudinal_desc-brain_T1w") - wf.connect(node, out, apply_xfm, 'inputspec.input_image') + wf.connect(node, out, apply_xfm, "inputspec.input_image") node, out = strat_pool.get_data("T1w_brain_template") - wf.connect(node, out, apply_xfm, 'inputspec.reference') + wf.connect(node, out, apply_xfm, "inputspec.reference") - node, out = \ - strat_pool.get_data("from-longitudinal_to-template_mode-image_xfm") - wf.connect(node, out, apply_xfm, 'inputspec.transform') + node, out = strat_pool.get_data("from-longitudinal_to-template_mode-image_xfm") + wf.connect(node, out, apply_xfm, "inputspec.transform") - outputs = { - 'space-template_desc-brain_T1w': - (apply_xfm, 'outputspec.output_image') - } + outputs = {"space-template_desc-brain_T1w": (apply_xfm, "outputspec.output_image")} return (wf, outputs) @@ -357,50 +335,57 @@ def warp_longitudinal_T1w_to_template(wf, cfg, strat_pool, pipe_num, ], ) def warp_longitudinal_seg_to_T1w(wf, cfg, strat_pool, pipe_num, opt=None): - xfm_prov = strat_pool.get_cpac_provenance( - 'from-longitudinal_to-T1w_mode-image_desc-linear_xfm') + "from-longitudinal_to-T1w_mode-image_desc-linear_xfm" + ) reg_tool = check_prov_for_regtool(xfm_prov) - num_cpus = cfg.pipeline_setup['system_config'][ - 'max_cores_per_participant'] + num_cpus = cfg.pipeline_setup["system_config"]["max_cores_per_participant"] - num_ants_cores = cfg.pipeline_setup['system_config']['num_ants_threads'] + num_ants_cores = cfg.pipeline_setup["system_config"]["num_ants_threads"] outputs = {} labels = [ - 'CSF_mask', 'CSF_desc-preproc_mask', 'CSF_probseg', - 'GM_mask', 'GM_desc-preproc_mask', 'GM_probseg', - 'WM_mask', 'WM_desc-preproc_mask', 'WM_probseg', + "CSF_mask", + "CSF_desc-preproc_mask", + "CSF_probseg", + "GM_mask", + "GM_desc-preproc_mask", + "GM_probseg", + "WM_mask", + "WM_desc-preproc_mask", + "WM_probseg", ] for label in labels: - apply_xfm = apply_transform(f'warp_longitudinal_seg_to_T1w_{label}_' - f'{pipe_num}', reg_tool, - time_series=False, num_cpus=num_cpus, - num_ants_cores=num_ants_cores) - - if reg_tool == 'ants': - apply_xfm.inputs.inputspec.interpolation = \ - cfg.registration_workflows['anatomical_registration'][ - 'registration']['ANTs']['interpolation'] - elif reg_tool == 'fsl': - apply_xfm.inputs.inputspec.interpolation = \ - cfg.registration_workflows['anatomical_registration'][ - 'registration']['FSL-FNIRT']['interpolation'] + apply_xfm = apply_transform( + f"warp_longitudinal_seg_to_T1w_{label}_" f"{pipe_num}", + reg_tool, + time_series=False, + num_cpus=num_cpus, + num_ants_cores=num_ants_cores, + ) + + if reg_tool == "ants": + apply_xfm.inputs.inputspec.interpolation = cfg.registration_workflows[ + "anatomical_registration" + ]["registration"]["ANTs"]["interpolation"] + elif reg_tool == "fsl": + apply_xfm.inputs.inputspec.interpolation = cfg.registration_workflows[ + "anatomical_registration" + ]["registration"]["FSL-FNIRT"]["interpolation"] node, out = strat_pool.get_data("space-longitudinal_desc-brain_T1w") - wf.connect(node, out, apply_xfm, 'inputspec.input_image') + wf.connect(node, out, apply_xfm, "inputspec.input_image") node, out = strat_pool.get_data("T1w_brain_template") - wf.connect(node, out, apply_xfm, 'inputspec.reference') + wf.connect(node, out, apply_xfm, "inputspec.reference") - node, out = \ - strat_pool.get_data("from-longitudinal_to-template_mode-image_xfm") - wf.connect(node, out, apply_xfm, 'inputspec.transform') + node, out = strat_pool.get_data("from-longitudinal_to-template_mode-image_xfm") + wf.connect(node, out, apply_xfm, "inputspec.transform") - outputs[f'label-{label}'] = (apply_xfm, 'outputspec.output_image') + outputs[f"label-{label}"] = (apply_xfm, "outputspec.output_image") return (wf, outputs) @@ -421,42 +406,44 @@ def anat_longitudinal_wf(subject_id, sub_list, config): ------- None """ - # list of lists for every strategy session_id_list = [] session_wfs = {} cpac_dirs = [] - out_dir = config.pipeline_setup['output_directory']['path'] + out_dir = config.pipeline_setup["output_directory"]["path"] - orig_pipe_name = config.pipeline_setup['pipeline_name'] + orig_pipe_name = config.pipeline_setup["pipeline_name"] # Loop over the sessions to create the input for the longitudinal # algorithm for session in sub_list: - - unique_id = session['unique_id'] + unique_id = session["unique_id"] session_id_list.append(unique_id) try: - creds_path = session['creds_path'] - if creds_path and 'none' not in creds_path.lower(): + creds_path = session["creds_path"] + if creds_path and "none" not in creds_path.lower(): if os.path.exists(creds_path): input_creds_path = os.path.abspath(creds_path) else: - err_msg = 'Credentials path: "%s" for subject "%s" ' \ - 'session "%s" was not found. Check this path ' \ - 'and try again.' % (creds_path, subject_id, - unique_id) + err_msg = ( + 'Credentials path: "%s" for subject "%s" ' + 'session "%s" was not found. Check this path ' + "and try again." % (creds_path, subject_id, unique_id) + ) raise Exception(err_msg) else: input_creds_path = None except KeyError: input_creds_path = None - workflow = initialize_nipype_wf(config, sub_list[0], - # just grab the first one for the name - name="anat_longitudinal_pre-preproc") + workflow = initialize_nipype_wf( + config, + sub_list[0], + # just grab the first one for the name + name="anat_longitudinal_pre-preproc", + ) workflow, rpool = initiate_rpool(workflow, config, session) pipeline_blocks = build_anat_preproc_stack(rpool, config) @@ -468,9 +455,10 @@ def anat_longitudinal_wf(subject_id, sub_list, config): workflow.run() - cpac_dir = os.path.join(out_dir, f'pipeline_{orig_pipe_name}', - f'{subject_id}_{unique_id}') - cpac_dirs.append(os.path.join(cpac_dir, 'anat')) + cpac_dir = os.path.join( + out_dir, f"pipeline_{orig_pipe_name}", f"{subject_id}_{unique_id}" + ) + cpac_dirs.append(os.path.join(cpac_dir, "anat")) # Now we have all the anat_preproc set up for every session # loop over the different anat preproc strategies @@ -479,88 +467,108 @@ def anat_longitudinal_wf(subject_id, sub_list, config): for cpac_dir in cpac_dirs: if os.path.isdir(cpac_dir): for filename in os.listdir(cpac_dir): - if 'T1w.nii' in filename: - for tag in filename.split('_'): - if 'desc-' in tag and 'brain' in tag: + if "T1w.nii" in filename: + for tag in filename.split("_"): + if "desc-" in tag and "brain" in tag: if tag not in strats_brain_dct: strats_brain_dct[tag] = [] - strats_brain_dct[tag].append(os.path.join(cpac_dir, - filename)) + strats_brain_dct[tag].append( + os.path.join(cpac_dir, filename) + ) if tag not in strats_head_dct: strats_head_dct[tag] = [] - head_file = filename.replace(tag, 'desc-reorient') - strats_head_dct[tag].append(os.path.join(cpac_dir, - head_file)) + head_file = filename.replace(tag, "desc-reorient") + strats_head_dct[tag].append( + os.path.join(cpac_dir, head_file) + ) for strat in strats_brain_dct.keys(): + wf = initialize_nipype_wf( + config, + sub_list[0], + # just grab the first one for the name + name=f"template_node_{strat}", + ) - wf = initialize_nipype_wf(config, sub_list[0], - # just grab the first one for the name - name=f"template_node_{strat}") - - config.pipeline_setup[ - 'pipeline_name'] = f'longitudinal_{orig_pipe_name}' + config.pipeline_setup["pipeline_name"] = f"longitudinal_{orig_pipe_name}" - template_node_name = f'longitudinal_anat_template_{strat}' + template_node_name = f"longitudinal_anat_template_{strat}" # This node will generate the longitudinal template (the functions are # in longitudinal_preproc) # Later other algorithms could be added to calculate it, like the # multivariate template from ANTS # It would just require to change it here. - template_node = subject_specific_template( - workflow_name=template_node_name - ) + template_node = subject_specific_template(workflow_name=template_node_name) template_node.inputs.set( - avg_method=config.longitudinal_template_generation[ - 'average_method'], - dof=config.longitudinal_template_generation['dof'], - interp=config.longitudinal_template_generation['interp'], - cost=config.longitudinal_template_generation['cost'], + avg_method=config.longitudinal_template_generation["average_method"], + dof=config.longitudinal_template_generation["dof"], + interp=config.longitudinal_template_generation["interp"], + cost=config.longitudinal_template_generation["cost"], convergence_threshold=config.longitudinal_template_generation[ - 'convergence_threshold'], - thread_pool=config.longitudinal_template_generation[ - 'thread_pool'], - unique_id_list=list(session_wfs.keys()) + "convergence_threshold" + ], + thread_pool=config.longitudinal_template_generation["thread_pool"], + unique_id_list=list(session_wfs.keys()), ) template_node.inputs.input_brain_list = strats_brain_dct[strat] template_node.inputs.input_skull_list = strats_head_dct[strat] - long_id = f'longitudinal_{subject_id}_strat-{strat}' + long_id = f"longitudinal_{subject_id}_strat-{strat}" wf, rpool = initiate_rpool(wf, config, part_id=long_id) - rpool.set_data("space-longitudinal_desc-brain_T1w", - template_node, 'brain_template', {}, - "", template_node_name) + rpool.set_data( + "space-longitudinal_desc-brain_T1w", + template_node, + "brain_template", + {}, + "", + template_node_name, + ) - rpool.set_data("space-longitudinal_desc-brain_T1w-template", - template_node, 'brain_template', {}, - "", template_node_name) + rpool.set_data( + "space-longitudinal_desc-brain_T1w-template", + template_node, + "brain_template", + {}, + "", + template_node_name, + ) - rpool.set_data("space-longitudinal_desc-reorient_T1w", - template_node, 'skull_template', {}, - "", template_node_name) + rpool.set_data( + "space-longitudinal_desc-reorient_T1w", + template_node, + "skull_template", + {}, + "", + template_node_name, + ) - rpool.set_data("space-longitudinal_desc-reorient_T1w-template", - template_node, 'skull_template', {}, - "", template_node_name) + rpool.set_data( + "space-longitudinal_desc-reorient_T1w-template", + template_node, + "skull_template", + {}, + "", + template_node_name, + ) pipeline_blocks = [mask_longitudinal_T1w_brain] - pipeline_blocks = build_T1w_registration_stack(rpool, config, - pipeline_blocks) + pipeline_blocks = build_T1w_registration_stack(rpool, config, pipeline_blocks) - pipeline_blocks = build_segmentation_stack(rpool, config, - pipeline_blocks) + pipeline_blocks = build_segmentation_stack(rpool, config, pipeline_blocks) wf = connect_pipeline(wf, config, rpool, pipeline_blocks) - excl = ['space-longitudinal_desc-brain_T1w', - 'space-longitudinal_desc-reorient_T1w', - 'space-longitudinal_desc-brain_mask'] + excl = [ + "space-longitudinal_desc-brain_T1w", + "space-longitudinal_desc-reorient_T1w", + "space-longitudinal_desc-brain_mask", + ] rpool.gather_pipes(wf, config, add_excl=excl) # this is going to run multiple times! @@ -568,21 +576,21 @@ def anat_longitudinal_wf(subject_id, sub_list, config): wf.run() # now, just write out a copy of the above to each session - config.pipeline_setup['pipeline_name'] = orig_pipe_name + config.pipeline_setup["pipeline_name"] = orig_pipe_name for session in sub_list: - - unique_id = session['unique_id'] + unique_id = session["unique_id"] try: - creds_path = session['creds_path'] - if creds_path and 'none' not in creds_path.lower(): + creds_path = session["creds_path"] + if creds_path and "none" not in creds_path.lower(): if os.path.exists(creds_path): input_creds_path = os.path.abspath(creds_path) else: - err_msg = 'Credentials path: "%s" for subject "%s" ' \ - 'session "%s" was not found. Check this path ' \ - 'and try again.' % (creds_path, subject_id, - unique_id) + err_msg = ( + 'Credentials path: "%s" for subject "%s" ' + 'session "%s" was not found. Check this path ' + "and try again." % (creds_path, subject_id, unique_id) + ) raise Exception(err_msg) else: input_creds_path = None @@ -593,56 +601,64 @@ def anat_longitudinal_wf(subject_id, sub_list, config): wf, rpool = initiate_rpool(wf, config, session) - config.pipeline_setup[ - 'pipeline_name'] = f'longitudinal_{orig_pipe_name}' - rpool = ingress_output_dir(config, rpool, long_id, - creds_path=input_creds_path) - - select_node_name = f'select_{unique_id}' - select_sess = pe.Node(Function(input_names=['session', - 'output_brains', - 'warps'], - output_names=['brain_path', - 'warp_path'], - function=select_session), - name=select_node_name) + config.pipeline_setup["pipeline_name"] = f"longitudinal_{orig_pipe_name}" + rpool = ingress_output_dir( + config, rpool, long_id, creds_path=input_creds_path + ) + + select_node_name = f"select_{unique_id}" + select_sess = pe.Node( + Function( + input_names=["session", "output_brains", "warps"], + output_names=["brain_path", "warp_path"], + function=select_session, + ), + name=select_node_name, + ) select_sess.inputs.session = unique_id - wf.connect(template_node, 'output_brain_list', select_sess, - 'output_brains') - wf.connect(template_node, 'warp_list', select_sess, 'warps') + wf.connect(template_node, "output_brain_list", select_sess, "output_brains") + wf.connect(template_node, "warp_list", select_sess, "warps") - rpool.set_data("space-longitudinal_desc-brain_T1w", - select_sess, 'brain_path', {}, "", - select_node_name) + rpool.set_data( + "space-longitudinal_desc-brain_T1w", + select_sess, + "brain_path", + {}, + "", + select_node_name, + ) - rpool.set_data("from-T1w_to-longitudinal_mode-image_" - "desc-linear_xfm", - select_sess, 'warp_path', {}, "", - select_node_name) + rpool.set_data( + "from-T1w_to-longitudinal_mode-image_" "desc-linear_xfm", + select_sess, + "warp_path", + {}, + "", + select_node_name, + ) - config.pipeline_setup['pipeline_name'] = orig_pipe_name - excl = ['space-template_desc-brain_T1w', - 'space-T1w_desc-brain_mask'] + config.pipeline_setup["pipeline_name"] = orig_pipe_name + excl = ["space-template_desc-brain_T1w", "space-T1w_desc-brain_mask"] rpool.gather_pipes(wf, config, add_excl=excl) wf.run() # begin single-session stuff again for session in sub_list: - - unique_id = session['unique_id'] + unique_id = session["unique_id"] try: - creds_path = session['creds_path'] - if creds_path and 'none' not in creds_path.lower(): + creds_path = session["creds_path"] + if creds_path and "none" not in creds_path.lower(): if os.path.exists(creds_path): input_creds_path = os.path.abspath(creds_path) else: - err_msg = 'Credentials path: "%s" for subject "%s" ' \ - 'session "%s" was not found. Check this path ' \ - 'and try again.' % (creds_path, subject_id, - unique_id) + err_msg = ( + 'Credentials path: "%s" for subject "%s" ' + 'session "%s" was not found. Check this path ' + "and try again." % (creds_path, subject_id, unique_id) + ) raise Exception(err_msg) else: input_creds_path = None @@ -653,8 +669,10 @@ def anat_longitudinal_wf(subject_id, sub_list, config): wf, rpool = initiate_rpool(wf, config, session) - pipeline_blocks = [warp_longitudinal_T1w_to_template, - warp_longitudinal_seg_to_T1w] + pipeline_blocks = [ + warp_longitudinal_T1w_to_template, + warp_longitudinal_seg_to_T1w, + ] wf = connect_pipeline(wf, config, rpool, pipeline_blocks) @@ -665,8 +683,6 @@ def anat_longitudinal_wf(subject_id, sub_list, config): wf.run() - - # TODO check: # 1 func alone works # 2 anat + func works, pass anat strategy list? @@ -687,42 +703,43 @@ def func_preproc_longitudinal_wf(subject_id, sub_list, config): strat_list_ses_list : list of list a list of strategies; within each strategy, a list of sessions """ - - datasink = pe.Node(nio.DataSink(), name='sinker') - datasink.inputs.base_directory = \ - config.pipeline_setup['working_directory']['path'] + datasink = pe.Node(nio.DataSink(), name="sinker") + datasink.inputs.base_directory = config.pipeline_setup["working_directory"]["path"] session_id_list = [] ses_list_strat_list = {} - workflow_name = 'func_preproc_longitudinal_' + str(subject_id) + workflow_name = "func_preproc_longitudinal_" + str(subject_id) workflow = pe.Workflow(name=workflow_name) - workflow.base_dir = config.pipeline_setup['working_directory']['path'] - workflow.config['execution'] = { - 'hash_method': 'timestamp', - 'crashdump_dir': os.path.abspath( - config.pipeline_setup['crash_directory']['path']) + workflow.base_dir = config.pipeline_setup["working_directory"]["path"] + workflow.config["execution"] = { + "hash_method": "timestamp", + "crashdump_dir": os.path.abspath( + config.pipeline_setup["crash_directory"]["path"] + ), } for sub_dict in sub_list: - if 'func' in sub_dict or 'rest' in sub_dict: - if 'func' in sub_dict: - func_paths_dict = sub_dict['func'] + if "func" in sub_dict or "rest" in sub_dict: + if "func" in sub_dict: + func_paths_dict = sub_dict["func"] else: - func_paths_dict = sub_dict['rest'] + func_paths_dict = sub_dict["rest"] - unique_id = sub_dict['unique_id'] + unique_id = sub_dict["unique_id"] session_id_list.append(unique_id) try: - creds_path = sub_dict['creds_path'] - if creds_path and 'none' not in creds_path.lower(): + creds_path = sub_dict["creds_path"] + if creds_path and "none" not in creds_path.lower(): if os.path.exists(creds_path): input_creds_path = os.path.abspath(creds_path) else: - err_msg = 'Credentials path: "%s" for subject "%s" was not ' \ - 'found. Check this path and try again.' % ( - creds_path, subject_id) + err_msg = ( + 'Credentials path: "%s" for subject "%s" was not ' + "found. Check this path and try again." + % (creds_path, subject_id) + ) raise Exception(err_msg) else: input_creds_path = None @@ -731,7 +748,7 @@ def func_preproc_longitudinal_wf(subject_id, sub_list, config): strat = Strategy() strat_list = [strat] - node_suffix = '_'.join([subject_id, unique_id]) + node_suffix = "_".join([subject_id, unique_id]) # Functional Ingress Workflow # add optional flag @@ -742,24 +759,23 @@ def func_preproc_longitudinal_wf(subject_id, sub_list, config): sub_dict, subject_id, input_creds_path, - node_suffix) + node_suffix, + ) # Functional Initial Prep Workflow - workflow, strat_list = connect_func_init(workflow, strat_list, - config, node_suffix) + workflow, strat_list = connect_func_init( + workflow, strat_list, config, node_suffix + ) # Functional Image Preprocessing Workflow - workflow, strat_list = connect_func_preproc(workflow, strat_list, - config, node_suffix) + workflow, strat_list = connect_func_preproc( + workflow, strat_list, config, node_suffix + ) # Distortion Correction - workflow, strat_list = connect_distortion_correction(workflow, - strat_list, - config, - diff, - blip, - fmap_rp_list, - node_suffix) + workflow, strat_list = connect_distortion_correction( + workflow, strat_list, config, diff, blip, fmap_rp_list, node_suffix + ) ses_list_strat_list[node_suffix] = strat_list @@ -771,10 +787,10 @@ def func_preproc_longitudinal_wf(subject_id, sub_list, config): # TODO rename and reorganize dict # TODO update strat name strat_list_ses_list = {} - strat_list_ses_list['func_default'] = [] + strat_list_ses_list["func_default"] = [] for sub_ses_id, strat_nodes_list in ses_list_strat_list.items(): - strat_list_ses_list['func_default'].append(strat_nodes_list[0]) + strat_list_ses_list["func_default"].append(strat_nodes_list[0]) workflow.run() @@ -795,16 +811,15 @@ def merge_func_preproc(working_directory): skull_list : list a list of func preprocessed skull """ - brain_list = [] skull_list = [] for dirpath, dirnames, filenames in os.walk(working_directory): for f in filenames: - if 'func_get_preprocessed_median' in dirpath and '.nii.gz' in f: + if "func_get_preprocessed_median" in dirpath and ".nii.gz" in f: filepath = os.path.join(dirpath, f) brain_list.append(filepath) - if 'func_get_motion_correct_median' in dirpath and '.nii.gz' in f: + if "func_get_motion_correct_median" in dirpath and ".nii.gz" in f: filepath = os.path.join(dirpath, f) skull_list.append(filepath) @@ -815,70 +830,81 @@ def merge_func_preproc(working_directory): def register_func_longitudinal_template_to_standard( - longitudinal_template_node, c, workflow, strat_init, strat_name): - sub_mem_gb, num_cores_per_sub, num_ants_cores, num_omp_cores = \ + longitudinal_template_node, c, workflow, strat_init, strat_name +): + sub_mem_gb, num_cores_per_sub, num_ants_cores, num_omp_cores = ( check_config_resources(c) + ) strat_init_new = strat_init.fork() - strat_init_new.update_resource_pool({ - 'functional_preprocessed_median': ( - longitudinal_template_node, 'brain_template'), - 'motion_correct_median': ( - longitudinal_template_node, 'skull_template') - }) + strat_init_new.update_resource_pool( + { + "functional_preprocessed_median": ( + longitudinal_template_node, + "brain_template", + ), + "motion_correct_median": (longitudinal_template_node, "skull_template"), + } + ) strat_list = [strat_init_new] new_strat_list = [] - regOption = c.anatomical_preproc[ - 'registration_workflow' - ]['registration']['using'] - - if 'FSL' in regOption: + regOption = c.anatomical_preproc["registration_workflow"]["registration"]["using"] + if "FSL" in regOption: for num_strat, strat in enumerate(strat_list): - flirt_reg_func_mni = create_fsl_flirt_linear_reg( - 'func_mni_flirt_register_%s_%d' % (strat_name, num_strat) + "func_mni_flirt_register_%s_%d" % (strat_name, num_strat) ) - if c.functional_registration['2-func_registration_to_template'][ - 'FNIRT_pipelines']['interpolation'] not in ["trilinear", - "sinc", "spline"]: + if c.functional_registration["2-func_registration_to_template"][ + "FNIRT_pipelines" + ]["interpolation"] not in ["trilinear", "sinc", "spline"]: err_msg = 'The selected FSL interpolation method may be in the list of values: "trilinear", "sinc", "spline"' raise Exception(err_msg) # Input registration parameters - flirt_reg_func_mni.inputs.inputspec.interp = \ - c.functional_registration['2-func_registration_to_template'][ - 'FNIRT_pipelines']['interpolation'] + flirt_reg_func_mni.inputs.inputspec.interp = c.functional_registration[ + "2-func_registration_to_template" + ]["FNIRT_pipelines"]["interpolation"] - node, out_file = strat['functional_preprocessed_median'] - workflow.connect(node, out_file, - flirt_reg_func_mni, 'inputspec.input_brain') + node, out_file = strat["functional_preprocessed_median"] + workflow.connect( + node, out_file, flirt_reg_func_mni, "inputspec.input_brain" + ) # pass the reference files - node, out_file = strat['template_brain_for_func_preproc'] - workflow.connect(node, out_file, flirt_reg_func_mni, - 'inputspec.reference_brain') + node, out_file = strat["template_brain_for_func_preproc"] + workflow.connect( + node, out_file, flirt_reg_func_mni, "inputspec.reference_brain" + ) - if 'ANTS' in regOption: + if "ANTS" in regOption: strat = strat.fork() new_strat_list.append(strat) strat.append_name(flirt_reg_func_mni.name) - strat.update_resource_pool({ - 'registration_method': 'FSL', - 'func_longitudinal_to_mni_linear_xfm': ( - flirt_reg_func_mni, 'outputspec.linear_xfm'), - 'mni_to_func_longitudinal_linear_xfm': ( - flirt_reg_func_mni, 'outputspec.invlinear_xfm'), - 'func_longitudinal_template_to_standard': ( - flirt_reg_func_mni, 'outputspec.output_brain') - }) + strat.update_resource_pool( + { + "registration_method": "FSL", + "func_longitudinal_to_mni_linear_xfm": ( + flirt_reg_func_mni, + "outputspec.linear_xfm", + ), + "mni_to_func_longitudinal_linear_xfm": ( + flirt_reg_func_mni, + "outputspec.invlinear_xfm", + ), + "func_longitudinal_template_to_standard": ( + flirt_reg_func_mni, + "outputspec.output_brain", + ), + } + ) strat_list += new_strat_list @@ -889,51 +915,52 @@ def register_func_longitudinal_template_to_standard( except AttributeError: fsl_linear_reg_only = [0] - if 'FSL' in regOption and 0 in fsl_linear_reg_only: - + if "FSL" in regOption and 0 in fsl_linear_reg_only: for num_strat, strat in enumerate(strat_list): - - if strat.get('registration_method') == 'FSL': - + if strat.get("registration_method") == "FSL": fnirt_reg_func_mni = create_fsl_fnirt_nonlinear_reg( - 'func_mni_fnirt_register_%s_%d' % (strat_name, num_strat) + "func_mni_fnirt_register_%s_%d" % (strat_name, num_strat) ) # brain input - node, out_file = strat['functional_preprocessed_median'] - workflow.connect(node, out_file, - fnirt_reg_func_mni, 'inputspec.input_brain') + node, out_file = strat["functional_preprocessed_median"] + workflow.connect( + node, out_file, fnirt_reg_func_mni, "inputspec.input_brain" + ) # brain reference - node, out_file = strat['template_brain_for_func_preproc'] - workflow.connect(node, out_file, - fnirt_reg_func_mni, - 'inputspec.reference_brain') + node, out_file = strat["template_brain_for_func_preproc"] + workflow.connect( + node, out_file, fnirt_reg_func_mni, "inputspec.reference_brain" + ) # skull input - node, out_file = strat['motion_correct_median'] - workflow.connect(node, out_file, - fnirt_reg_func_mni, 'inputspec.input_skull') + node, out_file = strat["motion_correct_median"] + workflow.connect( + node, out_file, fnirt_reg_func_mni, "inputspec.input_skull" + ) # skull reference - node, out_file = strat['template_skull_for_func_preproc'] - workflow.connect(node, out_file, - fnirt_reg_func_mni, - 'inputspec.reference_skull') + node, out_file = strat["template_skull_for_func_preproc"] + workflow.connect( + node, out_file, fnirt_reg_func_mni, "inputspec.reference_skull" + ) - node, out_file = strat['func_longitudinal_to_mni_linear_xfm'] - workflow.connect(node, out_file, - fnirt_reg_func_mni, 'inputspec.linear_aff') + node, out_file = strat["func_longitudinal_to_mni_linear_xfm"] + workflow.connect( + node, out_file, fnirt_reg_func_mni, "inputspec.linear_aff" + ) - node, out_file = strat['template_ref_mask'] - workflow.connect(node, out_file, - fnirt_reg_func_mni, 'inputspec.ref_mask') + node, out_file = strat["template_ref_mask"] + workflow.connect( + node, out_file, fnirt_reg_func_mni, "inputspec.ref_mask" + ) # assign the FSL FNIRT config file specified in pipeline # config.yml - fnirt_reg_func_mni.inputs.inputspec.fnirt_config = \ - c.anatomical_preproc['registration_workflow']['registration'][ - 'FSL-FNIRT']['fnirt_config'] + fnirt_reg_func_mni.inputs.inputspec.fnirt_config = c.anatomical_preproc[ + "registration_workflow" + ]["registration"]["FSL-FNIRT"]["fnirt_config"] if 1 in fsl_linear_reg_only: strat = strat.fork() @@ -941,132 +968,143 @@ def register_func_longitudinal_template_to_standard( strat.append_name(fnirt_reg_func_mni.name) - strat.update_resource_pool({ - 'func_longitudinal_to_mni_nonlinear_xfm': ( - fnirt_reg_func_mni, 'outputspec.nonlinear_xfm'), - 'func_longitudinal_template_to_standard': ( - fnirt_reg_func_mni, 'outputspec.output_brain') - }, override=True) + strat.update_resource_pool( + { + "func_longitudinal_to_mni_nonlinear_xfm": ( + fnirt_reg_func_mni, + "outputspec.nonlinear_xfm", + ), + "func_longitudinal_template_to_standard": ( + fnirt_reg_func_mni, + "outputspec.output_brain", + ), + }, + override=True, + ) strat_list += new_strat_list new_strat_list = [] for num_strat, strat in enumerate(strat_list): - # or run ANTS anatomical-to-MNI registration instead - if 'ANTS' in regOption and \ - strat.get('registration_method') != 'FSL': - - ants_reg_func_mni = \ - create_wf_calculate_ants_warp( - 'func_mni_ants_register_%s_%d' % (strat_name, num_strat), - num_threads=num_ants_cores, - reg_ants_skull= - c.anatomical_preproc['registration_workflow'][ - 'reg_with_skull'] - ) + if "ANTS" in regOption and strat.get("registration_method") != "FSL": + ants_reg_func_mni = create_wf_calculate_ants_warp( + "func_mni_ants_register_%s_%d" % (strat_name, num_strat), + num_threads=num_ants_cores, + reg_ants_skull=c.anatomical_preproc["registration_workflow"][ + "reg_with_skull" + ], + ) - if c.functional_registration['2-func_registration_to_template'][ - 'ANTs_pipelines']['interpolation'] not in ['Linear', - 'BSpline', - 'LanczosWindowedSinc']: + if c.functional_registration["2-func_registration_to_template"][ + "ANTs_pipelines" + ]["interpolation"] not in ["Linear", "BSpline", "LanczosWindowedSinc"]: err_msg = 'The selected ANTS interpolation method may be in the list of values: "Linear", "BSpline", "LanczosWindowedSinc"' raise Exception(err_msg) # Input registration parameters - ants_reg_func_mni.inputs.inputspec.interp = \ - c.functional_registration['2-func_registration_to_template'][ - 'ANTs_pipelines']['interpolation'] + ants_reg_func_mni.inputs.inputspec.interp = c.functional_registration[ + "2-func_registration_to_template" + ]["ANTs_pipelines"]["interpolation"] # calculating the transform with the skullstripped is # reported to be better, but it requires very high # quality skullstripping. If skullstripping is imprecise # registration with skull is preferred - if c.anatomical_preproc['registration_workflow'][ - 'reg_with_skull']: - + if c.anatomical_preproc["registration_workflow"]["reg_with_skull"]: # get the skull-stripped anatomical from resource pool - node, out_file = strat['functional_preprocessed_median'] + node, out_file = strat["functional_preprocessed_median"] # pass the anatomical to the workflow - workflow.connect(node, out_file, - ants_reg_func_mni, 'inputspec.moving_brain') + workflow.connect( + node, out_file, ants_reg_func_mni, "inputspec.moving_brain" + ) # get the reorient skull-on anatomical from resource pool - node, out_file = strat['motion_correct_median'] + node, out_file = strat["motion_correct_median"] # pass the anatomical to the workflow - workflow.connect(node, out_file, - ants_reg_func_mni, 'inputspec.moving_skull') + workflow.connect( + node, out_file, ants_reg_func_mni, "inputspec.moving_skull" + ) # pass the reference file - node, out_file = strat['template_brain_for_func_preproc'] - workflow.connect(node, out_file, - ants_reg_func_mni, - 'inputspec.reference_brain') + node, out_file = strat["template_brain_for_func_preproc"] + workflow.connect( + node, out_file, ants_reg_func_mni, "inputspec.reference_brain" + ) # pass the reference file - node, out_file = strat['template_skull_for_func_preproc'] - workflow.connect(node, out_file, - ants_reg_func_mni, - 'inputspec.reference_skull') + node, out_file = strat["template_skull_for_func_preproc"] + workflow.connect( + node, out_file, ants_reg_func_mni, "inputspec.reference_skull" + ) else: + node, out_file = strat["functional_preprocessed_median"] - node, out_file = strat['functional_preprocessed_median'] - - workflow.connect(node, out_file, - ants_reg_func_mni, 'inputspec.moving_brain') + workflow.connect( + node, out_file, ants_reg_func_mni, "inputspec.moving_brain" + ) # pass the reference file - node, out_file = strat['template_brain_for_func_preproc'] - workflow.connect(node, out_file, - ants_reg_func_mni, - 'inputspec.reference_brain') + node, out_file = strat["template_brain_for_func_preproc"] + workflow.connect( + node, out_file, ants_reg_func_mni, "inputspec.reference_brain" + ) # pass the reference mask file - node, out_file = strat['template_brain_mask_for_func_preproc'] + node, out_file = strat["template_brain_mask_for_func_preproc"] workflow.connect( - node, out_file, - ants_reg_func_mni, 'inputspec.reference_mask' + node, out_file, ants_reg_func_mni, "inputspec.reference_mask" ) # pass the reference mask file - node, out_file = strat['functional_brain_mask'] - workflow.connect( - node, out_file, - ants_reg_func_mni, 'inputspec.moving_mask' - ) + node, out_file = strat["functional_brain_mask"] + workflow.connect(node, out_file, ants_reg_func_mni, "inputspec.moving_mask") - ants_reg_func_mni.inputs.inputspec.ants_para = \ - c.anatomical_preproc['registration_workflow']['registration'][ - 'ANTs']['T1_registration'] + ants_reg_func_mni.inputs.inputspec.ants_para = c.anatomical_preproc[ + "registration_workflow" + ]["registration"]["ANTs"]["T1_registration"] ants_reg_func_mni.inputs.inputspec.fixed_image_mask = None strat.append_name(ants_reg_func_mni.name) - strat.update_resource_pool({ - 'registration_method': 'ANTS', - 'ants_initial_xfm': ( - ants_reg_func_mni, 'outputspec.ants_initial_xfm'), - 'ants_rigid_xfm': ( - ants_reg_func_mni, 'outputspec.ants_rigid_xfm'), - 'ants_affine_xfm': ( - ants_reg_func_mni, 'outputspec.ants_affine_xfm'), - 'func_longitudinal_to_mni_nonlinear_xfm': ( - ants_reg_func_mni, 'outputspec.warp_field'), - 'mni_to_func_longitudinal_nonlinear_xfm': ( - ants_reg_func_mni, 'outputspec.inverse_warp_field'), - 'func_longitudinal_to_mni_ants_composite_xfm': ( - ants_reg_func_mni, 'outputspec.composite_transform'), - 'func_longitudinal_template_to_standard': ( - ants_reg_func_mni, 'outputspec.normalized_output_brain') - }) + strat.update_resource_pool( + { + "registration_method": "ANTS", + "ants_initial_xfm": ( + ants_reg_func_mni, + "outputspec.ants_initial_xfm", + ), + "ants_rigid_xfm": (ants_reg_func_mni, "outputspec.ants_rigid_xfm"), + "ants_affine_xfm": ( + ants_reg_func_mni, + "outputspec.ants_affine_xfm", + ), + "func_longitudinal_to_mni_nonlinear_xfm": ( + ants_reg_func_mni, + "outputspec.warp_field", + ), + "mni_to_func_longitudinal_nonlinear_xfm": ( + ants_reg_func_mni, + "outputspec.inverse_warp_field", + ), + "func_longitudinal_to_mni_ants_composite_xfm": ( + ants_reg_func_mni, + "outputspec.composite_transform", + ), + "func_longitudinal_template_to_standard": ( + ants_reg_func_mni, + "outputspec.normalized_output_brain", + ), + } + ) strat_list += new_strat_list - ''' + """ # Func -> T1 Registration (Initial Linear Reg) workflow, strat_list, diff_complete = connect_func_to_anat_init_reg(workflow, strat_list, c) @@ -1075,13 +1113,13 @@ def register_func_longitudinal_template_to_standard( # Func -> T1/EPI Template workflow, strat_list = connect_func_to_template_reg(workflow, strat_list, c) - ''' + """ return workflow, strat_list def func_longitudinal_template_wf(subject_id, strat_list, config): - ''' + """ Parameters ---------- subject_id : string @@ -1094,74 +1132,106 @@ def func_longitudinal_template_wf(subject_id, strat_list, config): Returns ------- None - ''' - - workflow_name = 'func_longitudinal_template_' + str(subject_id) + """ + workflow_name = "func_longitudinal_template_" + str(subject_id) workflow = pe.Workflow(name=workflow_name) - workflow.base_dir = config.pipeline_setup['working_directory']['path'] - workflow.config['execution'] = { - 'hash_method': 'timestamp', - 'crashdump_dir': os.path.abspath( - config.pipeline_setup['crash_directory']['path']) + workflow.base_dir = config.pipeline_setup["working_directory"]["path"] + workflow.config["execution"] = { + "hash_method": "timestamp", + "crashdump_dir": os.path.abspath( + config.pipeline_setup["crash_directory"]["path"] + ), } # strat_nodes_list = strat_list['func_default'] strat_init = Strategy() templates_for_resampling = [ - (config.resolution_for_func_preproc, - config.template_brain_only_for_func, - 'template_brain_for_func_preproc', 'resolution_for_func_preproc'), - (config.resolution_for_func_preproc, config.template_skull_for_func, - 'template_skull_for_func_preproc', 'resolution_for_func_preproc'), - (config.resolution_for_func_preproc, config.ref_mask_for_func, - 'template_ref_mask', 'resolution_for_func_preproc'), + ( + config.resolution_for_func_preproc, + config.template_brain_only_for_func, + "template_brain_for_func_preproc", + "resolution_for_func_preproc", + ), + ( + config.resolution_for_func_preproc, + config.template_skull_for_func, + "template_skull_for_func_preproc", + "resolution_for_func_preproc", + ), + ( + config.resolution_for_func_preproc, + config.ref_mask_for_func, + "template_ref_mask", + "resolution_for_func_preproc", + ), # TODO check float resolution - (config.resolution_for_func_preproc, - config.functional_registration['2-func_registration_to_template'][ - 'target_template']['EPI_template']['template_epi'], - 'template_epi', 'resolution_for_func_preproc'), - (config.resolution_for_func_derivative, - config.functional_registration['2-func_registration_to_template'][ - 'target_template']['EPI_template']['template_epi'], - 'template_epi_derivative', 'resolution_for_func_derivative'), - (config.resolution_for_func_derivative, - config.template_brain_only_for_func, - 'template_brain_for_func_derivative', 'resolution_for_func_preproc'), ( - config.resolution_for_func_derivative, config.template_skull_for_func, - 'template_skull_for_func_derivative', 'resolution_for_func_preproc'), + config.resolution_for_func_preproc, + config.functional_registration["2-func_registration_to_template"][ + "target_template" + ]["EPI_template"]["template_epi"], + "template_epi", + "resolution_for_func_preproc", + ), + ( + config.resolution_for_func_derivative, + config.functional_registration["2-func_registration_to_template"][ + "target_template" + ]["EPI_template"]["template_epi"], + "template_epi_derivative", + "resolution_for_func_derivative", + ), + ( + config.resolution_for_func_derivative, + config.template_brain_only_for_func, + "template_brain_for_func_derivative", + "resolution_for_func_preproc", + ), + ( + config.resolution_for_func_derivative, + config.template_skull_for_func, + "template_skull_for_func_derivative", + "resolution_for_func_preproc", + ), ] for resolution, template, template_name, tag in templates_for_resampling: - resampled_template = pe.Node(Function( - input_names=['resolution', 'template', 'template_name', 'tag'], - output_names=['resampled_template'], - function=resolve_resolution, - as_module=True), - name='resampled_' + template_name) + resampled_template = pe.Node( + Function( + input_names=["resolution", "template", "template_name", "tag"], + output_names=["resampled_template"], + function=resolve_resolution, + as_module=True, + ), + name="resampled_" + template_name, + ) resampled_template.inputs.resolution = resolution resampled_template.inputs.template = template resampled_template.inputs.template_name = template_name resampled_template.inputs.tag = tag - strat_init.update_resource_pool({ - template_name: (resampled_template, 'resampled_template') - }) + strat_init.update_resource_pool( + {template_name: (resampled_template, "resampled_template")} + ) merge_func_preproc_node = pe.Node( - Function(input_names=['working_directory'], - output_names=['brain_list', 'skull_list'], - function=merge_func_preproc, - as_module=True), - name='merge_func_preproc') + Function( + input_names=["working_directory"], + output_names=["brain_list", "skull_list"], + function=merge_func_preproc, + as_module=True, + ), + name="merge_func_preproc", + ) - merge_func_preproc_node.inputs.working_directory = \ - config.pipeline_setup['working_directory']['path'] + merge_func_preproc_node.inputs.working_directory = config.pipeline_setup[ + "working_directory" + ]["path"] template_node = subject_specific_template( - workflow_name='subject_specific_func_template_' + subject_id + workflow_name="subject_specific_func_template_" + subject_id ) template_node.inputs.set( @@ -1173,20 +1243,16 @@ def func_longitudinal_template_wf(subject_id, strat_list, config): thread_pool=config.longitudinal_template_thread_pool, ) - workflow.connect(merge_func_preproc_node, 'brain_list', - template_node, 'input_brain_list') + workflow.connect( + merge_func_preproc_node, "brain_list", template_node, "input_brain_list" + ) - workflow.connect(merge_func_preproc_node, 'skull_list', - template_node, 'input_skull_list') + workflow.connect( + merge_func_preproc_node, "skull_list", template_node, "input_skull_list" + ) workflow, strat_list = register_func_longitudinal_template_to_standard( - template_node, - config, - workflow, - strat_init, - 'default' + template_node, config, workflow, strat_init, "default" ) workflow.run() - - return diff --git a/CPAC/pipeline/engine.py b/CPAC/pipeline/engine.py index 09be47823a..3b7451d8b8 100644 --- a/CPAC/pipeline/engine.py +++ b/CPAC/pipeline/engine.py @@ -17,52 +17,58 @@ import ast import copy import hashlib -import json from itertools import chain +import json import logging import os import re -from typing import Any, Optional, Union +from typing import Optional, Union import warnings +from nipype import config # pylint: disable=wrong-import-order +from nipype.interfaces.utility import Rename # pylint: disable=wrong-import-order -from CPAC.pipeline import \ - nipype_pipeline_engine as pe # pylint: disable=ungrouped-imports -from nipype import config, logging # pylint: disable=wrong-import-order -from CPAC.pipeline.nodeblock import NodeBlockFunction # pylint: disable=ungrouped-imports -from nipype.interfaces.utility import \ - Rename # pylint: disable=wrong-import-order from CPAC.image_utils.spatial_smoothing import spatial_smoothing -from CPAC.image_utils.statistical_transforms import z_score_standardize, \ - fisher_z_score_standardize +from CPAC.image_utils.statistical_transforms import ( + fisher_z_score_standardize, + z_score_standardize, +) +from CPAC.pipeline import ( + nipype_pipeline_engine as pe, # pylint: disable=ungrouped-imports +) from CPAC.pipeline.check_outputs import ExpectedOutputs +from CPAC.pipeline.nodeblock import ( + NodeBlockFunction, # pylint: disable=ungrouped-imports +) from CPAC.pipeline.utils import MOVEMENT_FILTER_KEYS, name_fork, source_set from CPAC.registration.registration import transform_derivative +from CPAC.resources.templates.lookup_table import lookup_identifier from CPAC.utils.bids_utils import res_in_filename from CPAC.utils.datasource import ( create_anat_datasource, create_func_datasource, - ingress_func_metadata, create_general_datasource, - resolve_resolution + ingress_func_metadata, + resolve_resolution, ) -from CPAC.utils.interfaces.function import Function from CPAC.utils.interfaces.datasink import DataSink -from CPAC.utils.monitoring import getLogger, LOGTAIL, \ - WARNING_FREESURFER_OFF_WITH_DATA +from CPAC.utils.interfaces.function import Function +from CPAC.utils.monitoring import getLogger, LOGTAIL, WARNING_FREESURFER_OFF_WITH_DATA from CPAC.utils.outputs import Outputs from CPAC.utils.typing import LIST_OR_STR, TUPLE -from CPAC.utils.utils import check_prov_for_regtool, \ - create_id_string, get_last_prov_entry, read_json, write_output_json - -from CPAC.resources.templates.lookup_table import lookup_identifier +from CPAC.utils.utils import ( + check_prov_for_regtool, + create_id_string, + get_last_prov_entry, + read_json, + write_output_json, +) -logger = getLogger('nipype.workflow') +logger = getLogger("nipype.workflow") class ResourcePool: def __init__(self, rpool=None, name=None, cfg=None, pipe_list=None): - if not rpool: self.rpool = {} else: @@ -78,57 +84,71 @@ def __init__(self, rpool=None, name=None, cfg=None, pipe_list=None): if cfg: self.cfg = cfg - self.logdir = cfg.pipeline_setup['log_directory']['path'] - - self.num_cpus = cfg.pipeline_setup['system_config'][ - 'max_cores_per_participant'] - self.num_ants_cores = cfg.pipeline_setup['system_config'][ - 'num_ants_threads'] - - self.ants_interp = cfg.registration_workflows[ - 'functional_registration']['func_registration_to_template'][ - 'ANTs_pipelines']['interpolation'] - self.fsl_interp = cfg.registration_workflows[ - 'functional_registration']['func_registration_to_template'][ - 'FNIRT_pipelines']['interpolation'] - - self.func_reg = cfg.registration_workflows[ - 'functional_registration']['func_registration_to_template'][ - 'run'] - - self.run_smoothing = 'smoothed' in cfg.post_processing[ - 'spatial_smoothing']['output'] - self.smoothing_bool = cfg.post_processing['spatial_smoothing']['run'] - self.run_zscoring = 'z-scored' in cfg.post_processing[ - 'z-scoring']['output'] - self.zscoring_bool = cfg.post_processing['z-scoring']['run'] - self.fwhm = cfg.post_processing['spatial_smoothing']['fwhm'] - self.smooth_opts = cfg.post_processing['spatial_smoothing'][ - 'smoothing_method'] - - self.xfm = ['alff', 'desc-sm_alff', 'desc-zstd_alff', - 'desc-sm-zstd_alff', - 'falff', 'desc-sm_falff', 'desc-zstd_falff', - 'desc-sm-zstd_falff', - 'reho', 'desc-sm_reho', 'desc-zstd_reho', - 'desc-sm-zstd_reho'] + self.logdir = cfg.pipeline_setup["log_directory"]["path"] + + self.num_cpus = cfg.pipeline_setup["system_config"][ + "max_cores_per_participant" + ] + self.num_ants_cores = cfg.pipeline_setup["system_config"][ + "num_ants_threads" + ] + + self.ants_interp = cfg.registration_workflows["functional_registration"][ + "func_registration_to_template" + ]["ANTs_pipelines"]["interpolation"] + self.fsl_interp = cfg.registration_workflows["functional_registration"][ + "func_registration_to_template" + ]["FNIRT_pipelines"]["interpolation"] + + self.func_reg = cfg.registration_workflows["functional_registration"][ + "func_registration_to_template" + ]["run"] + + self.run_smoothing = ( + "smoothed" in cfg.post_processing["spatial_smoothing"]["output"] + ) + self.smoothing_bool = cfg.post_processing["spatial_smoothing"]["run"] + self.run_zscoring = "z-scored" in cfg.post_processing["z-scoring"]["output"] + self.zscoring_bool = cfg.post_processing["z-scoring"]["run"] + self.fwhm = cfg.post_processing["spatial_smoothing"]["fwhm"] + self.smooth_opts = cfg.post_processing["spatial_smoothing"][ + "smoothing_method" + ] + + self.xfm = [ + "alff", + "desc-sm_alff", + "desc-zstd_alff", + "desc-sm-zstd_alff", + "falff", + "desc-sm_falff", + "desc-zstd_falff", + "desc-sm-zstd_falff", + "reho", + "desc-sm_reho", + "desc-zstd_reho", + "desc-sm-zstd_reho", + ] def __repr__(self) -> str: - params = [f"{param}={getattr(self, param)}" for param in - ["rpool", "name", "cfg", "pipe_list"] if - getattr(self, param, None) is not None] + params = [ + f"{param}={getattr(self, param)}" + for param in ["rpool", "name", "cfg", "pipe_list"] + if getattr(self, param, None) is not None + ] return f'ResourcePool({", ".join(params)})' def __str__(self) -> str: if self.name: - return f'ResourcePool({self.name}): {list(self.rpool)}' - return f'ResourcePool: {list(self.rpool)}' + return f"ResourcePool({self.name}): {list(self.rpool)}" + return f"ResourcePool: {list(self.rpool)}" def append_name(self, name): self.name.append(name) - def back_propogate_template_name(self, wf, resource_idx: str, json_info: dict, - id_string: 'pe.Node') -> None: + def back_propogate_template_name( + self, wf, resource_idx: str, json_info: dict, id_string: "pe.Node" + ) -> None: """Find and apply the template name from a resource's provenance Parameters @@ -143,27 +163,29 @@ def back_propogate_template_name(self, wf, resource_idx: str, json_info: dict, ------- None """ - if ('template' in resource_idx and self.check_rpool('derivatives-dir')): - if self.check_rpool('template'): - node, out = self.get_data('template') - wf.connect(node, out, id_string, 'template_desc') - elif 'Template' in json_info: - id_string.inputs.template_desc = json_info['Template'] - elif ('template' in resource_idx and - len(json_info.get('CpacProvenance', [])) > 1): - for resource in source_set(json_info['CpacProvenance']): - source, value = resource.split(':', 1) - if value.startswith('template_' - ) and source != 'FSL-AFNI-bold-ref': + if "template" in resource_idx and self.check_rpool("derivatives-dir"): + if self.check_rpool("template"): + node, out = self.get_data("template") + wf.connect(node, out, id_string, "template_desc") + elif "Template" in json_info: + id_string.inputs.template_desc = json_info["Template"] + elif ( + "template" in resource_idx and len(json_info.get("CpacProvenance", [])) > 1 + ): + for resource in source_set(json_info["CpacProvenance"]): + source, value = resource.split(":", 1) + if value.startswith("template_") and source != "FSL-AFNI-bold-ref": # 'FSL-AFNI-bold-ref' is currently allowed to be in # a different space, so don't use it as the space for # descendents try: - anscestor_json = list(self.rpool.get(source).items() - )[0][1].get('json', {}) - if 'Description' in anscestor_json: + anscestor_json = list(self.rpool.get(source).items())[0][1].get( + "json", {} + ) + if "Description" in anscestor_json: id_string.inputs.template_desc = anscestor_json[ - 'Description'] + "Description" + ] return except (IndexError, KeyError): pass @@ -196,17 +218,19 @@ def get_resources(self): return self.rpool.keys() def copy_rpool(self): - return ResourcePool(rpool=copy.deepcopy(self.get_entire_rpool()), - name=self.name, - cfg=self.cfg, - pipe_list=copy.deepcopy(self.pipe_list)) + return ResourcePool( + rpool=copy.deepcopy(self.get_entire_rpool()), + name=self.name, + cfg=self.cfg, + pipe_list=copy.deepcopy(self.pipe_list), + ) @staticmethod def get_raw_label(resource: str) -> str: """Removes ``desc-*`` label""" - for tag in resource.split('_'): - if 'desc-' in tag: - resource = resource.replace(f'{tag}_', '') + for tag in resource.split("_"): + if "desc-" in tag: + resource = resource.replace(f"{tag}_", "") break return resource @@ -214,32 +238,35 @@ def get_strat_info(self, prov, label=None, logdir=None): strat_info = {} for entry in prov: if isinstance(entry, list): - strat_info[entry[-1].split(':')[0]] = entry + strat_info[entry[-1].split(":")[0]] = entry elif isinstance(entry, str): - strat_info[entry.split(':')[0]] = entry.split(':')[1] + strat_info[entry.split(":")[0]] = entry.split(":")[1] if label: if not logdir: logdir = self.logdir - print(f'\n\nPrinting out strategy info for {label} in {logdir}\n') - write_output_json(strat_info, f'{label}_strat_info', - indent=4, basedir=logdir) + print(f"\n\nPrinting out strategy info for {label} in {logdir}\n") + write_output_json( + strat_info, f"{label}_strat_info", indent=4, basedir=logdir + ) def set_json_info(self, resource, pipe_idx, key, val): - #TODO: actually should probably be able to inititialize resource/pipe_idx + # TODO: actually should probably be able to inititialize resource/pipe_idx if pipe_idx not in self.rpool[resource]: - raise Exception('\n[!] DEV: The pipeline/strat ID does not exist ' - f'in the resource pool.\nResource: {resource}' - f'Pipe idx: {pipe_idx}\nKey: {key}\nVal: {val}\n') + raise Exception( + "\n[!] DEV: The pipeline/strat ID does not exist " + f"in the resource pool.\nResource: {resource}" + f"Pipe idx: {pipe_idx}\nKey: {key}\nVal: {val}\n" + ) else: - if 'json' not in self.rpool[resource][pipe_idx]: - self.rpool[resource][pipe_idx]['json'] = {} - self.rpool[resource][pipe_idx]['json'][key] = val + if "json" not in self.rpool[resource][pipe_idx]: + self.rpool[resource][pipe_idx]["json"] = {} + self.rpool[resource][pipe_idx]["json"][key] = val def get_json_info(self, resource, pipe_idx, key): - #TODO: key checks + # TODO: key checks if not pipe_idx: - for pipe_idx, val in self.rpool[resource].items(): - return val['json'][key] + for pipe_idx, val in self.rpool[resource].items(): + return val["json"][key] return self.rpool[resource][pipe_idx][key] @staticmethod @@ -251,97 +278,127 @@ def get_resource_from_prov(prov): if not len(prov): return None if isinstance(prov[-1], list): - return prov[-1][-1].split(':')[0] + return prov[-1][-1].split(":")[0] elif isinstance(prov[-1], str): - return prov[-1].split(':')[0] + return prov[-1].split(":")[0] def regressor_dct(self, cfg) -> dict: """Returns the regressor dictionary for the current strategy if - one exists. Raises KeyError otherwise.""" + one exists. Raises KeyError otherwise. + """ # pylint: disable=attribute-defined-outside-init - if hasattr(self, '_regressor_dct'): # memoized + if hasattr(self, "_regressor_dct"): # memoized # pylint: disable=access-member-before-definition return self._regressor_dct - key_error = KeyError("[!] No regressors in resource pool. \n\n" - "Try turning on create_regressors or " - "ingress_regressors.") - _nr = cfg['nuisance_corrections', '2-nuisance_regression'] - - if not hasattr(self, 'timeseries'): - if _nr['Regressors']: - self.regressors = {reg["Name"]: reg for reg in _nr['Regressors']} + key_error = KeyError( + "[!] No regressors in resource pool. \n\n" + "Try turning on create_regressors or " + "ingress_regressors." + ) + _nr = cfg["nuisance_corrections", "2-nuisance_regression"] + + if not hasattr(self, "timeseries"): + if _nr["Regressors"]: + self.regressors = {reg["Name"]: reg for reg in _nr["Regressors"]} else: self.regressors = [] - if self.check_rpool('parsed_regressors'): # ingressed regressor + if self.check_rpool("parsed_regressors"): # ingressed regressor # name regressor workflow without regressor_prov - strat_name = _nr['ingress_regressors']['Regressors']['Name'] + strat_name = _nr["ingress_regressors"]["Regressors"]["Name"] if strat_name in self.regressors: self._regressor_dct = self.regressors[strat_name] return self._regressor_dct - self.regressor_dct = _nr['ingress_regressors']['Regressors'] + self.regressor_dct = _nr["ingress_regressors"]["Regressors"] return self.regressor_dct - prov = self.get_cpac_provenance('desc-confounds_timeseries') - strat_name_components = prov[-1].split('_') - for _ in list(range(prov[-1].count('_'))): - reg_name = '_'.join(strat_name_components[-_:]) + prov = self.get_cpac_provenance("desc-confounds_timeseries") + strat_name_components = prov[-1].split("_") + for _ in list(range(prov[-1].count("_"))): + reg_name = "_".join(strat_name_components[-_:]) if reg_name in self.regressors: self._regressor_dct = self.regressors[reg_name] return self._regressor_dct raise key_error - def set_data(self, resource, node, output, json_info, pipe_idx, node_name, - fork=False, inject=False): + def set_data( + self, + resource, + node, + output, + json_info, + pipe_idx, + node_name, + fork=False, + inject=False, + ): json_info = json_info.copy() cpac_prov = [] - if 'CpacProvenance' in json_info: - cpac_prov = json_info['CpacProvenance'] + if "CpacProvenance" in json_info: + cpac_prov = json_info["CpacProvenance"] current_prov_list = list(cpac_prov) - new_prov_list = list(cpac_prov) # <---- making a copy, it was already a list + new_prov_list = list(cpac_prov) # <---- making a copy, it was already a list if not inject: - new_prov_list.append(f'{resource}:{node_name}') + new_prov_list.append(f"{resource}:{node_name}") try: res, new_pipe_idx = self.generate_prov_string(new_prov_list) except IndexError: - raise IndexError(f'\n\nThe set_data() call for {resource} has no ' - 'provenance information and should not be an ' - 'injection.') + raise IndexError( + f"\n\nThe set_data() call for {resource} has no " + "provenance information and should not be an " + "injection." + ) if not json_info: - json_info = {'RawSources': [resource]} # <---- this will be repopulated to the full file path at the end of the pipeline building, in gather_pipes() - json_info['CpacProvenance'] = new_prov_list + json_info = { + "RawSources": [resource] + } # <---- this will be repopulated to the full file path at the end of the pipeline building, in gather_pipes() + json_info["CpacProvenance"] = new_prov_list if resource not in self.rpool.keys(): self.rpool[resource] = {} - else: - if not fork: # <--- in the event of multiple strategies/options, this will run for every option; just keep in mind - search = False - if self.get_resource_from_prov(current_prov_list) == resource: - pipe_idx = self.generate_prov_string(current_prov_list)[1] # CHANGING PIPE_IDX, BE CAREFUL DOWNSTREAM IN THIS FUNCTION - if pipe_idx not in self.rpool[resource].keys(): - search = True - else: + elif not fork: # <--- in the event of multiple strategies/options, this will run for every option; just keep in mind + search = False + if self.get_resource_from_prov(current_prov_list) == resource: + pipe_idx = self.generate_prov_string(current_prov_list)[ + 1 + ] # CHANGING PIPE_IDX, BE CAREFUL DOWNSTREAM IN THIS FUNCTION + if pipe_idx not in self.rpool[resource].keys(): search = True - if search: - for idx in current_prov_list: - if self.get_resource_from_prov(idx) == resource: - if isinstance(idx, list): - pipe_idx = self.generate_prov_string(idx)[1] # CHANGING PIPE_IDX, BE CAREFUL DOWNSTREAM IN THIS FUNCTION - elif isinstance(idx, str): - pipe_idx = idx - break - if pipe_idx in self.rpool[resource].keys(): # <--- in case the resource name is now new, and not the original - del self.rpool[resource][pipe_idx] # <--- remove old keys so we don't end up with a new strat for every new node unit (unless we fork) + else: + search = True + if search: + for idx in current_prov_list: + if self.get_resource_from_prov(idx) == resource: + if isinstance(idx, list): + pipe_idx = self.generate_prov_string( + idx + )[ + 1 + ] # CHANGING PIPE_IDX, BE CAREFUL DOWNSTREAM IN THIS FUNCTION + elif isinstance(idx, str): + pipe_idx = idx + break + if ( + pipe_idx in self.rpool[resource].keys() + ): # <--- in case the resource name is now new, and not the original + del self.rpool[ + resource + ][ + pipe_idx + ] # <--- remove old keys so we don't end up with a new strat for every new node unit (unless we fork) if new_pipe_idx not in self.rpool[resource]: self.rpool[resource][new_pipe_idx] = {} if new_pipe_idx not in self.pipe_list: self.pipe_list.append(new_pipe_idx) - self.rpool[resource][new_pipe_idx]['data'] = (node, output) - self.rpool[resource][new_pipe_idx]['json'] = json_info + self.rpool[resource][new_pipe_idx]["data"] = (node, output) + self.rpool[resource][new_pipe_idx]["json"] = json_info - def get(self, resource: LIST_OR_STR, pipe_idx: Optional[str] = None, - report_fetched: Optional[bool] = False, - optional: Optional[bool] = False) -> Union[ - TUPLE[Optional[dict], Optional[str]], Optional[dict]]: + def get( + self, + resource: LIST_OR_STR, + pipe_idx: Optional[str] = None, + report_fetched: Optional[bool] = False, + optional: Optional[bool] = False, + ) -> Union[TUPLE[Optional[dict], Optional[str]], Optional[dict]]: # NOTE!!! # if this is the main rpool, this will return a dictionary of strats, and inside those, are dictionaries like {'data': (node, out), 'json': info} # BUT, if this is a sub rpool (i.e. a strat_pool), this will return a one-level dictionary of {'data': (node, out), 'json': info} WITHOUT THE LEVEL OF STRAT KEYS ABOVE IT @@ -373,24 +430,26 @@ def get(self, resource: LIST_OR_STR, pipe_idx: Optional[str] = None, "your C-PAC output directory.\n- If you have done these, " "and you still get this message, please let us know " "through any of our support channels at: " - "https://fcp-indi.github.io/\n") + "https://fcp-indi.github.io/\n" + ) - def get_data(self, resource, pipe_idx=None, report_fetched=False, - quick_single=False): + def get_data( + self, resource, pipe_idx=None, report_fetched=False, quick_single=False + ): if report_fetched: if pipe_idx: - connect, fetched = self.get(resource, pipe_idx=pipe_idx, - report_fetched=report_fetched) - return (connect['data'], fetched) - connect, fetched =self.get(resource, - report_fetched=report_fetched) - return (connect['data'], fetched) + connect, fetched = self.get( + resource, pipe_idx=pipe_idx, report_fetched=report_fetched + ) + return (connect["data"], fetched) + connect, fetched = self.get(resource, report_fetched=report_fetched) + return (connect["data"], fetched) elif pipe_idx: - return self.get(resource, pipe_idx=pipe_idx)['data'] + return self.get(resource, pipe_idx=pipe_idx)["data"] elif quick_single or len(self.get(resource)) == 1: for key, val in self.get(resource).items(): - return val['data'] - return self.get(resource)['data'] + return val["data"] + return self.get(resource)["data"] def copy_resource(self, resource, new_name): try: @@ -416,12 +475,14 @@ def get_json(self, resource, strat=None): # TODO: the below hits the exception if you use get_cpac_provenance on # TODO: the main rpool (i.e. if strat=None) - if 'json' in resource_strat_dct: - strat_json = resource_strat_dct['json'] + if "json" in resource_strat_dct: + strat_json = resource_strat_dct["json"] else: - raise Exception('\n[!] Developer info: the JSON ' - f'information for {resource} and {strat} ' - f'is incomplete.\n') + raise Exception( + "\n[!] Developer info: the JSON " + f"information for {resource} and {strat} " + f"is incomplete.\n" + ) return strat_json def get_cpac_provenance(self, resource, strat=None): @@ -434,7 +495,7 @@ def get_cpac_provenance(self, resource, strat=None): except KeyError: continue json_data = self.get_json(resource, strat) - return json_data['CpacProvenance'] + return json_data["CpacProvenance"] @staticmethod def generate_prov_string(prov): @@ -442,17 +503,21 @@ def generate_prov_string(prov): # MULTIPLE PRECEDING RESOURCES (or single, if just one) # NOTE: this DOES NOT merge multiple resources!!! (i.e. for merging-strat pipe_idx generation) if not isinstance(prov, list): - raise Exception('\n[!] Developer info: the CpacProvenance ' - f'entry for {prov} has to be a list.\n') + raise Exception( + "\n[!] Developer info: the CpacProvenance " + f"entry for {prov} has to be a list.\n" + ) last_entry = get_last_prov_entry(prov) - resource = last_entry.split(':')[0] + resource = last_entry.split(":")[0] return (resource, str(prov)) @staticmethod def generate_prov_list(prov_str): if not isinstance(prov_str, str): - raise Exception('\n[!] Developer info: the CpacProvenance ' - f'entry for {str(prov_str)} has to be a string.\n') + raise Exception( + "\n[!] Developer info: the CpacProvenance " + f"entry for {prov_str!s} has to be a string.\n" + ) return ast.literal_eval(prov_str) @staticmethod @@ -464,15 +529,15 @@ def get_resource_strats_from_prov(prov): # {rpool entry}: {that entry's provenance} resource_strat_dct = {} if isinstance(prov, str): - resource = prov.split(':')[0] + resource = prov.split(":")[0] resource_strat_dct[resource] = prov else: for spot, entry in enumerate(prov): if isinstance(entry, list): - resource = entry[-1].split(':')[0] + resource = entry[-1].split(":")[0] resource_strat_dct[resource] = entry elif isinstance(entry, str): - resource = entry.split(':')[0] + resource = entry.split(":")[0] resource_strat_dct[resource] = entry return resource_strat_dct @@ -489,7 +554,6 @@ def flatten_prov(self, prov): return flat_prov def get_strats(self, resources, debug=False): - # TODO: NOTE: NOT COMPATIBLE WITH SUB-RPOOL/STRAT_POOLS # TODO: (and it doesn't have to be) @@ -498,16 +562,16 @@ def get_strats(self, resources, debug=False): linked_resources = [] resource_list = [] if debug: - verbose_logger = getLogger('engine') - verbose_logger.debug('\nresources: %s', resources) + verbose_logger = getLogger("engine") + verbose_logger.debug("\nresources: %s", resources) for resource in resources: # grab the linked-input tuples if isinstance(resource, tuple): linked = [] for label in list(resource): - rp_dct, fetched_resource = self.get(label, - report_fetched=True, - optional=True) + rp_dct, fetched_resource = self.get( + label, report_fetched=True, optional=True + ) if not rp_dct: continue linked.append(fetched_resource) @@ -522,43 +586,45 @@ def get_strats(self, resources, debug=False): variant_pool = {} len_inputs = len(resource_list) if debug: - verbose_logger = getLogger('engine') - verbose_logger.debug('linked_resources: %s', - linked_resources) - verbose_logger.debug('resource_list: %s', resource_list) + verbose_logger = getLogger("engine") + verbose_logger.debug("linked_resources: %s", linked_resources) + verbose_logger.debug("resource_list: %s", resource_list) for resource in resource_list: - rp_dct, fetched_resource = self.get(resource, - report_fetched=True, # <---- rp_dct has the strats/pipe_idxs as the keys on first level, then 'data' and 'json' on each strat level underneath - optional=True) # oh, and we make the resource fetching in get_strats optional so we can have optional inputs, but they won't be optional in the node block unless we want them to be + rp_dct, fetched_resource = self.get( + resource, + report_fetched=True, # <---- rp_dct has the strats/pipe_idxs as the keys on first level, then 'data' and 'json' on each strat level underneath + optional=True, + ) # oh, and we make the resource fetching in get_strats optional so we can have optional inputs, but they won't be optional in the node block unless we want them to be if not rp_dct: len_inputs -= 1 continue sub_pool = [] if debug: - verbose_logger.debug('len(rp_dct): %s\n', len(rp_dct)) + verbose_logger.debug("len(rp_dct): %s\n", len(rp_dct)) for strat in rp_dct.keys(): json_info = self.get_json(fetched_resource, strat) - cpac_prov = json_info['CpacProvenance'] + cpac_prov = json_info["CpacProvenance"] sub_pool.append(cpac_prov) if fetched_resource not in variant_pool: variant_pool[fetched_resource] = [] - if 'CpacVariant' in json_info: - for key, val in json_info['CpacVariant'].items(): + if "CpacVariant" in json_info: + for key, val in json_info["CpacVariant"].items(): if val not in variant_pool[fetched_resource]: variant_pool[fetched_resource] += val - variant_pool[fetched_resource].append( - f'NO-{val[0]}') + variant_pool[fetched_resource].append(f"NO-{val[0]}") if debug: - verbose_logger = getLogger('engine') - verbose_logger.debug('%s sub_pool: %s\n', resource, sub_pool) + verbose_logger = getLogger("engine") + verbose_logger.debug("%s sub_pool: %s\n", resource, sub_pool) total_pool.append(sub_pool) if not total_pool: - raise LookupError('\n\n[!] C-PAC says: None of the listed ' - 'resources in the node block being connected ' - 'exist in the resource pool.\n\nResources:\n' - '%s\n\n' % resource_list) + raise LookupError( + "\n\n[!] C-PAC says: None of the listed " + "resources in the node block being connected " + "exist in the resource pool.\n\nResources:\n" + "%s\n\n" % resource_list + ) # TODO: right now total_pool is: # TODO: [[[T1w:anat_ingress, desc-preproc_T1w:anatomical_init, desc-preproc_T1w:acpc_alignment], [T1w:anat_ingress,desc-preproc_T1w:anatomical_init]], @@ -576,7 +642,7 @@ def get_strats(self, resources, debug=False): new_strats = {} # get rid of duplicates - TODO: refactor .product - strat_str_list = [] + strat_str_list = [] strat_list_list = [] for strat_tuple in strats: strat_list = list(copy.deepcopy(strat_tuple)) @@ -586,18 +652,14 @@ def get_strats(self, resources, debug=False): strat_list_list.append(strat_list) if debug: - verbose_logger = getLogger('engine') - verbose_logger.debug('len(strat_list_list): %s\n', - len(strat_list_list)) + verbose_logger = getLogger("engine") + verbose_logger.debug("len(strat_list_list): %s\n", len(strat_list_list)) for strat_list in strat_list_list: - json_dct = {} for strat in strat_list: # strat is a prov list for a single resource/input - strat_resource, strat_idx = \ - self.generate_prov_string(strat) - strat_json = self.get_json(strat_resource, - strat=strat_idx) + strat_resource, strat_idx = self.generate_prov_string(strat) + strat_json = self.get_json(strat_resource, strat=strat_idx) json_dct[strat_resource] = strat_json drop = False @@ -613,38 +675,38 @@ def get_strats(self, resources, debug=False): if xlabel == ylabel: continue yjson = copy.deepcopy(json_dct[ylabel]) - - if 'CpacVariant' not in xjson: - xjson['CpacVariant'] = {} - if 'CpacVariant' not in yjson: - yjson['CpacVariant'] = {} - + + if "CpacVariant" not in xjson: + xjson["CpacVariant"] = {} + if "CpacVariant" not in yjson: + yjson["CpacVariant"] = {} + current_strat = [] - for key, val in xjson['CpacVariant'].items(): + for key, val in xjson["CpacVariant"].items(): if isinstance(val, list): current_strat.append(val[0]) else: current_strat.append(val) current_spread = list(set(variant_pool[xlabel])) for spread_label in current_spread: - if 'NO-' in spread_label: + if "NO-" in spread_label: continue if spread_label not in current_strat: - current_strat.append(f'NO-{spread_label}') - + current_strat.append(f"NO-{spread_label}") + other_strat = [] - for key, val in yjson['CpacVariant'].items(): + for key, val in yjson["CpacVariant"].items(): if isinstance(val, list): other_strat.append(val[0]) else: other_strat.append(val) other_spread = list(set(variant_pool[ylabel])) for spread_label in other_spread: - if 'NO-' in spread_label: + if "NO-" in spread_label: continue if spread_label not in other_strat: - other_strat.append(f'NO-{spread_label}') - + other_strat.append(f"NO-{spread_label}") + for variant in current_spread: in_current_strat = False in_other_strat = False @@ -671,7 +733,7 @@ def get_strats(self, resources, debug=False): if in_other_spread: if not in_current_strat: drop = True - break + break if drop: break if drop: @@ -680,62 +742,84 @@ def get_strats(self, resources, debug=False): # make the merged strat label from the multiple inputs # strat_list is actually the merged CpacProvenance lists pipe_idx = str(strat_list) - new_strats[pipe_idx] = ResourcePool() # <----- new_strats is A DICTIONARY OF RESOURCEPOOL OBJECTS! - + new_strats[pipe_idx] = ( + ResourcePool() + ) # <----- new_strats is A DICTIONARY OF RESOURCEPOOL OBJECTS! + # placing JSON info at one level higher only for copy convenience - new_strats[pipe_idx].rpool['json'] = {} - new_strats[pipe_idx].rpool['json']['subjson'] = {} - new_strats[pipe_idx].rpool['json']['CpacProvenance'] = strat_list + new_strats[pipe_idx].rpool["json"] = {} + new_strats[pipe_idx].rpool["json"]["subjson"] = {} + new_strats[pipe_idx].rpool["json"]["CpacProvenance"] = strat_list # now just invert resource:strat to strat:resource for each resource:strat for cpac_prov in strat_list: resource, strat = self.generate_prov_string(cpac_prov) - resource_strat_dct = self.rpool[resource][strat] # <----- remember, this is the dct of 'data' and 'json'. - new_strats[pipe_idx].rpool[resource] = resource_strat_dct # <----- new_strats is A DICTIONARY OF RESOURCEPOOL OBJECTS! each one is a new slice of the resource pool combined together. + resource_strat_dct = self.rpool[resource][ + strat + ] # <----- remember, this is the dct of 'data' and 'json'. + new_strats[pipe_idx].rpool[resource] = ( + resource_strat_dct # <----- new_strats is A DICTIONARY OF RESOURCEPOOL OBJECTS! each one is a new slice of the resource pool combined together. + ) self.pipe_list.append(pipe_idx) - if 'CpacVariant' in resource_strat_dct['json']: - if 'CpacVariant' not in new_strats[pipe_idx].rpool['json']: - new_strats[pipe_idx].rpool['json']['CpacVariant'] = {} - for younger_resource, variant_list in resource_strat_dct['json']['CpacVariant'].items(): - if younger_resource not in new_strats[pipe_idx].rpool['json']['CpacVariant']: - new_strats[pipe_idx].rpool['json']['CpacVariant'][younger_resource] = variant_list + if "CpacVariant" in resource_strat_dct["json"]: + if "CpacVariant" not in new_strats[pipe_idx].rpool["json"]: + new_strats[pipe_idx].rpool["json"]["CpacVariant"] = {} + for younger_resource, variant_list in resource_strat_dct[ + "json" + ]["CpacVariant"].items(): + if ( + younger_resource + not in new_strats[pipe_idx].rpool["json"]["CpacVariant"] + ): + new_strats[pipe_idx].rpool["json"]["CpacVariant"][ + younger_resource + ] = variant_list # preserve each input's JSON info also - data_type = resource.split('_')[-1] - if data_type not in new_strats[pipe_idx].rpool['json']['subjson']: - new_strats[pipe_idx].rpool['json']['subjson'][data_type] = {} - new_strats[pipe_idx].rpool['json']['subjson'][data_type].update(copy.deepcopy(resource_strat_dct['json'])) + data_type = resource.split("_")[-1] + if data_type not in new_strats[pipe_idx].rpool["json"]["subjson"]: + new_strats[pipe_idx].rpool["json"]["subjson"][data_type] = {} + new_strats[pipe_idx].rpool["json"]["subjson"][data_type].update( + copy.deepcopy(resource_strat_dct["json"]) + ) else: new_strats = {} - for resource_strat_list in total_pool: # total_pool will have only one list of strats, for the one input - for cpac_prov in resource_strat_list: # <------- cpac_prov here doesn't need to be modified, because it's not merging with other inputs + for resource_strat_list in ( + total_pool + ): # total_pool will have only one list of strats, for the one input + for cpac_prov in resource_strat_list: # <------- cpac_prov here doesn't need to be modified, because it's not merging with other inputs resource, pipe_idx = self.generate_prov_string(cpac_prov) - resource_strat_dct = self.rpool[resource][pipe_idx] # <----- remember, this is the dct of 'data' and 'json'. - new_strats[pipe_idx] = ResourcePool(rpool={resource: resource_strat_dct}) # <----- again, new_strats is A DICTIONARY OF RESOURCEPOOL OBJECTS! + resource_strat_dct = self.rpool[resource][ + pipe_idx + ] # <----- remember, this is the dct of 'data' and 'json'. + new_strats[pipe_idx] = ResourcePool( + rpool={resource: resource_strat_dct} + ) # <----- again, new_strats is A DICTIONARY OF RESOURCEPOOL OBJECTS! # placing JSON info at one level higher only for copy convenience - new_strats[pipe_idx].rpool['json'] = resource_strat_dct['json'] # TODO: WARNING- THIS IS A LEVEL HIGHER THAN THE ORIGINAL 'JSON' FOR EASE OF ACCESS IN CONNECT_BLOCK WITH THE .GET(JSON) - new_strats[pipe_idx].rpool['json']['subjson'] = {} - new_strats[pipe_idx].rpool['json']['CpacProvenance'] = cpac_prov + new_strats[pipe_idx].rpool["json"] = resource_strat_dct[ + "json" + ] # TODO: WARNING- THIS IS A LEVEL HIGHER THAN THE ORIGINAL 'JSON' FOR EASE OF ACCESS IN CONNECT_BLOCK WITH THE .GET(JSON) + new_strats[pipe_idx].rpool["json"]["subjson"] = {} + new_strats[pipe_idx].rpool["json"]["CpacProvenance"] = cpac_prov # preserve each input's JSON info also - data_type = resource.split('_')[-1] - if data_type not in new_strats[pipe_idx].rpool['json']['subjson']: - new_strats[pipe_idx].rpool['json']['subjson'][data_type] = {} - new_strats[pipe_idx].rpool['json']['subjson'][data_type].update(copy.deepcopy(resource_strat_dct['json'])) + data_type = resource.split("_")[-1] + if data_type not in new_strats[pipe_idx].rpool["json"]["subjson"]: + new_strats[pipe_idx].rpool["json"]["subjson"][data_type] = {} + new_strats[pipe_idx].rpool["json"]["subjson"][data_type].update( + copy.deepcopy(resource_strat_dct["json"]) + ) return new_strats - def derivative_xfm(self, wf, label, connection, json_info, pipe_idx, - pipe_x): - + def derivative_xfm(self, wf, label, connection, json_info, pipe_idx, pipe_x): if label in self.xfm: - json_info = dict(json_info) # get the bold-to-template transform from the current strat_pool # info xfm_idx = None - xfm_label = 'from-bold_to-template_mode-image_xfm' - for entry in json_info['CpacProvenance']: + xfm_label = "from-bold_to-template_mode-image_xfm" + for entry in json_info["CpacProvenance"]: if isinstance(entry, list): - if entry[-1].split(':')[0] == xfm_label: + if entry[-1].split(":")[0] == xfm_label: xfm_prov = entry xfm_idx = self.generate_prov_string(xfm_prov)[1] break @@ -746,40 +830,50 @@ def derivative_xfm(self, wf, label, connection, json_info, pipe_idx, if not xfm_idx: xfm_info = [] for pipe_idx, entry in self.get(xfm_label).items(): - xfm_info.append((pipe_idx, entry['json']['CpacProvenance'])) + xfm_info.append((pipe_idx, entry["json"]["CpacProvenance"])) else: xfm_info = [(xfm_idx, xfm_prov)] for num, xfm_entry in enumerate(xfm_info): - xfm_idx, xfm_prov = xfm_entry reg_tool = check_prov_for_regtool(xfm_prov) - xfm = transform_derivative(f'{label}_xfm_{pipe_x}_{num}', - label, reg_tool, self.num_cpus, - self.num_ants_cores, - ants_interp=self.ants_interp, - fsl_interp=self.fsl_interp, - opt=None) - wf.connect(connection[0], connection[1], - xfm, 'inputspec.in_file') - - node, out = self.get_data("T1w-brain-template-deriv", - quick_single=True) - wf.connect(node, out, xfm, 'inputspec.reference') - - node, out = self.get_data('from-bold_to-template_mode-image_xfm', - pipe_idx=xfm_idx) - wf.connect(node, out, xfm, 'inputspec.transform') - - label = f'space-template_{label}' - json_info['Template'] = self.get_json_info('T1w-brain-template-deriv', - None, 'Description') - new_prov = json_info['CpacProvenance'] + xfm_prov - json_info['CpacProvenance'] = new_prov + xfm = transform_derivative( + f"{label}_xfm_{pipe_x}_{num}", + label, + reg_tool, + self.num_cpus, + self.num_ants_cores, + ants_interp=self.ants_interp, + fsl_interp=self.fsl_interp, + opt=None, + ) + wf.connect(connection[0], connection[1], xfm, "inputspec.in_file") + + node, out = self.get_data("T1w-brain-template-deriv", quick_single=True) + wf.connect(node, out, xfm, "inputspec.reference") + + node, out = self.get_data( + "from-bold_to-template_mode-image_xfm", pipe_idx=xfm_idx + ) + wf.connect(node, out, xfm, "inputspec.transform") + + label = f"space-template_{label}" + json_info["Template"] = self.get_json_info( + "T1w-brain-template-deriv", None, "Description" + ) + new_prov = json_info["CpacProvenance"] + xfm_prov + json_info["CpacProvenance"] = new_prov new_pipe_idx = self.generate_prov_string(new_prov) - self.set_data(label, xfm, 'outputspec.out_file', json_info, - new_pipe_idx, f'{label}_xfm_{num}', fork=True) + self.set_data( + label, + xfm, + "outputspec.out_file", + json_info, + new_pipe_idx, + f"{label}_xfm_{num}", + fork=True, + ) return wf @@ -793,8 +887,9 @@ def filtered_movement(self) -> bool: bool """ try: - return 'motion_estimate_filter' in str(self.get_cpac_provenance( - 'desc-movementParameters_motion')) + return "motion_estimate_filter" in str( + self.get_cpac_provenance("desc-movementParameters_motion") + ) except KeyError: # not a strat_pool or no movement parameters in strat_pool return False @@ -808,49 +903,53 @@ def filter_name(self, cfg) -> str: ------- str """ - motion_filters = cfg['functional_preproc', - 'motion_estimates_and_correction', - 'motion_estimate_filter', 'filters'] - if len(motion_filters) == 1 and cfg.switch_is_on([ - 'functional_preproc', 'motion_estimates_and_correction', - 'motion_estimate_filter', 'run'], exclusive=True + motion_filters = cfg[ + "functional_preproc", + "motion_estimates_and_correction", + "motion_estimate_filter", + "filters", + ] + if len(motion_filters) == 1 and cfg.switch_is_on( + [ + "functional_preproc", + "motion_estimates_and_correction", + "motion_estimate_filter", + "run", + ], + exclusive=True, ): - return motion_filters[0]['Name'] + return motion_filters[0]["Name"] try: - key = 'motion' - sidecar = self.get_json('desc-movementParameters_motion') + key = "motion" + sidecar = self.get_json("desc-movementParameters_motion") except KeyError: sidecar = None - if sidecar is not None and 'CpacVariant' in sidecar: - if sidecar['CpacVariant'][key]: - return sidecar['CpacVariant'][key][0][::-1].split('_', - 1)[0][::-1] - return 'none' - - def post_process(self, wf, label, connection, json_info, pipe_idx, pipe_x, - outs): + if sidecar is not None and "CpacVariant" in sidecar: + if sidecar["CpacVariant"][key]: + return sidecar["CpacVariant"][key][0][::-1].split("_", 1)[0][::-1] + return "none" - input_type = 'func_derivative' + def post_process(self, wf, label, connection, json_info, pipe_idx, pipe_x, outs): + input_type = "func_derivative" post_labels = [(label, connection[0], connection[1])] - if re.match(r'(.*_)?[ed]c[bw]$', label) or re.match(r'(.*_)?lfcd[bw]$', - label): + if re.match(r"(.*_)?[ed]c[bw]$", label) or re.match(r"(.*_)?lfcd[bw]$", label): # suffix: [eigenvector or degree] centrality [binarized or weighted] # or lfcd [binarized or weighted] - mask = 'template-specification-file' - elif 'space-template' in label: - if 'space-template_res-derivative_desc-bold_mask' in self.rpool.keys(): - mask = 'space-template_res-derivative_desc-bold_mask' + mask = "template-specification-file" + elif "space-template" in label: + if "space-template_res-derivative_desc-bold_mask" in self.rpool.keys(): + mask = "space-template_res-derivative_desc-bold_mask" else: - mask = 'space-template_desc-bold_mask' + mask = "space-template_desc-bold_mask" else: - mask = 'space-bold_desc-brain_mask' + mask = "space-bold_desc-brain_mask" mask_idx = None - for entry in json_info['CpacProvenance']: + for entry in json_info["CpacProvenance"]: if isinstance(entry, list): - if entry[-1].split(':')[0] == mask: + if entry[-1].split(":")[0] == mask: mask_prov = entry mask_idx = self.generate_prov_string(mask_prov)[1] break @@ -858,96 +957,119 @@ def post_process(self, wf, label, connection, json_info, pipe_idx, pipe_x, if self.smoothing_bool: if label in Outputs.to_smooth: for smooth_opt in self.smooth_opts: - - sm = spatial_smoothing(f'{label}_smooth_{smooth_opt}_' - f'{pipe_x}', - self.fwhm, input_type, smooth_opt) - wf.connect(connection[0], connection[1], - sm, 'inputspec.in_file') - node, out = self.get_data(mask, pipe_idx=mask_idx, - quick_single=mask_idx is None) - wf.connect(node, out, sm, 'inputspec.mask') - - if 'desc-' not in label: - if 'space-' in label: - for tag in label.split('_'): - if 'space-' in tag: - smlabel = label.replace(tag, - f'{tag}_desc-sm') + sm = spatial_smoothing( + f"{label}_smooth_{smooth_opt}_" f"{pipe_x}", + self.fwhm, + input_type, + smooth_opt, + ) + wf.connect(connection[0], connection[1], sm, "inputspec.in_file") + node, out = self.get_data( + mask, pipe_idx=mask_idx, quick_single=mask_idx is None + ) + wf.connect(node, out, sm, "inputspec.mask") + + if "desc-" not in label: + if "space-" in label: + for tag in label.split("_"): + if "space-" in tag: + smlabel = label.replace(tag, f"{tag}_desc-sm") break else: - smlabel = f'desc-sm_{label}' + smlabel = f"desc-sm_{label}" else: - for tag in label.split('_'): - if 'desc-' in tag: - newtag = f'{tag}-sm' + for tag in label.split("_"): + if "desc-" in tag: + newtag = f"{tag}-sm" smlabel = label.replace(tag, newtag) break - post_labels.append((smlabel, sm, 'outputspec.out_file')) - - self.set_data(smlabel, sm, 'outputspec.out_file', - json_info, pipe_idx, - f'spatial_smoothing_{smooth_opt}', - fork=True) - self.set_data('fwhm', sm, 'outputspec.fwhm', json_info, - pipe_idx, f'spatial_smoothing_{smooth_opt}', - fork=True) - - if self.zscoring_bool: + post_labels.append((smlabel, sm, "outputspec.out_file")) + + self.set_data( + smlabel, + sm, + "outputspec.out_file", + json_info, + pipe_idx, + f"spatial_smoothing_{smooth_opt}", + fork=True, + ) + self.set_data( + "fwhm", + sm, + "outputspec.fwhm", + json_info, + pipe_idx, + f"spatial_smoothing_{smooth_opt}", + fork=True, + ) + + if self.zscoring_bool: for label_con_tpl in post_labels: label = label_con_tpl[0] connection = (label_con_tpl[1], label_con_tpl[2]) if label in Outputs.to_zstd: - zstd = z_score_standardize(f'{label}_zstd_{pipe_x}', - input_type) + zstd = z_score_standardize(f"{label}_zstd_{pipe_x}", input_type) - wf.connect(connection[0], connection[1], - zstd, 'inputspec.in_file') + wf.connect(connection[0], connection[1], zstd, "inputspec.in_file") node, out = self.get_data(mask, pipe_idx=mask_idx) - wf.connect(node, out, zstd, 'inputspec.mask') + wf.connect(node, out, zstd, "inputspec.mask") - if 'desc-' not in label: - if 'space-template' in label: - new_label = label.replace('space-template', - 'space-template_desc-zstd') + if "desc-" not in label: + if "space-template" in label: + new_label = label.replace( + "space-template", "space-template_desc-zstd" + ) else: - new_label = f'desc-zstd_{label}' + new_label = f"desc-zstd_{label}" else: - for tag in label.split('_'): - if 'desc-' in tag: - newtag = f'{tag}-zstd' + for tag in label.split("_"): + if "desc-" in tag: + newtag = f"{tag}-zstd" new_label = label.replace(tag, newtag) break - post_labels.append((new_label, zstd, 'outputspec.out_file')) + post_labels.append((new_label, zstd, "outputspec.out_file")) - self.set_data(new_label, zstd, 'outputspec.out_file', - json_info, pipe_idx, f'zscore_standardize', - fork=True) + self.set_data( + new_label, + zstd, + "outputspec.out_file", + json_info, + pipe_idx, + "zscore_standardize", + fork=True, + ) elif label in Outputs.to_fisherz: + zstd = fisher_z_score_standardize( + f"{label}_zstd_{pipe_x}", label, input_type + ) - zstd = fisher_z_score_standardize(f'{label}_zstd_{pipe_x}', - label, input_type) - - wf.connect(connection[0], connection[1], - zstd, 'inputspec.correlation_file') + wf.connect( + connection[0], connection[1], zstd, "inputspec.correlation_file" + ) # if the output is 'space-template_desc-MeanSCA_correlations', we want # 'desc-MeanSCA_timeseries' - oned = label.replace('correlations', 'timeseries') + oned = label.replace("correlations", "timeseries") node, out = outs[oned] - wf.connect(node, out, zstd, 'inputspec.timeseries_oned') + wf.connect(node, out, zstd, "inputspec.timeseries_oned") - post_labels.append((new_label, zstd, 'outputspec.out_file')) + post_labels.append((new_label, zstd, "outputspec.out_file")) - self.set_data(new_label, zstd, 'outputspec.out_file', - json_info, pipe_idx, - 'fisher_zscore_standardize', - fork=True) + self.set_data( + new_label, + zstd, + "outputspec.out_file", + json_info, + pipe_idx, + "fisher_zscore_standardize", + fork=True, + ) return (wf, post_labels) @@ -960,16 +1082,15 @@ def gather_pipes(self, wf, cfg, all=False, add_incl=None, add_excl=None): if add_excl: excl += add_excl - if 'nonsmoothed' not in cfg.post_processing['spatial_smoothing'][ - 'output']: + if "nonsmoothed" not in cfg.post_processing["spatial_smoothing"]["output"]: excl += Outputs.native_nonsmooth excl += Outputs.template_nonsmooth - if 'raw' not in cfg.post_processing['z-scoring']['output']: + if "raw" not in cfg.post_processing["z-scoring"]["output"]: excl += Outputs.native_raw excl += Outputs.template_raw - if not cfg.pipeline_setup['output_directory']['write_debugging_outputs']: + if not cfg.pipeline_setup["output_directory"]["write_debugging_outputs"]: # substring_excl.append(['bold']) excl += Outputs.debugging @@ -998,45 +1119,43 @@ def gather_pipes(self, wf, cfg, all=False, add_incl=None, add_excl=None): if drop: continue - subdir = 'other' + subdir = "other" if resource in Outputs.anat: - subdir = 'anat' - #TODO: get acq- etc. + subdir = "anat" + # TODO: get acq- etc. elif resource in Outputs.func: - subdir = 'func' - #TODO: other stuff like acq- etc. + subdir = "func" + # TODO: other stuff like acq- etc. for pipe_idx in self.rpool[resource]: unique_id = self.get_name() - part_id = unique_id.split('_')[0] - ses_id = unique_id.split('_')[1] + part_id = unique_id.split("_")[0] + ses_id = unique_id.split("_")[1] - if 'ses-' not in ses_id: + if "ses-" not in ses_id: ses_id = f"ses-{ses_id}" - out_dir = cfg.pipeline_setup['output_directory']['path'] - pipe_name = cfg.pipeline_setup['pipeline_name'] - container = os.path.join(f'pipeline_{pipe_name}', part_id, - ses_id) - filename = f'{unique_id}_{res_in_filename(self.cfg, resource)}' + out_dir = cfg.pipeline_setup["output_directory"]["path"] + pipe_name = cfg.pipeline_setup["pipeline_name"] + container = os.path.join(f"pipeline_{pipe_name}", part_id, ses_id) + filename = f"{unique_id}_{res_in_filename(self.cfg, resource)}" out_path = os.path.join(out_dir, container, subdir, filename) out_dct = { - 'unique_id': unique_id, - 'out_dir': out_dir, - 'container': container, - 'subdir': subdir, - 'filename': filename, - 'out_path': out_path + "unique_id": unique_id, + "out_dir": out_dir, + "container": container, + "subdir": subdir, + "filename": filename, + "out_path": out_path, } - self.rpool[resource][pipe_idx]['out'] = out_dct + self.rpool[resource][pipe_idx]["out"] = out_dct # TODO: have to link the pipe_idx's here. and call up 'desc-preproc_T1w' from a Sources in a json and replace. here. # TODO: can do the pipeline_description.json variants here too! for resource in self.rpool.keys(): - if resource not in Outputs.any: continue @@ -1064,30 +1183,46 @@ def gather_pipes(self, wf, cfg, all=False, add_incl=None, add_excl=None): num_variant = 0 if len(self.rpool[resource]) == 1: num_variant = "" - all_jsons = [self.rpool[resource][pipe_idx]['json'] for pipe_idx in - self.rpool[resource]] - unlabelled = set(key for json_info in all_jsons for key in - json_info.get('CpacVariant', {}).keys() if - key not in (*MOVEMENT_FILTER_KEYS, 'timeseries')) - if 'bold' in unlabelled: + all_jsons = [ + self.rpool[resource][pipe_idx]["json"] + for pipe_idx in self.rpool[resource] + ] + unlabelled = set( + key + for json_info in all_jsons + for key in json_info.get("CpacVariant", {}).keys() + if key not in (*MOVEMENT_FILTER_KEYS, "timeseries") + ) + if "bold" in unlabelled: all_bolds = list( - chain.from_iterable(json_info['CpacVariant']['bold'] for - json_info in all_jsons if - 'CpacVariant' in json_info and - 'bold' in json_info['CpacVariant'])) + chain.from_iterable( + json_info["CpacVariant"]["bold"] + for json_info in all_jsons + if "CpacVariant" in json_info + and "bold" in json_info["CpacVariant"] + ) + ) # not any(not) because all is overloaded as a parameter here - if not any(not re.match(r'apply_(phasediff|blip)_to_' - r'timeseries_separately_.*', _bold) - for _bold in all_bolds): + if not any( + not re.match( + r"apply_(phasediff|blip)_to_" r"timeseries_separately_.*", _bold + ) + for _bold in all_bolds + ): # this fork point should only result in 0 or 1 forks - unlabelled.remove('bold') + unlabelled.remove("bold") del all_bolds - all_forks = {key: set( - chain.from_iterable(json_info['CpacVariant'][key] for - json_info in all_jsons if - 'CpacVariant' in json_info and - key in json_info['CpacVariant'])) for - key in unlabelled} + all_forks = { + key: set( + chain.from_iterable( + json_info["CpacVariant"][key] + for json_info in all_jsons + if "CpacVariant" in json_info + and key in json_info["CpacVariant"] + ) + ) + for key in unlabelled + } # del all_jsons for key, forks in all_forks.items(): if len(forks) < 2: # no int suffix needed if only one fork @@ -1095,8 +1230,8 @@ def gather_pipes(self, wf, cfg, all=False, add_incl=None, add_excl=None): # del all_forks for pipe_idx in self.rpool[resource]: pipe_x = self.get_pipe_number(pipe_idx) - json_info = self.rpool[resource][pipe_idx]['json'] - out_dct = self.rpool[resource][pipe_idx]['out'] + json_info = self.rpool[resource][pipe_idx]["json"] + out_dct = self.rpool[resource][pipe_idx]["out"] try: if unlabelled: @@ -1105,157 +1240,173 @@ def gather_pipes(self, wf, cfg, all=False, add_incl=None, add_excl=None): pass try: - del json_info['subjson'] + del json_info["subjson"] except KeyError: pass - if out_dct['subdir'] == 'other' and not all: + if out_dct["subdir"] == "other" and not all: continue - unique_id = out_dct['unique_id'] + unique_id = out_dct["unique_id"] resource_idx = resource if isinstance(num_variant, int): - resource_idx, out_dct = name_fork(resource_idx, cfg, - json_info, out_dct) + resource_idx, out_dct = name_fork( + resource_idx, cfg, json_info, out_dct + ) if unlabelled: - if 'desc-' in out_dct['filename']: - for key in out_dct['filename'].split('_')[::-1]: + if "desc-" in out_dct["filename"]: + for key in out_dct["filename"].split("_")[::-1]: # final `desc` entity - if key.startswith('desc-'): - out_dct['filename'] = out_dct['filename' - ].replace( - key, f'{key}-{num_variant}') + if key.startswith("desc-"): + out_dct["filename"] = out_dct["filename"].replace( + key, f"{key}-{num_variant}" + ) resource_idx = resource_idx.replace( - key, f'{key}-{num_variant}') + key, f"{key}-{num_variant}" + ) break else: - suff = resource.split('_')[-1] - newdesc_suff = f'desc-{num_variant}_{suff}' - resource_idx = resource_idx.replace(suff, - newdesc_suff) - id_string = pe.Node(Function(input_names=['cfg', 'unique_id', - 'resource', - 'scan_id', - 'template_desc', - 'atlas_id', - 'fwhm', - 'subdir', - 'extension'], - output_names=['out_filename'], - function=create_id_string), - name=f'id_string_{resource_idx}_{pipe_x}') + suff = resource.split("_")[-1] + newdesc_suff = f"desc-{num_variant}_{suff}" + resource_idx = resource_idx.replace(suff, newdesc_suff) + id_string = pe.Node( + Function( + input_names=[ + "cfg", + "unique_id", + "resource", + "scan_id", + "template_desc", + "atlas_id", + "fwhm", + "subdir", + "extension", + ], + output_names=["out_filename"], + function=create_id_string, + ), + name=f"id_string_{resource_idx}_{pipe_x}", + ) id_string.inputs.cfg = self.cfg id_string.inputs.unique_id = unique_id id_string.inputs.resource = resource_idx - id_string.inputs.subdir = out_dct['subdir'] + id_string.inputs.subdir = out_dct["subdir"] # grab the iterable scan ID - if out_dct['subdir'] == 'func': - node, out = self.rpool['scan']["['scan:func_ingress']"][ - 'data'] - wf.connect(node, out, id_string, 'scan_id') - - self.back_propogate_template_name(wf, resource_idx, json_info, - id_string) + if out_dct["subdir"] == "func": + node, out = self.rpool["scan"]["['scan:func_ingress']"]["data"] + wf.connect(node, out, id_string, "scan_id") + + self.back_propogate_template_name( + wf, resource_idx, json_info, id_string + ) # grab the FWHM if smoothed - for tag in resource.split('_'): - if 'desc-' in tag and '-sm' in tag: - fwhm_idx = pipe_idx.replace(f'{resource}:', 'fwhm:') + for tag in resource.split("_"): + if "desc-" in tag and "-sm" in tag: + fwhm_idx = pipe_idx.replace(f"{resource}:", "fwhm:") try: - node, out = self.rpool['fwhm'][fwhm_idx]['data'] - wf.connect(node, out, id_string, 'fwhm') + node, out = self.rpool["fwhm"][fwhm_idx]["data"] + wf.connect(node, out, id_string, "fwhm") except KeyError: # smoothing was not done for this resource in the # engine.py smoothing pass break - atlas_suffixes = ['timeseries', 'correlations', 'statmap'] + atlas_suffixes = ["timeseries", "correlations", "statmap"] # grab the iterable atlas ID atlas_id = None - if not resource.endswith('desc-confounds_timeseries'): - if resource.split('_')[-1] in atlas_suffixes: - atlas_idx = pipe_idx.replace(resource, 'atlas_name') + if not resource.endswith("desc-confounds_timeseries"): + if resource.split("_")[-1] in atlas_suffixes: + atlas_idx = pipe_idx.replace(resource, "atlas_name") # need the single quote and the colon inside the double # quotes - it's the encoded pipe_idx - #atlas_idx = new_idx.replace(f"'{temp_rsc}:", + # atlas_idx = new_idx.replace(f"'{temp_rsc}:", # "'atlas_name:") - if atlas_idx in self.rpool['atlas_name']: - node, out = self.rpool['atlas_name'][atlas_idx][ - 'data'] - wf.connect(node, out, id_string, 'atlas_id') - elif 'atlas-' in resource: - for tag in resource.split('_'): - if 'atlas-' in tag: - atlas_id = tag.replace('atlas-', '') + if atlas_idx in self.rpool["atlas_name"]: + node, out = self.rpool["atlas_name"][atlas_idx]["data"] + wf.connect(node, out, id_string, "atlas_id") + elif "atlas-" in resource: + for tag in resource.split("_"): + if "atlas-" in tag: + atlas_id = tag.replace("atlas-", "") id_string.inputs.atlas_id = atlas_id else: - warnings.warn(str( - LookupError("\n[!] No atlas ID found for " - f"{out_dct['filename']}.\n"))) - nii_name = pe.Node(Rename(), name=f'nii_{resource_idx}_' - f'{pipe_x}') + warnings.warn( + str( + LookupError( + "\n[!] No atlas ID found for " + f"{out_dct['filename']}.\n" + ) + ) + ) + nii_name = pe.Node(Rename(), name=f"nii_{resource_idx}_" f"{pipe_x}") nii_name.inputs.keep_ext = True - + if resource in Outputs.ciftis: - nii_name.inputs.keep_ext = False - id_string.inputs.extension = Outputs.ciftis[resource] + nii_name.inputs.keep_ext = False + id_string.inputs.extension = Outputs.ciftis[resource] else: - nii_name.inputs.keep_ext = True - - + nii_name.inputs.keep_ext = True + if resource in Outputs.giftis: + nii_name.inputs.keep_ext = False + id_string.inputs.extension = f"{Outputs.giftis[resource]}.gii" - nii_name.inputs.keep_ext = False - id_string.inputs.extension = f'{Outputs.giftis[resource]}.gii' - else: - nii_name.inputs.keep_ext = True - - wf.connect(id_string, 'out_filename', - nii_name, 'format_string') - - node, out = self.rpool[resource][pipe_idx]['data'] + nii_name.inputs.keep_ext = True + + wf.connect(id_string, "out_filename", nii_name, "format_string") + + node, out = self.rpool[resource][pipe_idx]["data"] try: - wf.connect(node, out, nii_name, 'in_file') + wf.connect(node, out, nii_name, "in_file") except OSError as os_error: logger.warning(os_error) continue - write_json_imports = ['import os', 'import json'] - write_json = pe.Node(Function(input_names=['json_data', - 'filename'], - output_names=['json_file'], - function=write_output_json, - imports=write_json_imports), - name=f'json_{resource_idx}_{pipe_x}') + write_json_imports = ["import os", "import json"] + write_json = pe.Node( + Function( + input_names=["json_data", "filename"], + output_names=["json_file"], + function=write_output_json, + imports=write_json_imports, + ), + name=f"json_{resource_idx}_{pipe_x}", + ) write_json.inputs.json_data = json_info - wf.connect(id_string, 'out_filename', write_json, 'filename') - ds = pe.Node(DataSink(), name=f'sinker_{resource_idx}_' - f'{pipe_x}') + wf.connect(id_string, "out_filename", write_json, "filename") + ds = pe.Node(DataSink(), name=f"sinker_{resource_idx}_" f"{pipe_x}") ds.inputs.parameterization = False - ds.inputs.base_directory = out_dct['out_dir'] - ds.inputs.encrypt_bucket_keys = cfg.pipeline_setup[ - 'Amazon-AWS']['s3_encryption'] - ds.inputs.container = out_dct['container'] - - if cfg.pipeline_setup['Amazon-AWS'][ - 'aws_output_bucket_credentials']: - ds.inputs.creds_path = cfg.pipeline_setup['Amazon-AWS'][ - 'aws_output_bucket_credentials'] - expected_outputs += (out_dct['subdir'], create_id_string( - self.cfg, unique_id, resource_idx, - template_desc=id_string.inputs.template_desc, - atlas_id=atlas_id, subdir=out_dct['subdir'])) - wf.connect(nii_name, 'out_file', - ds, f'{out_dct["subdir"]}.@data') - wf.connect(write_json, 'json_file', - ds, f'{out_dct["subdir"]}.@json') + ds.inputs.base_directory = out_dct["out_dir"] + ds.inputs.encrypt_bucket_keys = cfg.pipeline_setup["Amazon-AWS"][ + "s3_encryption" + ] + ds.inputs.container = out_dct["container"] + + if cfg.pipeline_setup["Amazon-AWS"]["aws_output_bucket_credentials"]: + ds.inputs.creds_path = cfg.pipeline_setup["Amazon-AWS"][ + "aws_output_bucket_credentials" + ] + expected_outputs += ( + out_dct["subdir"], + create_id_string( + self.cfg, + unique_id, + resource_idx, + template_desc=id_string.inputs.template_desc, + atlas_id=atlas_id, + subdir=out_dct["subdir"], + ), + ) + wf.connect(nii_name, "out_file", ds, f'{out_dct["subdir"]}.@data') + wf.connect(write_json, "json_file", ds, f'{out_dct["subdir"]}.@json') outputs_logger.info(expected_outputs) def node_data(self, resource, **kwargs): - '''Factory function to create NodeData objects + """Factory function to create NodeData objects Parameters ---------- @@ -1264,7 +1415,7 @@ def node_data(self, resource, **kwargs): Returns ------- NodeData - ''' + """ return NodeData(self, resource, **kwargs) @@ -1275,8 +1426,7 @@ def __init__(self, node_block_functions, debug=False): self.node_blocks = {} - for node_block_function in node_block_functions: # <---- sets up the NodeBlock object in case you gave it a list of node blocks instead of a single one - for option forking. - + for node_block_function in node_block_functions: # <---- sets up the NodeBlock object in case you gave it a list of node blocks instead of a single one - for option forking. self.input_interface = [] if isinstance(node_block_function, tuple): self.input_interface = node_block_function[1] @@ -1286,9 +1436,11 @@ def __init__(self, node_block_functions, debug=False): if not isinstance(node_block_function, NodeBlockFunction): # If the object is a plain function `__name__` will be more useful then `str()` - obj_str = node_block_function.__name__ \ - if hasattr(node_block_function, '__name__') else \ - str(node_block_function) + obj_str = ( + node_block_function.__name__ + if hasattr(node_block_function, "__name__") + else str(node_block_function) + ) raise TypeError(f'Object is not a nodeblock: "{obj_str}"') name = node_block_function.name @@ -1305,37 +1457,37 @@ def __init__(self, node_block_functions, debug=False): list_tup.append(interface[1]) node_block_function.inputs.remove(orig_input) node_block_function.inputs.append(tuple(list_tup)) - else: - if orig_input == interface[0]: - node_block_function.inputs.remove(interface[0]) - node_block_function.inputs.append(interface[1]) + elif orig_input == interface[0]: + node_block_function.inputs.remove(interface[0]) + node_block_function.inputs.append(interface[1]) for key, val in node_block_function.legacy_nodeblock_dict().items(): self.node_blocks[name][key] = val - self.node_blocks[name]['block_function'] = node_block_function + self.node_blocks[name]["block_function"] = node_block_function - #TODO: fix/replace below + # TODO: fix/replace below self.outputs = {} for out in node_block_function.outputs: self.outputs[out] = None - self.options = ['base'] + self.options = ["base"] if node_block_function.outputs is not None: self.options = node_block_function.outputs - logger.info('Connecting %s...', name) + logger.info("Connecting %s...", name) if debug: - config.update_config( - {'logging': {'workflow_level': 'DEBUG'}}) + config.update_config({"logging": {"workflow_level": "DEBUG"}}) logging.update_logging(config) - logger.debug('"inputs": %s\n\t "outputs": %s%s', - node_block_function.inputs, - list(self.outputs.keys()), - f'\n\t"options": {self.options}' - if self.options != ['base'] else '') - config.update_config( - {'logging': {'workflow_level': 'INFO'}}) + logger.debug( + '"inputs": %s\n\t "outputs": %s%s', + node_block_function.inputs, + list(self.outputs.keys()), + f'\n\t"options": {self.options}' + if self.options != ["base"] + else "", + ) + config.update_config({"logging": {"workflow_level": "INFO"}}) logging.update_logging(config) def get_name(self): @@ -1343,14 +1495,16 @@ def get_name(self): def check_null(self, val): if isinstance(val, str): - val = None if val.lower() == 'none' else val + val = None if val.lower() == "none" else val return val def check_output(self, outputs, label, name): if label not in outputs: - raise NameError(f'\n[!] Output name "{label}" in the block ' - 'function does not match the outputs list ' - f'{outputs} in Node Block "{name}"\n') + raise NameError( + f'\n[!] Output name "{label}" in the block ' + "function does not match the outputs list " + f'{outputs} in Node Block "{name}"\n' + ) def grab_tiered_dct(self, cfg, key_list): cfg_dct = cfg.dict() @@ -1358,17 +1512,19 @@ def grab_tiered_dct(self, cfg, key_list): try: cfg_dct = cfg_dct.get(key, {}) except KeyError: - raise Exception(f"[!] The config provided to the node block is not valid") + raise Exception( + "[!] The config provided to the node block is not valid" + ) return cfg_dct def connect_block(self, wf, cfg, rpool): - debug = cfg.pipeline_setup['Debugging']['verbose'] + debug = cfg.pipeline_setup["Debugging"]["verbose"] all_opts = [] for name, block_dct in self.node_blocks.items(): opts = [] - config = self.check_null(block_dct['config']) - option_key = self.check_null(block_dct['option_key']) - option_val = self.check_null(block_dct['option_val']) + config = self.check_null(block_dct["config"]) + option_key = self.check_null(block_dct["option_key"]) + option_val = self.check_null(block_dct["option_val"]) if option_key and option_val: if not isinstance(option_key, list): option_key = [option_key] @@ -1378,13 +1534,15 @@ def connect_block(self, wf, cfg, rpool): key_list = config + option_key else: key_list = option_key - if 'USER-DEFINED' in option_val: + if "USER-DEFINED" in option_val: # load custom config data into each 'opt' opts = self.grab_tiered_dct(cfg, key_list) else: for option in option_val: try: - if option in self.grab_tiered_dct(cfg, key_list): # <---- goes over the option_vals in the node block docstring, and checks if the user's pipeline config included it in the forking list + if ( + option in self.grab_tiered_dct(cfg, key_list) + ): # <---- goes over the option_vals in the node block docstring, and checks if the user's pipeline config included it in the forking list opts.append(option) except AttributeError as err: raise Exception(f"{err}\nNode Block: {name}") @@ -1395,12 +1553,14 @@ def connect_block(self, wf, cfg, rpool): elif option_key and not option_val: # enables multiple config forking entries if not isinstance(option_key[0], list): - raise Exception(f'[!] The option_key field ({option_key}) ' - f'for {name} exists but there is no ' - 'option_val.\n\nIf you are trying to ' - 'populate multiple option keys, the ' - 'option_val field must contain a list of ' - 'a list.\n') + raise Exception( + f"[!] The option_key field ({option_key}) " + f"for {name} exists but there is no " + "option_val.\n\nIf you are trying to " + "populate multiple option keys, the " + "option_val field must contain a list of " + "a list.\n" + ) for option_config in option_key: # option_config is a list of pipe config levels down to the option if config: @@ -1409,29 +1569,35 @@ def connect_block(self, wf, cfg, rpool): key_list = option_config option_val = option_config[-1] if option_val in self.grab_tiered_dct(cfg, key_list[:-1]): - opts.append(option_val) - else: # AND, if there are multiple option-val's (in a list) in the docstring, it gets iterated below in 'for opt in option' etc. AND THAT'S WHEN YOU HAVE TO DELINEATE WITHIN THE NODE BLOCK CODE!!! + opts.append(option_val) + else: # AND, if there are multiple option-val's (in a list) in the docstring, it gets iterated below in 'for opt in option' etc. AND THAT'S WHEN YOU HAVE TO DELINEATE WITHIN THE NODE BLOCK CODE!!! opts = [None] all_opts += opts sidecar_additions = { - 'CpacConfigHash': hashlib.sha1(json.dumps(cfg.dict(), sort_keys=True).encode('utf-8')).hexdigest(), - 'CpacConfig': cfg.dict() + "CpacConfigHash": hashlib.sha1( + json.dumps(cfg.dict(), sort_keys=True).encode("utf-8") + ).hexdigest(), + "CpacConfig": cfg.dict(), } - if cfg['pipeline_setup']['output_directory'].get('user_defined'): - sidecar_additions['UserDefined'] = cfg['pipeline_setup']['output_directory']['user_defined'] - - for name, block_dct in self.node_blocks.items(): # <--- iterates over either the single node block in the sequence, or a list of node blocks within the list of node blocks, i.e. for option forking. + if cfg["pipeline_setup"]["output_directory"].get("user_defined"): + sidecar_additions["UserDefined"] = cfg["pipeline_setup"][ + "output_directory" + ]["user_defined"] - switch = self.check_null(block_dct['switch']) - config = self.check_null(block_dct['config']) - option_key = self.check_null(block_dct['option_key']) - option_val = self.check_null(block_dct['option_val']) - inputs = self.check_null(block_dct['inputs']) - outputs = self.check_null(block_dct['outputs']) + for ( + name, + block_dct, + ) in self.node_blocks.items(): # <--- iterates over either the single node block in the sequence, or a list of node blocks within the list of node blocks, i.e. for option forking. + switch = self.check_null(block_dct["switch"]) + config = self.check_null(block_dct["config"]) + option_key = self.check_null(block_dct["option_key"]) + option_val = self.check_null(block_dct["option_val"]) + inputs = self.check_null(block_dct["inputs"]) + outputs = self.check_null(block_dct["outputs"]) - block_function = block_dct['block_function'] + block_function = block_dct["block_function"] opts = [] if option_key and option_val: @@ -1443,15 +1609,19 @@ def connect_block(self, wf, cfg, rpool): key_list = config + option_key else: key_list = option_key - if 'USER-DEFINED' in option_val: + if "USER-DEFINED" in option_val: # load custom config data into each 'opt' opts = self.grab_tiered_dct(cfg, key_list) else: for option in option_val: - if option in self.grab_tiered_dct(cfg, key_list): # <---- goes over the option_vals in the node block docstring, and checks if the user's pipeline config included it in the forking list + if ( + option in self.grab_tiered_dct(cfg, key_list) + ): # <---- goes over the option_vals in the node block docstring, and checks if the user's pipeline config included it in the forking list opts.append(option) - else: # AND, if there are multiple option-val's (in a list) in the docstring, it gets iterated below in 'for opt in option' etc. AND THAT'S WHEN YOU HAVE TO DELINEATE WITHIN THE NODE BLOCK CODE!!! - opts = [None] # THIS ALSO MEANS the multiple option-val's in docstring node blocks can be entered once in the entire node-block sequence, not in a list of multiples + else: # AND, if there are multiple option-val's (in a list) in the docstring, it gets iterated below in 'for opt in option' etc. AND THAT'S WHEN YOU HAVE TO DELINEATE WITHIN THE NODE BLOCK CODE!!! + opts = [ + None + ] # THIS ALSO MEANS the multiple option-val's in docstring node blocks can be entered once in the entire node-block sequence, not in a list of multiples if not opts: # for node blocks where the options are split into different # block functions - opts will be empty for non-selected @@ -1465,41 +1635,45 @@ def connect_block(self, wf, cfg, rpool): try: key_list = config + switch except TypeError: - raise Exception("\n\n[!] Developer info: Docstring error " - f"for {name}, make sure the 'config' or " - "'switch' fields are lists.\n\n") + raise Exception( + "\n\n[!] Developer info: Docstring error " + f"for {name}, make sure the 'config' or " + "'switch' fields are lists.\n\n" + ) switch = self.grab_tiered_dct(cfg, key_list) - - else: - if isinstance(switch[0], list): - # we have multiple switches, which is designed to only work if - # config is set to "None" - switch_list = [] - for key_list in switch: - val = self.grab_tiered_dct(cfg, key_list) - if isinstance(val, list): - # fork switches - if True in val: - switch_list.append(True) - if False in val: - switch_list.append(False) - else: - switch_list.append(val) - if False in switch_list: - switch = [False] + + elif isinstance(switch[0], list): + # we have multiple switches, which is designed to only work if + # config is set to "None" + switch_list = [] + for key_list in switch: + val = self.grab_tiered_dct(cfg, key_list) + if isinstance(val, list): + # fork switches + if True in val: + switch_list.append(True) + if False in val: + switch_list.append(False) else: - switch = [True] + switch_list.append(val) + if False in switch_list: + switch = [False] else: - # if config is set to "None" - key_list = switch - switch = self.grab_tiered_dct(cfg, key_list) + switch = [True] + else: + # if config is set to "None" + key_list = switch + switch = self.grab_tiered_dct(cfg, key_list) if not isinstance(switch, list): switch = [switch] if True in switch: for pipe_idx, strat_pool in rpool.get_strats( - inputs, debug).items(): # strat_pool is a ResourcePool like {'desc-preproc_T1w': { 'json': info, 'data': (node, out) }, 'desc-brain_mask': etc.} - fork = False in switch # keep in mind rpool.get_strats(inputs) = {pipe_idx1: {'desc-preproc_T1w': etc.}, pipe_idx2: {..} } - for opt in opts: # it's a dictionary of ResourcePools called strat_pools, except those sub-ResourcePools only have one level! no pipe_idx strat keys. + inputs, debug + ).items(): # strat_pool is a ResourcePool like {'desc-preproc_T1w': { 'json': info, 'data': (node, out) }, 'desc-brain_mask': etc.} + fork = ( + False in switch + ) # keep in mind rpool.get_strats(inputs) = {pipe_idx1: {'desc-preproc_T1w': etc.}, pipe_idx2: {..} } + for opt in opts: # it's a dictionary of ResourcePools called strat_pools, except those sub-ResourcePools only have one level! no pipe_idx strat keys. # remember, you can get 'data' or 'json' from strat_pool with member functions # strat_pool has all of the JSON information of all the inputs! # so when we set_data below for the TOP-LEVEL MAIN RPOOL (not the strat_pool), we can generate new merged JSON information for each output. @@ -1518,130 +1692,155 @@ def connect_block(self, wf, cfg, rpool): strat_pool.copy_resource(input_name, interface[0]) replaced_inputs.append(interface[0]) try: - wf, outs = block_function(wf, cfg, strat_pool, - pipe_x, opt) + wf, outs = block_function(wf, cfg, strat_pool, pipe_x, opt) except IOError as e: # duplicate node logger.warning(e) continue if not outs: - if (block_function.__name__ == 'freesurfer_' - 'postproc'): - logger.warning( - WARNING_FREESURFER_OFF_WITH_DATA) - LOGTAIL['warnings'].append( - WARNING_FREESURFER_OFF_WITH_DATA) + if block_function.__name__ == "freesurfer_" "postproc": + logger.warning(WARNING_FREESURFER_OFF_WITH_DATA) + LOGTAIL["warnings"].append( + WARNING_FREESURFER_OFF_WITH_DATA + ) continue if opt and len(option_val) > 1: - node_name = f'{node_name}_{opt}' - elif opt and 'USER-DEFINED' in option_val: + node_name = f"{node_name}_{opt}" + elif opt and "USER-DEFINED" in option_val: node_name = f'{node_name}_{opt["Name"]}' if debug: - verbose_logger = getLogger('engine') - verbose_logger.debug('\n=======================') - verbose_logger.debug('Node name: %s', node_name) - prov_dct = \ - rpool.get_resource_strats_from_prov( - ast.literal_eval(pipe_idx)) + verbose_logger = getLogger("engine") + verbose_logger.debug("\n=======================") + verbose_logger.debug("Node name: %s", node_name) + prov_dct = rpool.get_resource_strats_from_prov( + ast.literal_eval(pipe_idx) + ) for key, val in prov_dct.items(): - verbose_logger.debug('-------------------') - verbose_logger.debug('Input - %s:', key) - sub_prov_dct = \ - rpool.get_resource_strats_from_prov(val) + verbose_logger.debug("-------------------") + verbose_logger.debug("Input - %s:", key) + sub_prov_dct = rpool.get_resource_strats_from_prov(val) for sub_key, sub_val in sub_prov_dct.items(): - sub_sub_dct = \ - rpool.get_resource_strats_from_prov( - sub_val) - verbose_logger.debug(' sub-input - %s:', - sub_key) - verbose_logger.debug(' prov = %s', - sub_val) + sub_sub_dct = rpool.get_resource_strats_from_prov( + sub_val + ) + verbose_logger.debug(" sub-input - %s:", sub_key) + verbose_logger.debug(" prov = %s", sub_val) verbose_logger.debug( - ' sub_sub_inputs = %s', - sub_sub_dct.keys()) + " sub_sub_inputs = %s", sub_sub_dct.keys() + ) for label, connection in outs.items(): self.check_output(outputs, label, name) - new_json_info = copy.deepcopy(strat_pool.get('json')) + new_json_info = copy.deepcopy(strat_pool.get("json")) # transfer over data-specific json info # for example, if the input data json is _bold and the output is also _bold - data_type = label.split('_')[-1] - if data_type in new_json_info['subjson']: - if 'SkullStripped' in new_json_info['subjson'][data_type]: - new_json_info['SkullStripped'] = new_json_info['subjson'][data_type]['SkullStripped'] - - # determine sources for the outputs, i.e. all input data into the node block - new_json_info['Sources'] = [x for x in strat_pool.get_entire_rpool() if x != 'json' and x not in replaced_inputs] - + data_type = label.split("_")[-1] + if data_type in new_json_info["subjson"]: + if ( + "SkullStripped" + in new_json_info["subjson"][data_type] + ): + new_json_info["SkullStripped"] = new_json_info[ + "subjson" + ][data_type]["SkullStripped"] + + # determine sources for the outputs, i.e. all input data into the node block + new_json_info["Sources"] = [ + x + for x in strat_pool.get_entire_rpool() + if x != "json" and x not in replaced_inputs + ] + if isinstance(outputs, dict): new_json_info.update(outputs[label]) - if 'Description' not in outputs[label]: + if "Description" not in outputs[label]: # don't propagate old Description try: - del new_json_info['Description'] + del new_json_info["Description"] except KeyError: pass - if 'Template' in outputs[label]: - template_key = outputs[label]['Template'] - if template_key in new_json_info['Sources']: + if "Template" in outputs[label]: + template_key = outputs[label]["Template"] + if template_key in new_json_info["Sources"]: # only if the pipeline config template key is entered as the 'Template' field # otherwise, skip this and take in the literal 'Template' string try: - new_json_info['Template'] = new_json_info['subjson'][template_key]['Description'] + new_json_info["Template"] = new_json_info[ + "subjson" + ][template_key]["Description"] except KeyError: pass try: - new_json_info['Resolution'] = new_json_info['subjson'][template_key]['Resolution'] + new_json_info["Resolution"] = new_json_info[ + "subjson" + ][template_key]["Resolution"] except KeyError: pass else: # don't propagate old Description try: - del new_json_info['Description'] + del new_json_info["Description"] except KeyError: pass - if 'Description' in new_json_info: - new_json_info['Description'] = ' '.join(new_json_info['Description'].split()) + if "Description" in new_json_info: + new_json_info["Description"] = " ".join( + new_json_info["Description"].split() + ) for sidecar_key, sidecar_value in sidecar_additions.items(): if sidecar_key not in new_json_info: new_json_info[sidecar_key] = sidecar_value try: - del new_json_info['subjson'] + del new_json_info["subjson"] except KeyError: pass if fork or len(opts) > 1 or len(all_opts) > 1: - if 'CpacVariant' not in new_json_info: - new_json_info['CpacVariant'] = {} + if "CpacVariant" not in new_json_info: + new_json_info["CpacVariant"] = {} raw_label = rpool.get_raw_label(label) - if raw_label not in new_json_info['CpacVariant']: - new_json_info['CpacVariant'][raw_label] = [] - new_json_info['CpacVariant'][raw_label].append(node_name) - - rpool.set_data(label, - connection[0], - connection[1], - new_json_info, - pipe_idx, node_name, fork) + if raw_label not in new_json_info["CpacVariant"]: + new_json_info["CpacVariant"][raw_label] = [] + new_json_info["CpacVariant"][raw_label].append( + node_name + ) + + rpool.set_data( + label, + connection[0], + connection[1], + new_json_info, + pipe_idx, + node_name, + fork, + ) wf, post_labels = rpool.post_process( - wf, label, connection, new_json_info, pipe_idx, - pipe_x, outs) + wf, + label, + connection, + new_json_info, + pipe_idx, + pipe_x, + outs, + ) if rpool.func_reg: for postlabel in post_labels: connection = (postlabel[1], postlabel[2]) - wf = rpool.derivative_xfm(wf, postlabel[0], - connection, - new_json_info, - pipe_idx, - pipe_x) + wf = rpool.derivative_xfm( + wf, + postlabel[0], + connection, + new_json_info, + pipe_idx, + pipe_x, + ) return wf @@ -1689,211 +1888,224 @@ def wrap_block(node_blocks, interface, wf, cfg, strat_pool, pipe_num, opt): """ for block in node_blocks: - #new_pool = copy.deepcopy(strat_pool) + # new_pool = copy.deepcopy(strat_pool) for in_resource, val in interface.items(): if isinstance(val, tuple): - strat_pool.set_data(in_resource, val[0], val[1], {}, "", "", - fork=True)# - if 'sub_num' not in strat_pool.get_pool_info(): - strat_pool.set_pool_info({'sub_num': 0}) - sub_num = strat_pool.get_pool_info()['sub_num'] - - wf, outputs = block(wf, cfg, strat_pool, f'{pipe_num}-{sub_num}', opt)# + strat_pool.set_data( + in_resource, val[0], val[1], {}, "", "", fork=True + ) # + if "sub_num" not in strat_pool.get_pool_info(): + strat_pool.set_pool_info({"sub_num": 0}) + sub_num = strat_pool.get_pool_info()["sub_num"] + + wf, outputs = block(wf, cfg, strat_pool, f"{pipe_num}-{sub_num}", opt) # for out, val in outputs.items(): if out in interface and isinstance(interface[out], str): - strat_pool.set_data(interface[out], outputs[out][0], outputs[out][1], - {}, "", "") + strat_pool.set_data( + interface[out], outputs[out][0], outputs[out][1], {}, "", "" + ) else: - strat_pool.set_data(out, outputs[out][0], outputs[out][1], - {}, "", "") + strat_pool.set_data(out, outputs[out][0], outputs[out][1], {}, "", "") sub_num += 1 - strat_pool.set_pool_info({'sub_num': sub_num}) + strat_pool.set_pool_info({"sub_num": sub_num}) return (wf, strat_pool) -def ingress_raw_anat_data(wf, rpool, cfg, data_paths, unique_id, part_id, - ses_id): - if 'anat' not in data_paths: - print('No anatomical data present.') +def ingress_raw_anat_data(wf, rpool, cfg, data_paths, unique_id, part_id, ses_id): + if "anat" not in data_paths: + print("No anatomical data present.") return rpool - if 'creds_path' not in data_paths: - data_paths['creds_path'] = None + if "creds_path" not in data_paths: + data_paths["creds_path"] = None - anat_flow = create_anat_datasource(f'anat_T1w_gather_{part_id}_{ses_id}') + anat_flow = create_anat_datasource(f"anat_T1w_gather_{part_id}_{ses_id}") anat = {} - if type(data_paths['anat']) is str: - anat['T1']=data_paths['anat'] - elif 'T1w' in data_paths['anat']: - anat['T1']=data_paths['anat']['T1w'] + if type(data_paths["anat"]) is str: + anat["T1"] = data_paths["anat"] + elif "T1w" in data_paths["anat"]: + anat["T1"] = data_paths["anat"]["T1w"] - if 'T1' in anat: + if "T1" in anat: anat_flow.inputs.inputnode.set( subject=part_id, - anat=anat['T1'], - creds_path=data_paths['creds_path'], - dl_dir=cfg.pipeline_setup['working_directory']['path'], - img_type='anat' + anat=anat["T1"], + creds_path=data_paths["creds_path"], + dl_dir=cfg.pipeline_setup["working_directory"]["path"], + img_type="anat", ) - rpool.set_data('T1w', anat_flow, 'outputspec.anat', {}, - "", "anat_ingress") - - if 'T2w' in data_paths['anat']: - anat_flow_T2 = create_anat_datasource(f'anat_T2w_gather_{part_id}_{ses_id}') + rpool.set_data("T1w", anat_flow, "outputspec.anat", {}, "", "anat_ingress") + + if "T2w" in data_paths["anat"]: + anat_flow_T2 = create_anat_datasource(f"anat_T2w_gather_{part_id}_{ses_id}") anat_flow_T2.inputs.inputnode.set( subject=part_id, - anat=data_paths['anat']['T2w'], - creds_path=data_paths['creds_path'], - dl_dir=cfg.pipeline_setup['working_directory']['path'], - img_type='anat' + anat=data_paths["anat"]["T2w"], + creds_path=data_paths["creds_path"], + dl_dir=cfg.pipeline_setup["working_directory"]["path"], + img_type="anat", + ) + rpool.set_data("T2w", anat_flow_T2, "outputspec.anat", {}, "", "anat_ingress") + + if cfg.surface_analysis["freesurfer"]["ingress_reconall"]: + rpool = ingress_freesurfer( + wf, rpool, cfg, data_paths, unique_id, part_id, ses_id ) - rpool.set_data('T2w', anat_flow_T2, 'outputspec.anat', {}, - "", "anat_ingress") - if cfg.surface_analysis['freesurfer']['ingress_reconall']: - rpool = ingress_freesurfer(wf, rpool, cfg, data_paths, unique_id, part_id, - ses_id) - return rpool -def ingress_freesurfer(wf, rpool, cfg, data_paths, unique_id, part_id, - ses_id): - - try: - fs_path = os.path.join(cfg.pipeline_setup['freesurfer_dir'], part_id) + +def ingress_freesurfer(wf, rpool, cfg, data_paths, unique_id, part_id, ses_id): + try: + fs_path = os.path.join(cfg.pipeline_setup["freesurfer_dir"], part_id) except KeyError: - print('No FreeSurfer data present.') + print("No FreeSurfer data present.") return rpool - - #fs_path = os.path.join(cfg.pipeline_setup['freesurfer_dir'], part_id) + + # fs_path = os.path.join(cfg.pipeline_setup['freesurfer_dir'], part_id) if not os.path.exists(fs_path): - if 'sub' in part_id: - fs_path = os.path.join(cfg.pipeline_setup['freesurfer_dir'], part_id.replace('sub-', '')) + if "sub" in part_id: + fs_path = os.path.join( + cfg.pipeline_setup["freesurfer_dir"], part_id.replace("sub-", "") + ) else: - fs_path = os.path.join(cfg.pipeline_setup['freesurfer_dir'], ('sub-' + part_id)) - + fs_path = os.path.join( + cfg.pipeline_setup["freesurfer_dir"], ("sub-" + part_id) + ) + # patch for flo-specific data if not os.path.exists(fs_path): - subj_ses = part_id + '-' + ses_id - fs_path = os.path.join(cfg.pipeline_setup['freesurfer_dir'], subj_ses) + subj_ses = part_id + "-" + ses_id + fs_path = os.path.join(cfg.pipeline_setup["freesurfer_dir"], subj_ses) if not os.path.exists(fs_path): - print(f'No FreeSurfer data found for subject {part_id}') + print(f"No FreeSurfer data found for subject {part_id}") return rpool - + # Check for double nested subj names - if os.path.exists(os.path.join(fs_path, os.path.basename(fs_path))): + if os.path.exists(os.path.join(fs_path, os.path.basename(fs_path))): fs_path = os.path.join(fs_path, part_id) - fs_ingress = create_general_datasource('gather_freesurfer_dir') + fs_ingress = create_general_datasource("gather_freesurfer_dir") fs_ingress.inputs.inputnode.set( unique_id=unique_id, data=fs_path, - creds_path=data_paths['creds_path'], - dl_dir=cfg.pipeline_setup['working_directory']['path']) - rpool.set_data("freesurfer-subject-dir", fs_ingress, 'outputspec.data', - {}, "", "freesurfer_config_ingress") + creds_path=data_paths["creds_path"], + dl_dir=cfg.pipeline_setup["working_directory"]["path"], + ) + rpool.set_data( + "freesurfer-subject-dir", + fs_ingress, + "outputspec.data", + {}, + "", + "freesurfer_config_ingress", + ) recon_outs = { - 'pipeline-fs_raw-average': 'mri/rawavg.mgz', - 'pipeline-fs_subcortical-seg': 'mri/aseg.mgz', - 'pipeline-fs_brainmask': 'mri/brainmask.mgz', - 'pipeline-fs_wmparc': 'mri/wmparc.mgz', - 'pipeline-fs_T1': 'mri/T1.mgz', - 'pipeline-fs_hemi-L_desc-surface_curv': 'surf/lh.curv', - 'pipeline-fs_hemi-R_desc-surface_curv': 'surf/rh.curv', - 'pipeline-fs_hemi-L_desc-surfaceMesh_pial': 'surf/lh.pial', - 'pipeline-fs_hemi-R_desc-surfaceMesh_pial': 'surf/rh.pial', - 'pipeline-fs_hemi-L_desc-surfaceMesh_smoothwm': 'surf/lh.smoothwm', - 'pipeline-fs_hemi-R_desc-surfaceMesh_smoothwm': 'surf/rh.smoothwm', - 'pipeline-fs_hemi-L_desc-surfaceMesh_sphere': 'surf/lh.sphere', - 'pipeline-fs_hemi-R_desc-surfaceMesh_sphere': 'surf/rh.sphere', - 'pipeline-fs_hemi-L_desc-surfaceMap_sulc': 'surf/lh.sulc', - 'pipeline-fs_hemi-R_desc-surfaceMap_sulc': 'surf/rh.sulc', - 'pipeline-fs_hemi-L_desc-surfaceMap_thickness': 'surf/lh.thickness', - 'pipeline-fs_hemi-R_desc-surfaceMap_thickness': 'surf/rh.thickness', - 'pipeline-fs_hemi-L_desc-surfaceMap_volume': 'surf/lh.volume', - 'pipeline-fs_hemi-R_desc-surfaceMap_volume': 'surf/rh.volume', - 'pipeline-fs_hemi-L_desc-surfaceMesh_white': 'surf/lh.white', - 'pipeline-fs_hemi-R_desc-surfaceMesh_white': 'surf/rh.white', - 'pipeline-fs_xfm': 'mri/transforms/talairach.lta' + "pipeline-fs_raw-average": "mri/rawavg.mgz", + "pipeline-fs_subcortical-seg": "mri/aseg.mgz", + "pipeline-fs_brainmask": "mri/brainmask.mgz", + "pipeline-fs_wmparc": "mri/wmparc.mgz", + "pipeline-fs_T1": "mri/T1.mgz", + "pipeline-fs_hemi-L_desc-surface_curv": "surf/lh.curv", + "pipeline-fs_hemi-R_desc-surface_curv": "surf/rh.curv", + "pipeline-fs_hemi-L_desc-surfaceMesh_pial": "surf/lh.pial", + "pipeline-fs_hemi-R_desc-surfaceMesh_pial": "surf/rh.pial", + "pipeline-fs_hemi-L_desc-surfaceMesh_smoothwm": "surf/lh.smoothwm", + "pipeline-fs_hemi-R_desc-surfaceMesh_smoothwm": "surf/rh.smoothwm", + "pipeline-fs_hemi-L_desc-surfaceMesh_sphere": "surf/lh.sphere", + "pipeline-fs_hemi-R_desc-surfaceMesh_sphere": "surf/rh.sphere", + "pipeline-fs_hemi-L_desc-surfaceMap_sulc": "surf/lh.sulc", + "pipeline-fs_hemi-R_desc-surfaceMap_sulc": "surf/rh.sulc", + "pipeline-fs_hemi-L_desc-surfaceMap_thickness": "surf/lh.thickness", + "pipeline-fs_hemi-R_desc-surfaceMap_thickness": "surf/rh.thickness", + "pipeline-fs_hemi-L_desc-surfaceMap_volume": "surf/lh.volume", + "pipeline-fs_hemi-R_desc-surfaceMap_volume": "surf/rh.volume", + "pipeline-fs_hemi-L_desc-surfaceMesh_white": "surf/lh.white", + "pipeline-fs_hemi-R_desc-surfaceMesh_white": "surf/rh.white", + "pipeline-fs_xfm": "mri/transforms/talairach.lta", } - + for key, outfile in recon_outs.items(): fullpath = os.path.join(fs_path, outfile) if os.path.exists(fullpath): - fs_ingress = create_general_datasource(f'gather_fs_{key}_dir') + fs_ingress = create_general_datasource(f"gather_fs_{key}_dir") fs_ingress.inputs.inputnode.set( unique_id=unique_id, data=fullpath, - creds_path=data_paths['creds_path'], - dl_dir=cfg.pipeline_setup['working_directory']['path']) - rpool.set_data(key, fs_ingress, 'outputspec.data', - {}, "", f"fs_{key}_ingress") + creds_path=data_paths["creds_path"], + dl_dir=cfg.pipeline_setup["working_directory"]["path"], + ) + rpool.set_data( + key, fs_ingress, "outputspec.data", {}, "", f"fs_{key}_ingress" + ) else: - warnings.warn(str( - LookupError("\n[!] Path does not exist for " - f"{fullpath}.\n"))) - + warnings.warn( + str(LookupError("\n[!] Path does not exist for " f"{fullpath}.\n")) + ) + return rpool -def ingress_raw_func_data(wf, rpool, cfg, data_paths, unique_id, part_id, - ses_id): - func_paths_dct = data_paths['func'] +def ingress_raw_func_data(wf, rpool, cfg, data_paths, unique_id, part_id, ses_id): + func_paths_dct = data_paths["func"] - func_wf = create_func_datasource(func_paths_dct, rpool, - f'func_ingress_{part_id}_{ses_id}') + func_wf = create_func_datasource( + func_paths_dct, rpool, f"func_ingress_{part_id}_{ses_id}" + ) func_wf.inputs.inputnode.set( subject=part_id, - creds_path=data_paths['creds_path'], - dl_dir=cfg.pipeline_setup['working_directory']['path'] + creds_path=data_paths["creds_path"], + dl_dir=cfg.pipeline_setup["working_directory"]["path"], ) - func_wf.get_node('inputnode').iterables = \ - ("scan", list(func_paths_dct.keys())) - - rpool.set_data('subject', func_wf, 'outputspec.subject', {}, "", - "func_ingress") - rpool.set_data('bold', func_wf, 'outputspec.rest', {}, "", "func_ingress") - rpool.set_data('scan', func_wf, 'outputspec.scan', {}, "", "func_ingress") - rpool.set_data('scan-params', func_wf, 'outputspec.scan_params', {}, "", - "scan_params_ingress") - + func_wf.get_node("inputnode").iterables = ("scan", list(func_paths_dct.keys())) + + rpool.set_data("subject", func_wf, "outputspec.subject", {}, "", "func_ingress") + rpool.set_data("bold", func_wf, "outputspec.rest", {}, "", "func_ingress") + rpool.set_data("scan", func_wf, "outputspec.scan", {}, "", "func_ingress") + rpool.set_data( + "scan-params", func_wf, "outputspec.scan_params", {}, "", "scan_params_ingress" + ) + # TODO: CHECK FOR PARAMETERS - wf, rpool, diff, blip, fmap_rp_list = \ - ingress_func_metadata(wf, cfg, rpool, data_paths, part_id, - data_paths['creds_path'], ses_id) + wf, rpool, diff, blip, fmap_rp_list = ingress_func_metadata( + wf, cfg, rpool, data_paths, part_id, data_paths["creds_path"], ses_id + ) # Memoize list of local functional scans # TODO: handle S3 files # Skip S3 files for now local_func_scans = [ - func_paths_dct[scan]['scan'] for scan in func_paths_dct.keys() if not - func_paths_dct[scan]['scan'].startswith('s3://')] + func_paths_dct[scan]["scan"] + for scan in func_paths_dct.keys() + if not func_paths_dct[scan]["scan"].startswith("s3://") + ] if local_func_scans: # pylint: disable=protected-access wf._local_func_scans = local_func_scans - if cfg.pipeline_setup['Debugging']['verbose']: - verbose_logger = getLogger('engine') - verbose_logger.debug('local_func_scans: %s', local_func_scans) + if cfg.pipeline_setup["Debugging"]["verbose"]: + verbose_logger = getLogger("engine") + verbose_logger.debug("local_func_scans: %s", local_func_scans) del local_func_scans return (wf, rpool, diff, blip, fmap_rp_list) -def ingress_output_dir(wf, cfg, rpool, unique_id, data_paths, part_id, ses_id, creds_path=None): - - dir_path = data_paths['derivatives_dir'] +def ingress_output_dir( + wf, cfg, rpool, unique_id, data_paths, part_id, ses_id, creds_path=None +): + dir_path = data_paths["derivatives_dir"] print(f"\nPulling outputs from {dir_path}.\n") - anat = os.path.join(dir_path, 'anat') - func = os.path.join(dir_path, 'func') + anat = os.path.join(dir_path, "anat") + func = os.path.join(dir_path, "func") - exts = ['.nii', '.gz', '.mat', '.1D', '.txt', '.csv', '.rms', '.tsv'] + exts = [".nii", ".gz", ".mat", ".1D", ".txt", ".csv", ".rms", ".tsv"] outdir_anat = [] outdir_func = [] @@ -1906,89 +2118,103 @@ def ingress_output_dir(wf, cfg, rpool, unique_id, data_paths, part_id, ses_id, c for ext in exts: if ext in filename: if subdir == anat: - outdir_anat.append(os.path.join(subdir, - filename)) + outdir_anat.append(os.path.join(subdir, filename)) else: - outdir_func.append(os.path.join(subdir, - filename)) + outdir_func.append(os.path.join(subdir, filename)) - # Add derivatives directory to rpool - ingress = create_general_datasource(f'gather_derivatives_dir') + # Add derivatives directory to rpool + ingress = create_general_datasource("gather_derivatives_dir") ingress.inputs.inputnode.set( - unique_id=unique_id, - data=dir_path, - creds_path=creds_path, - dl_dir=cfg.pipeline_setup['working_directory']['path'] - ) - rpool.set_data("derivatives-dir", ingress, 'outputspec.data', - {}, "", "outdir_config_ingress") + unique_id=unique_id, + data=dir_path, + creds_path=creds_path, + dl_dir=cfg.pipeline_setup["working_directory"]["path"], + ) + rpool.set_data( + "derivatives-dir", ingress, "outputspec.data", {}, "", "outdir_config_ingress" + ) for subdir in [outdir_anat, outdir_func]: for filepath in subdir: filename = str(filepath) for ext in exts: - filename = filename.split("/")[-1].replace(ext, '') + filename = filename.split("/")[-1].replace(ext, "") - data_label = filename.split(unique_id)[1].lstrip('_') + data_label = filename.split(unique_id)[1].lstrip("_") if len(filename) == len(data_label): - raise Exception('\n\n[!] Possibly wrong participant or ' - 'session in this directory?\n\n' - f'Filepath: {filepath}\n\n') + raise Exception( + "\n\n[!] Possibly wrong participant or " + "session in this directory?\n\n" + f"Filepath: {filepath}\n\n" + ) - bidstag = '' - for tag in data_label.split('_'): - for prefix in ['task-', 'run-', 'acq-', 'rec']: + bidstag = "" + for tag in data_label.split("_"): + for prefix in ["task-", "run-", "acq-", "rec"]: if tag.startswith(prefix): - bidstag += f'{tag}_' - data_label = data_label.replace(f'{tag}_', '') + bidstag += f"{tag}_" + data_label = data_label.replace(f"{tag}_", "") data_label, json = strip_template(data_label, dir_path, filename) - rpool, json_info, pipe_idx, node_name, data_label = \ - json_outdir_ingress(rpool, filepath, \ - exts, data_label, json) + rpool, json_info, pipe_idx, node_name, data_label = json_outdir_ingress( + rpool, filepath, exts, data_label, json + ) - if ('template' in data_label and not json_info['Template'] == \ - cfg.pipeline_setup['outdir_ingress']['Template']): + if ( + "template" in data_label + and not json_info["Template"] + == cfg.pipeline_setup["outdir_ingress"]["Template"] + ): continue # Rename confounds to avoid confusion in nuisance regression - if data_label.endswith('desc-confounds_timeseries'): - data_label = 'pipeline-ingress_desc-confounds_timeseries' + if data_label.endswith("desc-confounds_timeseries"): + data_label = "pipeline-ingress_desc-confounds_timeseries" if len(bidstag) > 1: # Remove tail symbol bidstag = bidstag[:-1] - if bidstag.startswith('task-'): - bidstag = bidstag.replace('task-', '') + if bidstag.startswith("task-"): + bidstag = bidstag.replace("task-", "") # Rename bold mask for CPAC naming convention # and to avoid collision with anat brain mask - if data_label.endswith('desc-brain_mask') and filepath in outdir_func: - data_label = data_label.replace('brain_mask', 'bold_mask') + if data_label.endswith("desc-brain_mask") and filepath in outdir_func: + data_label = data_label.replace("brain_mask", "bold_mask") try: pipe_x = rpool.get_pipe_number(pipe_idx) except ValueError: pipe_x = len(rpool.pipe_list) if filepath in outdir_anat: - ingress = create_general_datasource(f'gather_anat_outdir_{str(data_label)}_{pipe_x}') + ingress = create_general_datasource( + f"gather_anat_outdir_{data_label!s}_{pipe_x}" + ) ingress.inputs.inputnode.set( unique_id=unique_id, data=filepath, creds_path=creds_path, - dl_dir=cfg.pipeline_setup['working_directory']['path'] + dl_dir=cfg.pipeline_setup["working_directory"]["path"], + ) + rpool.set_data( + data_label, + ingress, + "outputspec.data", + json_info, + pipe_idx, + node_name, + f"outdir_{data_label}_ingress", + inject=True, ) - rpool.set_data(data_label, ingress, 'outputspec.data', json_info, - pipe_idx, node_name, f"outdir_{data_label}_ingress", inject=True) else: - if data_label.endswith('desc-preproc_bold'): + if data_label.endswith("desc-preproc_bold"): func_key = data_label func_dict[bidstag] = {} - func_dict[bidstag]['scan'] = str(filepath) - func_dict[bidstag]['scan_parameters'] = json_info - func_dict[bidstag]['pipe_idx'] = pipe_idx - if data_label.endswith('desc-brain_mask'): - data_label = data_label.replace('brain_mask', 'bold_mask') + func_dict[bidstag]["scan"] = str(filepath) + func_dict[bidstag]["scan_parameters"] = json_info + func_dict[bidstag]["pipe_idx"] = pipe_idx + if data_label.endswith("desc-brain_mask"): + data_label = data_label.replace("brain_mask", "bold_mask") try: func_paths[data_label].append(filepath) except: @@ -1996,166 +2222,193 @@ def ingress_output_dir(wf, cfg, rpool, unique_id, data_paths, part_id, ses_id, c func_paths[data_label].append(filepath) if func_dict: - wf, rpool = func_outdir_ingress(wf, cfg, func_dict, rpool, unique_id, \ - creds_path, part_id, func_key, func_paths) + wf, rpool = func_outdir_ingress( + wf, + cfg, + func_dict, + rpool, + unique_id, + creds_path, + part_id, + func_key, + func_paths, + ) - if cfg.surface_analysis['freesurfer']['ingress_reconall']: - rpool = ingress_freesurfer(wf, rpool, cfg, data_paths, unique_id, part_id, - ses_id) + if cfg.surface_analysis["freesurfer"]["ingress_reconall"]: + rpool = ingress_freesurfer( + wf, rpool, cfg, data_paths, unique_id, part_id, ses_id + ) return wf, rpool + def json_outdir_ingress(rpool, filepath, exts, data_label, json): - desc_val = None - for tag in data_label.split('_'): - if 'desc-' in tag: + for tag in data_label.split("_"): + if "desc-" in tag: desc_val = tag break jsonpath = str(filepath) for ext in exts: - jsonpath = jsonpath.replace(ext, '') + jsonpath = jsonpath.replace(ext, "") jsonpath = f"{jsonpath}.json" if not os.path.exists(jsonpath): - print(f'\n\n[!] No JSON found for file {filepath}.\nCreating ' - f'{jsonpath}..\n\n') + print( + f"\n\n[!] No JSON found for file {filepath}.\nCreating " f"{jsonpath}..\n\n" + ) json_info = { - 'Description': 'This data was generated elsewhere and ' - 'supplied by the user into this C-PAC run\'s ' - 'output directory. This JSON file was ' - 'automatically generated by C-PAC because a ' - 'JSON file was not supplied with the data.' + "Description": "This data was generated elsewhere and " + "supplied by the user into this C-PAC run's " + "output directory. This JSON file was " + "automatically generated by C-PAC because a " + "JSON file was not supplied with the data." } json_info = {**json_info, **json} write_output_json(json_info, jsonpath) else: json_info = read_json(jsonpath) json_info = {**json_info, **json} - if 'CpacProvenance' in json_info: + if "CpacProvenance" in json_info: if desc_val: # it's a C-PAC output, let's check for pipe_idx/strat integer # suffixes in the desc- entries. only_desc = str(desc_val) - + if only_desc[-1].isdigit(): for idx in range(0, 3): # let's stop at 3, please don't run >999 strategies okay? if only_desc[-1].isdigit(): only_desc = only_desc[:-1] - - if only_desc[-1] == '-': - only_desc = only_desc.rstrip('-') - else: - raise Exception('\n[!] Something went wrong with either ' - 'reading in the output directory or when ' - 'it was written out previously.\n\nGive ' - 'this to your friendly local C-PAC ' - f'developer:\n\n{str(data_label)}\n') - # remove the integer at the end of the desc-* variant, we will + if only_desc[-1] == "-": + only_desc = only_desc.rstrip("-") + else: + raise Exception( + "\n[!] Something went wrong with either " + "reading in the output directory or when " + "it was written out previously.\n\nGive " + "this to your friendly local C-PAC " + f"developer:\n\n{data_label!s}\n" + ) + + # remove the integer at the end of the desc-* variant, we will # get the unique pipe_idx from the CpacProvenance below data_label = data_label.replace(desc_val, only_desc) # preserve cpac provenance/pipe_idx - pipe_idx = rpool.generate_prov_string(json_info['CpacProvenance']) + pipe_idx = rpool.generate_prov_string(json_info["CpacProvenance"]) node_name = "" - + else: - json_info['CpacProvenance'] = [f'{data_label}:Non-C-PAC Origin: {filepath}'] - if not 'Description' in json_info: - json_info['Description'] = 'This data was generated elsewhere and ' \ - 'supplied by the user into this C-PAC run\'s '\ - 'output directory. This JSON file was '\ - 'automatically generated by C-PAC because a '\ - 'JSON file was not supplied with the data.' - pipe_idx = rpool.generate_prov_string(json_info['CpacProvenance']) + json_info["CpacProvenance"] = [f"{data_label}:Non-C-PAC Origin: {filepath}"] + if "Description" not in json_info: + json_info["Description"] = ( + "This data was generated elsewhere and " + "supplied by the user into this C-PAC run's " + "output directory. This JSON file was " + "automatically generated by C-PAC because a " + "JSON file was not supplied with the data." + ) + pipe_idx = rpool.generate_prov_string(json_info["CpacProvenance"]) node_name = f"{data_label}_ingress" return rpool, json_info, pipe_idx, node_name, data_label -def func_outdir_ingress(wf, cfg, func_dict, rpool, unique_id, creds_path, part_id, key, \ - func_paths): + +def func_outdir_ingress( + wf, cfg, func_dict, rpool, unique_id, creds_path, part_id, key, func_paths +): pipe_x = len(rpool.pipe_list) - exts = ['.nii', '.gz', '.mat', '.1D', '.txt', '.csv', '.rms', '.tsv'] - ingress = create_func_datasource(func_dict, rpool, f'gather_func_outdir_{key}_{pipe_x}') + exts = [".nii", ".gz", ".mat", ".1D", ".txt", ".csv", ".rms", ".tsv"] + ingress = create_func_datasource( + func_dict, rpool, f"gather_func_outdir_{key}_{pipe_x}" + ) ingress.inputs.inputnode.set( subject=unique_id, creds_path=creds_path, - dl_dir=cfg.pipeline_setup['working_directory']['path'] + dl_dir=cfg.pipeline_setup["working_directory"]["path"], ) - rpool.set_data('subject', ingress, 'outputspec.subject', {}, "", - "func_ingress") - ingress.get_node('inputnode').iterables = \ - ("scan", list(func_dict.keys())) - rpool.set_data(key, ingress, 'outputspec.rest', {}, "", - "func_ingress") - - rpool.set_data('scan', ingress, 'outputspec.scan', {}, "", 'func_ingress') - rpool.set_data('scan-params', ingress, 'outputspec.scan_params', {}, "", - "scan_params_ingress") - wf, rpool, diff, blip, fmap_rp_list = ingress_func_metadata(wf, cfg, \ - rpool, func_dict, part_id, creds_path, key) - + rpool.set_data("subject", ingress, "outputspec.subject", {}, "", "func_ingress") + ingress.get_node("inputnode").iterables = ("scan", list(func_dict.keys())) + rpool.set_data(key, ingress, "outputspec.rest", {}, "", "func_ingress") + + rpool.set_data("scan", ingress, "outputspec.scan", {}, "", "func_ingress") + rpool.set_data( + "scan-params", ingress, "outputspec.scan_params", {}, "", "scan_params_ingress" + ) + wf, rpool, diff, blip, fmap_rp_list = ingress_func_metadata( + wf, cfg, rpool, func_dict, part_id, creds_path, key + ) + # Have to do it this weird way to save the parsed BIDS tag & filepath - mask_paths_key = 'desc-bold_mask' if 'desc-bold_mask' in func_paths else \ - 'space-template_desc-bold_mask' - ts_paths_key = 'pipeline-ingress_desc-confounds_timeseries' + mask_paths_key = ( + "desc-bold_mask" + if "desc-bold_mask" in func_paths + else "space-template_desc-bold_mask" + ) + ts_paths_key = "pipeline-ingress_desc-confounds_timeseries" # Connect func data with approproate scan name - iterables = pe.Node(Function(input_names=['scan', - 'mask_paths', - 'ts_paths'], - output_names=['out_scan', - 'mask', - 'confounds'], - function=set_iterables), - name=f'set_iterables_{pipe_x}') + iterables = pe.Node( + Function( + input_names=["scan", "mask_paths", "ts_paths"], + output_names=["out_scan", "mask", "confounds"], + function=set_iterables, + ), + name=f"set_iterables_{pipe_x}", + ) iterables.inputs.mask_paths = func_paths[mask_paths_key] iterables.inputs.ts_paths = func_paths[ts_paths_key] - wf.connect(ingress, 'outputspec.scan', iterables, 'scan') + wf.connect(ingress, "outputspec.scan", iterables, "scan") for key in func_paths: if key == mask_paths_key or key == ts_paths_key: - ingress_func = create_general_datasource(f'ingress_func_data_{key}') + ingress_func = create_general_datasource(f"ingress_func_data_{key}") ingress_func.inputs.inputnode.set( unique_id=unique_id, creds_path=creds_path, - dl_dir=cfg.pipeline_setup['working_directory']['path']) - wf.connect(iterables, 'out_scan', ingress_func, 'inputnode.scan') + dl_dir=cfg.pipeline_setup["working_directory"]["path"], + ) + wf.connect(iterables, "out_scan", ingress_func, "inputnode.scan") if key == mask_paths_key: - wf.connect(iterables, 'mask', ingress_func, 'inputnode.data') - rpool.set_data(key, ingress_func, 'inputnode.data', {}, "", f"outdir_{key}_ingress") + wf.connect(iterables, "mask", ingress_func, "inputnode.data") + rpool.set_data( + key, ingress_func, "inputnode.data", {}, "", f"outdir_{key}_ingress" + ) elif key == ts_paths_key: - wf.connect(iterables, 'confounds', ingress_func, 'inputnode.data') - rpool.set_data(key, ingress_func, 'inputnode.data', {}, "", f"outdir_{key}_ingress") + wf.connect(iterables, "confounds", ingress_func, "inputnode.data") + rpool.set_data( + key, ingress_func, "inputnode.data", {}, "", f"outdir_{key}_ingress" + ) return wf, rpool + def set_iterables(scan, mask_paths=None, ts_paths=None): - # match scan with filepath to get filepath mask_path = [path for path in mask_paths if scan in path] ts_path = [path for path in ts_paths if scan in path] - return (scan, mask_path[0], ts_path[0]) + return (scan, mask_path[0], ts_path[0]) + def strip_template(data_label, dir_path, filename): - json = {} - # rename to template - for prefix in ['space-', 'from-', 'to-']: - for bidstag in data_label.split('_'): + # rename to template + for prefix in ["space-", "from-", "to-"]: + for bidstag in data_label.split("_"): if bidstag.startswith(prefix): - template_key, template_val = bidstag.split('-') + template_key, template_val = bidstag.split("-") template_name, _template_desc = lookup_identifier(template_val) if template_name: - json['Template'] = template_val - data_label = data_label.replace(template_val, 'template') - elif bidstag.startswith('res-'): - res_key, res_val = bidstag.split('-') - json['Resolution'] = res_val - data_label = data_label.replace(bidstag, '') - if data_label.find('__'): data_label = data_label.replace('__', '_') + json["Template"] = template_val + data_label = data_label.replace(template_val, "template") + elif bidstag.startswith("res-"): + res_key, res_val = bidstag.split("-") + json["Resolution"] = res_val + data_label = data_label.replace(bidstag, "") + if data_label.find("__"): + data_label = data_label.replace("__", "_") return data_label, json @@ -2163,18 +2416,16 @@ def ingress_pipeconfig_paths(cfg, rpool, unique_id, creds_path=None): # ingress config file paths # TODO: may want to change the resource keys for each to include one level up in the YAML as well - import pkg_resources as p import pandas as pd - import ast + import pkg_resources as p - template_csv = p.resource_filename('CPAC', 'resources/cpac_templates.csv') + template_csv = p.resource_filename("CPAC", "resources/cpac_templates.csv") template_df = pd.read_csv(template_csv, keep_default_na=False) - + for row in template_df.itertuples(): - key = row.Key val = row.Pipeline_Config_Entry - val = cfg.get_nested(cfg, [x.lstrip() for x in val.split(',')]) + val = cfg.get_nested(cfg, [x.lstrip() for x in val.split(",")]) resolution = row.Intended_Resolution_Config_Entry desc = row.Description @@ -2182,72 +2433,95 @@ def ingress_pipeconfig_paths(cfg, rpool, unique_id, creds_path=None): continue if resolution: - res_keys = [x.lstrip() for x in resolution.split(',')] + res_keys = [x.lstrip() for x in resolution.split(",")] tag = res_keys[-1] - json_info = {} - - if '$FSLDIR' in val: - val = val.replace('$FSLDIR', cfg.pipeline_setup[ - 'system_config']['FSLDIR']) - if '$priors_path' in val: - priors_path = cfg.segmentation['tissue_segmentation']['FSL-FAST']['use_priors']['priors_path'] or '' - if '$FSLDIR' in priors_path: - priors_path = priors_path.replace('$FSLDIR', cfg.pipeline_setup['system_config']['FSLDIR']) - val = val.replace('$priors_path', priors_path) - if '${resolution_for_anat}' in val: - val = val.replace('${resolution_for_anat}', cfg.registration_workflows['anatomical_registration']['resolution_for_anat']) - if '${func_resolution}' in val: - val = val.replace('${func_resolution}', cfg.registration_workflows[ - 'functional_registration']['func_registration_to_template'][ - 'output_resolution'][tag]) + json_info = {} + + if "$FSLDIR" in val: + val = val.replace("$FSLDIR", cfg.pipeline_setup["system_config"]["FSLDIR"]) + if "$priors_path" in val: + priors_path = ( + cfg.segmentation["tissue_segmentation"]["FSL-FAST"]["use_priors"][ + "priors_path" + ] + or "" + ) + if "$FSLDIR" in priors_path: + priors_path = priors_path.replace( + "$FSLDIR", cfg.pipeline_setup["system_config"]["FSLDIR"] + ) + val = val.replace("$priors_path", priors_path) + if "${resolution_for_anat}" in val: + val = val.replace( + "${resolution_for_anat}", + cfg.registration_workflows["anatomical_registration"][ + "resolution_for_anat" + ], + ) + if "${func_resolution}" in val: + val = val.replace( + "${func_resolution}", + cfg.registration_workflows["functional_registration"][ + "func_registration_to_template" + ]["output_resolution"][tag], + ) if desc: template_name, _template_desc = lookup_identifier(val) if template_name: desc = f"{template_name} - {desc}" - json_info['Description'] = f"{desc} - {val}" + json_info["Description"] = f"{desc} - {val}" if resolution: resolution = cfg.get_nested(cfg, res_keys) - json_info['Resolution'] = resolution - - resampled_template = pe.Node(Function(input_names=['resolution', - 'template', - 'template_name', - 'tag'], - output_names=['resampled_template'], - function=resolve_resolution, - as_module=True), - name='resampled_' + key) + json_info["Resolution"] = resolution + + resampled_template = pe.Node( + Function( + input_names=["resolution", "template", "template_name", "tag"], + output_names=["resampled_template"], + function=resolve_resolution, + as_module=True, + ), + name="resampled_" + key, + ) resampled_template.inputs.resolution = resolution resampled_template.inputs.template = val resampled_template.inputs.template_name = key resampled_template.inputs.tag = tag - + # the set_data below is set up a little differently, because we are # injecting and also over-writing already-existing entries # other alternative would have been to ingress into the # resampled_template node from the already existing entries, but we # didn't do that here - rpool.set_data(key, - resampled_template, - 'resampled_template', - json_info, "", - "template_resample") #, inject=True) # pipe_idx (after the blank json {}) should be the previous strat that you want deleted! because you're not connecting this the regular way, you have to do it manually - - else: - if val: - config_ingress = create_general_datasource(f'gather_{key}') - config_ingress.inputs.inputnode.set( - unique_id=unique_id, - data=val, - creds_path=creds_path, - dl_dir=cfg.pipeline_setup['working_directory']['path'] - ) - rpool.set_data(key, config_ingress, 'outputspec.data', - json_info, "", f"{key}_config_ingress") + rpool.set_data( + key, + resampled_template, + "resampled_template", + json_info, + "", + "template_resample", + ) # , inject=True) # pipe_idx (after the blank json {}) should be the previous strat that you want deleted! because you're not connecting this the regular way, you have to do it manually + + elif val: + config_ingress = create_general_datasource(f"gather_{key}") + config_ingress.inputs.inputnode.set( + unique_id=unique_id, + data=val, + creds_path=creds_path, + dl_dir=cfg.pipeline_setup["working_directory"]["path"], + ) + rpool.set_data( + key, + config_ingress, + "outputspec.data", + json_info, + "", + f"{key}_config_ingress", + ) # templates, resampling from config - ''' + """ template_keys = [ ("anat", ["network_centrality", "template_specification_file"]), ("anat", ["nuisance_corrections", "2-nuisance_regression", @@ -2330,13 +2604,13 @@ def _set_nested(attr, keys): map_node=True ) cfg.set_nested(cfg, key, node) - ''' + """ return rpool def initiate_rpool(wf, cfg, data_paths=None, part_id=None): - ''' + """ data_paths format: {'anat': { @@ -2355,21 +2629,20 @@ def initiate_rpool(wf, cfg, data_paths=None, part_id=None): 'subject_id': 'sub-01', 'unique_id': 'ses-1', 'derivatives_dir': '{derivatives_dir path}'} - ''' - + """ # TODO: refactor further, integrate with the ingress_data functionality # TODO: used for BIDS-Derivatives (below), and possible refactoring of # TODO: the raw data config to use 'T1w' label instead of 'anat' etc. if data_paths: - part_id = data_paths['subject_id'] - ses_id = data_paths['unique_id'] - if 'creds_path' not in data_paths: + part_id = data_paths["subject_id"] + ses_id = data_paths["unique_id"] + if "creds_path" not in data_paths: creds_path = None else: - creds_path = data_paths['creds_path'] - unique_id = f'{part_id}_{ses_id}' - + creds_path = data_paths["creds_path"] + unique_id = f"{part_id}_{ses_id}" + elif part_id: unique_id = part_id creds_path = None @@ -2378,18 +2651,29 @@ def initiate_rpool(wf, cfg, data_paths=None, part_id=None): if data_paths: # ingress outdir - try: - if data_paths['derivatives_dir'] and cfg.pipeline_setup['outdir_ingress']['run']: - wf, rpool = \ - ingress_output_dir(wf, cfg, rpool, unique_id, data_paths, part_id, \ - ses_id, creds_path=None) + try: + if ( + data_paths["derivatives_dir"] + and cfg.pipeline_setup["outdir_ingress"]["run"] + ): + wf, rpool = ingress_output_dir( + wf, + cfg, + rpool, + unique_id, + data_paths, + part_id, + ses_id, + creds_path=None, + ) except: - rpool = ingress_raw_anat_data(wf, rpool, cfg, data_paths, unique_id, - part_id, ses_id) - if 'func' in data_paths: - wf, rpool, diff, blip, fmap_rp_list = \ - ingress_raw_func_data(wf, rpool, cfg, data_paths, unique_id, - part_id, ses_id) + rpool = ingress_raw_anat_data( + wf, rpool, cfg, data_paths, unique_id, part_id, ses_id + ) + if "func" in data_paths: + wf, rpool, diff, blip, fmap_rp_list = ingress_raw_func_data( + wf, rpool, cfg, data_paths, unique_id, part_id, ses_id + ) # grab any file paths from the pipeline config YAML rpool = ingress_pipeconfig_paths(cfg, rpool, unique_id, creds_path) @@ -2401,45 +2685,42 @@ def initiate_rpool(wf, cfg, data_paths=None, part_id=None): def run_node_blocks(blocks, data_paths, cfg=None): import os + from CPAC.pipeline import nipype_pipeline_engine as pe from CPAC.pipeline.engine import NodeBlock if not cfg: cfg = { - 'pipeline_setup': { - 'working_directory': { - 'path': os.getcwd() - }, - 'log_directory': { - 'path': os.getcwd() - } + "pipeline_setup": { + "working_directory": {"path": os.getcwd()}, + "log_directory": {"path": os.getcwd()}, } } # TODO: WE HAVE TO PARSE OVER UNIQUE ID'S!!! _, rpool = initiate_rpool(cfg, data_paths) - wf = pe.Workflow(name='node_blocks') - wf.base_dir = cfg.pipeline_setup['working_directory']['path'] - wf.config['execution'] = { - 'hash_method': 'timestamp', - 'crashdump_dir': cfg.pipeline_setup['log_directory']['path'] + wf = pe.Workflow(name="node_blocks") + wf.base_dir = cfg.pipeline_setup["working_directory"]["path"] + wf.config["execution"] = { + "hash_method": "timestamp", + "crashdump_dir": cfg.pipeline_setup["log_directory"]["path"], } run_blocks = [] - if rpool.check_rpool('desc-preproc_T1w'): + if rpool.check_rpool("desc-preproc_T1w"): print("Preprocessed T1w found, skipping anatomical preprocessing.") else: run_blocks += blocks[0] - if rpool.check_rpool('desc-preproc_bold'): + if rpool.check_rpool("desc-preproc_bold"): print("Preprocessed BOLD found, skipping functional preprocessing.") else: run_blocks += blocks[1] for block in run_blocks: - wf = NodeBlock(block, debug=cfg['pipeline_setup', 'Debugging', - 'verbose']).connect_block( - wf, cfg, rpool) + wf = NodeBlock( + block, debug=cfg["pipeline_setup", "Debugging", "verbose"] + ).connect_block(wf, cfg, rpool) rpool.gather_pipes(wf, cfg) wf.run() @@ -2477,6 +2758,7 @@ class NodeData: ... print(str(lookup_error).strip().split('\n')[0].strip()) [!] C-PAC says: None of the listed resources are in the resource pool: """ + # pylint: disable=too-few-public-methods def __init__(self, strat_pool=None, resource=None, **kwargs): self.node = NotImplemented diff --git a/CPAC/pipeline/schema.py b/CPAC/pipeline/schema.py index 9f903f1cad..eb83a9107f 100644 --- a/CPAC/pipeline/schema.py +++ b/CPAC/pipeline/schema.py @@ -16,35 +16,55 @@ # License along with C-PAC. If not, see . """Validation schema for C-PAC pipeline configurations""" + # pylint: disable=too-many-lines -import re from itertools import chain, permutations +import re +from subprocess import CalledProcessError + import numpy as np from pathvalidate import sanitize_filename -from subprocess import CalledProcessError -from voluptuous import All, ALLOW_EXTRA, Any, BooleanInvalid, Capitalize, \ - Coerce, CoerceInvalid, ExclusiveInvalid, In, Length, \ - LengthInvalid, Lower, Match, Maybe, MultipleInvalid, \ - Optional, Range, Required, Schema, Title +from voluptuous import ( + All, + ALLOW_EXTRA, + Any, + BooleanInvalid, + Capitalize, + Coerce, + CoerceInvalid, + ExclusiveInvalid, + In, + Length, + LengthInvalid, + Lower, + Match, + Maybe, + MultipleInvalid, + Optional, + Range, + Required, + Schema, + Title, +) + from CPAC.utils.datatypes import ItemFromList, ListFromItem from CPAC.utils.docs import DOCS_URL_PREFIX from CPAC.utils.utils import YAML_BOOLS # 1 or more digits, optional decimal, 'e', optional '-', 1 or more digits -SCIENTIFIC_NOTATION_STR_REGEX = r'^([0-9]+(\.[0-9]*)*(e)-{0,1}[0-9]+)*$' +SCIENTIFIC_NOTATION_STR_REGEX = r"^([0-9]+(\.[0-9]*)*(e)-{0,1}[0-9]+)*$" # (1 or more digits, optional decimal, 0 or more lowercase characters (units)) # ('x', # 1 or more digits, optional decimal, 0 or more lowercase characters (units) # ) 0 or more times -RESOLUTION_REGEX = r'^[0-9]+(\.[0-9]*){0,1}[a-z]*' \ - r'(x[0-9]+(\.[0-9]*){0,1}[a-z]*)*$' +RESOLUTION_REGEX = r"^[0-9]+(\.[0-9]*){0,1}[a-z]*" r"(x[0-9]+(\.[0-9]*){0,1}[a-z]*)*$" Number = Any(float, int, All(str, Match(SCIENTIFIC_NOTATION_STR_REGEX))) def str_to_bool1_1(x): # pylint: disable=invalid-name - '''Convert strings to Booleans for YAML1.1 syntax + """Convert strings to Booleans for YAML1.1 syntax Ref https://yaml.org/type/bool.html @@ -55,7 +75,7 @@ def str_to_bool1_1(x): # pylint: disable=invalid-name Returns ------- bool - ''' + """ if isinstance(x, str): try: x = float(x) @@ -63,201 +83,240 @@ def str_to_bool1_1(x): # pylint: disable=invalid-name return False except ValueError: pass - x = (True if str(x).lower() in YAML_BOOLS[True] else - False if str(x).lower() in YAML_BOOLS[False] else x) + x = ( + True + if str(x).lower() in YAML_BOOLS[True] + else False + if str(x).lower() in YAML_BOOLS[False] + else x + ) if not isinstance(x, (bool, int)): - raise BooleanInvalid('Type boolean value was expected, type ' - f'{getattr(type(x), "__name__", str(type(x)))} ' - f'value\n\n{x}\n\nwas provided') + raise BooleanInvalid( + 'Type boolean value was expected, type ' + f'{getattr(type(x), "__name__", str(type(x)))} ' + f'value\n\n{x}\n\nwas provided' + ) return bool(x) bool1_1 = All(str_to_bool1_1, bool) forkable = All(Coerce(ListFromItem), [bool1_1], Length(max=2)) valid_options = { - 'acpc': { - 'target': ['brain', 'whole-head'] - }, - 'brain_extraction': { - 'using': ['3dSkullStrip', 'BET', 'UNet', 'niworkflows-ants', - 'FreeSurfer-BET-Tight', 'FreeSurfer-BET-Loose', - 'FreeSurfer-ABCD', 'FreeSurfer-Brainmask'] + "acpc": {"target": ["brain", "whole-head"]}, + "brain_extraction": { + "using": [ + "3dSkullStrip", + "BET", + "UNet", + "niworkflows-ants", + "FreeSurfer-BET-Tight", + "FreeSurfer-BET-Loose", + "FreeSurfer-ABCD", + "FreeSurfer-Brainmask", + ] }, - 'centrality': { - 'method_options': ['degree_centrality', 'eigenvector_centrality', - 'local_functional_connectivity_density'], - 'threshold_options': ['Significance threshold', 'Sparsity threshold', - 'Correlation threshold'], - 'weight_options': ['Binarized', 'Weighted'] + "centrality": { + "method_options": [ + "degree_centrality", + "eigenvector_centrality", + "local_functional_connectivity_density", + ], + "threshold_options": [ + "Significance threshold", + "Sparsity threshold", + "Correlation threshold", + ], + "weight_options": ["Binarized", "Weighted"], }, - 'motion_correction': ['3dvolreg', 'mcflirt'], - 'sca': { - 'roi_paths': ['Avg', 'DualReg', 'MultReg'], + "motion_correction": ["3dvolreg", "mcflirt"], + "sca": { + "roi_paths": ["Avg", "DualReg", "MultReg"], }, - 'segmentation': { - 'using': ['FSL-FAST', 'ANTs_Prior_Based', 'Template_Based'], - 'template': ['EPI_Template', 'T1_Template'], + "segmentation": { + "using": ["FSL-FAST", "ANTs_Prior_Based", "Template_Based"], + "template": ["EPI_Template", "T1_Template"], }, - 'timeseries': { - 'roi_paths': ['Avg', 'Voxel', 'SpatialReg'], + "timeseries": { + "roi_paths": ["Avg", "Voxel", "SpatialReg"], }, - 'connectivity_matrix': { - 'using': ['AFNI', 'Nilearn', 'ndmg'], - 'measure': ['Pearson', 'Partial', 'Spearman', 'MGC', - # 'TangentEmbed' # "Skip tangent embedding for now" + "connectivity_matrix": { + "using": ["AFNI", "Nilearn", "ndmg"], + "measure": [ + "Pearson", + "Partial", + "Spearman", + "MGC", + # 'TangentEmbed' # "Skip tangent embedding for now" ], }, - 'Regressors': { - 'CompCor': { - 'degree': int, - 'erode_mask_mm': bool1_1, - 'summary': { - 'method': str, - 'components': int, - 'filter': str, + "Regressors": { + "CompCor": { + "degree": int, + "erode_mask_mm": bool1_1, + "summary": { + "method": str, + "components": int, + "filter": str, }, - 'threshold': str, - 'tissues': [str], - 'extraction_resolution': int + "threshold": str, + "tissues": [str], + "extraction_resolution": int, }, - 'segmentation': { - 'erode_mask': bool1_1, - 'extraction_resolution': Any( - int, float, 'Functional', All(str, Match(RESOLUTION_REGEX)) - ), - 'include_delayed': bool1_1, - 'include_delayed_squared': bool1_1, - 'include_squared': bool1_1, - 'summary': Any( - str, {'components': int, 'method': str} + "segmentation": { + "erode_mask": bool1_1, + "extraction_resolution": Any( + int, float, "Functional", All(str, Match(RESOLUTION_REGEX)) ), + "include_delayed": bool1_1, + "include_delayed_squared": bool1_1, + "include_squared": bool1_1, + "summary": Any(str, {"components": int, "method": str}), }, }, - 'target_space': ['Native', 'Template'] + "target_space": ["Native", "Template"], } -valid_options['space'] = list({option.lower() for option in - valid_options['target_space']}) +valid_options["space"] = list( + {option.lower() for option in valid_options["target_space"]} +) mutex = { # mutually exclusive booleans - 'FSL-BET': { + "FSL-BET": { # exactly zero or one of each of the following can be True for FSL-BET - 'mutex': ['reduce_bias', 'robust', 'padding', 'remove_eyes', - 'surfaces'], + "mutex": ["reduce_bias", "robust", "padding", "remove_eyes", "surfaces"], # the remaining keys: validators for FSL-BET - 'rem': { - 'frac': float, - 'mesh_boolean': bool1_1, - 'outline': bool1_1, - 'radius': int, - 'skull': bool1_1, - 'threshold': bool1_1, - 'vertical_gradient': Range(min=-1, max=1, min_included=False, - max_included=False), - 'functional_mean_thr': { - 'run': bool1_1, - 'threshold_value': Maybe(int), + "rem": { + "frac": float, + "mesh_boolean": bool1_1, + "outline": bool1_1, + "radius": int, + "skull": bool1_1, + "threshold": bool1_1, + "vertical_gradient": Range( + min=-1, max=1, min_included=False, max_included=False + ), + "functional_mean_thr": { + "run": bool1_1, + "threshold_value": Maybe(int), }, - 'functional_mean_bias_correction': bool1_1, - } + "functional_mean_bias_correction": bool1_1, + }, } } ANTs_parameter_transforms = { - 'gradientStep': Number, - 'metric': { - 'type': str, - 'metricWeight': int, - 'numberOfBins': int, - 'samplingStrategy': str, - 'samplingPercentage': Number, - 'radius': Number, + "gradientStep": Number, + "metric": { + "type": str, + "metricWeight": int, + "numberOfBins": int, + "samplingStrategy": str, + "samplingPercentage": Number, + "radius": Number, }, - 'convergence': { - 'iteration': All(str, Match(RESOLUTION_REGEX)), - 'convergenceThreshold': Number, - 'convergenceWindowSize': int, + "convergence": { + "iteration": All(str, Match(RESOLUTION_REGEX)), + "convergenceThreshold": Number, + "convergenceWindowSize": int, }, - 'smoothing-sigmas': All(str, Match(RESOLUTION_REGEX)), - 'shrink-factors': All(str, Match(RESOLUTION_REGEX)), - 'use-histogram-matching': bool1_1, - 'updateFieldVarianceInVoxelSpace': Number, - 'totalFieldVarianceInVoxelSpace': Number, - 'winsorize-image-intensities': { - 'lowerQuantile': float, - 'upperQuantile': float, + "smoothing-sigmas": All(str, Match(RESOLUTION_REGEX)), + "shrink-factors": All(str, Match(RESOLUTION_REGEX)), + "use-histogram-matching": bool1_1, + "updateFieldVarianceInVoxelSpace": Number, + "totalFieldVarianceInVoxelSpace": Number, + "winsorize-image-intensities": { + "lowerQuantile": float, + "upperQuantile": float, }, } -ANTs_parameters = [Any( - { - 'collapse-output-transforms': int - }, { - 'dimensionality': int - }, { - 'initial-moving-transform': { - 'initializationFeature': int, +ANTs_parameters = [ + Any( + {"collapse-output-transforms": int}, + {"dimensionality": int}, + { + "initial-moving-transform": { + "initializationFeature": int, + }, }, - }, { - 'transforms': [Any({ - 'Rigid': ANTs_parameter_transforms, - }, { - 'Affine': ANTs_parameter_transforms, - }, { - 'SyN': ANTs_parameter_transforms, - })], - }, { - 'verbose': Any(Coerce(int), In({0, 1})), - }, { - 'float': Any(Coerce(int), In({0, 1})), - }, { - 'masks': { - 'fixed_image_mask': bool1_1, - 'moving_image_mask': bool1_1, + { + "transforms": [ + Any( + { + "Rigid": ANTs_parameter_transforms, + }, + { + "Affine": ANTs_parameter_transforms, + }, + { + "SyN": ANTs_parameter_transforms, + }, + ) + ], + }, + { + "verbose": Any(Coerce(int), In({0, 1})), + }, + { + "float": Any(Coerce(int), In({0, 1})), + }, + { + "masks": { + "fixed_image_mask": bool1_1, + "moving_image_mask": bool1_1, + }, }, - }, dict # TODO: specify other valid ANTs parameters -)] -motion_estimate_filter = Any({ # notch filter with breathing_rate_* set - Required('filter_type'): 'notch', - Required('filter_order'): int, - Required('breathing_rate_min'): Number, - 'breathing_rate_max': Number, - 'center_frequency': Maybe(Number), - 'filter_bandwidth': Maybe(Number), - 'lowpass_cutoff': Maybe(Number), - 'Name': Maybe(str) - }, { # notch filter with manual parameters set - Required('filter_type'): 'notch', - Required('filter_order'): int, - 'breathing_rate_min': None, - 'breathing_rate_max': None, - Required('center_frequency'): Number, - Required('filter_bandwidth'): Number, - 'lowpass_cutoff': Maybe(Number), - 'Name': Maybe(str) - }, { # lowpass filter with breathing_rate_min - Required('filter_type'): 'lowpass', - Required('filter_order'): int, - Required('breathing_rate_min'): Number, - 'breathing_rate_max': Maybe(Number), - 'center_frequency': Maybe(Number), - 'filter_bandwidth': Maybe(Number), - 'lowpass_cutoff': Maybe(Number), - 'Name': Maybe(str) - }, { # lowpass filter with lowpass_cutoff - Required('filter_type'): 'lowpass', - Required('filter_order'): int, - Required('breathing_rate_min', default=None): None, - 'breathing_rate_max': Maybe(Number), - 'center_frequency': Maybe(Number), - 'filter_bandwidth': Maybe(Number), - Required('lowpass_cutoff'): Number, - 'Name': Maybe(str)}, - msg='`motion_estimate_filter` configuration is invalid.\nSee ' - f'{DOCS_URL_PREFIX}/user/' - 'func#motion-estimate-filter-valid-options for details.\n') -target_space = All(Coerce(ListFromItem), - [All(Title, In(valid_options['target_space']))]) + dict, # TODO: specify other valid ANTs parameters + ) +] +motion_estimate_filter = Any( + { # notch filter with breathing_rate_* set + Required("filter_type"): "notch", + Required("filter_order"): int, + Required("breathing_rate_min"): Number, + "breathing_rate_max": Number, + "center_frequency": Maybe(Number), + "filter_bandwidth": Maybe(Number), + "lowpass_cutoff": Maybe(Number), + "Name": Maybe(str), + }, + { # notch filter with manual parameters set + Required("filter_type"): "notch", + Required("filter_order"): int, + "breathing_rate_min": None, + "breathing_rate_max": None, + Required("center_frequency"): Number, + Required("filter_bandwidth"): Number, + "lowpass_cutoff": Maybe(Number), + "Name": Maybe(str), + }, + { # lowpass filter with breathing_rate_min + Required("filter_type"): "lowpass", + Required("filter_order"): int, + Required("breathing_rate_min"): Number, + "breathing_rate_max": Maybe(Number), + "center_frequency": Maybe(Number), + "filter_bandwidth": Maybe(Number), + "lowpass_cutoff": Maybe(Number), + "Name": Maybe(str), + }, + { # lowpass filter with lowpass_cutoff + Required("filter_type"): "lowpass", + Required("filter_order"): int, + Required("breathing_rate_min", default=None): None, + "breathing_rate_max": Maybe(Number), + "center_frequency": Maybe(Number), + "filter_bandwidth": Maybe(Number), + Required("lowpass_cutoff"): Number, + "Name": Maybe(str), + }, + msg="`motion_estimate_filter` configuration is invalid.\nSee " + f"{DOCS_URL_PREFIX}/user/" + "func#motion-estimate-filter-valid-options for details.\n", +) +target_space = All( + Coerce(ListFromItem), [All(Title, In(valid_options["target_space"]))] +) def name_motion_filter(mfilter, mfilters=None): - '''Given a motion filter, create a short string for the filename + """Given a motion filter, create a short string for the filename Parameters ---------- @@ -290,40 +349,43 @@ def name_motion_filter(mfilter, mfilters=None): ... 'breathing_rate_min': 0.19}, [{'Name': 'lowpass2fl0p19'}, ... {'Name': 'lowpass2fl0p19dup1'}]) 'lowpass2fl0p19dup2' - ''' + """ if mfilters is None: mfilters = [] - if 'Name' in mfilter: - name = mfilter['Name'] + if "Name" in mfilter: + name = mfilter["Name"] else: - if mfilter['filter_type'] == 'notch': - if mfilter.get('breathing_rate_min'): - range_str = (f'fl{mfilter["breathing_rate_min"]}' - f'fu{mfilter["breathing_rate_max"]}') + if mfilter["filter_type"] == "notch": + if mfilter.get("breathing_rate_min"): + range_str = ( + f'fl{mfilter["breathing_rate_min"]}' + f'fu{mfilter["breathing_rate_max"]}' + ) else: - range_str = (f'fc{mfilter["center_frequency"]}' - f'bw{mfilter["filter_bandwidth"]}') + range_str = ( + f'fc{mfilter["center_frequency"]}' + f'bw{mfilter["filter_bandwidth"]}' + ) + elif mfilter.get("breathing_rate_min"): + range_str = f'fl{mfilter["breathing_rate_min"]}' else: - if mfilter.get('breathing_rate_min'): - range_str = f'fl{mfilter["breathing_rate_min"]}' - else: - range_str = f'fc{mfilter["lowpass_cutoff"]}' - range_str = range_str.replace('.', 'p') + range_str = f'fc{mfilter["lowpass_cutoff"]}' + range_str = range_str.replace(".", "p") name = f'{mfilter["filter_type"]}{mfilter["filter_order"]}{range_str}' - dupes = 'Name' not in mfilter and len([_ for _ in (_.get('Name', '') for - _ in mfilters) if - _.startswith(name)]) + dupes = "Name" not in mfilter and len( + [_ for _ in (_.get("Name", "") for _ in mfilters) if _.startswith(name)] + ) if dupes: - dup = re.search('(?=[A-Za-z0-9]*)(dup[0-9]*)', name) + dup = re.search("(?=[A-Za-z0-9]*)(dup[0-9]*)", name) if dup: # Don't chain 'dup' suffixes - name = name.replace(dup.group(), f'dup{dupes}') + name = name.replace(dup.group(), f"dup{dupes}") else: - name = f'{name}dup{dupes}' + name = f"{name}dup{dupes}" return name def permutation_message(key, options): - '''Function to give a clean, human-readable error message for keys + """Function to give a clean, human-readable error message for keys that accept permutation values Parameters @@ -334,8 +396,9 @@ def permutation_message(key, options): Returns ------- - msg: str''' # noqa: E501 - return f''' + msg: str + """ + return f""" \'{key}\' takes a dictionary with paths to region-of-interest (ROI) NIFTI files (.nii or .nii.gz) as keys and a comma separated string @@ -346,757 +409,852 @@ def permutation_message(key, options): Available analyses for \'{key}\' are {options} -''' +""" def sanitize(filename): - '''Sanitize a filename and replace whitespaces with underscores''' - return re.sub(r'\s+', '_', sanitize_filename(filename)) + """Sanitize a filename and replace whitespaces with underscores""" + return re.sub(r"\s+", "_", sanitize_filename(filename)) -latest_schema = Schema({ - 'FROM': Maybe(str), - 'skip env check': Maybe(bool), # flag for skipping an environment check - 'pipeline_setup': { - 'pipeline_name': All(str, Length(min=1), sanitize), - 'output_directory': { - 'path': str, - 'source_outputs_dir': Maybe(str), - 'pull_source_once': bool1_1, - 'write_func_outputs': bool1_1, - 'write_debugging_outputs': bool1_1, - 'output_tree': str, - 'quality_control': { - 'generate_quality_control_images': bool1_1, - 'generate_xcpqc_files': bool1_1, - }, - 'user_defined': Maybe(str), - }, - 'working_directory': { - 'path': str, - 'remove_working_dir': bool1_1, - }, - 'log_directory': { - 'run_logging': bool1_1, - 'path': str, - 'graphviz': { - 'entire_workflow': { - 'generate': bool, - 'graph2use': Maybe(All(Coerce(ListFromItem), - [All(Lower, - In(('orig', 'hierarchical', 'flat', - 'exec', 'colored')))])), - 'format': Maybe(All(Coerce(ListFromItem), - [All(Lower, In(('png', 'svg')))])), - 'simple_form': Maybe(bool)}}, - }, - 'crash_log_directory': { - 'path': Maybe(str), - }, - 'system_config': { - 'fail_fast': bool1_1, - 'FSLDIR': Maybe(str), - 'on_grid': { - 'run': bool1_1, - 'resource_manager': Maybe(str), - 'SGE': { - 'parallel_environment': Maybe(str), - 'queue': Maybe(str), +latest_schema = Schema( + { + "FROM": Maybe(str), + "skip env check": Maybe(bool), # flag for skipping an environment check + "pipeline_setup": { + "pipeline_name": All(str, Length(min=1), sanitize), + "output_directory": { + "path": str, + "source_outputs_dir": Maybe(str), + "pull_source_once": bool1_1, + "write_func_outputs": bool1_1, + "write_debugging_outputs": bool1_1, + "output_tree": str, + "quality_control": { + "generate_quality_control_images": bool1_1, + "generate_xcpqc_files": bool1_1, }, + "user_defined": Maybe(str), }, - 'maximum_memory_per_participant': Number, - 'raise_insufficient': bool1_1, - 'max_cores_per_participant': int, - 'num_ants_threads': int, - 'num_OMP_threads': int, - 'num_participants_at_once': int, - 'random_seed': Maybe(Any( - 'random', - All(int, Range(min=1, max=np.iinfo(np.int32).max)))), - 'observed_usage': { - 'callback_log': Maybe(str), - 'buffer': Number, - }, - }, - 'Amazon-AWS': { - 'aws_output_bucket_credentials': Maybe(str), - 's3_encryption': bool1_1, - }, - 'Debugging': { - 'verbose': bool1_1, - }, - 'freesurfer_dir': str, - 'outdir_ingress': { - 'run': bool1_1, - 'Template': Maybe(str), - }, - }, - 'anatomical_preproc': { - 'run': bool1_1, - 'run_t2': bool1_1, - 'non_local_means_filtering': { - 'run': forkable, - 'noise_model': Maybe(str), - }, - 'n4_bias_field_correction': { - 'run': forkable, - 'shrink_factor': int, - }, - 't1t2_bias_field_correction': Required( - # require 'T1w_brain_ACPC_template' if 'acpc_target' is 'brain' - Any({ - 'run': False, - 'BiasFieldSmoothingSigma': Maybe(int), - }, { - 'run': True, - 'BiasFieldSmoothingSigma': Maybe(int), - },), - ), - - 'acpc_alignment': Required( - # require 'T1w_brain_ACPC_template' and - # 'T2w_brain_ACPC_template' if 'acpc_target' is 'brain' - Any({ - 'run': False, - 'run_before_preproc': Maybe(bool1_1), - 'brain_size': Maybe(int), - 'FOV_crop': Maybe(In({'robustfov', 'flirt'})), - 'acpc_target': Maybe(In(valid_options['acpc']['target'])), - 'align_brain_mask': Maybe(bool1_1), - 'T1w_ACPC_template': Maybe(str), - 'T1w_brain_ACPC_template': Maybe(str), - 'T2w_ACPC_template': Maybe(str), - 'T2w_brain_ACPC_template': Maybe(str), - }, { - 'run': True, - 'run_before_preproc': bool1_1, - 'brain_size': int, - 'FOV_crop': In({'robustfov', 'flirt'}), - 'acpc_target': valid_options['acpc']['target'][1], - 'align_brain_mask': Maybe(bool1_1), - 'T1w_ACPC_template': str, - 'T1w_brain_ACPC_template': Maybe(str), - 'T2w_ACPC_template': Maybe(str), - 'T2w_brain_ACPC_template': Maybe(str), - }, { - 'run': True, - 'run_before_preproc': bool1_1, - 'brain_size': int, - 'FOV_crop': In({'robustfov', 'flirt'}), - 'acpc_target': valid_options['acpc']['target'][0], - 'align_brain_mask': Maybe(bool1_1), - 'T1w_ACPC_template': str, - 'T1w_brain_ACPC_template': str, - 'T2w_ACPC_template': Maybe(str), - 'T2w_brain_ACPC_template': Maybe(str), - },), - msg='\'brain\' requires \'T1w_brain_ACPC_template\' and ' - '\'T2w_brain_ACPC_template\' to ' - 'be populated if \'run\' is not set to Off', - ), - 'brain_extraction': { - 'run': bool1_1, - 'using': [In(valid_options['brain_extraction']['using'])], - 'AFNI-3dSkullStrip': { - 'mask_vol': bool1_1, - 'shrink_factor': Number, - 'var_shrink_fac': bool1_1, - 'shrink_factor_bot_lim': Number, - 'avoid_vent': bool1_1, - 'n_iterations': int, - 'pushout': bool1_1, - 'touchup': bool1_1, - 'fill_hole': int, - 'NN_smooth': int, - 'smooth_final': int, - 'avoid_eyes': bool1_1, - 'use_edge': bool1_1, - 'exp_frac': Number, - 'push_to_edge': bool1_1, - 'use_skull': bool1_1, - 'perc_int': Number, - 'max_inter_iter': int, - 'fac': Number, - 'blur_fwhm': Number, - 'monkey': bool1_1, - }, - 'FSL-FNIRT': { - 'interpolation': In({ - 'trilinear', 'sinc', 'spline' - }), - }, - 'FSL-BET': { - 'frac': Number, - 'Robustfov': bool1_1, - 'mesh_boolean': bool1_1, - 'outline': bool1_1, - 'padding': bool1_1, - 'radius': int, - 'reduce_bias': bool1_1, - 'remove_eyes': bool1_1, - 'robust': bool1_1, - 'skull': bool1_1, - 'surfaces': bool1_1, - 'threshold': bool1_1, - 'vertical_gradient': Range(min=-1, max=1) + "working_directory": { + "path": str, + "remove_working_dir": bool1_1, }, - 'UNet': { - 'unet_model': Maybe(str), - }, - 'niworkflows-ants': { - 'template_path': Maybe(str), - 'mask_path': Maybe(str), - 'regmask_path': Maybe(str), + "log_directory": { + "run_logging": bool1_1, + "path": str, + "graphviz": { + "entire_workflow": { + "generate": bool, + "graph2use": Maybe( + All( + Coerce(ListFromItem), + [ + All( + Lower, + In( + ( + "orig", + "hierarchical", + "flat", + "exec", + "colored", + ) + ), + ) + ], + ) + ), + "format": Maybe( + All(Coerce(ListFromItem), [All(Lower, In(("png", "svg")))]) + ), + "simple_form": Maybe(bool), + } + }, }, - 'FreeSurfer-BET': { - 'T1w_brain_template_mask_ccs': Maybe(str) + "crash_log_directory": { + "path": Maybe(str), }, - }, - }, - 'segmentation': { - 'run': bool1_1, - 'tissue_segmentation': { - 'using': [In( - {'FSL-FAST', 'FreeSurfer', 'ANTs_Prior_Based', - 'Template_Based'} - )], - 'FSL-FAST': { - 'thresholding': { - 'use': In({'Auto', 'Custom'}), - 'Custom': { - 'CSF_threshold_value': float, - 'WM_threshold_value': float, - 'GM_threshold_value': float, + "system_config": { + "fail_fast": bool1_1, + "FSLDIR": Maybe(str), + "on_grid": { + "run": bool1_1, + "resource_manager": Maybe(str), + "SGE": { + "parallel_environment": Maybe(str), + "queue": Maybe(str), }, }, - 'use_priors': { - 'run': bool1_1, - 'priors_path': Maybe(str), - 'WM_path': Maybe(str), - 'GM_path': Maybe(str), - 'CSF_path': Maybe(str) + "maximum_memory_per_participant": Number, + "raise_insufficient": bool1_1, + "max_cores_per_participant": int, + "num_ants_threads": int, + "num_OMP_threads": int, + "num_participants_at_once": int, + "random_seed": Maybe( + Any("random", All(int, Range(min=1, max=np.iinfo(np.int32).max))) + ), + "observed_usage": { + "callback_log": Maybe(str), + "buffer": Number, }, }, - 'FreeSurfer': { - 'erode': Maybe(int), - 'CSF_label': Maybe([int]), - 'GM_label': Maybe([int]), - 'WM_label': Maybe([int]), + "Amazon-AWS": { + "aws_output_bucket_credentials": Maybe(str), + "s3_encryption": bool1_1, }, - 'ANTs_Prior_Based': { - 'run': forkable, - 'template_brain_list': Maybe(Any([str], [])), - 'template_segmentation_list': Maybe(Any([str], [])), - 'CSF_label': [int], - 'GM_label': [int], - 'WM_label': [int], + "Debugging": { + "verbose": bool1_1, }, - 'Template_Based': { - 'run': forkable, - 'template_for_segmentation': [In( - valid_options['segmentation']['template'] - )], - 'WHITE': Maybe(str), - 'GRAY': Maybe(str), - 'CSF': Maybe(str), + "freesurfer_dir": str, + "outdir_ingress": { + "run": bool1_1, + "Template": Maybe(str), }, }, - }, - 'registration_workflows': { - 'anatomical_registration': { - 'run': bool1_1, - 'resolution_for_anat': All(str, Match(RESOLUTION_REGEX)), - 'T1w_brain_template': Maybe(str), - 'T1w_template': Maybe(str), - 'T1w_brain_template_mask': Maybe(str), - 'reg_with_skull': bool1_1, - 'registration': { - 'using': [In({'ANTS', 'FSL', 'FSL-linear'})], - 'ANTs': { - 'use_lesion_mask': bool1_1, - 'T1_registration': Maybe(ANTs_parameters), - 'interpolation': In({ - 'Linear', 'BSpline', 'LanczosWindowedSinc' - }), - }, - 'FSL-FNIRT': { - 'fnirt_config': Maybe(str), - 'ref_resolution': All(str, Match(RESOLUTION_REGEX)), - 'FNIRT_T1w_brain_template': Maybe(str), - 'FNIRT_T1w_template': Maybe(str), - 'interpolation': In({ - 'trilinear', 'sinc', 'spline' - }), - 'identity_matrix': Maybe(str), - 'ref_mask': Maybe(str), - 'ref_mask_res-2': Maybe(str), - 'T1w_template_res-2': Maybe(str), - }, + "anatomical_preproc": { + "run": bool1_1, + "run_t2": bool1_1, + "non_local_means_filtering": { + "run": forkable, + "noise_model": Maybe(str), }, - 'overwrite_transform': { - 'run': bool1_1, - 'using': In({'FSL'}), + "n4_bias_field_correction": { + "run": forkable, + "shrink_factor": int, }, - }, - 'functional_registration': { - 'coregistration': { - 'run': bool1_1, - 'interpolation': In({'trilinear', 'sinc', 'spline'}), - 'using': str, - 'input': str, - 'cost': str, - 'dof': int, - 'arguments': Maybe(str), - 'func_input_prep': { - 'reg_with_skull': bool1_1, - 'input': [In({ - 'Mean_Functional', 'Selected_Functional_Volume', - 'fmriprep_reference' - })], - 'Mean Functional': { - 'n4_correct_func': bool1_1 + "t1t2_bias_field_correction": Required( + # require 'T1w_brain_ACPC_template' if 'acpc_target' is 'brain' + Any( + { + "run": False, + "BiasFieldSmoothingSigma": Maybe(int), }, - 'Selected Functional Volume': { - 'func_reg_input_volume': int + { + "run": True, + "BiasFieldSmoothingSigma": Maybe(int), }, + ), + ), + "acpc_alignment": Required( + # require 'T1w_brain_ACPC_template' and + # 'T2w_brain_ACPC_template' if 'acpc_target' is 'brain' + Any( + { + "run": False, + "run_before_preproc": Maybe(bool1_1), + "brain_size": Maybe(int), + "FOV_crop": Maybe(In({"robustfov", "flirt"})), + "acpc_target": Maybe(In(valid_options["acpc"]["target"])), + "align_brain_mask": Maybe(bool1_1), + "T1w_ACPC_template": Maybe(str), + "T1w_brain_ACPC_template": Maybe(str), + "T2w_ACPC_template": Maybe(str), + "T2w_brain_ACPC_template": Maybe(str), + }, + { + "run": True, + "run_before_preproc": bool1_1, + "brain_size": int, + "FOV_crop": In({"robustfov", "flirt"}), + "acpc_target": valid_options["acpc"]["target"][1], + "align_brain_mask": Maybe(bool1_1), + "T1w_ACPC_template": str, + "T1w_brain_ACPC_template": Maybe(str), + "T2w_ACPC_template": Maybe(str), + "T2w_brain_ACPC_template": Maybe(str), + }, + { + "run": True, + "run_before_preproc": bool1_1, + "brain_size": int, + "FOV_crop": In({"robustfov", "flirt"}), + "acpc_target": valid_options["acpc"]["target"][0], + "align_brain_mask": Maybe(bool1_1), + "T1w_ACPC_template": str, + "T1w_brain_ACPC_template": str, + "T2w_ACPC_template": Maybe(str), + "T2w_brain_ACPC_template": Maybe(str), + }, + ), + msg="'brain' requires 'T1w_brain_ACPC_template' and " + "'T2w_brain_ACPC_template' to " + "be populated if 'run' is not set to Off", + ), + "brain_extraction": { + "run": bool1_1, + "using": [In(valid_options["brain_extraction"]["using"])], + "AFNI-3dSkullStrip": { + "mask_vol": bool1_1, + "shrink_factor": Number, + "var_shrink_fac": bool1_1, + "shrink_factor_bot_lim": Number, + "avoid_vent": bool1_1, + "n_iterations": int, + "pushout": bool1_1, + "touchup": bool1_1, + "fill_hole": int, + "NN_smooth": int, + "smooth_final": int, + "avoid_eyes": bool1_1, + "use_edge": bool1_1, + "exp_frac": Number, + "push_to_edge": bool1_1, + "use_skull": bool1_1, + "perc_int": Number, + "max_inter_iter": int, + "fac": Number, + "blur_fwhm": Number, + "monkey": bool1_1, }, - 'boundary_based_registration': { - 'run': forkable, - 'bbr_schedule': str, - 'bbr_wm_map': In({'probability_map', 'partial_volume_map'}), - 'bbr_wm_mask_args': str, - 'reference': In({'whole-head', 'brain'}) + "FSL-FNIRT": { + "interpolation": In({"trilinear", "sinc", "spline"}), }, - }, - 'EPI_registration': { - 'run': bool1_1, - 'using': [In({'ANTS', 'FSL', 'FSL-linear'})], - 'EPI_template': Maybe(str), - 'EPI_template_mask': Maybe(str), - 'ANTs': { - 'parameters': Maybe(ANTs_parameters), - 'interpolation': In({ - 'Linear', 'BSpline', 'LanczosWindowedSinc' - }), + "FSL-BET": { + "frac": Number, + "Robustfov": bool1_1, + "mesh_boolean": bool1_1, + "outline": bool1_1, + "padding": bool1_1, + "radius": int, + "reduce_bias": bool1_1, + "remove_eyes": bool1_1, + "robust": bool1_1, + "skull": bool1_1, + "surfaces": bool1_1, + "threshold": bool1_1, + "vertical_gradient": Range(min=-1, max=1), }, - 'FSL-FNIRT': { - 'fnirt_config': Maybe(str), - 'interpolation': In({'trilinear', 'sinc', 'spline'}), - 'identity_matrix': Maybe(str), + "UNet": { + "unet_model": Maybe(str), }, - }, - 'func_registration_to_template': { - 'run': bool1_1, - 'run_EPI': bool1_1, - 'output_resolution': { - 'func_preproc_outputs': All( - str, Match(RESOLUTION_REGEX)), - 'func_derivative_outputs': All( - str, Match(RESOLUTION_REGEX) - ), + "niworkflows-ants": { + "template_path": Maybe(str), + "mask_path": Maybe(str), + "regmask_path": Maybe(str), }, - 'target_template': { - 'using': [In({'T1_template', 'EPI_template'})], - 'T1_template': { - 'T1w_brain_template_funcreg': Maybe(str), - 'T1w_template_funcreg': Maybe(str), - 'T1w_brain_template_mask_funcreg': Maybe(str), - 'T1w_template_for_resample': Maybe(str), + "FreeSurfer-BET": {"T1w_brain_template_mask_ccs": Maybe(str)}, + }, + }, + "segmentation": { + "run": bool1_1, + "tissue_segmentation": { + "using": [ + In({"FSL-FAST", "FreeSurfer", "ANTs_Prior_Based", "Template_Based"}) + ], + "FSL-FAST": { + "thresholding": { + "use": In({"Auto", "Custom"}), + "Custom": { + "CSF_threshold_value": float, + "WM_threshold_value": float, + "GM_threshold_value": float, + }, }, - 'EPI_template': { - 'EPI_template_funcreg': Maybe(str), - 'EPI_template_mask_funcreg': Maybe(str), - 'EPI_template_for_resample': Maybe(str) + "use_priors": { + "run": bool1_1, + "priors_path": Maybe(str), + "WM_path": Maybe(str), + "GM_path": Maybe(str), + "CSF_path": Maybe(str), }, }, - 'ANTs_pipelines': { - 'interpolation': In({ - 'Linear', 'BSpline', 'LanczosWindowedSinc'}) + "FreeSurfer": { + "erode": Maybe(int), + "CSF_label": Maybe([int]), + "GM_label": Maybe([int]), + "WM_label": Maybe([int]), }, - 'FNIRT_pipelines': { - 'interpolation': In({'trilinear', 'sinc', 'spline'}), - 'identity_matrix': Maybe(str), + "ANTs_Prior_Based": { + "run": forkable, + "template_brain_list": Maybe(Any([str], [])), + "template_segmentation_list": Maybe(Any([str], [])), + "CSF_label": [int], + "GM_label": [int], + "WM_label": [int], }, - 'apply_transform': { - 'using': In({'default', 'abcd', 'dcan_nhp', - 'single_step_resampling_from_stc'}), + "Template_Based": { + "run": forkable, + "template_for_segmentation": [ + In(valid_options["segmentation"]["template"]) + ], + "WHITE": Maybe(str), + "GRAY": Maybe(str), + "CSF": Maybe(str), }, }, }, - }, - 'surface_analysis': { - 'abcd_prefreesurfer_prep':{ - 'run': bool1_1, - }, - 'freesurfer': { - 'run_reconall': bool1_1, - 'reconall_args': Maybe(str), - # 'generate_masks': bool1_1, - 'ingress_reconall': bool1_1, - }, - 'post_freesurfer': { - 'run': bool1_1, - 'surf_atlas_dir': Maybe(str), - 'gray_ordinates_dir': Maybe(str), - 'gray_ordinates_res': Maybe(int), - 'high_res_mesh': Maybe(int), - 'low_res_mesh': Maybe(int), - 'subcortical_gray_labels': Maybe(str), - 'freesurfer_labels': Maybe(str), - 'fmri_res': Maybe(int), - 'smooth_fwhm': Maybe(int), - }, - 'amplitude_low_frequency_fluctuation': { - 'run': bool1_1, - }, - 'regional_homogeneity': { - 'run': bool1_1, - }, - 'surface_connectivity': { - 'run': bool1_1, - 'surface_parcellation_template': Maybe(str), - }, - }, - 'longitudinal_template_generation': { - 'run': bool1_1, - 'average_method': In({'median', 'mean', 'std'}), - 'dof': In({12, 9, 7, 6}), - 'interp': In({'trilinear', 'nearestneighbour', 'sinc', 'spline'}), - 'cost': In({ - 'corratio', 'mutualinfo', 'normmi', 'normcorr', 'leastsq', - 'labeldiff', 'bbr'}), - 'thread_pool': int, - 'convergence_threshold': Number, - }, - 'functional_preproc': { - 'run': bool1_1, - 'truncation': { - 'start_tr': int, - 'stop_tr': Maybe(Any(int, All(Capitalize, 'End'))) - }, - 'update_header': { - 'run': bool1_1, - }, - 'scaling': { - 'run': bool1_1, - 'scaling_factor': Number - }, - 'despiking': { - 'run': forkable, - 'space': In({'native', 'template'}) - }, - 'slice_timing_correction': { - 'run': forkable, - 'tpattern': Maybe(str), - 'tzero': Maybe(int), - }, - 'motion_estimates_and_correction': { - 'run': bool1_1, - 'motion_estimates': { - 'calculate_motion_first': bool1_1, - 'calculate_motion_after': bool1_1, + "registration_workflows": { + "anatomical_registration": { + "run": bool1_1, + "resolution_for_anat": All(str, Match(RESOLUTION_REGEX)), + "T1w_brain_template": Maybe(str), + "T1w_template": Maybe(str), + "T1w_brain_template_mask": Maybe(str), + "reg_with_skull": bool1_1, + "registration": { + "using": [In({"ANTS", "FSL", "FSL-linear"})], + "ANTs": { + "use_lesion_mask": bool1_1, + "T1_registration": Maybe(ANTs_parameters), + "interpolation": In( + {"Linear", "BSpline", "LanczosWindowedSinc"} + ), + }, + "FSL-FNIRT": { + "fnirt_config": Maybe(str), + "ref_resolution": All(str, Match(RESOLUTION_REGEX)), + "FNIRT_T1w_brain_template": Maybe(str), + "FNIRT_T1w_template": Maybe(str), + "interpolation": In({"trilinear", "sinc", "spline"}), + "identity_matrix": Maybe(str), + "ref_mask": Maybe(str), + "ref_mask_res-2": Maybe(str), + "T1w_template_res-2": Maybe(str), + }, + }, + "overwrite_transform": { + "run": bool1_1, + "using": In({"FSL"}), + }, }, - 'motion_correction': { - 'using': Optional(All(Coerce(ListFromItem), - Length(min=0, max=1, - msg='Forking is currently broken for this option. ' - 'Please use separate configs if you want to ' - 'use each of 3dvolreg and mcflirt. Follow ' - 'https://github.com/FCP-INDI/C-PAC/issues/1935 ' - 'to see when this issue is resolved.'), - [In(valid_options['motion_correction'])])), - 'AFNI-3dvolreg': { - 'functional_volreg_twopass': bool1_1, + "functional_registration": { + "coregistration": { + "run": bool1_1, + "interpolation": In({"trilinear", "sinc", "spline"}), + "using": str, + "input": str, + "cost": str, + "dof": int, + "arguments": Maybe(str), + "func_input_prep": { + "reg_with_skull": bool1_1, + "input": [ + In( + { + "Mean_Functional", + "Selected_Functional_Volume", + "fmriprep_reference", + } + ) + ], + "Mean Functional": {"n4_correct_func": bool1_1}, + "Selected Functional Volume": {"func_reg_input_volume": int}, + }, + "boundary_based_registration": { + "run": forkable, + "bbr_schedule": str, + "bbr_wm_map": In({"probability_map", "partial_volume_map"}), + "bbr_wm_mask_args": str, + "reference": In({"whole-head", "brain"}), + }, + }, + "EPI_registration": { + "run": bool1_1, + "using": [In({"ANTS", "FSL", "FSL-linear"})], + "EPI_template": Maybe(str), + "EPI_template_mask": Maybe(str), + "ANTs": { + "parameters": Maybe(ANTs_parameters), + "interpolation": In( + {"Linear", "BSpline", "LanczosWindowedSinc"} + ), + }, + "FSL-FNIRT": { + "fnirt_config": Maybe(str), + "interpolation": In({"trilinear", "sinc", "spline"}), + "identity_matrix": Maybe(str), + }, + }, + "func_registration_to_template": { + "run": bool1_1, + "run_EPI": bool1_1, + "output_resolution": { + "func_preproc_outputs": All(str, Match(RESOLUTION_REGEX)), + "func_derivative_outputs": All(str, Match(RESOLUTION_REGEX)), + }, + "target_template": { + "using": [In({"T1_template", "EPI_template"})], + "T1_template": { + "T1w_brain_template_funcreg": Maybe(str), + "T1w_template_funcreg": Maybe(str), + "T1w_brain_template_mask_funcreg": Maybe(str), + "T1w_template_for_resample": Maybe(str), + }, + "EPI_template": { + "EPI_template_funcreg": Maybe(str), + "EPI_template_mask_funcreg": Maybe(str), + "EPI_template_for_resample": Maybe(str), + }, + }, + "ANTs_pipelines": { + "interpolation": In( + {"Linear", "BSpline", "LanczosWindowedSinc"} + ) + }, + "FNIRT_pipelines": { + "interpolation": In({"trilinear", "sinc", "spline"}), + "identity_matrix": Maybe(str), + }, + "apply_transform": { + "using": In( + { + "default", + "abcd", + "dcan_nhp", + "single_step_resampling_from_stc", + } + ), + }, }, - 'motion_correction_reference': [In({ - 'mean', 'median', 'selected_volume', - 'fmriprep_reference'})], - 'motion_correction_reference_volume': int, }, - 'motion_estimate_filter': Required( - Any({'run': forkable, - 'filters': [motion_estimate_filter]}, - {'run': All(forkable, [In([False], [])]), - 'filters': Maybe(list)}) - ), }, - 'distortion_correction': { - 'run': forkable, - 'using': [In(['PhaseDiff', 'Blip', 'Blip-FSL-TOPUP'])], - 'PhaseDiff': { - 'fmap_skullstrip_option': In(['BET', 'AFNI']), - 'fmap_skullstrip_BET_frac': float, - 'fmap_skullstrip_AFNI_threshold': float, + "surface_analysis": { + "abcd_prefreesurfer_prep": { + "run": bool1_1, }, - 'Blip-FSL-TOPUP': { - 'warpres': int, - 'subsamp': int, - 'fwhm': int, - 'miter': int, - 'lambda': int, - 'ssqlambda': int, - 'regmod': In({'bending_energy', 'membrane_energy'}), - 'estmov': int, - 'minmet': int, - 'splineorder': int, - 'numprec': str, - 'interp': In({'spline', 'linear'}), - 'scale': int, - 'regrid': int - } - }, - 'func_masking': { - 'run': bool1_1, - 'using': [In( - ['AFNI', 'FSL', 'FSL_AFNI', 'Anatomical_Refined', - 'Anatomical_Based', 'Anatomical_Resampled', - 'CCS_Anatomical_Refined'] - )], - # handle validating mutually-exclusive booleans for FSL-BET - # functional_mean_boolean must be True if one of the mutually- - # exclusive options are - # see mutex definition for more definition - 'FSL-BET': Maybe(Any(*( - # exactly one mutually exclusive option on - [{k: d[k] for d in r for k in d} for r in [[ - { - **mutex['FSL-BET']['rem'], - 'functional_mean_boolean': True, - k1: True, - k2: False - } for k2 in mutex['FSL-BET']['mutex'] if k2 != k1 - ] for k1 in mutex['FSL-BET']['mutex']]] + - # no mutually-exclusive options on - [{ - **mutex['FSL-BET']['rem'], - 'functional_mean_boolean': bool1_1, - **{k: False for k in mutex['FSL-BET']['mutex']} - }])) - ), - 'FSL_AFNI': { - 'bold_ref': Maybe(str), - 'brain_mask': Maybe(str), - 'brain_probseg': Maybe(str), + "freesurfer": { + "run_reconall": bool1_1, + "reconall_args": Maybe(str), + # 'generate_masks': bool1_1, + "ingress_reconall": bool1_1, }, - 'Anatomical_Refined': { - 'anatomical_mask_dilation': Maybe(bool1_1), + "post_freesurfer": { + "run": bool1_1, + "surf_atlas_dir": Maybe(str), + "gray_ordinates_dir": Maybe(str), + "gray_ordinates_res": Maybe(int), + "high_res_mesh": Maybe(int), + "low_res_mesh": Maybe(int), + "subcortical_gray_labels": Maybe(str), + "freesurfer_labels": Maybe(str), + "fmri_res": Maybe(int), + "smooth_fwhm": Maybe(int), + }, + "amplitude_low_frequency_fluctuation": { + "run": bool1_1, + }, + "regional_homogeneity": { + "run": bool1_1, + }, + "surface_connectivity": { + "run": bool1_1, + "surface_parcellation_template": Maybe(str), }, - 'apply_func_mask_in_native_space': bool1_1, - }, - 'generate_func_mean': { - 'run': bool1_1, - }, - 'normalize_func': { - 'run': bool1_1, - }, - 'coreg_prep': { - 'run': bool1_1, }, - }, - 'nuisance_corrections': { - '1-ICA-AROMA': { - 'run': forkable, - 'denoising_type': In({'aggr', 'nonaggr'}), + "longitudinal_template_generation": { + "run": bool1_1, + "average_method": In({"median", "mean", "std"}), + "dof": In({12, 9, 7, 6}), + "interp": In({"trilinear", "nearestneighbour", "sinc", "spline"}), + "cost": In( + { + "corratio", + "mutualinfo", + "normmi", + "normcorr", + "leastsq", + "labeldiff", + "bbr", + } + ), + "thread_pool": int, + "convergence_threshold": Number, }, - '2-nuisance_regression': { - 'run': forkable, - 'space': All(Coerce(ItemFromList), - Lower, In({'native', 'template'})), - 'create_regressors': bool1_1, - 'ingress_regressors': { - 'run': bool1_1, - 'Regressors': { - 'Name': Maybe(str), - 'Columns': [str]}, + "functional_preproc": { + "run": bool1_1, + "truncation": { + "start_tr": int, + "stop_tr": Maybe(Any(int, All(Capitalize, "End"))), + }, + "update_header": { + "run": bool1_1, + }, + "scaling": {"run": bool1_1, "scaling_factor": Number}, + "despiking": {"run": forkable, "space": In({"native", "template"})}, + "slice_timing_correction": { + "run": forkable, + "tpattern": Maybe(str), + "tzero": Maybe(int), + }, + "motion_estimates_and_correction": { + "run": bool1_1, + "motion_estimates": { + "calculate_motion_first": bool1_1, + "calculate_motion_after": bool1_1, + }, + "motion_correction": { + "using": Optional( + All( + Coerce(ListFromItem), + Length( + min=0, + max=1, + msg="Forking is currently broken for this option. " + "Please use separate configs if you want to " + "use each of 3dvolreg and mcflirt. Follow " + "https://github.com/FCP-INDI/C-PAC/issues/1935 " + "to see when this issue is resolved.", + ), + [In(valid_options["motion_correction"])], + ) + ), + "AFNI-3dvolreg": { + "functional_volreg_twopass": bool1_1, + }, + "motion_correction_reference": [ + In({"mean", "median", "selected_volume", "fmriprep_reference"}) + ], + "motion_correction_reference_volume": int, + }, + "motion_estimate_filter": Required( + Any( + {"run": forkable, "filters": [motion_estimate_filter]}, + { + "run": All(forkable, [In([False], [])]), + "filters": Maybe(list), + }, + ) + ), }, - 'Regressors': Maybe([Schema({ - 'Name': Required(str), - 'Censor': { - 'method': str, - 'thresholds': [{ - 'type': str, - 'value': float, - }], - 'number_of_previous_trs_to_censor': Maybe(int), - 'number_of_subsequent_trs_to_censor': Maybe(int), + "distortion_correction": { + "run": forkable, + "using": [In(["PhaseDiff", "Blip", "Blip-FSL-TOPUP"])], + "PhaseDiff": { + "fmap_skullstrip_option": In(["BET", "AFNI"]), + "fmap_skullstrip_BET_frac": float, + "fmap_skullstrip_AFNI_threshold": float, }, - 'Motion': { - 'include_delayed': bool1_1, - 'include_squared': bool1_1, - 'include_delayed_squared': bool1_1 + "Blip-FSL-TOPUP": { + "warpres": int, + "subsamp": int, + "fwhm": int, + "miter": int, + "lambda": int, + "ssqlambda": int, + "regmod": In({"bending_energy", "membrane_energy"}), + "estmov": int, + "minmet": int, + "splineorder": int, + "numprec": str, + "interp": In({"spline", "linear"}), + "scale": int, + "regrid": int, }, - 'aCompCor': valid_options['Regressors']['CompCor'], - 'tCompCor': valid_options['Regressors']['CompCor'], - 'CerebrospinalFluid': valid_options[ - 'Regressors' - ]['segmentation'], - 'WhiteMatter': valid_options[ - 'Regressors' - ]['segmentation'], - 'GreyMatter': valid_options[ - 'Regressors' - ]['segmentation'], - 'GlobalSignal': {'summary': str}, - 'PolyOrt': {'degree': int}, - 'Bandpass': { - 'bottom_frequency': float, - 'top_frequency': float, - 'method': str, - } # how to check if [0] is > than [1]? - }, extra=ALLOW_EXTRA)]), - 'lateral_ventricles_mask': Maybe(str), - 'bandpass_filtering_order': Maybe( - In({'After', 'Before'})), - 'regressor_masks': { - 'erode_anatomical_brain_mask': { - 'run': bool1_1, - 'brain_mask_erosion_prop': Maybe(Number), - 'brain_mask_erosion_mm': Maybe(Number), - 'brain_erosion_mm': Maybe(Number) + }, + "func_masking": { + "run": bool1_1, + "using": [ + In( + [ + "AFNI", + "FSL", + "FSL_AFNI", + "Anatomical_Refined", + "Anatomical_Based", + "Anatomical_Resampled", + "CCS_Anatomical_Refined", + ] + ) + ], + # handle validating mutually-exclusive booleans for FSL-BET + # functional_mean_boolean must be True if one of the mutually- + # exclusive options are + # see mutex definition for more definition + "FSL-BET": Maybe( + Any( + *( + # exactly one mutually exclusive option on + [ + {k: d[k] for d in r for k in d} + for r in [ + [ + { + **mutex["FSL-BET"]["rem"], + "functional_mean_boolean": True, + k1: True, + k2: False, + } + for k2 in mutex["FSL-BET"]["mutex"] + if k2 != k1 + ] + for k1 in mutex["FSL-BET"]["mutex"] + ] + ] + + + # no mutually-exclusive options on + [ + { + **mutex["FSL-BET"]["rem"], + "functional_mean_boolean": bool1_1, + **{k: False for k in mutex["FSL-BET"]["mutex"]}, + } + ] + ) + ) + ), + "FSL_AFNI": { + "bold_ref": Maybe(str), + "brain_mask": Maybe(str), + "brain_probseg": Maybe(str), }, - 'erode_csf': { - 'run': bool1_1, - 'csf_erosion_prop': Maybe(Number), - 'csf_mask_erosion_mm': Maybe(Number), - 'csf_erosion_mm': Maybe(Number), + "Anatomical_Refined": { + "anatomical_mask_dilation": Maybe(bool1_1), }, - 'erode_wm': { - 'run': bool1_1, - 'wm_erosion_prop': Maybe(Number), - 'wm_mask_erosion_mm': Maybe(Number), - 'wm_erosion_mm': Maybe(Number), + "apply_func_mask_in_native_space": bool1_1, + }, + "generate_func_mean": { + "run": bool1_1, + }, + "normalize_func": { + "run": bool1_1, + }, + "coreg_prep": { + "run": bool1_1, + }, + }, + "nuisance_corrections": { + "1-ICA-AROMA": { + "run": forkable, + "denoising_type": In({"aggr", "nonaggr"}), + }, + "2-nuisance_regression": { + "run": forkable, + "space": All(Coerce(ItemFromList), Lower, In({"native", "template"})), + "create_regressors": bool1_1, + "ingress_regressors": { + "run": bool1_1, + "Regressors": {"Name": Maybe(str), "Columns": [str]}, + }, + "Regressors": Maybe( + [ + Schema( + { + "Name": Required(str), + "Censor": { + "method": str, + "thresholds": [ + { + "type": str, + "value": float, + } + ], + "number_of_previous_trs_to_censor": Maybe(int), + "number_of_subsequent_trs_to_censor": Maybe(int), + }, + "Motion": { + "include_delayed": bool1_1, + "include_squared": bool1_1, + "include_delayed_squared": bool1_1, + }, + "aCompCor": valid_options["Regressors"]["CompCor"], + "tCompCor": valid_options["Regressors"]["CompCor"], + "CerebrospinalFluid": valid_options["Regressors"][ + "segmentation" + ], + "WhiteMatter": valid_options["Regressors"][ + "segmentation" + ], + "GreyMatter": valid_options["Regressors"][ + "segmentation" + ], + "GlobalSignal": {"summary": str}, + "PolyOrt": {"degree": int}, + "Bandpass": { + "bottom_frequency": float, + "top_frequency": float, + "method": str, + }, # how to check if [0] is > than [1]? + }, + extra=ALLOW_EXTRA, + ) + ] + ), + "lateral_ventricles_mask": Maybe(str), + "bandpass_filtering_order": Maybe(In({"After", "Before"})), + "regressor_masks": { + "erode_anatomical_brain_mask": { + "run": bool1_1, + "brain_mask_erosion_prop": Maybe(Number), + "brain_mask_erosion_mm": Maybe(Number), + "brain_erosion_mm": Maybe(Number), + }, + "erode_csf": { + "run": bool1_1, + "csf_erosion_prop": Maybe(Number), + "csf_mask_erosion_mm": Maybe(Number), + "csf_erosion_mm": Maybe(Number), + }, + "erode_wm": { + "run": bool1_1, + "wm_erosion_prop": Maybe(Number), + "wm_mask_erosion_mm": Maybe(Number), + "wm_erosion_mm": Maybe(Number), + }, + "erode_gm": { + "run": bool1_1, + "gm_erosion_prop": Maybe(Number), + "gm_mask_erosion_mm": Maybe(Number), + "gm_erosion_mm": Maybe(Number), + }, }, - 'erode_gm': { - 'run': bool1_1, - 'gm_erosion_prop': Maybe(Number), - 'gm_mask_erosion_mm': Maybe(Number), - 'gm_erosion_mm': Maybe(Number), - } }, }, - }, - 'amplitude_low_frequency_fluctuation': { - 'run': bool1_1, - 'target_space': target_space, - 'highpass_cutoff': [float], - 'lowpass_cutoff': [float], - }, - 'voxel_mirrored_homotopic_connectivity': { - 'run': bool1_1, - 'symmetric_registration': { - 'T1w_brain_template_symmetric': Maybe(str), - 'T1w_brain_template_symmetric_funcreg': Maybe(str), - 'T1w_brain_template_symmetric_for_resample': Maybe(str), - 'T1w_template_symmetric': Maybe(str), - 'T1w_template_symmetric_funcreg': Maybe(str), - 'T1w_template_symmetric_for_resample': Maybe(str), - 'dilated_symmetric_brain_mask': Maybe(str), - 'dilated_symmetric_brain_mask_for_resample': Maybe(str), + "amplitude_low_frequency_fluctuation": { + "run": bool1_1, + "target_space": target_space, + "highpass_cutoff": [float], + "lowpass_cutoff": [float], }, - }, - 'regional_homogeneity': { - 'run': bool1_1, - 'target_space': target_space, - 'cluster_size': In({7, 19, 27}), - }, - 'post_processing': { - 'spatial_smoothing': { - 'run': bool1_1, - 'output': [In({'smoothed', 'nonsmoothed'})], - 'smoothing_method': [In({'FSL', 'AFNI'})], - 'fwhm': [int] + "voxel_mirrored_homotopic_connectivity": { + "run": bool1_1, + "symmetric_registration": { + "T1w_brain_template_symmetric": Maybe(str), + "T1w_brain_template_symmetric_funcreg": Maybe(str), + "T1w_brain_template_symmetric_for_resample": Maybe(str), + "T1w_template_symmetric": Maybe(str), + "T1w_template_symmetric_funcreg": Maybe(str), + "T1w_template_symmetric_for_resample": Maybe(str), + "dilated_symmetric_brain_mask": Maybe(str), + "dilated_symmetric_brain_mask_for_resample": Maybe(str), + }, }, - 'z-scoring': { - 'run': bool1_1, - 'output': [In({'z-scored', 'raw'})], + "regional_homogeneity": { + "run": bool1_1, + "target_space": target_space, + "cluster_size": In({7, 19, 27}), }, - }, - 'timeseries_extraction': { - 'run': bool1_1, - Optional('roi_paths_fully_specified'): bool1_1, - 'tse_roi_paths': Optional( - Maybe({ - str: In({', '.join( - list(options) - ) for options in list(chain.from_iterable([list( - permutations(valid_options['timeseries']['roi_paths'], - number_of) - ) for number_of in range(1, 6)]))}), - }), - msg=permutation_message( - 'tse_roi_paths', valid_options['timeseries']['roi_paths']) - ), - 'realignment': In({'ROI_to_func', 'func_to_ROI'}), - 'connectivity_matrix': { - option: Maybe([In(valid_options['connectivity_matrix'][option])]) - for option in ['using', 'measure'] + "post_processing": { + "spatial_smoothing": { + "run": bool1_1, + "output": [In({"smoothed", "nonsmoothed"})], + "smoothing_method": [In({"FSL", "AFNI"})], + "fwhm": [int], + }, + "z-scoring": { + "run": bool1_1, + "output": [In({"z-scored", "raw"})], + }, }, - }, - 'seed_based_correlation_analysis': { - 'run': bool1_1, - Optional('roi_paths_fully_specified'): bool1_1, - 'sca_roi_paths': Optional( - Maybe({ - str: In({', '.join(list( - options - )) for options in list(chain.from_iterable([list( - permutations(valid_options['sca']['roi_paths'], number_of) - ) for number_of in range(1, 4)]))}) - }), - msg=permutation_message( - 'sca_roi_paths', valid_options['sca']['roi_paths']) - ), - 'norm_timeseries_for_DR': bool1_1, - }, - 'network_centrality': { - 'run': bool1_1, - 'memory_allocation': Number, - 'template_specification_file': Maybe(str), - 'degree_centrality': { - 'weight_options': [In( - valid_options['centrality']['weight_options'] - )], - 'correlation_threshold_option': In( - valid_options['centrality']['threshold_options']), - 'correlation_threshold': Range(min=-1, max=1) + "timeseries_extraction": { + "run": bool1_1, + Optional("roi_paths_fully_specified"): bool1_1, + "tse_roi_paths": Optional( + Maybe( + { + str: In( + { + ", ".join(list(options)) + for options in list( + chain.from_iterable( + [ + list( + permutations( + valid_options["timeseries"][ + "roi_paths" + ], + number_of, + ) + ) + for number_of in range(1, 6) + ] + ) + ) + } + ), + } + ), + msg=permutation_message( + "tse_roi_paths", valid_options["timeseries"]["roi_paths"] + ), + ), + "realignment": In({"ROI_to_func", "func_to_ROI"}), + "connectivity_matrix": { + option: Maybe([In(valid_options["connectivity_matrix"][option])]) + for option in ["using", "measure"] + }, }, - 'eigenvector_centrality': { - 'weight_options': [In( - valid_options['centrality']['weight_options'] - )], - 'correlation_threshold_option': In( - valid_options['centrality']['threshold_options'] + "seed_based_correlation_analysis": { + "run": bool1_1, + Optional("roi_paths_fully_specified"): bool1_1, + "sca_roi_paths": Optional( + Maybe( + { + str: In( + { + ", ".join(list(options)) + for options in list( + chain.from_iterable( + [ + list( + permutations( + valid_options["sca"]["roi_paths"], + number_of, + ) + ) + for number_of in range(1, 4) + ] + ) + ) + } + ) + } + ), + msg=permutation_message( + "sca_roi_paths", valid_options["sca"]["roi_paths"] + ), ), - 'correlation_threshold': Range(min=-1, max=1) + "norm_timeseries_for_DR": bool1_1, }, - 'local_functional_connectivity_density': { - 'weight_options': [In( - valid_options['centrality']['weight_options'] - )], - 'correlation_threshold_option': In([ - o for o in valid_options['centrality']['threshold_options'] if - o != 'Sparsity threshold' - ]), - 'correlation_threshold': Range(min=-1, max=1) + "network_centrality": { + "run": bool1_1, + "memory_allocation": Number, + "template_specification_file": Maybe(str), + "degree_centrality": { + "weight_options": [In(valid_options["centrality"]["weight_options"])], + "correlation_threshold_option": In( + valid_options["centrality"]["threshold_options"] + ), + "correlation_threshold": Range(min=-1, max=1), + }, + "eigenvector_centrality": { + "weight_options": [In(valid_options["centrality"]["weight_options"])], + "correlation_threshold_option": In( + valid_options["centrality"]["threshold_options"] + ), + "correlation_threshold": Range(min=-1, max=1), + }, + "local_functional_connectivity_density": { + "weight_options": [In(valid_options["centrality"]["weight_options"])], + "correlation_threshold_option": In( + [ + o + for o in valid_options["centrality"]["threshold_options"] + if o != "Sparsity threshold" + ] + ), + "correlation_threshold": Range(min=-1, max=1), + }, }, - }, - 'PyPEER': { - 'run': bool1_1, - 'eye_scan_names': Maybe(Any([str], [])), - 'data_scan_names': Maybe(Any([str], [])), - 'eye_mask_path': Maybe(str), - 'stimulus_path': Maybe(str), - 'minimal_nuisance_correction': { - 'peer_gsr': bool1_1, - 'peer_scrub': bool1_1, - 'scrub_thresh': float, + "PyPEER": { + "run": bool1_1, + "eye_scan_names": Maybe(Any([str], [])), + "data_scan_names": Maybe(Any([str], [])), + "eye_mask_path": Maybe(str), + "stimulus_path": Maybe(str), + "minimal_nuisance_correction": { + "peer_gsr": bool1_1, + "peer_scrub": bool1_1, + "scrub_thresh": float, + }, }, - }, -}) + } +) def schema(config_dict): - '''Validate a pipeline configuration against the latest validation schema + """Validate a pipeline configuration against the latest validation schema by first applying backwards-compatibility patches, then applying Voluptuous validation, then handling complex configuration interaction checks before returning validated config_dict. @@ -1108,106 +1266,140 @@ def schema(config_dict): Returns ------- dict - ''' + """ from CPAC.utils.utils import _changes_1_8_0_to_1_8_1 + try: - partially_validated = latest_schema( - _changes_1_8_0_to_1_8_1(config_dict)) + partially_validated = latest_schema(_changes_1_8_0_to_1_8_1(config_dict)) except MultipleInvalid as multiple_invalid: - if (multiple_invalid.path == ['nuisance_corrections', - '2-nuisance_regression', 'space'] and - isinstance(multiple_invalid.errors[0], CoerceInvalid)): + if multiple_invalid.path == [ + "nuisance_corrections", + "2-nuisance_regression", + "space", + ] and isinstance(multiple_invalid.errors[0], CoerceInvalid): raise CoerceInvalid( 'Nusiance regression space is not forkable. Please choose ' f'only one of {valid_options["space"]}', - path=multiple_invalid.path) from multiple_invalid + path=multiple_invalid.path, + ) from multiple_invalid raise multiple_invalid try: - if (partially_validated['registration_workflows'][ - 'functional_registration' - ]['func_registration_to_template']['apply_transform'][ - 'using' - ] == 'single_step_resampling_from_stc'): - or_else = ('or choose a different option for ' - '``registration_workflows: functional_registration: ' - 'func_registration_to_template: apply_transform: ' - 'using``') - if True in partially_validated['nuisance_corrections'][ - '2-nuisance_regression']['run'] and partially_validated[ - 'nuisance_corrections' - ]['2-nuisance_regression']['space'] != 'template': + if ( + partially_validated["registration_workflows"]["functional_registration"][ + "func_registration_to_template" + ]["apply_transform"]["using"] + == "single_step_resampling_from_stc" + ): + or_else = ( + "or choose a different option for " + "``registration_workflows: functional_registration: " + "func_registration_to_template: apply_transform: " + "using``" + ) + if ( + True + in partially_validated["nuisance_corrections"]["2-nuisance_regression"][ + "run" + ] + and partially_validated["nuisance_corrections"][ + "2-nuisance_regression" + ]["space"] + != "template" + ): raise ExclusiveInvalid( - '``single_step_resampling_from_stc`` requires ' - 'template-space nuisance regression. Either set ' - '``nuisance_corrections: 2-nuisance_regression: space`` ' - f'to ``template`` {or_else}') - if any(registration != 'ANTS' for registration in - partially_validated['registration_workflows'][ - 'anatomical_registration']['registration']['using']): + "``single_step_resampling_from_stc`` requires " + "template-space nuisance regression. Either set " + "``nuisance_corrections: 2-nuisance_regression: space`` " + f"to ``template`` {or_else}" + ) + if any( + registration != "ANTS" + for registration in partially_validated["registration_workflows"][ + "anatomical_registration" + ]["registration"]["using"] + ): raise ExclusiveInvalid( - '``single_step_resampling_from_stc`` requires ' - 'ANTS registration. Either set ' - '``registration_workflows: anatomical_registration: ' - f'registration: using`` to ``ANTS`` {or_else}') + "``single_step_resampling_from_stc`` requires " + "ANTS registration. Either set " + "``registration_workflows: anatomical_registration: " + f"registration: using`` to ``ANTS`` {or_else}" + ) except KeyError: pass try: - motion_filters = partially_validated['functional_preproc'][ - 'motion_estimates_and_correction']['motion_estimate_filter'] - if True in motion_filters['run']: - for motion_filter in motion_filters['filters']: - motion_filter['Name'] = name_motion_filter( - motion_filter, motion_filters['filters']) + motion_filters = partially_validated["functional_preproc"][ + "motion_estimates_and_correction" + ]["motion_estimate_filter"] + if True in motion_filters["run"]: + for motion_filter in motion_filters["filters"]: + motion_filter["Name"] = name_motion_filter( + motion_filter, motion_filters["filters"] + ) else: - motion_filters['filters'] = [] + motion_filters["filters"] = [] except KeyError: pass try: # 'motion_correction.using' is only optional if 'run' is Off - mec = partially_validated['functional_preproc'][ - 'motion_estimates_and_correction'] - if mec['run']: + mec = partially_validated["functional_preproc"][ + "motion_estimates_and_correction" + ] + if mec["run"]: try: # max should be len(valid_options['motion_correction']) # once #1935 is resolved - Length(min=1, max=1)(mec['motion_correction']['using']) + Length(min=1, max=1)(mec["motion_correction"]["using"]) except LengthInvalid: - mec_path = ['functional_preproc', - 'motion_estimates_and_correction'] + mec_path = ["functional_preproc", "motion_estimates_and_correction"] raise LengthInvalid( # pylint: disable=raise-missing-from f'If data[{"][".join(map(repr, mec_path))}][\'run\'] is ' # length must be between 1 and # len(valid_options['motion_correction']) once #1935 is # resolved 'True, length of list must be exactly 1', - path=[*mec_path, 'motion_correction', 'using']) + path=[*mec_path, "motion_correction", "using"], + ) except KeyError: pass try: # Check for mutually exclusive options - if (partially_validated['nuisance_corrections'][ - '2-nuisance_regression']['ingress_regressors']['run'] and - partially_validated['nuisance_corrections'][ - '2-nuisance_regression']['create_regressors']): + if ( + partially_validated["nuisance_corrections"]["2-nuisance_regression"][ + "ingress_regressors" + ]["run"] + and partially_validated["nuisance_corrections"]["2-nuisance_regression"][ + "create_regressors" + ] + ): raise ExclusiveInvalid( "[!] Ingress_regressors and create_regressors can't both run! " - " Try turning one option off.\n ") + " Try turning one option off.\n " + ) except KeyError: pass try: - if not partially_validated.get("skip env check" - ) and 'unet' in [using.lower() for using in - partially_validated['anatomical_preproc'][ - 'brain_extraction']['using']]: + if not partially_validated.get("skip env check") and "unet" in [ + using.lower() + for using in partially_validated["anatomical_preproc"]["brain_extraction"][ + "using" + ] + ]: try: from importlib import import_module - import_module('CPAC.unet') - except (CalledProcessError, ImportError, ModuleNotFoundError, OSError) as error: + + import_module("CPAC.unet") + except ( + CalledProcessError, + ImportError, + ModuleNotFoundError, + OSError, + ) as error: import site + raise OSError( - 'U-Net brain extraction requires torch to be installed, ' - 'but the installation path in this container is ' - 'read-only. Please bind a local writable path to ' + "U-Net brain extraction requires torch to be installed, " + "but the installation path in this container is " + "read-only. Please bind a local writable path to " f'"{site.USER_BASE}" in the container to use U-Net.' ) from error except KeyError: diff --git a/CPAC/pipeline/test/test_engine.py b/CPAC/pipeline/test/test_engine.py index 3988a61f95..c228fc3640 100644 --- a/CPAC/pipeline/test/test_engine.py +++ b/CPAC/pipeline/test/test_engine.py @@ -1,98 +1,92 @@ import os + import pytest -from CPAC.pipeline.cpac_pipeline import initialize_nipype_wf, \ - load_cpac_pipe_config, \ - connect_pipeline, \ - build_anat_preproc_stack, \ - build_workflow -from CPAC.pipeline.engine import ResourcePool, ingress_raw_anat_data, \ - ingress_raw_func_data, \ - ingress_pipeconfig_paths, initiate_rpool + +from CPAC.pipeline.cpac_pipeline import ( + build_anat_preproc_stack, + build_workflow, + connect_pipeline, + initialize_nipype_wf, + load_cpac_pipe_config, +) +from CPAC.pipeline.engine import ( + ingress_pipeconfig_paths, + ingress_raw_anat_data, + ingress_raw_func_data, + initiate_rpool, + ResourcePool, +) from CPAC.utils.bids_utils import create_cpac_data_config -@pytest.mark.skip(reason='not a pytest test') +@pytest.mark.skip(reason="not a pytest test") def test_ingress_func_raw_data(pipe_config, bids_dir, test_dir): - - sub_data_dct = create_cpac_data_config(bids_dir, - skip_bids_validator=True)[0] + sub_data_dct = create_cpac_data_config(bids_dir, skip_bids_validator=True)[0] cfg = load_cpac_pipe_config(pipe_config) - cfg.pipeline_setup['output_directory']['path'] = \ - os.path.join(test_dir, 'out') - cfg.pipeline_setup['working_directory']['path'] = \ - os.path.join(test_dir, 'work') + cfg.pipeline_setup["output_directory"]["path"] = os.path.join(test_dir, "out") + cfg.pipeline_setup["working_directory"]["path"] = os.path.join(test_dir, "work") wf = initialize_nipype_wf(cfg, sub_data_dct) - part_id = sub_data_dct['subject_id'] - ses_id = sub_data_dct['unique_id'] + part_id = sub_data_dct["subject_id"] + ses_id = sub_data_dct["unique_id"] - unique_id = f'{part_id}_{ses_id}' + unique_id = f"{part_id}_{ses_id}" rpool = ResourcePool(name=unique_id, cfg=cfg) - if 'func' in sub_data_dct: - wf, rpool, diff, blip, fmap_rp_list = \ - ingress_raw_func_data(wf, rpool, cfg, sub_data_dct, unique_id, - part_id, ses_id) + if "func" in sub_data_dct: + wf, rpool, diff, blip, fmap_rp_list = ingress_raw_func_data( + wf, rpool, cfg, sub_data_dct, unique_id, part_id, ses_id + ) rpool.gather_pipes(wf, cfg, all=True) wf.run() -@pytest.mark.skip(reason='not a pytest test') +@pytest.mark.skip(reason="not a pytest test") def test_ingress_anat_raw_data(pipe_config, bids_dir, test_dir): - - sub_data_dct = create_cpac_data_config(bids_dir, - skip_bids_validator=True)[0] + sub_data_dct = create_cpac_data_config(bids_dir, skip_bids_validator=True)[0] cfg = load_cpac_pipe_config(pipe_config) - cfg.pipeline_setup['output_directory']['path'] = \ - os.path.join(test_dir, 'out') - cfg.pipeline_setup['working_directory']['path'] = \ - os.path.join(test_dir, 'work') + cfg.pipeline_setup["output_directory"]["path"] = os.path.join(test_dir, "out") + cfg.pipeline_setup["working_directory"]["path"] = os.path.join(test_dir, "work") wf = initialize_nipype_wf(cfg, sub_data_dct) - part_id = sub_data_dct['subject_id'] - ses_id = sub_data_dct['unique_id'] + part_id = sub_data_dct["subject_id"] + ses_id = sub_data_dct["unique_id"] - unique_id = f'{part_id}_{ses_id}' + unique_id = f"{part_id}_{ses_id}" rpool = ResourcePool(name=unique_id, cfg=cfg) - rpool = ingress_raw_anat_data(wf, rpool, cfg, - sub_data_dct, - unique_id, - part_id, ses_id) + rpool = ingress_raw_anat_data( + wf, rpool, cfg, sub_data_dct, unique_id, part_id, ses_id + ) rpool.gather_pipes(wf, cfg, all=True) wf.run() -@pytest.mark.skip(reason='not a pytest test') +@pytest.mark.skip(reason="not a pytest test") def test_ingress_pipeconfig_data(pipe_config, bids_dir, test_dir): - - sub_data_dct = create_cpac_data_config(bids_dir, - skip_bids_validator=True)[0] + sub_data_dct = create_cpac_data_config(bids_dir, skip_bids_validator=True)[0] cfg = load_cpac_pipe_config(pipe_config) - cfg.pipeline_setup['output_directory']['path'] = \ - os.path.join(test_dir, 'out') - cfg.pipeline_setup['working_directory']['path'] = \ - os.path.join(test_dir, 'work') - cfg.pipeline_setup['log_directory']['path'] = \ - os.path.join(test_dir, 'logs') + cfg.pipeline_setup["output_directory"]["path"] = os.path.join(test_dir, "out") + cfg.pipeline_setup["working_directory"]["path"] = os.path.join(test_dir, "work") + cfg.pipeline_setup["log_directory"]["path"] = os.path.join(test_dir, "logs") wf = initialize_nipype_wf(cfg, sub_data_dct) - part_id = sub_data_dct['subject_id'] - ses_id = sub_data_dct['unique_id'] + part_id = sub_data_dct["subject_id"] + ses_id = sub_data_dct["unique_id"] - unique_id = f'{part_id}_{ses_id}' + unique_id = f"{part_id}_{ses_id}" rpool = ResourcePool(name=unique_id, cfg=cfg) @@ -103,19 +97,14 @@ def test_ingress_pipeconfig_data(pipe_config, bids_dir, test_dir): wf.run() -@pytest.mark.skip(reason='not a pytest test') +@pytest.mark.skip(reason="not a pytest test") def test_build_anat_preproc_stack(pipe_config, bids_dir, test_dir): - - sub_data_dct = create_cpac_data_config(bids_dir, - skip_bids_validator=True)[0] + sub_data_dct = create_cpac_data_config(bids_dir, skip_bids_validator=True)[0] cfg = load_cpac_pipe_config(pipe_config) - cfg.pipeline_setup['output_directory']['path'] = \ - os.path.join(test_dir, 'out') - cfg.pipeline_setup['working_directory']['path'] = \ - os.path.join(test_dir, 'work') - cfg.pipeline_setup['log_directory']['path'] = \ - os.path.join(test_dir, 'logs') + cfg.pipeline_setup["output_directory"]["path"] = os.path.join(test_dir, "out") + cfg.pipeline_setup["working_directory"]["path"] = os.path.join(test_dir, "work") + cfg.pipeline_setup["log_directory"]["path"] = os.path.join(test_dir, "logs") wf = initialize_nipype_wf(cfg, sub_data_dct) @@ -127,44 +116,39 @@ def test_build_anat_preproc_stack(pipe_config, bids_dir, test_dir): rpool.gather_pipes(wf, cfg) wf.run() - -@pytest.mark.skip(reason='not a pytest test') + +@pytest.mark.skip(reason="not a pytest test") def test_build_workflow(pipe_config, bids_dir, test_dir): - - sub_data_dct = create_cpac_data_config(bids_dir, - skip_bids_validator=True)[0] + sub_data_dct = create_cpac_data_config(bids_dir, skip_bids_validator=True)[0] cfg = load_cpac_pipe_config(pipe_config) - cfg.pipeline_setup['output_directory']['path'] = \ - os.path.join(test_dir, 'out') - cfg.pipeline_setup['working_directory']['path'] = \ - os.path.join(test_dir, 'work') - cfg.pipeline_setup['log_directory']['path'] = \ - os.path.join(test_dir, 'logs') + cfg.pipeline_setup["output_directory"]["path"] = os.path.join(test_dir, "out") + cfg.pipeline_setup["working_directory"]["path"] = os.path.join(test_dir, "work") + cfg.pipeline_setup["log_directory"]["path"] = os.path.join(test_dir, "logs") wf = initialize_nipype_wf(cfg, sub_data_dct) wf, rpool = initiate_rpool(wf, cfg, sub_data_dct) - wf, _, _ = build_workflow( - sub_data_dct['subject_id'], sub_data_dct, cfg) + wf, _, _ = build_workflow(sub_data_dct["subject_id"], sub_data_dct, cfg) rpool.gather_pipes(wf, cfg) wf.run() + # bids_dir = "/Users/steven.giavasis/data/HBN-SI_dataset/rawdata" # test_dir = "/test_dir" # cfg = "/Users/hecheng.jin/GitHub/DevBranch/CPAC/resources/configs/pipeline_config_monkey-ABCD.yml" cfg = "/Users/hecheng.jin/GitHub/pipeline_config_monkey-ABCDlocal.yml" -bids_dir = '/Users/hecheng.jin/Monkey/monkey_data_oxford/site-ucdavis' +bids_dir = "/Users/hecheng.jin/Monkey/monkey_data_oxford/site-ucdavis" test_dir = "/Users/hecheng.jin/GitHub/Test/T2preproc" # test_ingress_func_raw_data(cfg, bids_dir, test_dir) # test_ingress_anat_raw_data(cfg, bids_dir, test_dir) # test_ingress_pipeconfig_data(cfg, bids_dir, test_dir) # test_build_anat_preproc_stack(cfg, bids_dir, test_dir) -if __name__ == '__main__': +if __name__ == "__main__": test_build_workflow(cfg, bids_dir, test_dir) diff --git a/CPAC/registration/registration.py b/CPAC/registration/registration.py index 5a957bda0d..7410b335f2 100644 --- a/CPAC/registration/registration.py +++ b/CPAC/registration/registration.py @@ -16,48 +16,58 @@ # License along with C-PAC. If not, see . # pylint: disable=too-many-lines,ungrouped-imports,wrong-import-order from typing import Optional -from CPAC.pipeline import nipype_pipeline_engine as pe -from CPAC.pipeline.nodeblock import nodeblock + from nipype.interfaces import afni, ants, c3, fsl, utility as util from nipype.interfaces.afni import utils as afni_utils from CPAC.anat_preproc.lesion_preproc import create_lesion_preproc from CPAC.func_preproc.utils import chunk_ts, split_ts_chunks -from CPAC.registration.utils import seperate_warps_list, \ - check_transforms, \ - generate_inverse_transform_flags, \ - single_ants_xfm_to_list, \ - interpolation_string, \ - change_itk_transform_type, \ - hardcoded_reg, \ - one_d_to_mat, \ - run_c3d, \ - run_c4d +from CPAC.pipeline import nipype_pipeline_engine as pe +from CPAC.pipeline.nodeblock import nodeblock +from CPAC.registration.utils import ( + change_itk_transform_type, + check_transforms, + generate_inverse_transform_flags, + hardcoded_reg, + interpolation_string, + one_d_to_mat, + run_c3d, + run_c4d, + seperate_warps_list, + single_ants_xfm_to_list, +) from CPAC.utils.interfaces.fsl import Merge as fslMerge from CPAC.utils.typing import LIST_OR_STR, TUPLE from CPAC.utils.utils import check_prov_for_motion_tool, check_prov_for_regtool -def apply_transform(wf_name, reg_tool, time_series=False, multi_input=False, - num_cpus=1, num_ants_cores=1): - +def apply_transform( + wf_name, + reg_tool, + time_series=False, + multi_input=False, + num_cpus=1, + num_ants_cores=1, +): if not reg_tool: - raise Exception("\n[!] Developer info: the 'reg_tool' parameter sent " - f"to the 'apply_transform' node for '{wf_name}' is " - f"empty.\n") + raise Exception( + "\n[!] Developer info: the 'reg_tool' parameter sent " + f"to the 'apply_transform' node for '{wf_name}' is " + f"empty.\n" + ) wf = pe.Workflow(name=wf_name) inputNode = pe.Node( - util.IdentityInterface(fields=['input_image', - 'reference', - 'transform', - 'interpolation']), - name='inputspec') + util.IdentityInterface( + fields=["input_image", "reference", "transform", "interpolation"] + ), + name="inputspec", + ) outputNode = pe.Node( - util.IdentityInterface(fields=['output_image']), - name='outputspec') + util.IdentityInterface(fields=["output_image"]), name="outputspec" + ) if int(num_cpus) > 1 and time_series: # parallelize time series warp application @@ -65,23 +75,22 @@ def apply_transform(wf_name, reg_tool, time_series=False, multi_input=False, # time series chunks multi_input = True - if reg_tool == 'ants': - + if reg_tool == "ants": if multi_input: - apply_warp = pe.MapNode(interface=ants.ApplyTransforms(), - name=f'apply_warp_{wf_name}', - iterfield=['input_image'], - mem_gb=0.7, - mem_x=(1708448960473801 / - 151115727451828646838272, - 'input_image')) + apply_warp = pe.MapNode( + interface=ants.ApplyTransforms(), + name=f"apply_warp_{wf_name}", + iterfield=["input_image"], + mem_gb=0.7, + mem_x=(1708448960473801 / 151115727451828646838272, "input_image"), + ) else: - apply_warp = pe.Node(interface=ants.ApplyTransforms(), - name=f'apply_warp_{wf_name}', - mem_gb=0.7, - mem_x=(1708448960473801 / - 151115727451828646838272, - 'input_image')) + apply_warp = pe.Node( + interface=ants.ApplyTransforms(), + name=f"apply_warp_{wf_name}", + mem_gb=0.7, + mem_x=(1708448960473801 / 151115727451828646838272, "input_image"), + ) apply_warp.inputs.dimension = 3 apply_warp.interface.num_threads = int(num_ants_cores) @@ -89,275 +98,312 @@ def apply_transform(wf_name, reg_tool, time_series=False, multi_input=False, if time_series: apply_warp.inputs.input_image_type = 3 - wf.connect(inputNode, 'reference', apply_warp, 'reference_image') + wf.connect(inputNode, "reference", apply_warp, "reference_image") - interp_string = pe.Node(util.Function(input_names=['interpolation', - 'reg_tool'], - output_names=['interpolation'], - function=interpolation_string), - name=f'interp_string', - mem_gb=2.5) + interp_string = pe.Node( + util.Function( + input_names=["interpolation", "reg_tool"], + output_names=["interpolation"], + function=interpolation_string, + ), + name="interp_string", + mem_gb=2.5, + ) interp_string.inputs.reg_tool = reg_tool - wf.connect(inputNode, 'interpolation', interp_string, 'interpolation') - wf.connect(interp_string, 'interpolation', - apply_warp, 'interpolation') - - ants_xfm_list = \ - pe.Node(util.Function(input_names=['transform'], - output_names=['transform_list'], - function=single_ants_xfm_to_list), - name=f'single_ants_xfm_to_list', - mem_gb=2.5) + wf.connect(inputNode, "interpolation", interp_string, "interpolation") + wf.connect(interp_string, "interpolation", apply_warp, "interpolation") + + ants_xfm_list = pe.Node( + util.Function( + input_names=["transform"], + output_names=["transform_list"], + function=single_ants_xfm_to_list, + ), + name="single_ants_xfm_to_list", + mem_gb=2.5, + ) - wf.connect(inputNode, 'transform', ants_xfm_list, 'transform') - wf.connect(ants_xfm_list, 'transform_list', apply_warp, 'transforms') + wf.connect(inputNode, "transform", ants_xfm_list, "transform") + wf.connect(ants_xfm_list, "transform_list", apply_warp, "transforms") # parallelize the apply warp, if multiple CPUs, and it's a time # series! if int(num_cpus) > 1 and time_series: + chunk_imports = ["import nibabel as nb"] + chunk = pe.Node( + util.Function( + input_names=["func_file", "n_chunks", "chunk_size"], + output_names=["TR_ranges"], + function=chunk_ts, + imports=chunk_imports, + ), + name=f"chunk_{wf_name}", + mem_gb=2.5, + ) - chunk_imports = ['import nibabel as nb'] - chunk = pe.Node(util.Function(input_names=['func_file', - 'n_chunks', - 'chunk_size'], - output_names=['TR_ranges'], - function=chunk_ts, - imports=chunk_imports), - name=f'chunk_{wf_name}', - mem_gb=2.5) - - #chunk.inputs.n_chunks = int(num_cpus) + # chunk.inputs.n_chunks = int(num_cpus) # 10-TR sized chunks chunk.inputs.chunk_size = 10 - wf.connect(inputNode, 'input_image', chunk, 'func_file') - - split_imports = ['import os', 'import subprocess'] - split = pe.Node(util.Function(input_names=['func_file', - 'tr_ranges'], - output_names=['split_funcs'], - function=split_ts_chunks, - imports=split_imports), - name=f'split_{wf_name}', - mem_gb=2.5) + wf.connect(inputNode, "input_image", chunk, "func_file") + + split_imports = ["import os", "import subprocess"] + split = pe.Node( + util.Function( + input_names=["func_file", "tr_ranges"], + output_names=["split_funcs"], + function=split_ts_chunks, + imports=split_imports, + ), + name=f"split_{wf_name}", + mem_gb=2.5, + ) - wf.connect(inputNode, 'input_image', split, 'func_file') - wf.connect(chunk, 'TR_ranges', split, 'tr_ranges') + wf.connect(inputNode, "input_image", split, "func_file") + wf.connect(chunk, "TR_ranges", split, "tr_ranges") - wf.connect(split, 'split_funcs', apply_warp, 'input_image') + wf.connect(split, "split_funcs", apply_warp, "input_image") - func_concat = pe.Node(interface=afni_utils.TCat(), - name=f'func_concat_{wf_name}', - mem_gb=2.5) - func_concat.inputs.outputtype = 'NIFTI_GZ' + func_concat = pe.Node( + interface=afni_utils.TCat(), name=f"func_concat_{wf_name}", mem_gb=2.5 + ) + func_concat.inputs.outputtype = "NIFTI_GZ" - wf.connect(apply_warp, 'output_image', func_concat, 'in_files') + wf.connect(apply_warp, "output_image", func_concat, "in_files") - wf.connect(func_concat, 'out_file', outputNode, 'output_image') + wf.connect(func_concat, "out_file", outputNode, "output_image") else: - wf.connect(inputNode, 'input_image', apply_warp, 'input_image') - wf.connect(apply_warp, 'output_image', outputNode, 'output_image') - - elif reg_tool == 'fsl': + wf.connect(inputNode, "input_image", apply_warp, "input_image") + wf.connect(apply_warp, "output_image", outputNode, "output_image") + elif reg_tool == "fsl": if multi_input: - apply_warp = pe.MapNode(interface=fsl.ApplyWarp(), - name=f'fsl_apply_warp', - iterfield=['in_file'], - mem_gb=2.5) + apply_warp = pe.MapNode( + interface=fsl.ApplyWarp(), + name="fsl_apply_warp", + iterfield=["in_file"], + mem_gb=2.5, + ) else: - apply_warp = pe.Node(interface=fsl.ApplyWarp(), - name='fsl_apply_warp', - mem_gb=2.5) - - interp_string = pe.Node(util.Function(input_names=['interpolation', - 'reg_tool'], - output_names=['interpolation'], - function=interpolation_string), - name=f'interp_string', - mem_gb=2.5) + apply_warp = pe.Node( + interface=fsl.ApplyWarp(), name="fsl_apply_warp", mem_gb=2.5 + ) + + interp_string = pe.Node( + util.Function( + input_names=["interpolation", "reg_tool"], + output_names=["interpolation"], + function=interpolation_string, + ), + name="interp_string", + mem_gb=2.5, + ) interp_string.inputs.reg_tool = reg_tool - wf.connect(inputNode, 'interpolation', interp_string, 'interpolation') - wf.connect(interp_string, 'interpolation', apply_warp, 'interp') + wf.connect(inputNode, "interpolation", interp_string, "interpolation") + wf.connect(interp_string, "interpolation", apply_warp, "interp") # mni to t1 - wf.connect(inputNode, 'reference', apply_warp, 'ref_file') + wf.connect(inputNode, "reference", apply_warp, "ref_file") # NOTE: C-PAC now converts all FSL xfm's to .nii, so even if the # inputNode 'transform' is a linear xfm, it's a .nii and must # go in as a warpfield file - wf.connect(inputNode, 'transform', apply_warp, 'field_file') + wf.connect(inputNode, "transform", apply_warp, "field_file") # parallelize the apply warp, if multiple CPUs, and it's a time # series! if int(num_cpus) > 1 and time_series: + chunk_imports = ["import nibabel as nb"] + chunk = pe.Node( + util.Function( + input_names=["func_file", "n_chunks", "chunk_size"], + output_names=["TR_ranges"], + function=chunk_ts, + imports=chunk_imports, + ), + name=f"chunk_{wf_name}", + mem_gb=2.5, + ) - chunk_imports = ['import nibabel as nb'] - chunk = pe.Node(util.Function(input_names=['func_file', - 'n_chunks', - 'chunk_size'], - output_names=['TR_ranges'], - function=chunk_ts, - imports=chunk_imports), - name=f'chunk_{wf_name}', - mem_gb=2.5) - - #chunk.inputs.n_chunks = int(num_cpus) + # chunk.inputs.n_chunks = int(num_cpus) # 10-TR sized chunks chunk.inputs.chunk_size = 10 - wf.connect(inputNode, 'input_image', chunk, 'func_file') - - split_imports = ['import os', 'import subprocess'] - split = pe.Node(util.Function(input_names=['func_file', - 'tr_ranges'], - output_names=['split_funcs'], - function=split_ts_chunks, - imports=split_imports), - name=f'split_{wf_name}', - mem_gb=2.5) + wf.connect(inputNode, "input_image", chunk, "func_file") + + split_imports = ["import os", "import subprocess"] + split = pe.Node( + util.Function( + input_names=["func_file", "tr_ranges"], + output_names=["split_funcs"], + function=split_ts_chunks, + imports=split_imports, + ), + name=f"split_{wf_name}", + mem_gb=2.5, + ) - wf.connect(inputNode, 'input_image', split, 'func_file') - wf.connect(chunk, 'TR_ranges', split, 'tr_ranges') + wf.connect(inputNode, "input_image", split, "func_file") + wf.connect(chunk, "TR_ranges", split, "tr_ranges") - wf.connect(split, 'split_funcs', apply_warp, 'in_file') + wf.connect(split, "split_funcs", apply_warp, "in_file") - func_concat = pe.Node(interface=afni_utils.TCat(), - name=f'func_concat{wf_name}') - func_concat.inputs.outputtype = 'NIFTI_GZ' + func_concat = pe.Node( + interface=afni_utils.TCat(), name=f"func_concat{wf_name}" + ) + func_concat.inputs.outputtype = "NIFTI_GZ" - wf.connect(apply_warp, 'out_file', func_concat, 'in_files') + wf.connect(apply_warp, "out_file", func_concat, "in_files") - wf.connect(func_concat, 'out_file', outputNode, 'output_image') + wf.connect(func_concat, "out_file", outputNode, "output_image") else: - wf.connect(inputNode, 'input_image', apply_warp, 'in_file') - wf.connect(apply_warp, 'out_file', outputNode, 'output_image') + wf.connect(inputNode, "input_image", apply_warp, "in_file") + wf.connect(apply_warp, "out_file", outputNode, "output_image") return wf -def transform_derivative(wf_name, label, reg_tool, num_cpus, num_ants_cores, - ants_interp=None, fsl_interp=None, opt=None): - '''Transform output derivatives to template space. +def transform_derivative( + wf_name, + label, + reg_tool, + num_cpus, + num_ants_cores, + ants_interp=None, + fsl_interp=None, + opt=None, +): + """Transform output derivatives to template space. This function is designed for use with the NodeBlock connection engine. - ''' - + """ wf = pe.Workflow(name=wf_name) - inputnode = pe.Node(util.IdentityInterface(fields=['in_file', - 'reference', - 'transform']), - name='inputspec') + inputnode = pe.Node( + util.IdentityInterface(fields=["in_file", "reference", "transform"]), + name="inputspec", + ) multi_input = False - if 'statmap' in label: + if "statmap" in label: multi_input = True stack = False - if 'correlations' in label: + if "correlations" in label: stack = True - apply_xfm = apply_transform(f'warp_{label}_to_template', reg_tool, - time_series=stack, - multi_input=multi_input, - num_cpus=num_cpus, - num_ants_cores=num_ants_cores) + apply_xfm = apply_transform( + f"warp_{label}_to_template", + reg_tool, + time_series=stack, + multi_input=multi_input, + num_cpus=num_cpus, + num_ants_cores=num_ants_cores, + ) - if reg_tool == 'ants': + if reg_tool == "ants": apply_xfm.inputs.inputspec.interpolation = ants_interp - elif reg_tool == 'fsl': + elif reg_tool == "fsl": apply_xfm.inputs.inputspec.interpolation = fsl_interp - wf.connect(inputnode, 'in_file', apply_xfm, 'inputspec.input_image') - wf.connect(inputnode, 'reference', apply_xfm, 'inputspec.reference') - wf.connect(inputnode, 'transform', apply_xfm, 'inputspec.transform') + wf.connect(inputnode, "in_file", apply_xfm, "inputspec.input_image") + wf.connect(inputnode, "reference", apply_xfm, "inputspec.reference") + wf.connect(inputnode, "transform", apply_xfm, "inputspec.transform") - outputnode = pe.Node(util.IdentityInterface(fields=['out_file']), - name='outputspec') + outputnode = pe.Node(util.IdentityInterface(fields=["out_file"]), name="outputspec") - wf.connect(apply_xfm, 'outputspec.output_image', outputnode, 'out_file') + wf.connect(apply_xfm, "outputspec.output_image", outputnode, "out_file") return wf -def convert_pedir(pedir, convert='xyz_to_int'): - '''FSL Flirt requires pedir input encoded as an int''' - if convert == 'xyz_to_int': - conv_dct = {'x': 1, 'y': 2, 'z': 3, 'x-': -1, 'y-': -2, 'z-': -3, - 'i': 1, 'j': 2, 'k': 3, 'i-': -1, 'j-': -2, 'k-': -3, - '-x': -1, '-i': -1, '-y': -2, - '-j': -2, '-z': -3, '-k': -3} - elif convert == 'ijk_to_xyz': - conv_dct = {'i': 'x', 'j': 'y', 'k': 'z', - 'i-': 'x-', 'j-': 'y-', 'k-': 'z-'} +def convert_pedir(pedir, convert="xyz_to_int"): + """FSL Flirt requires pedir input encoded as an int""" + if convert == "xyz_to_int": + conv_dct = { + "x": 1, + "y": 2, + "z": 3, + "x-": -1, + "y-": -2, + "z-": -3, + "i": 1, + "j": 2, + "k": 3, + "i-": -1, + "j-": -2, + "k-": -3, + "-x": -1, + "-i": -1, + "-y": -2, + "-j": -2, + "-z": -3, + "-k": -3, + } + elif convert == "ijk_to_xyz": + conv_dct = {"i": "x", "j": "y", "k": "z", "i-": "x-", "j-": "y-", "k-": "z-"} if isinstance(pedir, bytes): pedir = pedir.decode() if not isinstance(pedir, str): - raise Exception("\n\nPhase-encoding direction must be a " - "string value.\n\nValue: {0}" - "\n\n".format(pedir)) + raise Exception( + "\n\nPhase-encoding direction must be a " + f"string value.\n\nValue: {pedir}" + "\n\n" + ) if pedir not in conv_dct.keys(): - raise Exception("\n\nInvalid phase-encoding direction " - "entered: {0}\n\n".format(pedir)) + raise Exception("\n\nInvalid phase-encoding direction " f"entered: {pedir}\n\n") pedir = conv_dct[pedir] return pedir -def create_fsl_flirt_linear_reg(name='fsl_flirt_linear_reg'): - +def create_fsl_flirt_linear_reg(name="fsl_flirt_linear_reg"): linear_register = pe.Workflow(name=name) - inputspec = pe.Node(util.IdentityInterface(fields=['input_brain', - 'reference_brain', - 'interp', - 'ref_mask']), - name='inputspec') + inputspec = pe.Node( + util.IdentityInterface( + fields=["input_brain", "reference_brain", "interp", "ref_mask"] + ), + name="inputspec", + ) - outputspec = pe.Node(util.IdentityInterface(fields=['output_brain', - 'linear_xfm', - 'invlinear_xfm']), - name='outputspec') + outputspec = pe.Node( + util.IdentityInterface(fields=["output_brain", "linear_xfm", "invlinear_xfm"]), + name="outputspec", + ) - linear_reg = pe.Node(interface=fsl.FLIRT(), name='linear_reg_0') - linear_reg.inputs.cost = 'corratio' + linear_reg = pe.Node(interface=fsl.FLIRT(), name="linear_reg_0") + linear_reg.inputs.cost = "corratio" - inv_flirt_xfm = pe.Node(interface=fsl.utils.ConvertXFM(), - name='inv_linear_reg0_xfm') + inv_flirt_xfm = pe.Node( + interface=fsl.utils.ConvertXFM(), name="inv_linear_reg0_xfm" + ) inv_flirt_xfm.inputs.invert_xfm = True - linear_register.connect(inputspec, 'input_brain', - linear_reg, 'in_file') + linear_register.connect(inputspec, "input_brain", linear_reg, "in_file") - linear_register.connect(inputspec, 'reference_brain', - linear_reg, 'reference') + linear_register.connect(inputspec, "reference_brain", linear_reg, "reference") - linear_register.connect(inputspec, 'interp', - linear_reg, 'interp') + linear_register.connect(inputspec, "interp", linear_reg, "interp") - linear_register.connect(linear_reg, 'out_file', - outputspec, 'output_brain') + linear_register.connect(linear_reg, "out_file", outputspec, "output_brain") - linear_register.connect(linear_reg, 'out_matrix_file', - inv_flirt_xfm, 'in_file') + linear_register.connect(linear_reg, "out_matrix_file", inv_flirt_xfm, "in_file") - linear_register.connect(inv_flirt_xfm, 'out_file', - outputspec, 'invlinear_xfm') + linear_register.connect(inv_flirt_xfm, "out_file", outputspec, "invlinear_xfm") - linear_register.connect(linear_reg, 'out_matrix_file', - outputspec, 'linear_xfm') + linear_register.connect(linear_reg, "out_matrix_file", outputspec, "linear_xfm") return linear_register -def create_fsl_fnirt_nonlinear_reg(name='fsl_fnirt_nonlinear_reg'): +def create_fsl_fnirt_nonlinear_reg(name="fsl_fnirt_nonlinear_reg"): """ Performs non-linear registration of an input file to a reference file using FSL FNIRT. @@ -373,7 +419,6 @@ def create_fsl_fnirt_nonlinear_reg(name='fsl_fnirt_nonlinear_reg'): Notes ----- - Workflow Inputs:: inputspec.input_skull : string (nifti file) @@ -408,71 +453,68 @@ def create_fsl_fnirt_nonlinear_reg(name='fsl_fnirt_nonlinear_reg'): .. image:: ../images/nonlinear_register_detailed.dot.png :width: 500 """ - nonlinear_register = pe.Workflow(name=name) - inputspec = pe.Node(util.IdentityInterface(fields=['input_brain', - 'input_skull', - 'reference_brain', - 'reference_skull', - 'interp', - 'ref_mask', - 'linear_aff', - 'fnirt_config']), - name='inputspec') + inputspec = pe.Node( + util.IdentityInterface( + fields=[ + "input_brain", + "input_skull", + "reference_brain", + "reference_skull", + "interp", + "ref_mask", + "linear_aff", + "fnirt_config", + ] + ), + name="inputspec", + ) - outputspec = pe.Node(util.IdentityInterface(fields=['output_brain', - 'nonlinear_xfm']), - name='outputspec') + outputspec = pe.Node( + util.IdentityInterface(fields=["output_brain", "nonlinear_xfm"]), + name="outputspec", + ) - nonlinear_reg = pe.Node(interface=fsl.FNIRT(), - name='nonlinear_reg_1') + nonlinear_reg = pe.Node(interface=fsl.FNIRT(), name="nonlinear_reg_1") nonlinear_reg.inputs.fieldcoeff_file = True nonlinear_reg.inputs.jacobian_file = True - brain_warp = pe.Node(interface=fsl.ApplyWarp(), - name='brain_warp') + brain_warp = pe.Node(interface=fsl.ApplyWarp(), name="brain_warp") - nonlinear_register.connect(inputspec, 'input_skull', - nonlinear_reg, 'in_file') + nonlinear_register.connect(inputspec, "input_skull", nonlinear_reg, "in_file") - nonlinear_register.connect(inputspec, 'reference_skull', - nonlinear_reg, 'ref_file') + nonlinear_register.connect(inputspec, "reference_skull", nonlinear_reg, "ref_file") - nonlinear_register.connect(inputspec, 'interp', - brain_warp, 'interp') + nonlinear_register.connect(inputspec, "interp", brain_warp, "interp") - nonlinear_register.connect(inputspec, 'ref_mask', - nonlinear_reg, 'refmask_file') + nonlinear_register.connect(inputspec, "ref_mask", nonlinear_reg, "refmask_file") # FNIRT parameters are specified by FSL config file # ${FSLDIR}/etc/flirtsch/TI_2_MNI152_2mm.cnf (or user-specified) - nonlinear_register.connect(inputspec, 'fnirt_config', - nonlinear_reg, 'config_file') + nonlinear_register.connect(inputspec, "fnirt_config", nonlinear_reg, "config_file") - nonlinear_register.connect(inputspec, 'linear_aff', - nonlinear_reg, 'affine_file') + nonlinear_register.connect(inputspec, "linear_aff", nonlinear_reg, "affine_file") - nonlinear_register.connect(nonlinear_reg, 'fieldcoeff_file', - outputspec, 'nonlinear_xfm') + nonlinear_register.connect( + nonlinear_reg, "fieldcoeff_file", outputspec, "nonlinear_xfm" + ) - nonlinear_register.connect(inputspec, 'input_brain', - brain_warp, 'in_file') + nonlinear_register.connect(inputspec, "input_brain", brain_warp, "in_file") - nonlinear_register.connect(nonlinear_reg, 'fieldcoeff_file', - brain_warp, 'field_file') + nonlinear_register.connect( + nonlinear_reg, "fieldcoeff_file", brain_warp, "field_file" + ) - nonlinear_register.connect(inputspec, 'reference_brain', - brain_warp, 'ref_file') + nonlinear_register.connect(inputspec, "reference_brain", brain_warp, "ref_file") - nonlinear_register.connect(brain_warp, 'out_file', - outputspec, 'output_brain') + nonlinear_register.connect(brain_warp, "out_file", outputspec, "output_brain") return nonlinear_register -def create_fsl_fnirt_nonlinear_reg_nhp(name='fsl_fnirt_nonlinear_reg_nhp'): +def create_fsl_fnirt_nonlinear_reg_nhp(name="fsl_fnirt_nonlinear_reg_nhp"): """ Performs non-linear registration of an input file to a reference file using FSL FNIRT. @@ -488,7 +530,6 @@ def create_fsl_fnirt_nonlinear_reg_nhp(name='fsl_fnirt_nonlinear_reg_nhp'): Notes ----- - Workflow Inputs:: inputspec.input_skull : string (nifti file) @@ -525,129 +566,120 @@ def create_fsl_fnirt_nonlinear_reg_nhp(name='fsl_fnirt_nonlinear_reg_nhp'): .. image:: ../images/nonlinear_register_detailed.dot.png :width: 500 """ - nonlinear_register = pe.Workflow(name=name) - inputspec = pe.Node(util.IdentityInterface(fields=['input_brain', - 'input_skull', - 'reference_brain', - 'reference_skull', - 'interp', - 'ref_mask', - 'linear_aff', - 'fnirt_config']), - name='inputspec') - - outputspec = pe.Node(util.IdentityInterface(fields=['output_brain', - 'output_head', - 'output_mask', - 'output_biasfield', - 'nonlinear_xfm', - 'nonlinear_warp']), - name='outputspec') - - nonlinear_reg = pe.Node(interface=fsl.FNIRT(), - name='nonlinear_reg_1') + inputspec = pe.Node( + util.IdentityInterface( + fields=[ + "input_brain", + "input_skull", + "reference_brain", + "reference_skull", + "interp", + "ref_mask", + "linear_aff", + "fnirt_config", + ] + ), + name="inputspec", + ) + + outputspec = pe.Node( + util.IdentityInterface( + fields=[ + "output_brain", + "output_head", + "output_mask", + "output_biasfield", + "nonlinear_xfm", + "nonlinear_warp", + ] + ), + name="outputspec", + ) + + nonlinear_reg = pe.Node(interface=fsl.FNIRT(), name="nonlinear_reg_1") nonlinear_reg.inputs.fieldcoeff_file = True nonlinear_reg.inputs.jacobian_file = True nonlinear_reg.inputs.field_file = True - nonlinear_register.connect(inputspec, 'input_skull', - nonlinear_reg, 'in_file') + nonlinear_register.connect(inputspec, "input_skull", nonlinear_reg, "in_file") - nonlinear_register.connect(inputspec, 'reference_skull', - nonlinear_reg, 'ref_file') + nonlinear_register.connect(inputspec, "reference_skull", nonlinear_reg, "ref_file") - nonlinear_register.connect(inputspec, 'ref_mask', - nonlinear_reg, 'refmask_file') + nonlinear_register.connect(inputspec, "ref_mask", nonlinear_reg, "refmask_file") - nonlinear_register.connect(inputspec, 'fnirt_config', - nonlinear_reg, 'config_file') + nonlinear_register.connect(inputspec, "fnirt_config", nonlinear_reg, "config_file") - nonlinear_register.connect(inputspec, 'linear_aff', - nonlinear_reg, 'affine_file') + nonlinear_register.connect(inputspec, "linear_aff", nonlinear_reg, "affine_file") - brain_warp = pe.Node(interface=fsl.ApplyWarp(), - name='brain_warp') - brain_warp.inputs.interp = 'nn' + brain_warp = pe.Node(interface=fsl.ApplyWarp(), name="brain_warp") + brain_warp.inputs.interp = "nn" brain_warp.inputs.relwarp = True - nonlinear_register.connect(inputspec, 'input_brain', - brain_warp, 'in_file') + nonlinear_register.connect(inputspec, "input_brain", brain_warp, "in_file") - nonlinear_register.connect(nonlinear_reg, 'field_file', - brain_warp, 'field_file') + nonlinear_register.connect(nonlinear_reg, "field_file", brain_warp, "field_file") - nonlinear_register.connect(inputspec, 'reference_skull', - brain_warp, 'ref_file') + nonlinear_register.connect(inputspec, "reference_skull", brain_warp, "ref_file") - head_warp = pe.Node(interface=fsl.ApplyWarp(), - name='head_warp') - head_warp.inputs.interp = 'spline' + head_warp = pe.Node(interface=fsl.ApplyWarp(), name="head_warp") + head_warp.inputs.interp = "spline" head_warp.inputs.relwarp = True - nonlinear_register.connect(inputspec, 'input_brain', - head_warp, 'in_file') + nonlinear_register.connect(inputspec, "input_brain", head_warp, "in_file") - nonlinear_register.connect(nonlinear_reg, 'field_file', - head_warp, 'field_file') + nonlinear_register.connect(nonlinear_reg, "field_file", head_warp, "field_file") - nonlinear_register.connect(inputspec, 'reference_skull', - head_warp, 'ref_file') + nonlinear_register.connect(inputspec, "reference_skull", head_warp, "ref_file") - mask_warp = pe.Node(interface=fsl.ApplyWarp(), - name='mask_warp') - mask_warp.inputs.interp = 'nn' + mask_warp = pe.Node(interface=fsl.ApplyWarp(), name="mask_warp") + mask_warp.inputs.interp = "nn" mask_warp.inputs.relwarp = True - nonlinear_register.connect(inputspec, 'input_brain', - mask_warp, 'in_file') + nonlinear_register.connect(inputspec, "input_brain", mask_warp, "in_file") - nonlinear_register.connect(nonlinear_reg, 'field_file', - mask_warp, 'field_file') + nonlinear_register.connect(nonlinear_reg, "field_file", mask_warp, "field_file") - nonlinear_register.connect(inputspec, 'reference_skull', - mask_warp, 'ref_file') + nonlinear_register.connect(inputspec, "reference_skull", mask_warp, "ref_file") - biasfield_warp = pe.Node(interface=fsl.ApplyWarp(), - name='biasfield_warp') - biasfield_warp.inputs.interp = 'spline' + biasfield_warp = pe.Node(interface=fsl.ApplyWarp(), name="biasfield_warp") + biasfield_warp.inputs.interp = "spline" biasfield_warp.inputs.relwarp = True - nonlinear_register.connect(inputspec, 'input_brain', - biasfield_warp, 'in_file') + nonlinear_register.connect(inputspec, "input_brain", biasfield_warp, "in_file") - nonlinear_register.connect(nonlinear_reg, 'field_file', - biasfield_warp, 'field_file') + nonlinear_register.connect( + nonlinear_reg, "field_file", biasfield_warp, "field_file" + ) - nonlinear_register.connect(inputspec, 'reference_skull', - biasfield_warp, 'ref_file') + nonlinear_register.connect(inputspec, "reference_skull", biasfield_warp, "ref_file") - nonlinear_register.connect(nonlinear_reg, 'fieldcoeff_file', - outputspec, 'nonlinear_xfm') + nonlinear_register.connect( + nonlinear_reg, "fieldcoeff_file", outputspec, "nonlinear_xfm" + ) - nonlinear_register.connect(nonlinear_reg, 'field_file', - outputspec, 'nonlinear_warp') + nonlinear_register.connect( + nonlinear_reg, "field_file", outputspec, "nonlinear_warp" + ) - nonlinear_register.connect(brain_warp, 'out_file', - outputspec, 'output_brain') + nonlinear_register.connect(brain_warp, "out_file", outputspec, "output_brain") - nonlinear_register.connect(head_warp, 'out_file', - outputspec, 'output_head') + nonlinear_register.connect(head_warp, "out_file", outputspec, "output_head") - nonlinear_register.connect(mask_warp, 'out_file', - outputspec, 'output_mask') + nonlinear_register.connect(mask_warp, "out_file", outputspec, "output_mask") - nonlinear_register.connect(biasfield_warp, 'out_file', - outputspec, 'output_biasfield') + nonlinear_register.connect( + biasfield_warp, "out_file", outputspec, "output_biasfield" + ) return nonlinear_register -def create_register_func_to_anat(config, phase_diff_distcor=False, - name='register_func_to_anat'): - +def create_register_func_to_anat( + config, phase_diff_distcor=False, name="register_func_to_anat" +): """ Registers a functional scan in native space to anatomical space using a linear transform and does not include bbregister. @@ -668,7 +700,6 @@ def create_register_func_to_anat(config, phase_diff_distcor=False, Notes ----- - Workflow Inputs:: inputspec.func : string (nifti file) @@ -688,77 +719,93 @@ def create_register_func_to_anat(config, phase_diff_distcor=False, """ register_func_to_anat = pe.Workflow(name=name) - inputspec = pe.Node(util.IdentityInterface(fields=['func', - 'anat', - 'dof', - 'interp', - 'fieldmap', - 'fieldmapmask']), - name='inputspec') + inputspec = pe.Node( + util.IdentityInterface( + fields=["func", "anat", "dof", "interp", "fieldmap", "fieldmapmask"] + ), + name="inputspec", + ) inputNode_echospacing = pe.Node( - util.IdentityInterface(fields=['echospacing']), - name='echospacing_input') - - inputNode_pedir = pe.Node(util.IdentityInterface(fields=['pedir']), - name='pedir_input') + util.IdentityInterface(fields=["echospacing"]), name="echospacing_input" + ) - outputspec = pe.Node(util.IdentityInterface( - fields=['func_to_anat_linear_xfm_nobbreg', 'anat_func_nobbreg']), - name='outputspec') + inputNode_pedir = pe.Node( + util.IdentityInterface(fields=["pedir"]), name="pedir_input" + ) - linear_reg = pe.Node(interface=fsl.FLIRT(), - name='linear_func_to_anat') + outputspec = pe.Node( + util.IdentityInterface( + fields=["func_to_anat_linear_xfm_nobbreg", "anat_func_nobbreg"] + ), + name="outputspec", + ) - linear_reg.inputs.interp = config.registration_workflows['functional_registration']['coregistration']['interpolation'] - linear_reg.inputs.cost = config.registration_workflows['functional_registration']['coregistration']['cost'] - linear_reg.inputs.dof = config.registration_workflows['functional_registration']['coregistration']['dof'] - if config.registration_workflows['functional_registration']['coregistration']['arguments'] is not None: - linear_reg.inputs.args = config.registration_workflows['functional_registration']['coregistration']['arguments'] + linear_reg = pe.Node(interface=fsl.FLIRT(), name="linear_func_to_anat") + + linear_reg.inputs.interp = config.registration_workflows["functional_registration"][ + "coregistration" + ]["interpolation"] + linear_reg.inputs.cost = config.registration_workflows["functional_registration"][ + "coregistration" + ]["cost"] + linear_reg.inputs.dof = config.registration_workflows["functional_registration"][ + "coregistration" + ]["dof"] + if ( + config.registration_workflows["functional_registration"]["coregistration"][ + "arguments" + ] + is not None + ): + linear_reg.inputs.args = config.registration_workflows[ + "functional_registration" + ]["coregistration"]["arguments"] if phase_diff_distcor: - conv_pedir = \ - pe.Node(interface=util.Function(input_names=['pedir', - 'convert'], - output_names=['pedir'], - function=convert_pedir), - name='coreg_convert_pedir') - conv_pedir.inputs.convert = 'xyz_to_int' + conv_pedir = pe.Node( + interface=util.Function( + input_names=["pedir", "convert"], + output_names=["pedir"], + function=convert_pedir, + ), + name="coreg_convert_pedir", + ) + conv_pedir.inputs.convert = "xyz_to_int" - register_func_to_anat.connect(inputNode_pedir, 'pedir', - conv_pedir, 'pedir') - register_func_to_anat.connect(conv_pedir, 'pedir', - linear_reg, 'pedir') - register_func_to_anat.connect(inputspec, 'fieldmap', - linear_reg, 'fieldmap') - register_func_to_anat.connect(inputspec, 'fieldmapmask', - linear_reg, 'fieldmapmask') - register_func_to_anat.connect(inputNode_echospacing, 'echospacing', - linear_reg, 'echospacing') + register_func_to_anat.connect(inputNode_pedir, "pedir", conv_pedir, "pedir") + register_func_to_anat.connect(conv_pedir, "pedir", linear_reg, "pedir") + register_func_to_anat.connect(inputspec, "fieldmap", linear_reg, "fieldmap") + register_func_to_anat.connect( + inputspec, "fieldmapmask", linear_reg, "fieldmapmask" + ) + register_func_to_anat.connect( + inputNode_echospacing, "echospacing", linear_reg, "echospacing" + ) - register_func_to_anat.connect(inputspec, 'func', linear_reg, 'in_file') + register_func_to_anat.connect(inputspec, "func", linear_reg, "in_file") - register_func_to_anat.connect(inputspec, 'anat', linear_reg, 'reference') + register_func_to_anat.connect(inputspec, "anat", linear_reg, "reference") - register_func_to_anat.connect(inputspec, 'dof', linear_reg, 'dof') + register_func_to_anat.connect(inputspec, "dof", linear_reg, "dof") - register_func_to_anat.connect(inputspec, 'interp', linear_reg, 'interp') + register_func_to_anat.connect(inputspec, "interp", linear_reg, "interp") - register_func_to_anat.connect(linear_reg, 'out_matrix_file', - outputspec, - 'func_to_anat_linear_xfm_nobbreg') + register_func_to_anat.connect( + linear_reg, "out_matrix_file", outputspec, "func_to_anat_linear_xfm_nobbreg" + ) - register_func_to_anat.connect(linear_reg, 'out_file', - outputspec, 'anat_func_nobbreg') + register_func_to_anat.connect( + linear_reg, "out_file", outputspec, "anat_func_nobbreg" + ) return register_func_to_anat -def create_register_func_to_anat_use_T2(config, name='register_func_to_anat_use_T2'): +def create_register_func_to_anat_use_T2(config, name="register_func_to_anat_use_T2"): # for monkey data # ref: https://github.com/DCAN-Labs/dcan-macaque-pipeline/blob/master/fMRIVolume/GenericfMRIVolumeProcessingPipeline.sh#L287-L295 # https://github.com/HechengJin0/dcan-macaque-pipeline/blob/master/fMRIVolume/GenericfMRIVolumeProcessingPipeline.sh#L524-L535 - """ Registers a functional scan in native space to anatomical space using a linear transform and does not include bbregister, use T1 and T2 image. @@ -776,7 +823,6 @@ def create_register_func_to_anat_use_T2(config, name='register_func_to_anat_use_ Notes ----- - Workflow Inputs:: inputspec.func : string (nifti file) @@ -791,109 +837,130 @@ def create_register_func_to_anat_use_T2(config, name='register_func_to_anat_use_ outputspec.anat_func_nobbreg : string (nifti file) Functional scan registered to anatomical space """ - - register_func_to_anat_use_T2 = pe.Workflow(name=name) - inputspec = pe.Node(util.IdentityInterface(fields=['func', - 'T1_brain', - 'T2_head', - 'T2_brain']), - name='inputspec') + inputspec = pe.Node( + util.IdentityInterface(fields=["func", "T1_brain", "T2_head", "T2_brain"]), + name="inputspec", + ) - outputspec = pe.Node(util.IdentityInterface(fields=['func_to_anat_linear_xfm_nobbreg', - 'func_to_anat_linear_warp_nobbreg', - 'anat_func_nobbreg']), - name='outputspec') + outputspec = pe.Node( + util.IdentityInterface( + fields=[ + "func_to_anat_linear_xfm_nobbreg", + "func_to_anat_linear_warp_nobbreg", + "anat_func_nobbreg", + ] + ), + name="outputspec", + ) # ${FSLDIR}/bin/flirt -interp spline -dof 6 -in ${fMRIFolder}/${ScoutName}_gdc -ref ${T1wFolder}/${T2wRestoreImage} -omat "$fMRIFolder"/Scout2T2w.mat -out ${fMRIFolder}/Scout2T2w.nii.gz -searchrx -30 30 -searchry -30 30 -searchrz -30 30 -cost mutualinfo - linear_reg_func_to_t2 = pe.Node(interface=fsl.FLIRT(), - name='linear_reg_func_to_t2') - linear_reg_func_to_t2.inputs.interp = 'spline' - linear_reg_func_to_t2.inputs.cost = 'mutualinfo' + linear_reg_func_to_t2 = pe.Node(interface=fsl.FLIRT(), name="linear_reg_func_to_t2") + linear_reg_func_to_t2.inputs.interp = "spline" + linear_reg_func_to_t2.inputs.cost = "mutualinfo" linear_reg_func_to_t2.inputs.dof = 6 linear_reg_func_to_t2.inputs.searchr_x = [30, 30] linear_reg_func_to_t2.inputs.searchr_y = [30, 30] linear_reg_func_to_t2.inputs.searchr_z = [30, 30] - register_func_to_anat_use_T2.connect(inputspec, 'func', linear_reg_func_to_t2, 'in_file') + register_func_to_anat_use_T2.connect( + inputspec, "func", linear_reg_func_to_t2, "in_file" + ) - register_func_to_anat_use_T2.connect(inputspec, 'T2_head', linear_reg_func_to_t2, 'reference') + register_func_to_anat_use_T2.connect( + inputspec, "T2_head", linear_reg_func_to_t2, "reference" + ) # ${FSLDIR}/bin/convert_xfm -omat "$fMRIFolder"/T2w2Scout.mat -inverse "$fMRIFolder"/Scout2T2w.mat - invt = pe.Node(interface=fsl.ConvertXFM(), name='convert_xfm') + invt = pe.Node(interface=fsl.ConvertXFM(), name="convert_xfm") invt.inputs.invert_xfm = True - register_func_to_anat_use_T2.connect(linear_reg_func_to_t2, 'out_matrix_file', invt, 'in_file') + register_func_to_anat_use_T2.connect( + linear_reg_func_to_t2, "out_matrix_file", invt, "in_file" + ) # ${FSLDIR}/bin/applywarp --interp=nn -i ${T1wFolder}/${T2wRestoreImageBrain} -r ${fMRIFolder}/${ScoutName}_gdc --premat="$fMRIFolder"/T2w2Scout.mat -o ${fMRIFolder}/Scout_brain_mask.nii.gz - anat_to_func = pe.Node(interface=fsl.ApplyWarp(), - name='anat_to_func') - anat_to_func.inputs.interp = 'nn' + anat_to_func = pe.Node(interface=fsl.ApplyWarp(), name="anat_to_func") + anat_to_func.inputs.interp = "nn" - register_func_to_anat_use_T2.connect(inputspec, 'T2_brain', anat_to_func, 'in_file') - register_func_to_anat_use_T2.connect(inputspec, 'func', anat_to_func, 'ref_file') - register_func_to_anat_use_T2.connect(invt, 'out_file', anat_to_func, 'premat') + register_func_to_anat_use_T2.connect(inputspec, "T2_brain", anat_to_func, "in_file") + register_func_to_anat_use_T2.connect(inputspec, "func", anat_to_func, "ref_file") + register_func_to_anat_use_T2.connect(invt, "out_file", anat_to_func, "premat") # ${FSLDIR}/bin/fslmaths ${fMRIFolder}/Scout_brain_mask.nii.gz -bin ${fMRIFolder}/Scout_brain_mask.nii.gz - func_brain_mask = pe.Node(interface=fsl.maths.MathsCommand(), - name=f'func_brain_mask') - func_brain_mask.inputs.args = '-bin' + func_brain_mask = pe.Node( + interface=fsl.maths.MathsCommand(), name="func_brain_mask" + ) + func_brain_mask.inputs.args = "-bin" - register_func_to_anat_use_T2.connect(anat_to_func, 'out_file', func_brain_mask, 'in_file') + register_func_to_anat_use_T2.connect( + anat_to_func, "out_file", func_brain_mask, "in_file" + ) # ${FSLDIR}/bin/fslmaths ${fMRIFolder}/${ScoutName}_gdc -mas ${fMRIFolder}/Scout_brain_mask.nii.gz ${fMRIFolder}/Scout_brain_dc.nii.gz - func_brain = pe.Node(interface=fsl.MultiImageMaths(), - name='func_brain') + func_brain = pe.Node(interface=fsl.MultiImageMaths(), name="func_brain") func_brain.inputs.op_string = "-mas %s " - register_func_to_anat_use_T2.connect(inputspec, 'func', func_brain, 'in_file') - register_func_to_anat_use_T2.connect(func_brain_mask, 'out_file', func_brain, 'operand_files') + register_func_to_anat_use_T2.connect(inputspec, "func", func_brain, "in_file") + register_func_to_anat_use_T2.connect( + func_brain_mask, "out_file", func_brain, "operand_files" + ) # ## re-registering the maked brain to the T1 brain: # ${FSLDIR}/bin/flirt -interp spline -dof 6 -in ${fMRIFolder}/Scout_brain_dc.nii.gz -ref ${T1wFolder}/${T1wRestoreImageBrain} -omat "$fMRIFolder"/${ScoutName}_gdc2T1w_init.mat -out ${fMRIFolder}/${ScoutName}_gdc2T1w_brain_init -searchrx -30 30 -searchry -30 30 -searchrz -30 30 -cost mutualinfo - linear_reg_func_to_t1 = pe.Node(interface=fsl.FLIRT(), - name='linear_reg_func_to_t1') - linear_reg_func_to_t1.inputs.interp = 'spline' - linear_reg_func_to_t1.inputs.cost = 'mutualinfo' + linear_reg_func_to_t1 = pe.Node(interface=fsl.FLIRT(), name="linear_reg_func_to_t1") + linear_reg_func_to_t1.inputs.interp = "spline" + linear_reg_func_to_t1.inputs.cost = "mutualinfo" linear_reg_func_to_t1.inputs.dof = 6 linear_reg_func_to_t1.inputs.searchr_x = [30, 30] linear_reg_func_to_t1.inputs.searchr_y = [30, 30] linear_reg_func_to_t1.inputs.searchr_z = [30, 30] - register_func_to_anat_use_T2.connect(func_brain, 'out_file', linear_reg_func_to_t1, 'in_file') + register_func_to_anat_use_T2.connect( + func_brain, "out_file", linear_reg_func_to_t1, "in_file" + ) - register_func_to_anat_use_T2.connect(inputspec, 'T1_brain', linear_reg_func_to_t1, 'reference') + register_func_to_anat_use_T2.connect( + inputspec, "T1_brain", linear_reg_func_to_t1, "reference" + ) # #taking out warpfield as it is not being made without a fieldmap. # ${FSLDIR}/bin/convertwarp --relout --rel -r ${T1wFolder}/${T2wRestoreImage} --postmat=${fMRIFolder}/${ScoutName}_gdc2T1w_init.mat -o ${fMRIFolder}/${ScoutName}_gdc2T1w_init_warp - convert_warp = pe.Node(interface=fsl.ConvertWarp(), name='convert_warp') + convert_warp = pe.Node(interface=fsl.ConvertWarp(), name="convert_warp") convert_warp.inputs.out_relwarp = True convert_warp.inputs.relwarp = True - register_func_to_anat_use_T2.connect(linear_reg_func_to_t1, 'out_matrix_file', convert_warp, 'postmat') - - register_func_to_anat_use_T2.connect(inputspec, 'T2_head', convert_warp, 'reference') + register_func_to_anat_use_T2.connect( + linear_reg_func_to_t1, "out_matrix_file", convert_warp, "postmat" + ) + register_func_to_anat_use_T2.connect( + inputspec, "T2_head", convert_warp, "reference" + ) - register_func_to_anat_use_T2.connect(linear_reg_func_to_t1, 'out_matrix_file', - outputspec, - 'func_to_anat_linear_xfm_nobbreg') + register_func_to_anat_use_T2.connect( + linear_reg_func_to_t1, + "out_matrix_file", + outputspec, + "func_to_anat_linear_xfm_nobbreg", + ) - register_func_to_anat_use_T2.connect(convert_warp, 'out_file', - outputspec, - 'func_to_anat_linear_warp_nobbreg') + register_func_to_anat_use_T2.connect( + convert_warp, "out_file", outputspec, "func_to_anat_linear_warp_nobbreg" + ) - register_func_to_anat_use_T2.connect(linear_reg_func_to_t1, 'out_file', - outputspec, 'anat_func_nobbreg') + register_func_to_anat_use_T2.connect( + linear_reg_func_to_t1, "out_file", outputspec, "anat_func_nobbreg" + ) return register_func_to_anat_use_T2 -def create_bbregister_func_to_anat(phase_diff_distcor=False, - name='bbregister_func_to_anat'): - +def create_bbregister_func_to_anat( + phase_diff_distcor=False, name="bbregister_func_to_anat" +): """ Registers a functional scan in native space to structural. This is meant to be used after create_nonlinear_register() has been run and @@ -913,7 +980,6 @@ def create_bbregister_func_to_anat(phase_diff_distcor=False, Notes ----- - Workflow Inputs:: inputspec.func : string (nifti file) @@ -934,105 +1000,115 @@ def create_bbregister_func_to_anat(phase_diff_distcor=False, outputspec.anat_func : string (nifti file) Functional data in anatomical space """ - register_bbregister_func_to_anat = pe.Workflow(name=name) - inputspec = pe.Node(util.IdentityInterface(fields=['func', - 'anat', - 'linear_reg_matrix', - 'anat_wm_segmentation', - 'bbr_schedule', - 'bbr_wm_mask_args', - 'fieldmap', - 'fieldmapmask']), - name='inputspec') + inputspec = pe.Node( + util.IdentityInterface( + fields=[ + "func", + "anat", + "linear_reg_matrix", + "anat_wm_segmentation", + "bbr_schedule", + "bbr_wm_mask_args", + "fieldmap", + "fieldmapmask", + ] + ), + name="inputspec", + ) inputNode_echospacing = pe.Node( - util.IdentityInterface(fields=['echospacing']), - name='echospacing_input') + util.IdentityInterface(fields=["echospacing"]), name="echospacing_input" + ) - inputNode_pedir = pe.Node(util.IdentityInterface(fields=['pedir']), - name='pedir_input') + inputNode_pedir = pe.Node( + util.IdentityInterface(fields=["pedir"]), name="pedir_input" + ) - outputspec = pe.Node(util.IdentityInterface( - fields=['func_to_anat_linear_xfm', 'anat_func']), name='outputspec') + outputspec = pe.Node( + util.IdentityInterface(fields=["func_to_anat_linear_xfm", "anat_func"]), + name="outputspec", + ) - wm_bb_mask = pe.Node(interface=fsl.ImageMaths(), - name='wm_bb_mask') + wm_bb_mask = pe.Node(interface=fsl.ImageMaths(), name="wm_bb_mask") register_bbregister_func_to_anat.connect( - inputspec, 'bbr_wm_mask_args', - wm_bb_mask, 'op_string') + inputspec, "bbr_wm_mask_args", wm_bb_mask, "op_string" + ) - register_bbregister_func_to_anat.connect(inputspec, - 'anat_wm_segmentation', - wm_bb_mask, 'in_file') + register_bbregister_func_to_anat.connect( + inputspec, "anat_wm_segmentation", wm_bb_mask, "in_file" + ) def bbreg_args(bbreg_target): - return '-cost bbr -wmseg ' + bbreg_target + return "-cost bbr -wmseg " + bbreg_target - bbreg_func_to_anat = pe.Node(interface=fsl.FLIRT(), - name='bbreg_func_to_anat') + bbreg_func_to_anat = pe.Node(interface=fsl.FLIRT(), name="bbreg_func_to_anat") bbreg_func_to_anat.inputs.dof = 6 register_bbregister_func_to_anat.connect( - inputspec, 'bbr_schedule', - bbreg_func_to_anat, 'schedule') + inputspec, "bbr_schedule", bbreg_func_to_anat, "schedule" + ) register_bbregister_func_to_anat.connect( - wm_bb_mask, ('out_file', bbreg_args), - bbreg_func_to_anat, 'args') + wm_bb_mask, ("out_file", bbreg_args), bbreg_func_to_anat, "args" + ) register_bbregister_func_to_anat.connect( - inputspec, 'func', - bbreg_func_to_anat, 'in_file') + inputspec, "func", bbreg_func_to_anat, "in_file" + ) register_bbregister_func_to_anat.connect( - inputspec, 'anat', - bbreg_func_to_anat, 'reference') + inputspec, "anat", bbreg_func_to_anat, "reference" + ) register_bbregister_func_to_anat.connect( - inputspec, 'linear_reg_matrix', - bbreg_func_to_anat, 'in_matrix_file') + inputspec, "linear_reg_matrix", bbreg_func_to_anat, "in_matrix_file" + ) if phase_diff_distcor: - conv_pedir = \ - pe.Node(interface=util.Function(input_names=['pedir', - 'convert'], - output_names=['pedir'], - function=convert_pedir), - name='bbreg_convert_pedir') - conv_pedir.inputs.convert = 'xyz_to_int' - - register_bbregister_func_to_anat.connect(inputNode_pedir, 'pedir', - conv_pedir, 'pedir') - register_bbregister_func_to_anat.connect(conv_pedir, 'pedir', - bbreg_func_to_anat, 'pedir') + conv_pedir = pe.Node( + interface=util.Function( + input_names=["pedir", "convert"], + output_names=["pedir"], + function=convert_pedir, + ), + name="bbreg_convert_pedir", + ) + conv_pedir.inputs.convert = "xyz_to_int" + + register_bbregister_func_to_anat.connect( + inputNode_pedir, "pedir", conv_pedir, "pedir" + ) register_bbregister_func_to_anat.connect( - inputspec, 'fieldmap', - bbreg_func_to_anat, 'fieldmap') + conv_pedir, "pedir", bbreg_func_to_anat, "pedir" + ) register_bbregister_func_to_anat.connect( - inputspec, 'fieldmapmask', - bbreg_func_to_anat, 'fieldmapmask') + inputspec, "fieldmap", bbreg_func_to_anat, "fieldmap" + ) register_bbregister_func_to_anat.connect( - inputNode_echospacing, 'echospacing', - bbreg_func_to_anat, 'echospacing') + inputspec, "fieldmapmask", bbreg_func_to_anat, "fieldmapmask" + ) + register_bbregister_func_to_anat.connect( + inputNode_echospacing, "echospacing", bbreg_func_to_anat, "echospacing" + ) register_bbregister_func_to_anat.connect( - bbreg_func_to_anat, 'out_matrix_file', - outputspec, 'func_to_anat_linear_xfm') + bbreg_func_to_anat, "out_matrix_file", outputspec, "func_to_anat_linear_xfm" + ) register_bbregister_func_to_anat.connect( - bbreg_func_to_anat, 'out_file', - outputspec, 'anat_func') + bbreg_func_to_anat, "out_file", outputspec, "anat_func" + ) return register_bbregister_func_to_anat def create_wf_calculate_ants_warp( - name='create_wf_calculate_ants_warp', num_threads=1, reg_ants_skull=1 + name="create_wf_calculate_ants_warp", num_threads=1, reg_ants_skull=1 ): - ''' + """ Calculates the nonlinear ANTS registration transform. This workflow employs the antsRegistration tool: @@ -1050,7 +1126,6 @@ def create_wf_calculate_ants_warp( Notes ----- - Some of the inputs listed below are lists or lists of lists. This is because antsRegistration can perform multiple stages of calculations depending on how the user configures their registration. @@ -1157,798 +1232,986 @@ def create_wf_calculate_ants_warp( .. image:: :width: 500 - ''' - + """ calc_ants_warp_wf = pe.Workflow(name=name) - inputspec = pe.Node(util.IdentityInterface( - fields=['moving_brain', - 'reference_brain', - 'moving_skull', - 'reference_skull', - 'reference_mask', - 'moving_mask', - 'fixed_image_mask', - 'ants_para', - 'interp']), - name='inputspec') - - outputspec = pe.Node(util.IdentityInterface( - fields=['ants_initial_xfm', - 'ants_rigid_xfm', - 'ants_affine_xfm', - 'warp_field', - 'inverse_warp_field', - 'composite_transform', - 'wait', - 'normalized_output_brain']), name='outputspec') + inputspec = pe.Node( + util.IdentityInterface( + fields=[ + "moving_brain", + "reference_brain", + "moving_skull", + "reference_skull", + "reference_mask", + "moving_mask", + "fixed_image_mask", + "ants_para", + "interp", + ] + ), + name="inputspec", + ) + + outputspec = pe.Node( + util.IdentityInterface( + fields=[ + "ants_initial_xfm", + "ants_rigid_xfm", + "ants_affine_xfm", + "warp_field", + "inverse_warp_field", + "composite_transform", + "wait", + "normalized_output_brain", + ] + ), + name="outputspec", + ) # use ANTS to warp the masked anatomical image to a template image - ''' + """ calculate_ants_warp = pe.Node(interface=ants.Registration(), name='calculate_ants_warp') calculate_ants_warp.inputs.output_warped_image = True calculate_ants_warp.inputs.initial_moving_transform_com = 0 - ''' - reg_imports = ['import os', 'import subprocess'] - calculate_ants_warp = \ - pe.Node(interface=util.Function(input_names=['moving_brain', - 'reference_brain', - 'moving_skull', - 'reference_skull', - 'ants_para', - 'moving_mask', - 'reference_mask', - 'fixed_image_mask', - 'interp', - 'reg_with_skull'], - output_names=['warp_list', - 'warped_image'], - function=hardcoded_reg, - imports=reg_imports), - name='calc_ants_warp', - mem_gb=2.8, - mem_x=(2e-7, 'moving_brain', 'xyz'), - throttle=True) + """ + reg_imports = ["import os", "import subprocess"] + calculate_ants_warp = pe.Node( + interface=util.Function( + input_names=[ + "moving_brain", + "reference_brain", + "moving_skull", + "reference_skull", + "ants_para", + "moving_mask", + "reference_mask", + "fixed_image_mask", + "interp", + "reg_with_skull", + ], + output_names=["warp_list", "warped_image"], + function=hardcoded_reg, + imports=reg_imports, + ), + name="calc_ants_warp", + mem_gb=2.8, + mem_x=(2e-7, "moving_brain", "xyz"), + throttle=True, + ) calculate_ants_warp.interface.num_threads = num_threads - select_forward_initial = pe.Node(util.Function( - input_names=['warp_list', 'selection'], - output_names=['selected_warp'], - function=seperate_warps_list), name='select_forward_initial') + select_forward_initial = pe.Node( + util.Function( + input_names=["warp_list", "selection"], + output_names=["selected_warp"], + function=seperate_warps_list, + ), + name="select_forward_initial", + ) select_forward_initial.inputs.selection = "Initial" - select_forward_rigid = pe.Node(util.Function( - input_names=['warp_list', 'selection'], - output_names=['selected_warp'], - function=seperate_warps_list), name='select_forward_rigid') + select_forward_rigid = pe.Node( + util.Function( + input_names=["warp_list", "selection"], + output_names=["selected_warp"], + function=seperate_warps_list, + ), + name="select_forward_rigid", + ) select_forward_rigid.inputs.selection = "Rigid" - select_forward_affine = pe.Node(util.Function( - input_names=['warp_list', 'selection'], - output_names=['selected_warp'], - function=seperate_warps_list), name='select_forward_affine') + select_forward_affine = pe.Node( + util.Function( + input_names=["warp_list", "selection"], + output_names=["selected_warp"], + function=seperate_warps_list, + ), + name="select_forward_affine", + ) select_forward_affine.inputs.selection = "Affine" - select_forward_warp = pe.Node(util.Function( - input_names=['warp_list', 'selection'], - output_names=['selected_warp'], - function=seperate_warps_list), name='select_forward_warp') + select_forward_warp = pe.Node( + util.Function( + input_names=["warp_list", "selection"], + output_names=["selected_warp"], + function=seperate_warps_list, + ), + name="select_forward_warp", + ) select_forward_warp.inputs.selection = "Warp" - select_inverse_warp = pe.Node(util.Function( - input_names=['warp_list', 'selection'], - output_names=['selected_warp'], - function=seperate_warps_list), name='select_inverse_warp') + select_inverse_warp = pe.Node( + util.Function( + input_names=["warp_list", "selection"], + output_names=["selected_warp"], + function=seperate_warps_list, + ), + name="select_inverse_warp", + ) select_inverse_warp.inputs.selection = "Inverse" calc_ants_warp_wf.connect( - inputspec, 'moving_brain', - calculate_ants_warp, 'moving_brain') + inputspec, "moving_brain", calculate_ants_warp, "moving_brain" + ) calc_ants_warp_wf.connect( - inputspec, 'reference_brain', - calculate_ants_warp, 'reference_brain') + inputspec, "reference_brain", calculate_ants_warp, "reference_brain" + ) if reg_ants_skull == 1: - calculate_ants_warp.inputs.reg_with_skull = 1 calc_ants_warp_wf.connect( - inputspec, 'moving_skull', - calculate_ants_warp, 'moving_skull') + inputspec, "moving_skull", calculate_ants_warp, "moving_skull" + ) calc_ants_warp_wf.connect( - inputspec, 'reference_skull', - calculate_ants_warp, 'reference_skull') + inputspec, "reference_skull", calculate_ants_warp, "reference_skull" + ) else: calc_ants_warp_wf.connect( - inputspec, 'moving_brain', - calculate_ants_warp, 'moving_skull') + inputspec, "moving_brain", calculate_ants_warp, "moving_skull" + ) calc_ants_warp_wf.connect( - inputspec, 'reference_brain', - calculate_ants_warp, 'reference_skull') + inputspec, "reference_brain", calculate_ants_warp, "reference_skull" + ) calc_ants_warp_wf.connect( - inputspec, 'fixed_image_mask', - calculate_ants_warp, 'fixed_image_mask') + inputspec, "fixed_image_mask", calculate_ants_warp, "fixed_image_mask" + ) - calc_ants_warp_wf.connect(inputspec, 'reference_mask', - calculate_ants_warp, 'reference_mask') + calc_ants_warp_wf.connect( + inputspec, "reference_mask", calculate_ants_warp, "reference_mask" + ) - calc_ants_warp_wf.connect(inputspec, 'moving_mask', - calculate_ants_warp, 'moving_mask') + calc_ants_warp_wf.connect( + inputspec, "moving_mask", calculate_ants_warp, "moving_mask" + ) - calc_ants_warp_wf.connect(inputspec, 'ants_para', - calculate_ants_warp, 'ants_para') + calc_ants_warp_wf.connect(inputspec, "ants_para", calculate_ants_warp, "ants_para") - calc_ants_warp_wf.connect( - inputspec, 'interp', - calculate_ants_warp, 'interp') + calc_ants_warp_wf.connect(inputspec, "interp", calculate_ants_warp, "interp") # inter-workflow connections calc_ants_warp_wf.connect( - calculate_ants_warp, 'warp_list', - select_forward_initial, 'warp_list') + calculate_ants_warp, "warp_list", select_forward_initial, "warp_list" + ) calc_ants_warp_wf.connect( - calculate_ants_warp, 'warp_list', - select_forward_rigid, 'warp_list') + calculate_ants_warp, "warp_list", select_forward_rigid, "warp_list" + ) calc_ants_warp_wf.connect( - calculate_ants_warp, 'warp_list', - select_forward_affine, 'warp_list') + calculate_ants_warp, "warp_list", select_forward_affine, "warp_list" + ) calc_ants_warp_wf.connect( - calculate_ants_warp, 'warp_list', - select_forward_warp, 'warp_list') + calculate_ants_warp, "warp_list", select_forward_warp, "warp_list" + ) calc_ants_warp_wf.connect( - calculate_ants_warp, 'warp_list', - select_inverse_warp, 'warp_list') + calculate_ants_warp, "warp_list", select_inverse_warp, "warp_list" + ) # connections to outputspec calc_ants_warp_wf.connect( - select_forward_initial, 'selected_warp', - outputspec, 'ants_initial_xfm') + select_forward_initial, "selected_warp", outputspec, "ants_initial_xfm" + ) calc_ants_warp_wf.connect( - select_forward_rigid, 'selected_warp', - outputspec, 'ants_rigid_xfm') + select_forward_rigid, "selected_warp", outputspec, "ants_rigid_xfm" + ) calc_ants_warp_wf.connect( - select_forward_affine, 'selected_warp', - outputspec, 'ants_affine_xfm') + select_forward_affine, "selected_warp", outputspec, "ants_affine_xfm" + ) calc_ants_warp_wf.connect( - select_forward_warp, 'selected_warp', - outputspec, 'warp_field') + select_forward_warp, "selected_warp", outputspec, "warp_field" + ) calc_ants_warp_wf.connect( - select_inverse_warp, 'selected_warp', - outputspec, 'inverse_warp_field') + select_inverse_warp, "selected_warp", outputspec, "inverse_warp_field" + ) calc_ants_warp_wf.connect( - calculate_ants_warp, 'warped_image', - outputspec, 'normalized_output_brain') + calculate_ants_warp, "warped_image", outputspec, "normalized_output_brain" + ) return calc_ants_warp_wf -def FSL_registration_connector(wf_name, cfg, orig="T1w", opt=None, - symmetric=False, template="T1w"): - +def FSL_registration_connector( + wf_name, cfg, orig="T1w", opt=None, symmetric=False, template="T1w" +): wf = pe.Workflow(name=wf_name) inputNode = pe.Node( - util.IdentityInterface(fields=['input_brain', - 'reference_brain', - 'input_head', - 'reference_head', - 'input_mask', - 'reference_mask', - 'transform', - 'interpolation', - 'fnirt_config']), - name='inputspec') - - sym = '' - symm = '' - if symmetric: - sym = 'sym' - symm = '_symmetric' + util.IdentityInterface( + fields=[ + "input_brain", + "reference_brain", + "input_head", + "reference_head", + "input_mask", + "reference_mask", + "transform", + "interpolation", + "fnirt_config", + ] + ), + name="inputspec", + ) - tmpl = '' - if template == 'EPI': - tmpl = 'EPI' + sym = "" + symm = "" + if symmetric: + sym = "sym" + symm = "_symmetric" - if opt == 'FSL' or opt == 'FSL-linear': + tmpl = "" + if template == "EPI": + tmpl = "EPI" + if opt == "FSL" or opt == "FSL-linear": flirt_reg_anat_mni = create_fsl_flirt_linear_reg( - f'anat_mni_flirt_register{symm}' + f"anat_mni_flirt_register{symm}" ) # Input registration parameters - wf.connect(inputNode, 'interpolation', - flirt_reg_anat_mni, 'inputspec.interp') + wf.connect(inputNode, "interpolation", flirt_reg_anat_mni, "inputspec.interp") - wf.connect(inputNode, 'input_brain', - flirt_reg_anat_mni, 'inputspec.input_brain') + wf.connect( + inputNode, "input_brain", flirt_reg_anat_mni, "inputspec.input_brain" + ) - wf.connect(inputNode, 'reference_brain', flirt_reg_anat_mni, - 'inputspec.reference_brain') + wf.connect( + inputNode, + "reference_brain", + flirt_reg_anat_mni, + "inputspec.reference_brain", + ) - write_lin_composite_xfm = pe.Node(interface=fsl.ConvertWarp(), - name=f'fsl_lin-warp_to_nii{symm}') + write_lin_composite_xfm = pe.Node( + interface=fsl.ConvertWarp(), name=f"fsl_lin-warp_to_nii{symm}" + ) - wf.connect(inputNode, 'reference_brain', - write_lin_composite_xfm, 'reference') + wf.connect(inputNode, "reference_brain", write_lin_composite_xfm, "reference") - wf.connect(flirt_reg_anat_mni, 'outputspec.linear_xfm', - write_lin_composite_xfm, 'premat') + wf.connect( + flirt_reg_anat_mni, + "outputspec.linear_xfm", + write_lin_composite_xfm, + "premat", + ) - write_invlin_composite_xfm = pe.Node(interface=fsl.ConvertWarp(), - name=f'fsl_invlin-warp_to_' - f'nii{symm}') + write_invlin_composite_xfm = pe.Node( + interface=fsl.ConvertWarp(), name=f"fsl_invlin-warp_to_" f"nii{symm}" + ) - wf.connect(inputNode, 'reference_brain', - write_invlin_composite_xfm, 'reference') + wf.connect( + inputNode, "reference_brain", write_invlin_composite_xfm, "reference" + ) - wf.connect(flirt_reg_anat_mni, 'outputspec.invlinear_xfm', - write_invlin_composite_xfm, 'premat') + wf.connect( + flirt_reg_anat_mni, + "outputspec.invlinear_xfm", + write_invlin_composite_xfm, + "premat", + ) outputs = { - f'space-{sym}template_desc-preproc_{orig}': ( - flirt_reg_anat_mni, 'outputspec.output_brain'), - f'from-{orig}_to-{sym}{tmpl}template_mode-image_desc-linear_xfm': ( - write_lin_composite_xfm, 'out_file'), - f'from-{sym}{tmpl}template_to-{orig}_mode-image_desc-linear_xfm': ( - write_invlin_composite_xfm, 'out_file'), - f'from-{orig}_to-{sym}{tmpl}template_mode-image_xfm': ( - write_lin_composite_xfm, 'out_file') + f"space-{sym}template_desc-preproc_{orig}": ( + flirt_reg_anat_mni, + "outputspec.output_brain", + ), + f"from-{orig}_to-{sym}{tmpl}template_mode-image_desc-linear_xfm": ( + write_lin_composite_xfm, + "out_file", + ), + f"from-{sym}{tmpl}template_to-{orig}_mode-image_desc-linear_xfm": ( + write_invlin_composite_xfm, + "out_file", + ), + f"from-{orig}_to-{sym}{tmpl}template_mode-image_xfm": ( + write_lin_composite_xfm, + "out_file", + ), } - - if opt == 'FSL': + if opt == "FSL": fnirt_reg_anat_mni = create_fsl_fnirt_nonlinear_reg_nhp( - f'anat_mni_fnirt_register{symm}' + f"anat_mni_fnirt_register{symm}" ) - wf.connect(inputNode, 'input_brain', - fnirt_reg_anat_mni, 'inputspec.input_brain') + wf.connect( + inputNode, "input_brain", fnirt_reg_anat_mni, "inputspec.input_brain" + ) - wf.connect(inputNode, 'reference_brain', - fnirt_reg_anat_mni, 'inputspec.reference_brain') + wf.connect( + inputNode, + "reference_brain", + fnirt_reg_anat_mni, + "inputspec.reference_brain", + ) - wf.connect(inputNode, 'input_head', - fnirt_reg_anat_mni, 'inputspec.input_skull') + wf.connect(inputNode, "input_head", fnirt_reg_anat_mni, "inputspec.input_skull") # NOTE: crossover from above opt block - wf.connect(flirt_reg_anat_mni, 'outputspec.linear_xfm', - fnirt_reg_anat_mni, 'inputspec.linear_aff') + wf.connect( + flirt_reg_anat_mni, + "outputspec.linear_xfm", + fnirt_reg_anat_mni, + "inputspec.linear_aff", + ) - wf.connect(inputNode, 'reference_head', - fnirt_reg_anat_mni, 'inputspec.reference_skull') + wf.connect( + inputNode, "reference_head", fnirt_reg_anat_mni, "inputspec.reference_skull" + ) - wf.connect(inputNode, 'reference_mask', - fnirt_reg_anat_mni, 'inputspec.ref_mask') + wf.connect( + inputNode, "reference_mask", fnirt_reg_anat_mni, "inputspec.ref_mask" + ) # assign the FSL FNIRT config file specified in pipeline config.yml - wf.connect(inputNode, 'fnirt_config', - fnirt_reg_anat_mni, 'inputspec.fnirt_config') + wf.connect( + inputNode, "fnirt_config", fnirt_reg_anat_mni, "inputspec.fnirt_config" + ) # NOTE: this is an UPDATE because of the opt block above added_outputs = { - f'space-{sym}template_desc-preproc_{orig}': ( - fnirt_reg_anat_mni, 'outputspec.output_brain'), - f'space-{sym}template_desc-head_{orig}': ( - fnirt_reg_anat_mni, 'outputspec.output_head'), - f'space-{sym}template_desc-{orig}_mask': ( - fnirt_reg_anat_mni, 'outputspec.output_mask'), - f'space-{sym}template_desc-T1wT2w_biasfield': ( - fnirt_reg_anat_mni, 'outputspec.output_biasfield'), - f'from-{orig}_to-{sym}{tmpl}template_mode-image_xfm': ( - fnirt_reg_anat_mni, 'outputspec.nonlinear_xfm'), - f'from-{orig}_to-{sym}{tmpl}template_mode-image_warp': ( - fnirt_reg_anat_mni, 'outputspec.nonlinear_warp') + f"space-{sym}template_desc-preproc_{orig}": ( + fnirt_reg_anat_mni, + "outputspec.output_brain", + ), + f"space-{sym}template_desc-head_{orig}": ( + fnirt_reg_anat_mni, + "outputspec.output_head", + ), + f"space-{sym}template_desc-{orig}_mask": ( + fnirt_reg_anat_mni, + "outputspec.output_mask", + ), + f"space-{sym}template_desc-T1wT2w_biasfield": ( + fnirt_reg_anat_mni, + "outputspec.output_biasfield", + ), + f"from-{orig}_to-{sym}{tmpl}template_mode-image_xfm": ( + fnirt_reg_anat_mni, + "outputspec.nonlinear_xfm", + ), + f"from-{orig}_to-{sym}{tmpl}template_mode-image_warp": ( + fnirt_reg_anat_mni, + "outputspec.nonlinear_warp", + ), } outputs.update(added_outputs) return (wf, outputs) -def ANTs_registration_connector(wf_name, cfg, params, orig="T1w", - symmetric=False, template="T1w"): - +def ANTs_registration_connector( + wf_name, cfg, params, orig="T1w", symmetric=False, template="T1w" +): wf = pe.Workflow(name=wf_name) inputNode = pe.Node( - util.IdentityInterface(fields=['input_brain', - 'reference_brain', - 'input_head', - 'reference_head', - 'input_mask', - 'reference_mask', - 'transform', - 'interpolation']), - name='inputspec') - - sym = '' - symm = '' + util.IdentityInterface( + fields=[ + "input_brain", + "reference_brain", + "input_head", + "reference_head", + "input_mask", + "reference_mask", + "transform", + "interpolation", + ] + ), + name="inputspec", + ) + + sym = "" + symm = "" if symmetric: - sym = 'sym' - symm = '_symmetric' + sym = "sym" + symm = "_symmetric" - tmpl = '' - if template == 'EPI': - tmpl = 'EPI' + tmpl = "" + if template == "EPI": + tmpl = "EPI" if params is None: - err_msg = '\n\n[!] C-PAC says: \nYou have selected ANTs as your ' \ - 'anatomical registration method.\n' \ - 'However, no ANTs parameters were specified.\n' \ - 'Please specify ANTs parameters properly and try again.' + err_msg = ( + "\n\n[!] C-PAC says: \nYou have selected ANTs as your " + "anatomical registration method.\n" + "However, no ANTs parameters were specified.\n" + "Please specify ANTs parameters properly and try again." + ) raise Exception(err_msg) - ants_reg_anat_mni = \ - create_wf_calculate_ants_warp( - f'anat_mni_ants_register{symm}', - num_threads=cfg.pipeline_setup['system_config'][ - 'num_ants_threads'], - reg_ants_skull=cfg['registration_workflows'][ - 'anatomical_registration']['reg_with_skull'] - ) + ants_reg_anat_mni = create_wf_calculate_ants_warp( + f"anat_mni_ants_register{symm}", + num_threads=cfg.pipeline_setup["system_config"]["num_ants_threads"], + reg_ants_skull=cfg["registration_workflows"]["anatomical_registration"][ + "reg_with_skull" + ], + ) ants_reg_anat_mni.inputs.inputspec.ants_para = params - wf.connect(inputNode, 'interpolation', - ants_reg_anat_mni, 'inputspec.interp') + wf.connect(inputNode, "interpolation", ants_reg_anat_mni, "inputspec.interp") # calculating the transform with the skullstripped is # reported to be better, but it requires very high # quality skullstripping. If skullstripping is imprecise # registration with skull is preferred - wf.connect(inputNode, 'input_brain', - ants_reg_anat_mni, 'inputspec.moving_brain') + wf.connect(inputNode, "input_brain", ants_reg_anat_mni, "inputspec.moving_brain") - wf.connect(inputNode, 'reference_brain', - ants_reg_anat_mni, 'inputspec.reference_brain') + wf.connect( + inputNode, "reference_brain", ants_reg_anat_mni, "inputspec.reference_brain" + ) - wf.connect(inputNode, 'input_head', - ants_reg_anat_mni, 'inputspec.moving_skull') + wf.connect(inputNode, "input_head", ants_reg_anat_mni, "inputspec.moving_skull") - wf.connect(inputNode, 'reference_head', - ants_reg_anat_mni, 'inputspec.reference_skull') + wf.connect( + inputNode, "reference_head", ants_reg_anat_mni, "inputspec.reference_skull" + ) - wf.connect(inputNode, 'input_mask', - ants_reg_anat_mni, 'inputspec.moving_mask') + wf.connect(inputNode, "input_mask", ants_reg_anat_mni, "inputspec.moving_mask") - wf.connect(inputNode, 'reference_mask', - ants_reg_anat_mni, 'inputspec.reference_mask') + wf.connect( + inputNode, "reference_mask", ants_reg_anat_mni, "inputspec.reference_mask" + ) ants_reg_anat_mni.inputs.inputspec.fixed_image_mask = None - if orig == 'T1w': - if cfg.registration_workflows['anatomical_registration'][ - 'registration']['ANTs']['use_lesion_mask']: + if orig == "T1w": + if cfg.registration_workflows["anatomical_registration"]["registration"][ + "ANTs" + ]["use_lesion_mask"]: # Create lesion preproc node to apply afni Refit and Resample - lesion_preproc = create_lesion_preproc( - wf_name=f'lesion_preproc{symm}' + lesion_preproc = create_lesion_preproc(wf_name=f"lesion_preproc{symm}") + wf.connect(inputNode, "lesion_mask", lesion_preproc, "inputspec.lesion") + wf.connect( + lesion_preproc, + "outputspec.reorient", + ants_reg_anat_mni, + "inputspec.fixed_image_mask", ) - wf.connect(inputNode, 'lesion_mask', - lesion_preproc, 'inputspec.lesion') - wf.connect(lesion_preproc, 'outputspec.reorient', - ants_reg_anat_mni, 'inputspec.fixed_image_mask') # combine the linear xfm's into one - makes it easier downstream write_composite_linear_xfm = pe.Node( interface=ants.ApplyTransforms(), - name=f'write_composite_linear{symm}_xfm', + name=f"write_composite_linear{symm}_xfm", mem_gb=1.155, - mem_x=(1708448960473801 / 1208925819614629174706176, 'input_image')) + mem_x=(1708448960473801 / 1208925819614629174706176, "input_image"), + ) write_composite_linear_xfm.inputs.print_out_composite_warp_file = True - write_composite_linear_xfm.inputs.output_image = \ + write_composite_linear_xfm.inputs.output_image = ( f"from-{orig}_to-{sym}{tmpl}template_mode-image_desc-linear_xfm.nii.gz" + ) - wf.connect(inputNode, 'input_brain', - write_composite_linear_xfm, 'input_image') + wf.connect(inputNode, "input_brain", write_composite_linear_xfm, "input_image") - wf.connect(inputNode, 'reference_brain', - write_composite_linear_xfm, 'reference_image') + wf.connect( + inputNode, "reference_brain", write_composite_linear_xfm, "reference_image" + ) - wf.connect(inputNode, 'interpolation', - write_composite_linear_xfm, 'interpolation') + wf.connect(inputNode, "interpolation", write_composite_linear_xfm, "interpolation") write_composite_linear_xfm.inputs.input_image_type = 0 write_composite_linear_xfm.inputs.dimension = 3 - collect_transforms = pe.Node(util.Merge(3), - name=f'collect_transforms{symm}', - mem_gb=0.8, - mem_x=(263474863123069 / - 37778931862957161709568, - 'in1')) + collect_transforms = pe.Node( + util.Merge(3), + name=f"collect_transforms{symm}", + mem_gb=0.8, + mem_x=(263474863123069 / 37778931862957161709568, "in1"), + ) - wf.connect(ants_reg_anat_mni, 'outputspec.ants_affine_xfm', - collect_transforms, 'in1') + wf.connect( + ants_reg_anat_mni, "outputspec.ants_affine_xfm", collect_transforms, "in1" + ) - wf.connect(ants_reg_anat_mni, 'outputspec.ants_rigid_xfm', - collect_transforms, 'in2') + wf.connect( + ants_reg_anat_mni, "outputspec.ants_rigid_xfm", collect_transforms, "in2" + ) - wf.connect(ants_reg_anat_mni, 'outputspec.ants_initial_xfm', - collect_transforms, 'in3') + wf.connect( + ants_reg_anat_mni, "outputspec.ants_initial_xfm", collect_transforms, "in3" + ) # check transform list to exclude Nonetype (missing) init/rig/affine check_transform = pe.Node( - util.Function(input_names=['transform_list'], - output_names=['checked_transform_list', - 'list_length'], - function=check_transforms), - name=f'check_transforms', - mem_gb=6) + util.Function( + input_names=["transform_list"], + output_names=["checked_transform_list", "list_length"], + function=check_transforms, + ), + name="check_transforms", + mem_gb=6, + ) - wf.connect(collect_transforms, 'out', check_transform, 'transform_list') + wf.connect(collect_transforms, "out", check_transform, "transform_list") - wf.connect(check_transform, 'checked_transform_list', - write_composite_linear_xfm, 'transforms') + wf.connect( + check_transform, + "checked_transform_list", + write_composite_linear_xfm, + "transforms", + ) # combine the linear xfm's into one - makes it easier downstream write_composite_invlinear_xfm = pe.Node( interface=ants.ApplyTransforms(), - name=f'write_composite_invlinear{symm}_xfm', + name=f"write_composite_invlinear{symm}_xfm", mem_gb=1.05, - mem_x=(1367826948979337 / 151115727451828646838272, 'input_image')) + mem_x=(1367826948979337 / 151115727451828646838272, "input_image"), + ) write_composite_invlinear_xfm.inputs.print_out_composite_warp_file = True - write_composite_invlinear_xfm.inputs.output_image = \ + write_composite_invlinear_xfm.inputs.output_image = ( f"from-{sym}{tmpl}template_to-{orig}_mode-image_desc-linear_xfm.nii.gz" + ) - wf.connect(inputNode, 'reference_brain', - write_composite_invlinear_xfm, 'input_image') + wf.connect( + inputNode, "reference_brain", write_composite_invlinear_xfm, "input_image" + ) - wf.connect(inputNode, 'input_brain', - write_composite_invlinear_xfm, 'reference_image') + wf.connect( + inputNode, "input_brain", write_composite_invlinear_xfm, "reference_image" + ) - wf.connect(inputNode, 'interpolation', - write_composite_invlinear_xfm, 'interpolation') + wf.connect( + inputNode, "interpolation", write_composite_invlinear_xfm, "interpolation" + ) write_composite_invlinear_xfm.inputs.input_image_type = 0 write_composite_invlinear_xfm.inputs.dimension = 3 - collect_inv_transforms = pe.Node(util.Merge(3), - name='collect_inv_transforms' - f'{symm}') + collect_inv_transforms = pe.Node( + util.Merge(3), name="collect_inv_transforms" f"{symm}" + ) - wf.connect(ants_reg_anat_mni, 'outputspec.ants_initial_xfm', - collect_inv_transforms, 'in1') + wf.connect( + ants_reg_anat_mni, "outputspec.ants_initial_xfm", collect_inv_transforms, "in1" + ) - wf.connect(ants_reg_anat_mni, 'outputspec.ants_rigid_xfm', - collect_inv_transforms, 'in2') + wf.connect( + ants_reg_anat_mni, "outputspec.ants_rigid_xfm", collect_inv_transforms, "in2" + ) - wf.connect(ants_reg_anat_mni, 'outputspec.ants_affine_xfm', - collect_inv_transforms, 'in3') + wf.connect( + ants_reg_anat_mni, "outputspec.ants_affine_xfm", collect_inv_transforms, "in3" + ) # check transform list to exclude Nonetype (missing) init/rig/affine check_invlinear_transform = pe.Node( - util.Function(input_names=['transform_list'], - output_names=['checked_transform_list', - 'list_length'], - function=check_transforms), - name=f'check_inv_transforms') + util.Function( + input_names=["transform_list"], + output_names=["checked_transform_list", "list_length"], + function=check_transforms, + ), + name="check_inv_transforms", + ) - wf.connect(collect_inv_transforms, 'out', - check_invlinear_transform, 'transform_list') + wf.connect( + collect_inv_transforms, "out", check_invlinear_transform, "transform_list" + ) - wf.connect(check_invlinear_transform, 'checked_transform_list', - write_composite_invlinear_xfm, 'transforms') + wf.connect( + check_invlinear_transform, + "checked_transform_list", + write_composite_invlinear_xfm, + "transforms", + ) # generate inverse transform flags, which depends on the # number of transforms inverse_transform_flags = pe.Node( - util.Function(input_names=['transform_list'], - output_names=['inverse_transform_flags'], - function=generate_inverse_transform_flags), - name=f'inverse_transform_flags') + util.Function( + input_names=["transform_list"], + output_names=["inverse_transform_flags"], + function=generate_inverse_transform_flags, + ), + name="inverse_transform_flags", + ) - wf.connect(check_invlinear_transform, 'checked_transform_list', - inverse_transform_flags, 'transform_list') + wf.connect( + check_invlinear_transform, + "checked_transform_list", + inverse_transform_flags, + "transform_list", + ) - wf.connect(inverse_transform_flags, 'inverse_transform_flags', - write_composite_invlinear_xfm, 'invert_transform_flags') + wf.connect( + inverse_transform_flags, + "inverse_transform_flags", + write_composite_invlinear_xfm, + "invert_transform_flags", + ) # combine ALL xfm's into one - makes it easier downstream write_composite_xfm = pe.Node( - interface=ants.ApplyTransforms(), - name=f'write_composite_{symm}xfm', - mem_gb=1.5) + interface=ants.ApplyTransforms(), name=f"write_composite_{symm}xfm", mem_gb=1.5 + ) write_composite_xfm.inputs.print_out_composite_warp_file = True - write_composite_xfm.inputs.output_image = \ + write_composite_xfm.inputs.output_image = ( f"from-{orig}_to-{sym}{tmpl}template_mode-image_xfm.nii.gz" + ) - wf.connect(inputNode, 'input_brain', write_composite_xfm, 'input_image') + wf.connect(inputNode, "input_brain", write_composite_xfm, "input_image") - wf.connect(inputNode, 'reference_brain', - write_composite_xfm, 'reference_image') + wf.connect(inputNode, "reference_brain", write_composite_xfm, "reference_image") - wf.connect(inputNode, 'interpolation', - write_composite_xfm, 'interpolation') + wf.connect(inputNode, "interpolation", write_composite_xfm, "interpolation") write_composite_xfm.inputs.input_image_type = 0 write_composite_xfm.inputs.dimension = 3 - collect_all_transforms = pe.Node(util.Merge(4), - name=f'collect_all_transforms' - f'{symm}') + collect_all_transforms = pe.Node( + util.Merge(4), name=f"collect_all_transforms" f"{symm}" + ) - wf.connect(ants_reg_anat_mni, 'outputspec.warp_field', - collect_all_transforms, 'in1') + wf.connect( + ants_reg_anat_mni, "outputspec.warp_field", collect_all_transforms, "in1" + ) - wf.connect(ants_reg_anat_mni, 'outputspec.ants_affine_xfm', - collect_all_transforms, 'in2') + wf.connect( + ants_reg_anat_mni, "outputspec.ants_affine_xfm", collect_all_transforms, "in2" + ) - wf.connect(ants_reg_anat_mni, 'outputspec.ants_rigid_xfm', - collect_all_transforms, 'in3') + wf.connect( + ants_reg_anat_mni, "outputspec.ants_rigid_xfm", collect_all_transforms, "in3" + ) - wf.connect(ants_reg_anat_mni, 'outputspec.ants_initial_xfm', - collect_all_transforms, 'in4') + wf.connect( + ants_reg_anat_mni, "outputspec.ants_initial_xfm", collect_all_transforms, "in4" + ) # check transform list to exclude Nonetype (missing) init/rig/affine check_all_transform = pe.Node( - util.Function(input_names=['transform_list'], - output_names=['checked_transform_list', - 'list_length'], - function=check_transforms), - name=f'check_all_transforms') + util.Function( + input_names=["transform_list"], + output_names=["checked_transform_list", "list_length"], + function=check_transforms, + ), + name="check_all_transforms", + ) - wf.connect(collect_all_transforms, 'out', - check_all_transform, 'transform_list') + wf.connect(collect_all_transforms, "out", check_all_transform, "transform_list") - wf.connect(check_all_transform, 'checked_transform_list', - write_composite_xfm, 'transforms') + wf.connect( + check_all_transform, "checked_transform_list", write_composite_xfm, "transforms" + ) # combine ALL xfm's into one - makes it easier downstream write_composite_inv_xfm = pe.Node( interface=ants.ApplyTransforms(), - name=f'write_composite_inv_{symm}xfm', + name=f"write_composite_inv_{symm}xfm", mem_gb=0.3, - mem_x=(6278549929741219 / 604462909807314587353088, 'input_image')) + mem_x=(6278549929741219 / 604462909807314587353088, "input_image"), + ) write_composite_inv_xfm.inputs.print_out_composite_warp_file = True - write_composite_inv_xfm.inputs.output_image = \ + write_composite_inv_xfm.inputs.output_image = ( f"from-{sym}{tmpl}template_to-{orig}_mode-image_xfm.nii.gz" + ) - wf.connect(inputNode, 'reference_brain', - write_composite_inv_xfm, 'input_image') + wf.connect(inputNode, "reference_brain", write_composite_inv_xfm, "input_image") - wf.connect(inputNode, 'input_brain', - write_composite_inv_xfm, 'reference_image') + wf.connect(inputNode, "input_brain", write_composite_inv_xfm, "reference_image") - wf.connect(inputNode, 'interpolation', - write_composite_inv_xfm, 'interpolation') + wf.connect(inputNode, "interpolation", write_composite_inv_xfm, "interpolation") write_composite_inv_xfm.inputs.input_image_type = 0 write_composite_inv_xfm.inputs.dimension = 3 - collect_all_inv_transforms = pe.Node(util.Merge(4), - name=f'collect_all_inv_transforms' - f'{symm}') + collect_all_inv_transforms = pe.Node( + util.Merge(4), name=f"collect_all_inv_transforms" f"{symm}" + ) - wf.connect(ants_reg_anat_mni, 'outputspec.ants_initial_xfm', - collect_all_inv_transforms, 'in1') + wf.connect( + ants_reg_anat_mni, + "outputspec.ants_initial_xfm", + collect_all_inv_transforms, + "in1", + ) - wf.connect(ants_reg_anat_mni, 'outputspec.ants_rigid_xfm', - collect_all_inv_transforms, 'in2') + wf.connect( + ants_reg_anat_mni, + "outputspec.ants_rigid_xfm", + collect_all_inv_transforms, + "in2", + ) - wf.connect(ants_reg_anat_mni, 'outputspec.ants_affine_xfm', - collect_all_inv_transforms, 'in3') + wf.connect( + ants_reg_anat_mni, + "outputspec.ants_affine_xfm", + collect_all_inv_transforms, + "in3", + ) - wf.connect(ants_reg_anat_mni, 'outputspec.inverse_warp_field', - collect_all_inv_transforms, 'in4') + wf.connect( + ants_reg_anat_mni, + "outputspec.inverse_warp_field", + collect_all_inv_transforms, + "in4", + ) # check transform list to exclude Nonetype (missing) init/rig/affine check_all_inv_transform = pe.Node( - util.Function(input_names=['transform_list'], - output_names=['checked_transform_list', - 'list_length'], - function=check_transforms), - name=f'check_all_inv_transforms') + util.Function( + input_names=["transform_list"], + output_names=["checked_transform_list", "list_length"], + function=check_transforms, + ), + name="check_all_inv_transforms", + ) - wf.connect(collect_all_inv_transforms, 'out', - check_all_inv_transform, 'transform_list') + wf.connect( + collect_all_inv_transforms, "out", check_all_inv_transform, "transform_list" + ) - wf.connect(check_all_inv_transform, 'checked_transform_list', - write_composite_inv_xfm, 'transforms') + wf.connect( + check_all_inv_transform, + "checked_transform_list", + write_composite_inv_xfm, + "transforms", + ) # generate inverse transform flags, which depends on the # number of transforms inverse_all_transform_flags = pe.Node( - util.Function(input_names=['transform_list'], - output_names=['inverse_transform_flags'], - function=generate_inverse_transform_flags), - name=f'inverse_all_transform_flags') + util.Function( + input_names=["transform_list"], + output_names=["inverse_transform_flags"], + function=generate_inverse_transform_flags, + ), + name="inverse_all_transform_flags", + ) - wf.connect(check_all_inv_transform, 'checked_transform_list', - inverse_all_transform_flags, 'transform_list') + wf.connect( + check_all_inv_transform, + "checked_transform_list", + inverse_all_transform_flags, + "transform_list", + ) - wf.connect(inverse_all_transform_flags, 'inverse_transform_flags', - write_composite_inv_xfm, 'invert_transform_flags') + wf.connect( + inverse_all_transform_flags, + "inverse_transform_flags", + write_composite_inv_xfm, + "invert_transform_flags", + ) outputs = { - f'space-{sym}template_desc-preproc_{orig}': ( - ants_reg_anat_mni, 'outputspec.normalized_output_brain'), - f'from-{orig}_to-{sym}{tmpl}template_mode-image_xfm': ( - write_composite_xfm, 'output_image'), - f'from-{sym}{tmpl}template_to-{orig}_mode-image_xfm': ( - write_composite_inv_xfm, 'output_image'), - f'from-{orig}_to-{sym}{tmpl}template_mode-image_desc-linear_xfm': ( - write_composite_linear_xfm, 'output_image'), - f'from-{sym}{tmpl}template_to-{orig}_mode-image_desc-linear_xfm': ( - write_composite_invlinear_xfm, 'output_image'), - f'from-{orig}_to-{sym}{tmpl}template_mode-image_desc-nonlinear_xfm': ( - ants_reg_anat_mni, 'outputspec.warp_field'), - f'from-{sym}{tmpl}template_to-{orig}_mode-image_desc-nonlinear_xfm': ( - ants_reg_anat_mni, 'outputspec.inverse_warp_field') + f"space-{sym}template_desc-preproc_{orig}": ( + ants_reg_anat_mni, + "outputspec.normalized_output_brain", + ), + f"from-{orig}_to-{sym}{tmpl}template_mode-image_xfm": ( + write_composite_xfm, + "output_image", + ), + f"from-{sym}{tmpl}template_to-{orig}_mode-image_xfm": ( + write_composite_inv_xfm, + "output_image", + ), + f"from-{orig}_to-{sym}{tmpl}template_mode-image_desc-linear_xfm": ( + write_composite_linear_xfm, + "output_image", + ), + f"from-{sym}{tmpl}template_to-{orig}_mode-image_desc-linear_xfm": ( + write_composite_invlinear_xfm, + "output_image", + ), + f"from-{orig}_to-{sym}{tmpl}template_mode-image_desc-nonlinear_xfm": ( + ants_reg_anat_mni, + "outputspec.warp_field", + ), + f"from-{sym}{tmpl}template_to-{orig}_mode-image_desc-nonlinear_xfm": ( + ants_reg_anat_mni, + "outputspec.inverse_warp_field", + ), } return (wf, outputs) -def bold_to_T1template_xfm_connector(wf_name, cfg, reg_tool, symmetric=False, - blip=False): - +def bold_to_T1template_xfm_connector( + wf_name, cfg, reg_tool, symmetric=False, blip=False +): wf = pe.Workflow(name=wf_name) inputNode = pe.Node( - util.IdentityInterface(fields=['input_brain', - 'mean_bold', - 'coreg_xfm', - 'T1w-brain-template_funcreg', - 'T1w_to_template_xfm', - 'template_to_T1w_xfm', - 'blip_warp']), - name='inputspec') - - sym = '' + util.IdentityInterface( + fields=[ + "input_brain", + "mean_bold", + "coreg_xfm", + "T1w-brain-template_funcreg", + "T1w_to_template_xfm", + "template_to_T1w_xfm", + "blip_warp", + ] + ), + name="inputspec", + ) + + sym = "" if symmetric: - sym = 'sym' + sym = "sym" - if reg_tool == 'ants': - fsl_reg_2_itk = pe.Node(c3.C3dAffineTool(), name='fsl_reg_2_itk') + if reg_tool == "ants": + fsl_reg_2_itk = pe.Node(c3.C3dAffineTool(), name="fsl_reg_2_itk") fsl_reg_2_itk.inputs.itk_transform = True fsl_reg_2_itk.inputs.fsl2ras = True # convert the .mat from linear Func->Anat to # ANTS format - wf.connect(inputNode, 'coreg_xfm', fsl_reg_2_itk, 'transform_file') + wf.connect(inputNode, "coreg_xfm", fsl_reg_2_itk, "transform_file") - wf.connect(inputNode, 'input_brain', fsl_reg_2_itk, 'reference_file') + wf.connect(inputNode, "input_brain", fsl_reg_2_itk, "reference_file") - wf.connect(inputNode, 'mean_bold', fsl_reg_2_itk, 'source_file') + wf.connect(inputNode, "mean_bold", fsl_reg_2_itk, "source_file") - itk_imports = ['import os'] - change_transform = pe.Node(util.Function( - input_names=['input_affine_file'], - output_names=['updated_affine_file'], - function=change_itk_transform_type, - imports=itk_imports), - name='change_transform_type') + itk_imports = ["import os"] + change_transform = pe.Node( + util.Function( + input_names=["input_affine_file"], + output_names=["updated_affine_file"], + function=change_itk_transform_type, + imports=itk_imports, + ), + name="change_transform_type", + ) - wf.connect(fsl_reg_2_itk, 'itk_transform', - change_transform, 'input_affine_file') + wf.connect( + fsl_reg_2_itk, "itk_transform", change_transform, "input_affine_file" + ) # combine ALL xfm's into one - makes it easier downstream write_composite_xfm = pe.Node( - interface=ants.ApplyTransforms(), - name=f'write_composite_xfm', - mem_gb=1.5) + interface=ants.ApplyTransforms(), name="write_composite_xfm", mem_gb=1.5 + ) write_composite_xfm.inputs.print_out_composite_warp_file = True - write_composite_xfm.inputs.output_image = \ + write_composite_xfm.inputs.output_image = ( f"from-bold_to-{sym}template_mode-image_xfm.nii.gz" + ) - wf.connect(inputNode, 'mean_bold', - write_composite_xfm, 'input_image') + wf.connect(inputNode, "mean_bold", write_composite_xfm, "input_image") - wf.connect(inputNode, 'T1w-brain-template_funcreg', - write_composite_xfm, 'reference_image') + wf.connect( + inputNode, + "T1w-brain-template_funcreg", + write_composite_xfm, + "reference_image", + ) write_composite_xfm.inputs.input_image_type = 0 write_composite_xfm.inputs.dimension = 3 - write_composite_xfm.inputs.interpolation = \ - cfg.registration_workflows['anatomical_registration'][ - 'registration']['ANTs']['interpolation'] + write_composite_xfm.inputs.interpolation = cfg.registration_workflows[ + "anatomical_registration" + ]["registration"]["ANTs"]["interpolation"] if not blip: - collect_all_transforms = pe.Node(util.Merge(2), - name='collect_all_transforms') + collect_all_transforms = pe.Node( + util.Merge(2), name="collect_all_transforms" + ) else: - collect_all_transforms = pe.Node(util.Merge(3), - name='collect_all_transforms') + collect_all_transforms = pe.Node( + util.Merge(3), name="collect_all_transforms" + ) - wf.connect(inputNode, 'blip_warp', - collect_all_transforms, 'in3') + wf.connect(inputNode, "blip_warp", collect_all_transforms, "in3") - wf.connect(inputNode, 'T1w_to_template_xfm', - collect_all_transforms, 'in1') + wf.connect(inputNode, "T1w_to_template_xfm", collect_all_transforms, "in1") - wf.connect(change_transform, 'updated_affine_file', - collect_all_transforms, 'in2') + wf.connect( + change_transform, "updated_affine_file", collect_all_transforms, "in2" + ) - wf.connect(collect_all_transforms, 'out', - write_composite_xfm, 'transforms') + wf.connect(collect_all_transforms, "out", write_composite_xfm, "transforms") write_composite_inv_xfm = pe.Node( - interface=ants.ApplyTransforms(), - name=f'write_composite_inv_xfm', - mem_gb=1.5) + interface=ants.ApplyTransforms(), name="write_composite_inv_xfm", mem_gb=1.5 + ) write_composite_inv_xfm.inputs.print_out_composite_warp_file = True write_composite_inv_xfm.inputs.invert_transform_flags = [True, False] - write_composite_inv_xfm.inputs.output_image = \ + write_composite_inv_xfm.inputs.output_image = ( f"from-{sym}template_to-bold_mode-image_xfm.nii.gz" + ) - wf.connect(inputNode, 'T1w-brain-template_funcreg', - write_composite_inv_xfm, 'input_image') + wf.connect( + inputNode, + "T1w-brain-template_funcreg", + write_composite_inv_xfm, + "input_image", + ) - wf.connect(inputNode, 'mean_bold', - write_composite_inv_xfm, 'reference_image') + wf.connect(inputNode, "mean_bold", write_composite_inv_xfm, "reference_image") write_composite_inv_xfm.inputs.input_image_type = 0 write_composite_inv_xfm.inputs.dimension = 3 - write_composite_inv_xfm.inputs.interpolation = \ - cfg.registration_workflows['anatomical_registration'][ - 'registration']['ANTs']['interpolation'] + write_composite_inv_xfm.inputs.interpolation = cfg.registration_workflows[ + "anatomical_registration" + ]["registration"]["ANTs"]["interpolation"] - collect_inv_transforms = pe.Node(util.Merge(2), - name='collect_inv_transforms') + collect_inv_transforms = pe.Node(util.Merge(2), name="collect_inv_transforms") - wf.connect(change_transform, 'updated_affine_file', - collect_inv_transforms, 'in1') + wf.connect( + change_transform, "updated_affine_file", collect_inv_transforms, "in1" + ) - wf.connect(inputNode, 'template_to_T1w_xfm', - collect_inv_transforms, 'in2') + wf.connect(inputNode, "template_to_T1w_xfm", collect_inv_transforms, "in2") - wf.connect(collect_inv_transforms, 'out', - write_composite_inv_xfm, 'transforms') + wf.connect(collect_inv_transforms, "out", write_composite_inv_xfm, "transforms") outputs = { - f'from-bold_to-{sym}template_mode-image_xfm': - (write_composite_xfm, 'output_image'), - f'from-{sym}template_to-bold_mode-image_xfm': - (write_composite_inv_xfm, 'output_image') + f"from-bold_to-{sym}template_mode-image_xfm": ( + write_composite_xfm, + "output_image", + ), + f"from-{sym}template_to-bold_mode-image_xfm": ( + write_composite_inv_xfm, + "output_image", + ), } - elif reg_tool == 'fsl': - - write_composite_xfm = pe.Node(interface=fsl.ConvertWarp(), - name='combine_fsl_warps') + elif reg_tool == "fsl": + write_composite_xfm = pe.Node( + interface=fsl.ConvertWarp(), name="combine_fsl_warps" + ) - wf.connect(inputNode, 'T1w-brain-template_funcreg', - write_composite_xfm, 'reference') + wf.connect( + inputNode, "T1w-brain-template_funcreg", write_composite_xfm, "reference" + ) if blip: - wf.connect(inputNode, 'coreg_xfm', - write_composite_xfm, 'postmat') - wf.connect(inputNode, 'blip_warp', - write_composite_xfm, 'warp1') - wf.connect(inputNode, 'T1w_to_template_xfm', - write_composite_xfm, 'warp2') + wf.connect(inputNode, "coreg_xfm", write_composite_xfm, "postmat") + wf.connect(inputNode, "blip_warp", write_composite_xfm, "warp1") + wf.connect(inputNode, "T1w_to_template_xfm", write_composite_xfm, "warp2") else: - wf.connect(inputNode, 'coreg_xfm', - write_composite_xfm, 'premat') - wf.connect(inputNode, 'T1w_to_template_xfm', - write_composite_xfm, 'warp1') + wf.connect(inputNode, "coreg_xfm", write_composite_xfm, "premat") + wf.connect(inputNode, "T1w_to_template_xfm", write_composite_xfm, "warp1") outputs = { - f'from-bold_to-{sym}template_mode-image_xfm': - (write_composite_xfm, 'out_file'), + f"from-bold_to-{sym}template_mode-image_xfm": ( + write_composite_xfm, + "out_file", + ), } return (wf, outputs) @@ -1976,10 +2239,8 @@ def bold_to_T1template_xfm_connector(wf_name, cfg, reg_tool, symmetric=False, "space-template_desc-head_T1w": {"Template": "T1w-template"}, "space-template_desc-T1w_mask": {"Template": "T1w-template"}, "space-template_desc-T1wT2w_biasfield": {"Template": "T1w-template"}, - "from-T1w_to-template_mode-image_desc-linear_xfm": { - "Template": "T1w-template"}, - "from-template_to-T1w_mode-image_desc-linear_xfm": { - "Template": "T1w-template"}, + "from-T1w_to-template_mode-image_desc-linear_xfm": {"Template": "T1w-template"}, + "from-template_to-T1w_mode-image_desc-linear_xfm": {"Template": "T1w-template"}, "from-T1w_to-template_mode-image_xfm": {"Template": "T1w-template"}, "from-T1w_to-template_mode-image_warp": {"Template": "T1w-template"}, "from-longitudinal_to-template_mode-image_desc-linear_xfm": { @@ -1988,52 +2249,50 @@ def bold_to_T1template_xfm_connector(wf_name, cfg, reg_tool, symmetric=False, "from-template_to-longitudinal_mode-image_desc-linear_xfm": { "Template": "T1w-template" }, - "from-longitudinal_to-template_mode-image_xfm": { - "Template": "T1w-template"}, + "from-longitudinal_to-template_mode-image_xfm": {"Template": "T1w-template"}, }, ) def register_FSL_anat_to_template(wf, cfg, strat_pool, pipe_num, opt=None): - - fsl, outputs = FSL_registration_connector(f'register_{opt}_anat_to_' - f'template_{pipe_num}', cfg, - orig='T1w', opt=opt) + fsl, outputs = FSL_registration_connector( + f"register_{opt}_anat_to_" f"template_{pipe_num}", cfg, orig="T1w", opt=opt + ) fsl.inputs.inputspec.interpolation = cfg.registration_workflows[ - 'anatomical_registration']['registration']['FSL-FNIRT'][ - 'interpolation'] + "anatomical_registration" + ]["registration"]["FSL-FNIRT"]["interpolation"] fsl.inputs.inputspec.fnirt_config = cfg.registration_workflows[ - 'anatomical_registration']['registration']['FSL-FNIRT'][ - 'fnirt_config'] + "anatomical_registration" + ]["registration"]["FSL-FNIRT"]["fnirt_config"] - connect, brain = \ - strat_pool.get_data(['desc-brain_T1w', - 'space-longitudinal_desc-brain_T1w'], - report_fetched=True) + connect, brain = strat_pool.get_data( + ["desc-brain_T1w", "space-longitudinal_desc-brain_T1w"], report_fetched=True + ) node, out = connect - wf.connect(node, out, fsl, 'inputspec.input_brain') + wf.connect(node, out, fsl, "inputspec.input_brain") - node, out = strat_pool.get_data('T1w-brain-template') - wf.connect(node, out, fsl, 'inputspec.reference_brain') + node, out = strat_pool.get_data("T1w-brain-template") + wf.connect(node, out, fsl, "inputspec.reference_brain") - node, out = strat_pool.get_data('T1w-template') - wf.connect(node, out, fsl, 'inputspec.reference_head') + node, out = strat_pool.get_data("T1w-template") + wf.connect(node, out, fsl, "inputspec.reference_head") - node, out = strat_pool.get_data(["desc-preproc_T1w", - "space-longitudinal_desc-reorient_T1w"]) - wf.connect(node, out, fsl, 'inputspec.input_head') + node, out = strat_pool.get_data( + ["desc-preproc_T1w", "space-longitudinal_desc-reorient_T1w"] + ) + wf.connect(node, out, fsl, "inputspec.input_head") - node, out = strat_pool.get_data('template-ref-mask') - wf.connect(node, out, fsl, 'inputspec.reference_mask') + node, out = strat_pool.get_data("template-ref-mask") + wf.connect(node, out, fsl, "inputspec.reference_mask") - if 'space-longitudinal' in brain: + if "space-longitudinal" in brain: for key in outputs.keys(): - if 'from-T1w' in key: - new_key = key.replace('from-T1w', 'from-longitudinal') + if "from-T1w" in key: + new_key = key.replace("from-T1w", "from-longitudinal") outputs[new_key] = outputs[key] del outputs[key] - if 'to-T1w' in key: - new_key = key.replace('to-T1w', 'to-longitudinal') + if "to-T1w" in key: + new_key = key.replace("to-T1w", "to-longitudinal") outputs[new_key] = outputs[key] del outputs[key] @@ -2079,50 +2338,51 @@ def register_FSL_anat_to_template(wf, cfg, strat_pool, pipe_num, opt=None): }, }, ) -def register_symmetric_FSL_anat_to_template(wf, cfg, strat_pool, pipe_num, - opt=None): - - fsl, outputs = FSL_registration_connector(f'register_{opt}_anat_to_' - f'template_symmetric_' - f'{pipe_num}', cfg, orig='T1w', - opt=opt, symmetric=True) +def register_symmetric_FSL_anat_to_template(wf, cfg, strat_pool, pipe_num, opt=None): + fsl, outputs = FSL_registration_connector( + f"register_{opt}_anat_to_" f"template_symmetric_" f"{pipe_num}", + cfg, + orig="T1w", + opt=opt, + symmetric=True, + ) fsl.inputs.inputspec.interpolation = cfg.registration_workflows[ - 'anatomical_registration']['registration']['FSL-FNIRT'][ - 'interpolation'] + "anatomical_registration" + ]["registration"]["FSL-FNIRT"]["interpolation"] fsl.inputs.inputspec.fnirt_config = cfg.registration_workflows[ - 'anatomical_registration']['registration']['FSL-FNIRT'][ - 'fnirt_config'] + "anatomical_registration" + ]["registration"]["FSL-FNIRT"]["fnirt_config"] - connect, brain = \ - strat_pool.get_data(['desc-brain_T1w', - 'space-longitudinal_desc-brain_T1w'], - report_fetched=True) + connect, brain = strat_pool.get_data( + ["desc-brain_T1w", "space-longitudinal_desc-brain_T1w"], report_fetched=True + ) node, out = connect - wf.connect(node, out, fsl, 'inputspec.input_brain') + wf.connect(node, out, fsl, "inputspec.input_brain") - node, out = strat_pool.get_data('T1w-brain-template-symmetric') - wf.connect(node, out, fsl, 'inputspec.reference_brain') + node, out = strat_pool.get_data("T1w-brain-template-symmetric") + wf.connect(node, out, fsl, "inputspec.reference_brain") - node, out = strat_pool.get_data(["desc-preproc_T1w", - "space-longitudinal_desc-reorient_T1w"]) - wf.connect(node, out, fsl, 'inputspec.input_head') + node, out = strat_pool.get_data( + ["desc-preproc_T1w", "space-longitudinal_desc-reorient_T1w"] + ) + wf.connect(node, out, fsl, "inputspec.input_head") - node, out = strat_pool.get_data('T1w-template-symmetric') - wf.connect(node, out, fsl, 'inputspec.reference_head') + node, out = strat_pool.get_data("T1w-template-symmetric") + wf.connect(node, out, fsl, "inputspec.reference_head") - node, out = strat_pool.get_data('dilated-symmetric-brain-mask') - wf.connect(node, out, fsl, 'inputspec.reference_mask') + node, out = strat_pool.get_data("dilated-symmetric-brain-mask") + wf.connect(node, out, fsl, "inputspec.reference_mask") - if 'space-longitudinal' in brain: + if "space-longitudinal" in brain: for key in outputs.keys(): - if 'from-T1w' in key: - new_key = key.replace('from-T1w', 'from-longitudinal') + if "from-T1w" in key: + new_key = key.replace("from-T1w", "from-longitudinal") outputs[new_key] = outputs[key] del outputs[key] - if 'to-T1w' in key: - new_key = key.replace('to-T1w', 'to-longitudinal') + if "to-T1w" in key: + new_key = key.replace("to-T1w", "to-longitudinal") outputs[new_key] = outputs[key] del outputs[key] @@ -2131,8 +2391,7 @@ def register_symmetric_FSL_anat_to_template(wf, cfg, strat_pool, pipe_num, @nodeblock( name="register_FSL_EPI_to_template", - config=["registration_workflows", "functional_registration", - "EPI_registration"], + config=["registration_workflows", "functional_registration", "EPI_registration"], switch=["run"], option_key="using", option_val=["FSL", "FSL-linear"], @@ -2149,42 +2408,43 @@ def register_symmetric_FSL_anat_to_template(wf, cfg, strat_pool, pipe_num, "from-EPItemplate_to-bold_mode-image_desc-linear_xfm": { "Template": "EPI-template" }, - "from-bold_to-EPItemplate_mode-image_xfm": { - "Template": "EPI-template"}, + "from-bold_to-EPItemplate_mode-image_xfm": {"Template": "EPI-template"}, }, ) def register_FSL_EPI_to_template(wf, cfg, strat_pool, pipe_num, opt=None): - '''Directly register the mean functional to an EPI template. No T1w + """Directly register the mean functional to an EPI template. No T1w involved. - ''' - - fsl, outputs = FSL_registration_connector(f'register_{opt}_EPI_to_' - f'template_{pipe_num}', cfg, - orig='bold', opt=opt, - template='EPI') + """ + fsl, outputs = FSL_registration_connector( + f"register_{opt}_EPI_to_" f"template_{pipe_num}", + cfg, + orig="bold", + opt=opt, + template="EPI", + ) - fsl.inputs.inputspec.interpolation = cfg['registration_workflows'][ - 'functional_registration']['EPI_registration']['FSL-FNIRT'][ - 'interpolation'] + fsl.inputs.inputspec.interpolation = cfg["registration_workflows"][ + "functional_registration" + ]["EPI_registration"]["FSL-FNIRT"]["interpolation"] - fsl.inputs.inputspec.fnirt_config = cfg['registration_workflows'][ - 'functional_registration']['EPI_registration']['FSL-FNIRT'][ - 'fnirt_config'] + fsl.inputs.inputspec.fnirt_config = cfg["registration_workflows"][ + "functional_registration" + ]["EPI_registration"]["FSL-FNIRT"]["fnirt_config"] - node, out = strat_pool.get_data('sbref') - wf.connect(node, out, fsl, 'inputspec.input_brain') + node, out = strat_pool.get_data("sbref") + wf.connect(node, out, fsl, "inputspec.input_brain") - node, out = strat_pool.get_data('EPI-template') - wf.connect(node, out, fsl, 'inputspec.reference_brain') + node, out = strat_pool.get_data("EPI-template") + wf.connect(node, out, fsl, "inputspec.reference_brain") - node, out = strat_pool.get_data('sbref') - wf.connect(node, out, fsl, 'inputspec.input_head') + node, out = strat_pool.get_data("sbref") + wf.connect(node, out, fsl, "inputspec.input_head") - node, out = strat_pool.get_data('EPI-template') - wf.connect(node, out, fsl, 'inputspec.reference_head') + node, out = strat_pool.get_data("EPI-template") + wf.connect(node, out, fsl, "inputspec.reference_head") - node, out = strat_pool.get_data('EPI-template-mask') - wf.connect(node, out, fsl, 'inputspec.reference_mask') + node, out = strat_pool.get_data("EPI-template-mask") + wf.connect(node, out, fsl, "inputspec.reference_mask") return (wf, outputs) @@ -2220,130 +2480,141 @@ def register_FSL_EPI_to_template(wf, cfg, strat_pool, pipe_num, opt=None): outputs={ "space-template_desc-preproc_T1w": { "Description": "The preprocessed T1w brain transformed to " - "template space.", + "template space.", "Template": "T1w-template", }, "from-T1w_to-template_mode-image_desc-linear_xfm": { "Description": "Linear (affine) transform from T1w native space " - "to T1w-template space.", + "to T1w-template space.", "Template": "T1w-template", }, "from-template_to-T1w_mode-image_desc-linear_xfm": { "Description": "Linear (affine) transform from T1w-template space " - "to T1w native space.", + "to T1w native space.", "Template": "T1w-template", }, "from-T1w_to-template_mode-image_desc-nonlinear_xfm": { "Description": "Nonlinear (warp field) transform from T1w native " - "space to T1w-template space.", + "space to T1w-template space.", "Template": "T1w-template", }, "from-template_to-T1w_mode-image_desc-nonlinear_xfm": { "Description": "Nonlinear (warp field) transform from " - "T1w-template space to T1w native space.", + "T1w-template space to T1w native space.", "Template": "T1w-template", }, "from-T1w_to-template_mode-image_xfm": { "Description": "Composite (affine + warp field) transform from " - "T1w native space to T1w-template space.", + "T1w native space to T1w-template space.", "Template": "T1w-template", }, "from-template_to-T1w_mode-image_xfm": { "Description": "Composite (affine + warp field) transform from " - "T1w-template space to T1w native space.", + "T1w-template space to T1w native space.", "Template": "T1w-template", }, "from-longitudinal_to-template_mode-image_desc-linear_xfm": { "Description": "Linear (affine) transform from " - "longitudinal-template space to T1w-template " - "space.", + "longitudinal-template space to T1w-template " + "space.", "Template": "T1w-template", }, "from-template_to-longitudinal_mode-image_desc-linear_xfm": { "Description": "Linear (affine) transform from T1w-template " - "space to longitudinal-template space.", + "space to longitudinal-template space.", "Template": "T1w-template", }, "from-longitudinal_to-template_mode-image_desc-nonlinear_xfm": { "Description": "Nonlinear (warp field) transform from " - "longitudinal-template space to T1w-template " - "space.", + "longitudinal-template space to T1w-template " + "space.", "Template": "T1w-template", }, "from-template_to-longitudinal_mode-image_desc-nonlinear_xfm": { "Description": "Nonlinear (warp field) transform from " - "T1w-template space to longitudinal-template " - "space.", + "T1w-template space to longitudinal-template " + "space.", "Template": "T1w-template", }, "from-longitudinal_to-template_mode-image_xfm": { "Description": "Composite (affine + warp field) transform from " - "longitudinal-template space to T1w-template " - "space.", + "longitudinal-template space to T1w-template " + "space.", "Template": "T1w-template", }, "from-template_to-longitudinal_mode-image_xfm": { "Description": "Composite (affine + warp field) transform from " - "T1w-template space to longitudinal-template " - "space.", + "T1w-template space to longitudinal-template " + "space.", "Template": "T1w-template", }, }, ) def register_ANTs_anat_to_template(wf, cfg, strat_pool, pipe_num, opt=None): + params = cfg.registration_workflows["anatomical_registration"]["registration"][ + "ANTs" + ]["T1_registration"] - params = cfg.registration_workflows['anatomical_registration'][ - 'registration']['ANTs']['T1_registration'] - - ants_rc, outputs = ANTs_registration_connector('ANTS_T1_to_template_' - f'{pipe_num}', cfg, - params, orig='T1w') + ants_rc, outputs = ANTs_registration_connector( + "ANTS_T1_to_template_" f"{pipe_num}", cfg, params, orig="T1w" + ) ants_rc.inputs.inputspec.interpolation = cfg.registration_workflows[ - 'anatomical_registration']['registration']['ANTs']['interpolation'] + "anatomical_registration" + ]["registration"]["ANTs"]["interpolation"] - connect, brain = \ - strat_pool.get_data(['desc-preproc_T1w', - 'space-longitudinal_desc-brain_T1w'], - report_fetched=True) + connect, brain = strat_pool.get_data( + ["desc-preproc_T1w", "space-longitudinal_desc-brain_T1w"], report_fetched=True + ) node, out = connect - wf.connect(node, out, ants_rc, 'inputspec.input_brain') - - t1w_brain_template = strat_pool.node_data('T1w-brain-template') - wf.connect(t1w_brain_template.node, t1w_brain_template.out, - ants_rc, 'inputspec.reference_brain') + wf.connect(node, out, ants_rc, "inputspec.input_brain") + + t1w_brain_template = strat_pool.node_data("T1w-brain-template") + wf.connect( + t1w_brain_template.node, + t1w_brain_template.out, + ants_rc, + "inputspec.reference_brain", + ) # TODO check the order of T1w - node, out = strat_pool.get_data(["desc-restore_T1w", "desc-head_T1w", - "desc-preproc_T1w", - "space-longitudinal_desc-reorient_T1w"]) - wf.connect(node, out, ants_rc, 'inputspec.input_head') - + node, out = strat_pool.get_data( + [ + "desc-restore_T1w", + "desc-head_T1w", + "desc-preproc_T1w", + "space-longitudinal_desc-reorient_T1w", + ] + ) + wf.connect(node, out, ants_rc, "inputspec.input_head") - t1w_template = strat_pool.node_data('T1w-template') - wf.connect(t1w_template.node, t1w_template.out, - ants_rc, 'inputspec.reference_head') + t1w_template = strat_pool.node_data("T1w-template") + wf.connect(t1w_template.node, t1w_template.out, ants_rc, "inputspec.reference_head") - brain_mask = strat_pool.node_data(["space-T1w_desc-brain_mask", - "space-longitudinal_desc-brain_mask", - "space-T1w_desc-acpcbrain_mask"]) - wf.connect(brain_mask.node, brain_mask.out, - ants_rc, 'inputspec.input_mask') + brain_mask = strat_pool.node_data( + [ + "space-T1w_desc-brain_mask", + "space-longitudinal_desc-brain_mask", + "space-T1w_desc-acpcbrain_mask", + ] + ) + wf.connect(brain_mask.node, brain_mask.out, ants_rc, "inputspec.input_mask") - if strat_pool.check_rpool('T1w-brain-template-mask'): - node, out = strat_pool.get_data('T1w-brain-template-mask') - wf.connect(node, out, ants_rc, 'inputspec.reference_mask') + if strat_pool.check_rpool("T1w-brain-template-mask"): + node, out = strat_pool.get_data("T1w-brain-template-mask") + wf.connect(node, out, ants_rc, "inputspec.reference_mask") - if strat_pool.check_rpool('label-lesion_mask'): - node, out = strat_pool.get_data('label-lesion_mask') - wf.connect(node, out, ants_rc, 'inputspec.lesion_mask') + if strat_pool.check_rpool("label-lesion_mask"): + node, out = strat_pool.get_data("label-lesion_mask") + wf.connect(node, out, ants_rc, "inputspec.lesion_mask") - if 'space-longitudinal' in brain: + if "space-longitudinal" in brain: for key in outputs: - for direction in ['from', 'to']: - if f'{direction}-T1w' in key: - new_key = key.replace(f'{direction}-T1w', - f'{direction}-longitudinal') + for direction in ["from", "to"]: + if f"{direction}-T1w" in key: + new_key = key.replace( + f"{direction}-T1w", f"{direction}-longitudinal" + ) outputs[new_key] = outputs[key] del outputs[key] return (wf, outputs) @@ -2358,8 +2629,7 @@ def register_ANTs_anat_to_template(wf, cfg, strat_pool, pipe_num, opt=None): inputs=[ ( ["desc-preproc_T1w", "space-longitudinal_desc-brain_T1w"], - ["space-T1w_desc-brain_mask", - "space-longitudinal_desc-brain_mask"], + ["space-T1w_desc-brain_mask", "space-longitudinal_desc-brain_mask"], [ "desc-head_T1w", "desc-preproc_T1w", @@ -2413,56 +2683,60 @@ def register_ANTs_anat_to_template(wf, cfg, strat_pool, pipe_num, opt=None): }, }, ) -def register_symmetric_ANTs_anat_to_template(wf, cfg, strat_pool, pipe_num, - opt=None): - - params = cfg.registration_workflows['anatomical_registration'][ - 'registration']['ANTs']['T1_registration'] - - ants, outputs = ANTs_registration_connector('ANTS_T1_to_template_' - f'symmetric_{pipe_num}', cfg, - params, orig='T1w', - symmetric=True) +def register_symmetric_ANTs_anat_to_template(wf, cfg, strat_pool, pipe_num, opt=None): + params = cfg.registration_workflows["anatomical_registration"]["registration"][ + "ANTs" + ]["T1_registration"] + + ants, outputs = ANTs_registration_connector( + "ANTS_T1_to_template_" f"symmetric_{pipe_num}", + cfg, + params, + orig="T1w", + symmetric=True, + ) ants.inputs.inputspec.interpolation = cfg.registration_workflows[ - 'anatomical_registration']['registration']['ANTs']['interpolation'] + "anatomical_registration" + ]["registration"]["ANTs"]["interpolation"] - connect, brain = \ - strat_pool.get_data(['desc-preproc_T1w', - 'space-longitudinal_desc-brain_T1w'], - report_fetched=True) + connect, brain = strat_pool.get_data( + ["desc-preproc_T1w", "space-longitudinal_desc-brain_T1w"], report_fetched=True + ) node, out = connect - wf.connect(node, out, ants, 'inputspec.input_brain') + wf.connect(node, out, ants, "inputspec.input_brain") - node, out = strat_pool.get_data('T1w-brain-template-symmetric') - wf.connect(node, out, ants, 'inputspec.reference_brain') + node, out = strat_pool.get_data("T1w-brain-template-symmetric") + wf.connect(node, out, ants, "inputspec.reference_brain") - node, out = strat_pool.get_data(["desc-head_T1w", "desc-preproc_T1w", - "space-longitudinal_desc-reorient_T1w"]) - wf.connect(node, out, ants, 'inputspec.input_head') + node, out = strat_pool.get_data( + ["desc-head_T1w", "desc-preproc_T1w", "space-longitudinal_desc-reorient_T1w"] + ) + wf.connect(node, out, ants, "inputspec.input_head") - node, out = strat_pool.get_data('T1w-template-symmetric') - wf.connect(node, out, ants, 'inputspec.reference_head') + node, out = strat_pool.get_data("T1w-template-symmetric") + wf.connect(node, out, ants, "inputspec.reference_head") - node, out = strat_pool.get_data(["space-T1w_desc-brain_mask", - "space-longitudinal_desc-brain_mask"]) - wf.connect(node, out, ants, 'inputspec.input_mask') + node, out = strat_pool.get_data( + ["space-T1w_desc-brain_mask", "space-longitudinal_desc-brain_mask"] + ) + wf.connect(node, out, ants, "inputspec.input_mask") - node, out = strat_pool.get_data('dilated-symmetric-brain-mask') - wf.connect(node, out, ants, 'inputspec.reference_mask') + node, out = strat_pool.get_data("dilated-symmetric-brain-mask") + wf.connect(node, out, ants, "inputspec.reference_mask") - if strat_pool.check_rpool('label-lesion_mask'): - node, out = strat_pool.get_data('label-lesion_mask') - wf.connect(node, out, ants, 'inputspec.lesion_mask') + if strat_pool.check_rpool("label-lesion_mask"): + node, out = strat_pool.get_data("label-lesion_mask") + wf.connect(node, out, ants, "inputspec.lesion_mask") - if 'space-longitudinal' in brain: + if "space-longitudinal" in brain: for key in outputs.keys(): - if 'from-T1w' in key: - new_key = key.replace('from-T1w', 'from-longitudinal') + if "from-T1w" in key: + new_key = key.replace("from-T1w", "from-longitudinal") outputs[new_key] = outputs[key] del outputs[key] - if 'to-T1w' in key: - new_key = key.replace('to-T1w', 'to-longitudinal') + if "to-T1w" in key: + new_key = key.replace("to-T1w", "to-longitudinal") outputs[new_key] = outputs[key] del outputs[key] @@ -2494,45 +2768,48 @@ def register_symmetric_ANTs_anat_to_template(wf, cfg, strat_pool, pipe_num, "from-EPItemplate_to-bold_mode-image_desc-nonlinear_xfm": { "Template": "EPI-template" }, - "from-bold_to-EPItemplate_mode-image_xfm": { - "Template": "EPI-template"}, - "from-EPItemplate_to-bold_mode-image_xfm": { - "Template": "EPI-template"}, + "from-bold_to-EPItemplate_mode-image_xfm": {"Template": "EPI-template"}, + "from-EPItemplate_to-bold_mode-image_xfm": {"Template": "EPI-template"}, }, ) def register_ANTs_EPI_to_template(wf, cfg, strat_pool, pipe_num, opt=None): - '''Directly register the mean functional to an EPI template. No T1w + """Directly register the mean functional to an EPI template. No T1w involved. - ''' - params = cfg.registration_workflows['functional_registration'][ - 'EPI_registration']['ANTs']['parameters'] - - ants, outputs = ANTs_registration_connector('ANTS_bold_to_EPI-template' - f'_{pipe_num}', cfg, params, - orig='bold', template='EPI') + """ + params = cfg.registration_workflows["functional_registration"]["EPI_registration"][ + "ANTs" + ]["parameters"] + + ants, outputs = ANTs_registration_connector( + "ANTS_bold_to_EPI-template" f"_{pipe_num}", + cfg, + params, + orig="bold", + template="EPI", + ) ants.inputs.inputspec.interpolation = cfg.registration_workflows[ - 'functional_registration']['EPI_registration']['ANTs'][ - 'interpolation'] + "functional_registration" + ]["EPI_registration"]["ANTs"]["interpolation"] - node, out = strat_pool.get_data('sbref') - wf.connect(node, out, ants, 'inputspec.input_brain') + node, out = strat_pool.get_data("sbref") + wf.connect(node, out, ants, "inputspec.input_brain") - node, out = strat_pool.get_data('EPI-template') - wf.connect(node, out, ants, 'inputspec.reference_brain') + node, out = strat_pool.get_data("EPI-template") + wf.connect(node, out, ants, "inputspec.reference_brain") - node, out = strat_pool.get_data('sbref') - wf.connect(node, out, ants, 'inputspec.input_head') + node, out = strat_pool.get_data("sbref") + wf.connect(node, out, ants, "inputspec.input_head") - node, out = strat_pool.get_data('EPI-template') - wf.connect(node, out, ants, 'inputspec.reference_head') + node, out = strat_pool.get_data("EPI-template") + wf.connect(node, out, ants, "inputspec.reference_head") - node, out = strat_pool.get_data('space-bold_desc-brain_mask') - wf.connect(node, out, ants, 'inputspec.input_mask') + node, out = strat_pool.get_data("space-bold_desc-brain_mask") + wf.connect(node, out, ants, "inputspec.input_mask") - if strat_pool.check_rpool('EPI-template-mask'): - node, out = strat_pool.get_data('EPI-template-mask') - wf.connect(node, out, ants, 'inputspec.reference_mask') + if strat_pool.check_rpool("EPI-template-mask"): + node, out = strat_pool.get_data("EPI-template-mask") + wf.connect(node, out, ants, "inputspec.reference_mask") return (wf, outputs) @@ -2559,8 +2836,7 @@ def register_ANTs_EPI_to_template(wf, cfg, strat_pool, pipe_num, opt=None): ( "desc-restore-brain_T1w", ["desc-preproc_T1w", "space-longitudinal_desc-brain_T1w"], - ["desc-restore_T1w", "desc-preproc_T1w", "desc-reorient_T1w", - "T1w"], + ["desc-restore_T1w", "desc-preproc_T1w", "desc-reorient_T1w", "T1w"], ["desc-preproc_T1w", "desc-reorient_T1w", "T1w"], "space-T1w_desc-brain_mask", "T1w-template", @@ -2578,16 +2854,12 @@ def register_ANTs_EPI_to_template(wf, cfg, strat_pool, pipe_num, opt=None): "from-template_to-T1w_mode-image_xfm": {"Template": "T1w-template"}, }, ) -def overwrite_transform_anat_to_template(wf, cfg, strat_pool, pipe_num, - opt=None): - - xfm_prov = strat_pool.get_cpac_provenance( - 'from-T1w_to-template_mode-image_xfm') +def overwrite_transform_anat_to_template(wf, cfg, strat_pool, pipe_num, opt=None): + xfm_prov = strat_pool.get_cpac_provenance("from-T1w_to-template_mode-image_xfm") reg_tool = check_prov_for_regtool(xfm_prov) - if opt.lower() == 'fsl' and reg_tool.lower() == 'ants': - + if opt.lower() == "fsl" and reg_tool.lower() == "ants": # Apply head-to-head transforms on brain using ABCD-style registration # Convert ANTs warps to FSL warps to be consistent with the functional registration # Ref: https://github.com/DCAN-Labs/DCAN-HCP/blob/master/PostFreeSurfer/scripts/AtlasRegistrationToMNI152_ANTsbased.sh#L134-L172 @@ -2598,20 +2870,22 @@ def overwrite_transform_anat_to_template(wf, cfg, strat_pool, pipe_num, # -t ${WD}/xfms/T1w_to_MNI_1Rigid.mat \ # -t ${WD}/xfms/T1w_to_MNI_0DerivedInitialMovingTranslation.mat \ # -o [${WD}/xfms/ANTs_CombinedWarp.nii.gz,1] - ants_apply_warp_t1_to_template = pe.Node(interface=ants.ApplyTransforms(), - name=f'ANTS-ABCD_T1_to_template_{pipe_num}') + ants_apply_warp_t1_to_template = pe.Node( + interface=ants.ApplyTransforms(), + name=f"ANTS-ABCD_T1_to_template_{pipe_num}", + ) ants_apply_warp_t1_to_template.inputs.dimension = 3 ants_apply_warp_t1_to_template.inputs.print_out_composite_warp_file = True - ants_apply_warp_t1_to_template.inputs.output_image = 'ANTs_CombinedWarp.nii.gz' + ants_apply_warp_t1_to_template.inputs.output_image = "ANTs_CombinedWarp.nii.gz" - node, out = strat_pool.get_data(['desc-restore_T1w', 'desc-preproc_T1w']) - wf.connect(node, out, ants_apply_warp_t1_to_template, 'input_image') + node, out = strat_pool.get_data(["desc-restore_T1w", "desc-preproc_T1w"]) + wf.connect(node, out, ants_apply_warp_t1_to_template, "input_image") - node, out = strat_pool.get_data('T1w-template') - wf.connect(node, out, ants_apply_warp_t1_to_template, 'reference_image') + node, out = strat_pool.get_data("T1w-template") + wf.connect(node, out, ants_apply_warp_t1_to_template, "reference_image") - node, out = strat_pool.get_data('from-T1w_to-template_mode-image_xfm') - wf.connect(node, out, ants_apply_warp_t1_to_template, 'transforms') + node, out = strat_pool.get_data("from-T1w_to-template_mode-image_xfm") + wf.connect(node, out, ants_apply_warp_t1_to_template, "transforms") # antsApplyTransforms -d 3 -i ${T1wImage}.nii.gz -r ${Reference} \ # -t [${WD}/xfms/T1w_to_MNI_0DerivedInitialMovingTranslation.mat,1] \ @@ -2621,161 +2895,182 @@ def overwrite_transform_anat_to_template(wf, cfg, strat_pool, pipe_num, # -o [${WD}/xfms/ANTs_CombinedInvWarp.nii.gz,1] # T1wImage is ACPC aligned head - ants_apply_warp_template_to_t1 = pe.Node(interface=ants.ApplyTransforms(), - name=f'ANTS-ABCD_template_to_T1_{pipe_num}') + ants_apply_warp_template_to_t1 = pe.Node( + interface=ants.ApplyTransforms(), + name=f"ANTS-ABCD_template_to_T1_{pipe_num}", + ) ants_apply_warp_template_to_t1.inputs.dimension = 3 ants_apply_warp_template_to_t1.inputs.print_out_composite_warp_file = True - ants_apply_warp_template_to_t1.inputs.output_image = 'ANTs_CombinedInvWarp.nii.gz' + ants_apply_warp_template_to_t1.inputs.output_image = ( + "ANTs_CombinedInvWarp.nii.gz" + ) - node, out = strat_pool.get_data('desc-preproc_T1w') - wf.connect(node, out, ants_apply_warp_template_to_t1, 'input_image') + node, out = strat_pool.get_data("desc-preproc_T1w") + wf.connect(node, out, ants_apply_warp_template_to_t1, "input_image") - node, out = strat_pool.get_data('T1w-template') - wf.connect(node, out, ants_apply_warp_template_to_t1, 'reference_image') + node, out = strat_pool.get_data("T1w-template") + wf.connect(node, out, ants_apply_warp_template_to_t1, "reference_image") - node, out = strat_pool.get_data('from-template_to-T1w_mode-image_xfm') - wf.connect(node, out, ants_apply_warp_template_to_t1, 'transforms') + node, out = strat_pool.get_data("from-template_to-T1w_mode-image_xfm") + wf.connect(node, out, ants_apply_warp_template_to_t1, "transforms") # c4d -mcs ${WD}/xfms/ANTs_CombinedWarp.nii.gz -oo ${WD}/xfms/e1.nii.gz ${WD}/xfms/e2.nii.gz ${WD}/xfms/e3.nii.gz # -mcs: -multicomponent-split, -oo: -output-multiple - split_combined_warp = pe.Node(util.Function(input_names=['input', - 'output_name'], - output_names=['output1', - 'output2', - 'output3'], - function=run_c4d), - name=f'split_combined_warp_{pipe_num}') - split_combined_warp.inputs.output_name = 'e' - - wf.connect(ants_apply_warp_t1_to_template, 'output_image', - split_combined_warp, 'input') + split_combined_warp = pe.Node( + util.Function( + input_names=["input", "output_name"], + output_names=["output1", "output2", "output3"], + function=run_c4d, + ), + name=f"split_combined_warp_{pipe_num}", + ) + split_combined_warp.inputs.output_name = "e" + + wf.connect( + ants_apply_warp_t1_to_template, "output_image", split_combined_warp, "input" + ) # c4d -mcs ${WD}/xfms/ANTs_CombinedInvWarp.nii.gz -oo ${WD}/xfms/e1inv.nii.gz ${WD}/xfms/e2inv.nii.gz ${WD}/xfms/e3inv.nii.gz - split_combined_inv_warp = pe.Node(util.Function(input_names=['input', - 'output_name'], - output_names=['output1', - 'output2', - 'output3'], - function=run_c4d), - name=f'split_combined_inv_warp_{pipe_num}') - split_combined_inv_warp.inputs.output_name = 'einv' - - wf.connect(ants_apply_warp_template_to_t1, 'output_image', - split_combined_inv_warp, 'input') + split_combined_inv_warp = pe.Node( + util.Function( + input_names=["input", "output_name"], + output_names=["output1", "output2", "output3"], + function=run_c4d, + ), + name=f"split_combined_inv_warp_{pipe_num}", + ) + split_combined_inv_warp.inputs.output_name = "einv" + + wf.connect( + ants_apply_warp_template_to_t1, + "output_image", + split_combined_inv_warp, + "input", + ) # fslmaths ${WD}/xfms/e2.nii.gz -mul -1 ${WD}/xfms/e-2.nii.gz - change_e2_sign = pe.Node(interface=fsl.maths.MathsCommand(), - name=f'change_e2_sign_{pipe_num}') - change_e2_sign.inputs.args = '-mul -1' + change_e2_sign = pe.Node( + interface=fsl.maths.MathsCommand(), name=f"change_e2_sign_{pipe_num}" + ) + change_e2_sign.inputs.args = "-mul -1" - wf.connect(split_combined_warp, 'output2', - change_e2_sign, 'in_file') + wf.connect(split_combined_warp, "output2", change_e2_sign, "in_file") # fslmaths ${WD}/xfms/e2inv.nii.gz -mul -1 ${WD}/xfms/e-2inv.nii.gz - change_e2inv_sign = pe.Node(interface=fsl.maths.MathsCommand(), - name=f'change_e2inv_sign_{pipe_num}') - change_e2inv_sign.inputs.args = '-mul -1' + change_e2inv_sign = pe.Node( + interface=fsl.maths.MathsCommand(), name=f"change_e2inv_sign_{pipe_num}" + ) + change_e2inv_sign.inputs.args = "-mul -1" - wf.connect(split_combined_inv_warp, 'output2', - change_e2inv_sign, 'in_file') + wf.connect(split_combined_inv_warp, "output2", change_e2inv_sign, "in_file") # fslmerge -t ${OutputTransform} ${WD}/xfms/e1.nii.gz ${WD}/xfms/e-2.nii.gz ${WD}/xfms/e3.nii.gz - merge_xfms_to_list = pe.Node(util.Merge(3), - name=f'merge_t1_to_template_xfms_to_list_{pipe_num}') + merge_xfms_to_list = pe.Node( + util.Merge(3), name=f"merge_t1_to_template_xfms_to_list_{pipe_num}" + ) - wf.connect(split_combined_warp, 'output1', - merge_xfms_to_list, 'in1') - wf.connect(change_e2_sign, 'out_file', - merge_xfms_to_list, 'in2') - wf.connect(split_combined_warp, 'output3', - merge_xfms_to_list, 'in3') + wf.connect(split_combined_warp, "output1", merge_xfms_to_list, "in1") + wf.connect(change_e2_sign, "out_file", merge_xfms_to_list, "in2") + wf.connect(split_combined_warp, "output3", merge_xfms_to_list, "in3") - merge_xfms = pe.Node(interface=fslMerge(), - name=f'merge_t1_to_template_xfms_{pipe_num}') - merge_xfms.inputs.dimension = 't' + merge_xfms = pe.Node( + interface=fslMerge(), name=f"merge_t1_to_template_xfms_{pipe_num}" + ) + merge_xfms.inputs.dimension = "t" - wf.connect(merge_xfms_to_list, 'out', - merge_xfms, 'in_files') + wf.connect(merge_xfms_to_list, "out", merge_xfms, "in_files") # fslmerge -t ${OutputInvTransform} ${WD}/xfms/e1inv.nii.gz ${WD}/xfms/e-2inv.nii.gz ${WD}/xfms/e3inv.nii.gz - merge_inv_xfms_to_list = pe.Node(util.Merge(3), - name=f'merge_template_to_t1_xfms_to_list_{pipe_num}') + merge_inv_xfms_to_list = pe.Node( + util.Merge(3), name=f"merge_template_to_t1_xfms_to_list_{pipe_num}" + ) - wf.connect(split_combined_inv_warp, 'output1', - merge_inv_xfms_to_list, 'in1') - wf.connect(change_e2inv_sign, 'out_file', - merge_inv_xfms_to_list, 'in2') - wf.connect(split_combined_inv_warp, 'output3', - merge_inv_xfms_to_list, 'in3') + wf.connect(split_combined_inv_warp, "output1", merge_inv_xfms_to_list, "in1") + wf.connect(change_e2inv_sign, "out_file", merge_inv_xfms_to_list, "in2") + wf.connect(split_combined_inv_warp, "output3", merge_inv_xfms_to_list, "in3") - merge_inv_xfms = pe.Node(interface=fslMerge(), - name=f'merge_template_to_t1_xfms_{pipe_num}') - merge_inv_xfms.inputs.dimension = 't' + merge_inv_xfms = pe.Node( + interface=fslMerge(), name=f"merge_template_to_t1_xfms_{pipe_num}" + ) + merge_inv_xfms.inputs.dimension = "t" - wf.connect(merge_inv_xfms_to_list, 'out', - merge_inv_xfms, 'in_files') + wf.connect(merge_inv_xfms_to_list, "out", merge_inv_xfms, "in_files") # applywarp --rel --interp=spline -i ${T1wRestore} -r ${Reference} -w ${OutputTransform} -o ${OutputT1wImageRestore} - fsl_apply_warp_t1_to_template = pe.Node(interface=fsl.ApplyWarp(), - name=f'FSL-ABCD_T1_to_template_{pipe_num}') + fsl_apply_warp_t1_to_template = pe.Node( + interface=fsl.ApplyWarp(), name=f"FSL-ABCD_T1_to_template_{pipe_num}" + ) fsl_apply_warp_t1_to_template.inputs.relwarp = True - fsl_apply_warp_t1_to_template.inputs.interp = 'spline' + fsl_apply_warp_t1_to_template.inputs.interp = "spline" - node, out = strat_pool.get_data(['desc-restore_T1w', 'desc-preproc_T1w']) - wf.connect(node, out, fsl_apply_warp_t1_to_template, 'in_file') + node, out = strat_pool.get_data(["desc-restore_T1w", "desc-preproc_T1w"]) + wf.connect(node, out, fsl_apply_warp_t1_to_template, "in_file") - node, out = strat_pool.get_data('T1w-template') - wf.connect(node, out, fsl_apply_warp_t1_to_template, 'ref_file') + node, out = strat_pool.get_data("T1w-template") + wf.connect(node, out, fsl_apply_warp_t1_to_template, "ref_file") - wf.connect(merge_xfms, 'merged_file', - fsl_apply_warp_t1_to_template, 'field_file') + wf.connect( + merge_xfms, "merged_file", fsl_apply_warp_t1_to_template, "field_file" + ) # applywarp --rel --interp=nn -i ${T1wRestoreBrain} -r ${Reference} -w ${OutputTransform} -o ${OutputT1wImageRestoreBrain} - fsl_apply_warp_t1_brain_to_template = pe.Node(interface=fsl.ApplyWarp(), - name=f'FSL-ABCD_T1_brain_to_template_{pipe_num}') + fsl_apply_warp_t1_brain_to_template = pe.Node( + interface=fsl.ApplyWarp(), name=f"FSL-ABCD_T1_brain_to_template_{pipe_num}" + ) fsl_apply_warp_t1_brain_to_template.inputs.relwarp = True - fsl_apply_warp_t1_brain_to_template.inputs.interp = 'nn' + fsl_apply_warp_t1_brain_to_template.inputs.interp = "nn" # TODO connect T1wRestoreBrain, check T1wRestoreBrain quality - node, out = strat_pool.get_data('desc-preproc_T1w') - wf.connect(node, out, fsl_apply_warp_t1_brain_to_template, 'in_file') + node, out = strat_pool.get_data("desc-preproc_T1w") + wf.connect(node, out, fsl_apply_warp_t1_brain_to_template, "in_file") - node, out = strat_pool.get_data('T1w-template') - wf.connect(node, out, fsl_apply_warp_t1_brain_to_template, 'ref_file') + node, out = strat_pool.get_data("T1w-template") + wf.connect(node, out, fsl_apply_warp_t1_brain_to_template, "ref_file") - wf.connect(merge_xfms, 'merged_file', - fsl_apply_warp_t1_brain_to_template, 'field_file') + wf.connect( + merge_xfms, "merged_file", fsl_apply_warp_t1_brain_to_template, "field_file" + ) - fsl_apply_warp_t1_brain_mask_to_template = pe.Node(interface=fsl.ApplyWarp(), - name=f'FSL-ABCD_T1_brain_mask_to_template_{pipe_num}') + fsl_apply_warp_t1_brain_mask_to_template = pe.Node( + interface=fsl.ApplyWarp(), + name=f"FSL-ABCD_T1_brain_mask_to_template_{pipe_num}", + ) fsl_apply_warp_t1_brain_mask_to_template.inputs.relwarp = True - fsl_apply_warp_t1_brain_mask_to_template.inputs.interp = 'nn' + fsl_apply_warp_t1_brain_mask_to_template.inputs.interp = "nn" - node, out = strat_pool.get_data('space-T1w_desc-brain_mask') - wf.connect(node, out, fsl_apply_warp_t1_brain_mask_to_template, 'in_file') + node, out = strat_pool.get_data("space-T1w_desc-brain_mask") + wf.connect(node, out, fsl_apply_warp_t1_brain_mask_to_template, "in_file") - node, out = strat_pool.get_data('T1w-template') - wf.connect(node, out, fsl_apply_warp_t1_brain_mask_to_template, 'ref_file') + node, out = strat_pool.get_data("T1w-template") + wf.connect(node, out, fsl_apply_warp_t1_brain_mask_to_template, "ref_file") - wf.connect(merge_xfms, 'merged_file', - fsl_apply_warp_t1_brain_mask_to_template, 'field_file') + wf.connect( + merge_xfms, + "merged_file", + fsl_apply_warp_t1_brain_mask_to_template, + "field_file", + ) # fslmaths ${OutputT1wImageRestore} -mas ${OutputT1wImageRestoreBrain} ${OutputT1wImageRestoreBrain} - apply_mask = pe.Node(interface=fsl.maths.ApplyMask(), - name=f'get_t1_brain_{pipe_num}') + apply_mask = pe.Node( + interface=fsl.maths.ApplyMask(), name=f"get_t1_brain_{pipe_num}" + ) - wf.connect(fsl_apply_warp_t1_to_template, 'out_file', - apply_mask, 'in_file') + wf.connect(fsl_apply_warp_t1_to_template, "out_file", apply_mask, "in_file") - wf.connect(fsl_apply_warp_t1_brain_to_template, 'out_file', - apply_mask, 'mask_file') + wf.connect( + fsl_apply_warp_t1_brain_to_template, "out_file", apply_mask, "mask_file" + ) outputs = { - 'space-template_desc-preproc_T1w': (apply_mask, 'out_file'), - 'space-template_desc-head_T1w': (fsl_apply_warp_t1_to_template, 'out_file'), - 'space-template_desc-T1w_mask': (fsl_apply_warp_t1_brain_mask_to_template, 'out_file'), - 'from-T1w_to-template_mode-image_xfm': (merge_xfms, 'merged_file'), - 'from-template_to-T1w_mode-image_xfm': (merge_inv_xfms, 'merged_file') + "space-template_desc-preproc_T1w": (apply_mask, "out_file"), + "space-template_desc-head_T1w": (fsl_apply_warp_t1_to_template, "out_file"), + "space-template_desc-T1w_mask": ( + fsl_apply_warp_t1_brain_mask_to_template, + "out_file", + ), + "from-T1w_to-template_mode-image_xfm": (merge_xfms, "merged_file"), + "from-template_to-T1w_mode-image_xfm": (merge_inv_xfms, "merged_file"), } return (wf, outputs) @@ -2796,40 +3091,37 @@ def overwrite_transform_anat_to_template(wf, cfg, strat_pool, pipe_num, outputs=["sbref"], ) def coregistration_prep_vol(wf, cfg, strat_pool, pipe_num, opt=None): - - get_func_volume = pe.Node(interface=afni.Calc(), - name=f'get_func_volume_{pipe_num}') + get_func_volume = pe.Node(interface=afni.Calc(), name=f"get_func_volume_{pipe_num}") get_func_volume.inputs.set( - expr='a', - single_idx=cfg.registration_workflows['functional_registration']['coregistration'][ - 'func_input_prep']['Selected Functional Volume']['func_reg_input_volume'], - outputtype='NIFTI_GZ' + expr="a", + single_idx=cfg.registration_workflows["functional_registration"][ + "coregistration" + ]["func_input_prep"]["Selected Functional Volume"]["func_reg_input_volume"], + outputtype="NIFTI_GZ", ) - if not cfg.registration_workflows['functional_registration'][ - 'coregistration']['func_input_prep']['reg_with_skull']: + if not cfg.registration_workflows["functional_registration"]["coregistration"][ + "func_input_prep" + ]["reg_with_skull"]: node, out = strat_pool.get_data("desc-brain_bold") else: # TODO check which file is functional_skull_leaf # TODO add a function to choose brain or skull? node, out = strat_pool.get_data(["desc-motion_bold", "bold"]) - wf.connect(node, out, get_func_volume, 'in_file_a') + wf.connect(node, out, get_func_volume, "in_file_a") - coreg_input = (get_func_volume, 'out_file') + coreg_input = (get_func_volume, "out_file") - outputs = { - 'sbref': coreg_input - } + outputs = {"sbref": coreg_input} return (wf, outputs) @nodeblock( name="coregistration_prep_mean", - switch=[["functional_preproc", "run"], - ["functional_preproc", "coreg_prep", "run"]], + switch=[["functional_preproc", "run"], ["functional_preproc", "coreg_prep", "run"]], option_key=[ "registration_workflows", "functional_registration", @@ -2842,38 +3134,34 @@ def coregistration_prep_vol(wf, cfg, strat_pool, pipe_num, opt=None): outputs=["sbref"], ) def coregistration_prep_mean(wf, cfg, strat_pool, pipe_num, opt=None): - coreg_input = strat_pool.get_data("desc-mean_bold") # TODO add mean skull - if cfg.registration_workflows['functional_registration'][ - 'coregistration']['func_input_prep']['Mean Functional'][ - 'n4_correct_func']: + if cfg.registration_workflows["functional_registration"]["coregistration"][ + "func_input_prep" + ]["Mean Functional"]["n4_correct_func"]: n4_correct_func = pe.Node( - interface= - ants.N4BiasFieldCorrection(dimension=3, - copy_header=True, - bspline_fitting_distance=200), + interface=ants.N4BiasFieldCorrection( + dimension=3, copy_header=True, bspline_fitting_distance=200 + ), shrink_factor=2, - name=f'func_mean_n4_corrected_{pipe_num}') - n4_correct_func.inputs.args = '-r True' + name=f"func_mean_n4_corrected_{pipe_num}", + ) + n4_correct_func.inputs.args = "-r True" node, out = coreg_input - wf.connect(node, out, n4_correct_func, 'input_image') + wf.connect(node, out, n4_correct_func, "input_image") - coreg_input = (n4_correct_func, 'output_image') + coreg_input = (n4_correct_func, "output_image") - outputs = { - 'sbref': coreg_input - } + outputs = {"sbref": coreg_input} return (wf, outputs) @nodeblock( name="coregistration_prep_fmriprep", - switch=[["functional_preproc", "run"], - ["functional_preproc", "coreg_prep", "run"]], + switch=[["functional_preproc", "run"], ["functional_preproc", "coreg_prep", "run"]], option_key=[ "registration_workflows", "functional_registration", @@ -2886,12 +3174,9 @@ def coregistration_prep_mean(wf, cfg, strat_pool, pipe_num, opt=None): outputs=["sbref"], ) def coregistration_prep_fmriprep(wf, cfg, strat_pool, pipe_num, opt=None): - coreg_input = strat_pool.get_data("desc-ref_bold") - outputs = { - 'sbref': coreg_input - } + outputs = {"sbref": coreg_input} return (wf, outputs) @@ -2930,158 +3215,181 @@ def coregistration_prep_fmriprep(wf, cfg, strat_pool, pipe_num, opt=None): ) def coregistration(wf, cfg, strat_pool, pipe_num, opt=None): diff_complete = False - if strat_pool.check_rpool("despiked-fieldmap") and \ - strat_pool.check_rpool("fieldmap-mask"): + if strat_pool.check_rpool("despiked-fieldmap") and strat_pool.check_rpool( + "fieldmap-mask" + ): diff_complete = True - if strat_pool.check_rpool('T2w') and cfg.anatomical_preproc['run_t2']: + if strat_pool.check_rpool("T2w") and cfg.anatomical_preproc["run_t2"]: # monkey data - func_to_anat = create_register_func_to_anat_use_T2(cfg, - f'func_to_anat_FLIRT_' - f'{pipe_num}') + func_to_anat = create_register_func_to_anat_use_T2( + cfg, f"func_to_anat_FLIRT_" f"{pipe_num}" + ) # https://github.com/DCAN-Labs/dcan-macaque-pipeline/blob/master/fMRIVolume/GenericfMRIVolumeProcessingPipeline.sh#L177 # fslmaths "$fMRIFolder"/"$NameOffMRI"_mc -Tmean "$fMRIFolder"/"$ScoutName"_gdc - func_mc_mean = pe.Node(interface=afni_utils.TStat(), - name=f'func_motion_corrected_mean_{pipe_num}') + func_mc_mean = pe.Node( + interface=afni_utils.TStat(), name=f"func_motion_corrected_mean_{pipe_num}" + ) - func_mc_mean.inputs.options = '-mean' - func_mc_mean.inputs.outputtype = 'NIFTI_GZ' + func_mc_mean.inputs.options = "-mean" + func_mc_mean.inputs.outputtype = "NIFTI_GZ" node, out = strat_pool.get_data("desc-motion_bold") - wf.connect(node, out, func_mc_mean, 'in_file') + wf.connect(node, out, func_mc_mean, "in_file") - wf.connect(func_mc_mean, 'out_file', func_to_anat, 'inputspec.func') + wf.connect(func_mc_mean, "out_file", func_to_anat, "inputspec.func") - node, out = strat_pool.get_data('desc-preproc_T1w') - wf.connect(node, out, func_to_anat, 'inputspec.T1_brain') + node, out = strat_pool.get_data("desc-preproc_T1w") + wf.connect(node, out, func_to_anat, "inputspec.T1_brain") - node, out = strat_pool.get_data('desc-head_T2w') - wf.connect(node, out, func_to_anat, 'inputspec.T2_head') + node, out = strat_pool.get_data("desc-head_T2w") + wf.connect(node, out, func_to_anat, "inputspec.T2_head") - node, out = strat_pool.get_data('desc-preproc_T2w') - wf.connect(node, out, func_to_anat, 'inputspec.T2_brain') + node, out = strat_pool.get_data("desc-preproc_T2w") + wf.connect(node, out, func_to_anat, "inputspec.T2_brain") else: # if field map-based distortion correction is on, but BBR is off, # send in the distortion correction files here - func_to_anat = create_register_func_to_anat(cfg, diff_complete, - f'func_to_anat_FLIRT_' - f'{pipe_num}') + func_to_anat = create_register_func_to_anat( + cfg, diff_complete, f"func_to_anat_FLIRT_" f"{pipe_num}" + ) func_to_anat.inputs.inputspec.dof = cfg.registration_workflows[ - 'functional_registration']['coregistration']['dof'] + "functional_registration" + ]["coregistration"]["dof"] func_to_anat.inputs.inputspec.interp = cfg.registration_workflows[ - 'functional_registration']['coregistration']['interpolation'] + "functional_registration" + ]["coregistration"]["interpolation"] - node, out = strat_pool.get_data('sbref') - wf.connect(node, out, func_to_anat, 'inputspec.func') + node, out = strat_pool.get_data("sbref") + wf.connect(node, out, func_to_anat, "inputspec.func") - node, out = strat_pool.get_data(['desc-restore-brain_T1w', 'desc-preproc_T1w']) - wf.connect(node, out, func_to_anat, 'inputspec.anat') + node, out = strat_pool.get_data(["desc-restore-brain_T1w", "desc-preproc_T1w"]) + wf.connect(node, out, func_to_anat, "inputspec.anat") if diff_complete: - node, out = strat_pool.get_data('effectiveEchoSpacing') - wf.connect(node, out, func_to_anat, 'echospacing_input.echospacing') + node, out = strat_pool.get_data("effectiveEchoSpacing") + wf.connect(node, out, func_to_anat, "echospacing_input.echospacing") - node, out = strat_pool.get_data('pe-direction') - wf.connect(node, out, func_to_anat, 'pedir_input.pedir') + node, out = strat_pool.get_data("pe-direction") + wf.connect(node, out, func_to_anat, "pedir_input.pedir") node, out = strat_pool.get_data("despiked-fieldmap") - wf.connect(node, out, func_to_anat, 'inputspec.fieldmap') + wf.connect(node, out, func_to_anat, "inputspec.fieldmap") node, out = strat_pool.get_data("fieldmap-mask") - wf.connect(node, out, func_to_anat, 'inputspec.fieldmapmask') + wf.connect(node, out, func_to_anat, "inputspec.fieldmapmask") - if strat_pool.check_rpool('T2w') and cfg.anatomical_preproc['run_t2']: + if strat_pool.check_rpool("T2w") and cfg.anatomical_preproc["run_t2"]: outputs = { - 'space-T1w_sbref': - (func_to_anat, 'outputspec.anat_func_nobbreg'), - 'from-bold_to-T1w_mode-image_desc-linear_xfm': - (func_to_anat, 'outputspec.func_to_anat_linear_xfm_nobbreg'), - 'from-bold_to-T1w_mode-image_desc-linear_warp': - (func_to_anat, 'outputspec.func_to_anat_linear_warp_nobbreg') + "space-T1w_sbref": (func_to_anat, "outputspec.anat_func_nobbreg"), + "from-bold_to-T1w_mode-image_desc-linear_xfm": ( + func_to_anat, + "outputspec.func_to_anat_linear_xfm_nobbreg", + ), + "from-bold_to-T1w_mode-image_desc-linear_warp": ( + func_to_anat, + "outputspec.func_to_anat_linear_warp_nobbreg", + ), } else: outputs = { - 'space-T1w_sbref': - (func_to_anat, 'outputspec.anat_func_nobbreg'), - 'from-bold_to-T1w_mode-image_desc-linear_xfm': - (func_to_anat, 'outputspec.func_to_anat_linear_xfm_nobbreg') + "space-T1w_sbref": (func_to_anat, "outputspec.anat_func_nobbreg"), + "from-bold_to-T1w_mode-image_desc-linear_xfm": ( + func_to_anat, + "outputspec.func_to_anat_linear_xfm_nobbreg", + ), } - if True in cfg.registration_workflows['functional_registration'][ - 'coregistration']["boundary_based_registration"]["run"]: - - func_to_anat_bbreg = create_bbregister_func_to_anat(diff_complete, - f'func_to_anat_' - f'bbreg_' - f'{pipe_num}') - func_to_anat_bbreg.inputs.inputspec.bbr_schedule = \ - cfg.registration_workflows['functional_registration'][ - 'coregistration']['boundary_based_registration'][ - 'bbr_schedule'] - - func_to_anat_bbreg.inputs.inputspec.bbr_wm_mask_args = \ - cfg.registration_workflows['functional_registration'][ - 'coregistration']['boundary_based_registration'][ - 'bbr_wm_mask_args'] - - node, out = strat_pool.get_data('sbref') - wf.connect(node, out, func_to_anat_bbreg, 'inputspec.func') - - if cfg.registration_workflows['functional_registration'][ - 'coregistration']['boundary_based_registration'][ - 'reference'] == 'whole-head': - node, out = strat_pool.get_data('desc-head_T1w') - wf.connect(node, out, func_to_anat_bbreg, 'inputspec.anat') - - elif cfg.registration_workflows['functional_registration'][ - 'coregistration']['boundary_based_registration'][ - 'reference'] == 'brain': - node, out = strat_pool.get_data('desc-preproc_T1w') - wf.connect(node, out, func_to_anat_bbreg, 'inputspec.anat') - - wf.connect(func_to_anat, 'outputspec.func_to_anat_linear_xfm_nobbreg', - func_to_anat_bbreg, 'inputspec.linear_reg_matrix') - - if strat_pool.check_rpool('space-bold_label-WM_mask'): + if ( + True + in cfg.registration_workflows["functional_registration"]["coregistration"][ + "boundary_based_registration" + ]["run"] + ): + func_to_anat_bbreg = create_bbregister_func_to_anat( + diff_complete, f"func_to_anat_" f"bbreg_" f"{pipe_num}" + ) + func_to_anat_bbreg.inputs.inputspec.bbr_schedule = cfg.registration_workflows[ + "functional_registration" + ]["coregistration"]["boundary_based_registration"]["bbr_schedule"] + + func_to_anat_bbreg.inputs.inputspec.bbr_wm_mask_args = ( + cfg.registration_workflows["functional_registration"]["coregistration"][ + "boundary_based_registration" + ]["bbr_wm_mask_args"] + ) + + node, out = strat_pool.get_data("sbref") + wf.connect(node, out, func_to_anat_bbreg, "inputspec.func") + + if ( + cfg.registration_workflows["functional_registration"]["coregistration"][ + "boundary_based_registration" + ]["reference"] + == "whole-head" + ): + node, out = strat_pool.get_data("desc-head_T1w") + wf.connect(node, out, func_to_anat_bbreg, "inputspec.anat") + + elif ( + cfg.registration_workflows["functional_registration"]["coregistration"][ + "boundary_based_registration" + ]["reference"] + == "brain" + ): + node, out = strat_pool.get_data("desc-preproc_T1w") + wf.connect(node, out, func_to_anat_bbreg, "inputspec.anat") + + wf.connect( + func_to_anat, + "outputspec.func_to_anat_linear_xfm_nobbreg", + func_to_anat_bbreg, + "inputspec.linear_reg_matrix", + ) + + if strat_pool.check_rpool("space-bold_label-WM_mask"): node, out = strat_pool.get_data(["space-bold_label-WM_mask"]) - wf.connect(node, out, - func_to_anat_bbreg, 'inputspec.anat_wm_segmentation') + wf.connect(node, out, func_to_anat_bbreg, "inputspec.anat_wm_segmentation") else: - if cfg.registration_workflows['functional_registration'][ - 'coregistration']['boundary_based_registration']['bbr_wm_map'] == 'probability_map': - node, out = strat_pool.get_data(["label-WM_probseg", - "label-WM_mask"]) - elif cfg.registration_workflows['functional_registration'][ - 'coregistration']['boundary_based_registration']['bbr_wm_map'] == 'partial_volume_map': - node, out = strat_pool.get_data(["label-WM_pveseg", - "label-WM_mask"]) - wf.connect(node, out, - func_to_anat_bbreg, 'inputspec.anat_wm_segmentation') + if ( + cfg.registration_workflows["functional_registration"]["coregistration"][ + "boundary_based_registration" + ]["bbr_wm_map"] + == "probability_map" + ): + node, out = strat_pool.get_data(["label-WM_probseg", "label-WM_mask"]) + elif ( + cfg.registration_workflows["functional_registration"]["coregistration"][ + "boundary_based_registration" + ]["bbr_wm_map"] + == "partial_volume_map" + ): + node, out = strat_pool.get_data(["label-WM_pveseg", "label-WM_mask"]) + wf.connect(node, out, func_to_anat_bbreg, "inputspec.anat_wm_segmentation") if diff_complete: - node, out = strat_pool.get_data('effectiveEchoSpacing') - wf.connect(node, out, - func_to_anat_bbreg, 'echospacing_input.echospacing') + node, out = strat_pool.get_data("effectiveEchoSpacing") + wf.connect(node, out, func_to_anat_bbreg, "echospacing_input.echospacing") - node, out = strat_pool.get_data('pe-direction') - wf.connect(node, out, func_to_anat_bbreg, 'pedir_input.pedir') + node, out = strat_pool.get_data("pe-direction") + wf.connect(node, out, func_to_anat_bbreg, "pedir_input.pedir") node, out = strat_pool.get_data("despiked-fieldmap") - wf.connect(node, out, func_to_anat_bbreg, 'inputspec.fieldmap') + wf.connect(node, out, func_to_anat_bbreg, "inputspec.fieldmap") node, out = strat_pool.get_data("fieldmap-mask") - wf.connect(node, out, - func_to_anat_bbreg, 'inputspec.fieldmapmask') + wf.connect(node, out, func_to_anat_bbreg, "inputspec.fieldmapmask") outputs = { - 'space-T1w_sbref': - (func_to_anat_bbreg, 'outputspec.anat_func'), - 'from-bold_to-T1w_mode-image_desc-linear_xfm': - (func_to_anat_bbreg, 'outputspec.func_to_anat_linear_xfm') + "space-T1w_sbref": (func_to_anat_bbreg, "outputspec.anat_func"), + "from-bold_to-T1w_mode-image_desc-linear_xfm": ( + func_to_anat_bbreg, + "outputspec.func_to_anat_linear_xfm", + ), } return (wf, outputs) @@ -3121,51 +3429,48 @@ def coregistration(wf, cfg, strat_pool, pipe_num, opt=None): }, ) def create_func_to_T1template_xfm(wf, cfg, strat_pool, pipe_num, opt=None): - '''Condense the BOLD-to-T1 coregistration transform and the T1-to-template + """Condense the BOLD-to-T1 coregistration transform and the T1-to-template transform into one transform matrix. - ''' - xfm_prov = strat_pool.get_cpac_provenance( - 'from-T1w_to-template_mode-image_xfm') + """ + xfm_prov = strat_pool.get_cpac_provenance("from-T1w_to-template_mode-image_xfm") reg_tool = check_prov_for_regtool(xfm_prov) - xfm, outputs = bold_to_T1template_xfm_connector('create_func_to_T1w' - f'template_xfm_{pipe_num}', - cfg, reg_tool, - symmetric=False) + xfm, outputs = bold_to_T1template_xfm_connector( + "create_func_to_T1w" f"template_xfm_{pipe_num}", cfg, reg_tool, symmetric=False + ) - node, out = strat_pool.get_data( - 'from-bold_to-T1w_mode-image_desc-linear_xfm') - wf.connect(node, out, xfm, 'inputspec.coreg_xfm') + node, out = strat_pool.get_data("from-bold_to-T1w_mode-image_desc-linear_xfm") + wf.connect(node, out, xfm, "inputspec.coreg_xfm") - node, out = strat_pool.get_data('desc-brain_T1w') - wf.connect(node, out, xfm, 'inputspec.input_brain') + node, out = strat_pool.get_data("desc-brain_T1w") + wf.connect(node, out, xfm, "inputspec.input_brain") - node, out = strat_pool.get_data('sbref') - wf.connect(node, out, xfm, 'inputspec.mean_bold') + node, out = strat_pool.get_data("sbref") + wf.connect(node, out, xfm, "inputspec.mean_bold") - node, out = strat_pool.get_data('T1w-brain-template-funcreg') - wf.connect(node, out, xfm, 'inputspec.T1w-brain-template_funcreg') + node, out = strat_pool.get_data("T1w-brain-template-funcreg") + wf.connect(node, out, xfm, "inputspec.T1w-brain-template_funcreg") - node, out = strat_pool.get_data('from-T1w_to-template_mode-image_xfm') - wf.connect(node, out, xfm, 'inputspec.T1w_to_template_xfm') + node, out = strat_pool.get_data("from-T1w_to-template_mode-image_xfm") + wf.connect(node, out, xfm, "inputspec.T1w_to_template_xfm") # FNIRT pipelines don't have an inverse nonlinear warp, make optional - if strat_pool.check_rpool('from-template_to-T1w_mode-image_xfm'): - node, out = strat_pool.get_data('from-template_to-T1w_mode-image_xfm') - wf.connect(node, out, xfm, 'inputspec.template_to_T1w_xfm') - - if strat_pool.check_rpool('ants-blip-warp'): - if reg_tool == 'ants': - node, out = strat_pool.get_data('ants-blip-warp') - wf.connect(node, out, xfm, 'inputspec.blip_warp') - elif reg_tool == 'fsl': + if strat_pool.check_rpool("from-template_to-T1w_mode-image_xfm"): + node, out = strat_pool.get_data("from-template_to-T1w_mode-image_xfm") + wf.connect(node, out, xfm, "inputspec.template_to_T1w_xfm") + + if strat_pool.check_rpool("ants-blip-warp"): + if reg_tool == "ants": + node, out = strat_pool.get_data("ants-blip-warp") + wf.connect(node, out, xfm, "inputspec.blip_warp") + elif reg_tool == "fsl": # apply the ants blip warp separately pass - elif strat_pool.check_rpool('fsl-blip-warp'): - if reg_tool == 'fsl': - node, out = strat_pool.get_data('fsl-blip-warp') - wf.connect(node, out, xfm, 'inputspec.blip_warp') - elif reg_tool == 'ants': + elif strat_pool.check_rpool("fsl-blip-warp"): + if reg_tool == "fsl": + node, out = strat_pool.get_data("fsl-blip-warp") + wf.connect(node, out, xfm, "inputspec.blip_warp") + elif reg_tool == "ants": # apply the fsl blip warp separately pass @@ -3200,42 +3505,39 @@ def create_func_to_T1template_xfm(wf, cfg, strat_pool, pipe_num, opt=None): }, }, ) -def create_func_to_T1template_symmetric_xfm(wf, cfg, strat_pool, pipe_num, - opt=None): - '''Condense the BOLD-to-T1 coregistration transform and the T1-to- +def create_func_to_T1template_symmetric_xfm(wf, cfg, strat_pool, pipe_num, opt=None): + """Condense the BOLD-to-T1 coregistration transform and the T1-to- symmetric-template transform into one transform matrix. - ''' - - xfm_prov = strat_pool.get_cpac_provenance( - 'from-T1w_to-symtemplate_mode-image_xfm') + """ + xfm_prov = strat_pool.get_cpac_provenance("from-T1w_to-symtemplate_mode-image_xfm") reg_tool = check_prov_for_regtool(xfm_prov) - xfm, outputs = bold_to_T1template_xfm_connector('create_func_to_T1wsymtem' - f'plate_xfm_{pipe_num}', - cfg, reg_tool, - symmetric=True) + xfm, outputs = bold_to_T1template_xfm_connector( + "create_func_to_T1wsymtem" f"plate_xfm_{pipe_num}", + cfg, + reg_tool, + symmetric=True, + ) - node, out = strat_pool.get_data( - 'from-bold_to-T1w_mode-image_desc-linear_xfm') - wf.connect(node, out, xfm, 'inputspec.coreg_xfm') + node, out = strat_pool.get_data("from-bold_to-T1w_mode-image_desc-linear_xfm") + wf.connect(node, out, xfm, "inputspec.coreg_xfm") - node, out = strat_pool.get_data('desc-brain_T1w') - wf.connect(node, out, xfm, 'inputspec.input_brain') + node, out = strat_pool.get_data("desc-brain_T1w") + wf.connect(node, out, xfm, "inputspec.input_brain") - node, out = strat_pool.get_data('sbref') - wf.connect(node, out, xfm, 'inputspec.mean_bold') + node, out = strat_pool.get_data("sbref") + wf.connect(node, out, xfm, "inputspec.mean_bold") - node, out = strat_pool.get_data('T1w-brain-template-symmetric-deriv') - wf.connect(node, out, xfm, 'inputspec.T1w-brain-template_funcreg') + node, out = strat_pool.get_data("T1w-brain-template-symmetric-deriv") + wf.connect(node, out, xfm, "inputspec.T1w-brain-template_funcreg") - node, out = strat_pool.get_data('from-T1w_to-symtemplate_mode-image_xfm') - wf.connect(node, out, xfm, 'inputspec.T1w_to_template_xfm') + node, out = strat_pool.get_data("from-T1w_to-symtemplate_mode-image_xfm") + wf.connect(node, out, xfm, "inputspec.T1w_to_template_xfm") # FNIRT pipelines don't have an inverse nonlinear warp, make optional - if strat_pool.check_rpool('from-symtemplate_to-T1w_mode-image_xfm'): - node, out = \ - strat_pool.get_data('from-symtemplate_to-T1w_mode-image_xfm') - wf.connect(node, out, xfm, 'inputspec.template_to_T1w_xfm') + if strat_pool.check_rpool("from-symtemplate_to-T1w_mode-image_xfm"): + node, out = strat_pool.get_data("from-symtemplate_to-T1w_mode-image_xfm") + wf.connect(node, out, xfm, "inputspec.template_to_T1w_xfm") return (wf, outputs) @@ -3273,109 +3575,107 @@ def create_func_to_T1template_symmetric_xfm(wf, cfg, strat_pool, pipe_num, ], outputs=["sbref", "desc-preproc_bold", "desc-stc_bold", "bold"], ) -def apply_phasediff_to_timeseries_separately(wf, cfg, strat_pool, pipe_num, - opt=None): - - outputs = {'desc-preproc_bold': strat_pool.get_data("desc-preproc_bold")} +def apply_phasediff_to_timeseries_separately(wf, cfg, strat_pool, pipe_num, opt=None): + outputs = {"desc-preproc_bold": strat_pool.get_data("desc-preproc_bold")} if not strat_pool.check_rpool("despiked-fieldmap"): return (wf, outputs) - invert_coreg_xfm = pe.Node(interface=fsl.ConvertXFM(), - name=f'invert_coreg_xfm_{pipe_num}') + invert_coreg_xfm = pe.Node( + interface=fsl.ConvertXFM(), name=f"invert_coreg_xfm_{pipe_num}" + ) invert_coreg_xfm.inputs.invert_xfm = True node, out = strat_pool.get_data("from-bold_to-T1w_mode-image_desc-linear_xfm") - wf.connect(node, out, invert_coreg_xfm, 'in_file') + wf.connect(node, out, invert_coreg_xfm, "in_file") - warp_fmap = pe.Node(interface=fsl.ApplyWarp(), - name=f'warp_fmap_{pipe_num}') + warp_fmap = pe.Node(interface=fsl.ApplyWarp(), name=f"warp_fmap_{pipe_num}") - node, out = strat_pool.get_data('despiked-fieldmap') - wf.connect(node, out, warp_fmap, 'in_file') + node, out = strat_pool.get_data("despiked-fieldmap") + wf.connect(node, out, warp_fmap, "in_file") - node, out = strat_pool.get_data('sbref') - wf.connect(node, out, warp_fmap, 'ref_file') + node, out = strat_pool.get_data("sbref") + wf.connect(node, out, warp_fmap, "ref_file") - wf.connect(invert_coreg_xfm, 'out_file', warp_fmap, 'premat') + wf.connect(invert_coreg_xfm, "out_file", warp_fmap, "premat") - mask_fmap = pe.Node(interface=fsl.maths.MathsCommand(), - name=f'mask_fmap_{pipe_num}') - mask_fmap.inputs.args = '-abs -bin' + mask_fmap = pe.Node( + interface=fsl.maths.MathsCommand(), name=f"mask_fmap_{pipe_num}" + ) + mask_fmap.inputs.args = "-abs -bin" - wf.connect(warp_fmap, 'out_file', mask_fmap, 'in_file') + wf.connect(warp_fmap, "out_file", mask_fmap, "in_file") - conv_pedir = \ - pe.Node(interface=util.Function(input_names=['pedir', - 'convert'], - output_names=['pedir'], - function=convert_pedir), - name=f'apply_phasediff_convert_pedir_{pipe_num}') - conv_pedir.inputs.convert = 'ijk_to_xyz' + conv_pedir = pe.Node( + interface=util.Function( + input_names=["pedir", "convert"], + output_names=["pedir"], + function=convert_pedir, + ), + name=f"apply_phasediff_convert_pedir_{pipe_num}", + ) + conv_pedir.inputs.convert = "ijk_to_xyz" - node, out = strat_pool.get_data('pe-direction') - wf.connect(node, out, conv_pedir, 'pedir') + node, out = strat_pool.get_data("pe-direction") + wf.connect(node, out, conv_pedir, "pedir") - fugue_saveshift = pe.Node(interface=fsl.FUGUE(), - name=f'fugue_saveshift_{pipe_num}') + fugue_saveshift = pe.Node(interface=fsl.FUGUE(), name=f"fugue_saveshift_{pipe_num}") fugue_saveshift.inputs.save_shift = True - wf.connect(warp_fmap, 'out_file', fugue_saveshift, 'fmap_in_file') - wf.connect(mask_fmap, 'out_file', fugue_saveshift, 'mask_file') + wf.connect(warp_fmap, "out_file", fugue_saveshift, "fmap_in_file") + wf.connect(mask_fmap, "out_file", fugue_saveshift, "mask_file") # FSL calls effective echo spacing = dwell time (not accurate) - node, out = strat_pool.get_data('effectiveEchoSpacing') - wf.connect(node, out, fugue_saveshift, 'dwell_time') + node, out = strat_pool.get_data("effectiveEchoSpacing") + wf.connect(node, out, fugue_saveshift, "dwell_time") - wf.connect(conv_pedir, 'pedir', fugue_saveshift, 'unwarp_direction') + wf.connect(conv_pedir, "pedir", fugue_saveshift, "unwarp_direction") - shift_warp = pe.Node(interface=fsl.ConvertWarp(), - name=f'shift_warp_{pipe_num}') + shift_warp = pe.Node(interface=fsl.ConvertWarp(), name=f"shift_warp_{pipe_num}") shift_warp.inputs.out_relwarp = True - wf.connect(fugue_saveshift, 'shift_out_file', shift_warp, 'shift_in_file') + wf.connect(fugue_saveshift, "shift_out_file", shift_warp, "shift_in_file") - node, out = strat_pool.get_data('sbref') - wf.connect(node, out, shift_warp, 'reference') + node, out = strat_pool.get_data("sbref") + wf.connect(node, out, shift_warp, "reference") - wf.connect(conv_pedir, 'pedir', shift_warp, 'shift_direction') + wf.connect(conv_pedir, "pedir", shift_warp, "shift_direction") - warp_bold = pe.Node(interface=fsl.ApplyWarp(), - name=f'warp_bold_phasediff_{pipe_num}') + warp_bold = pe.Node( + interface=fsl.ApplyWarp(), name=f"warp_bold_phasediff_{pipe_num}" + ) warp_bold.inputs.relwarp = True - warp_bold.inputs.interp = 'spline' - - if opt == 'default': - node, out = strat_pool.get_data('desc-preproc_bold') - out_label = 'desc-preproc_bold' - elif opt == 'single_step_resampling_from_stc': - node, out = strat_pool.get_data('desc-stc_bold') - out_label = 'desc-stc_bold' - elif opt == 'abcd': - node, out = strat_pool.get_data('bold') - out_label = 'bold' - - wf.connect(node, out, warp_bold, 'in_file') - - node, out = strat_pool.get_data('sbref') - wf.connect(node, out, warp_bold, 'ref_file') - - wf.connect(shift_warp, 'out_file', warp_bold, 'field_file') - - warp_sbref = pe.Node(interface=fsl.ApplyWarp(), - name=f'warp_sbref_phasediff_{pipe_num}') + warp_bold.inputs.interp = "spline" + + if opt == "default": + node, out = strat_pool.get_data("desc-preproc_bold") + out_label = "desc-preproc_bold" + elif opt == "single_step_resampling_from_stc": + node, out = strat_pool.get_data("desc-stc_bold") + out_label = "desc-stc_bold" + elif opt == "abcd": + node, out = strat_pool.get_data("bold") + out_label = "bold" + + wf.connect(node, out, warp_bold, "in_file") + + node, out = strat_pool.get_data("sbref") + wf.connect(node, out, warp_bold, "ref_file") + + wf.connect(shift_warp, "out_file", warp_bold, "field_file") + + warp_sbref = pe.Node( + interface=fsl.ApplyWarp(), name=f"warp_sbref_phasediff_{pipe_num}" + ) warp_sbref.inputs.relwarp = True - warp_sbref.inputs.interp = 'spline' + warp_sbref.inputs.interp = "spline" - node, out = strat_pool.get_data('sbref') - wf.connect(node, out, warp_sbref, 'in_file') - wf.connect(node, out, warp_sbref, 'ref_file') + node, out = strat_pool.get_data("sbref") + wf.connect(node, out, warp_sbref, "in_file") + wf.connect(node, out, warp_sbref, "ref_file") - wf.connect(shift_warp, 'out_file', warp_sbref, 'field_file') + wf.connect(shift_warp, "out_file", warp_sbref, "field_file") - outputs = { - out_label: (warp_bold, 'out_file'), - 'sbref': (warp_sbref, 'out_file') - } + outputs = {out_label: (warp_bold, "out_file"), "sbref": (warp_sbref, "out_file")} return (wf, outputs) @@ -3412,69 +3712,67 @@ def apply_phasediff_to_timeseries_separately(wf, cfg, strat_pool, pipe_num, ], outputs=["desc-preproc_bold", "desc-stc_bold", "bold"], ) -def apply_blip_to_timeseries_separately(wf, cfg, strat_pool, pipe_num, - opt=None): - - xfm_prov = strat_pool.get_cpac_provenance( - 'from-bold_to-template_mode-image_xfm') +def apply_blip_to_timeseries_separately(wf, cfg, strat_pool, pipe_num, opt=None): + xfm_prov = strat_pool.get_cpac_provenance("from-bold_to-template_mode-image_xfm") reg_tool = check_prov_for_regtool(xfm_prov) - outputs = {'desc-preproc_bold': strat_pool.get_data("desc-preproc_bold")} + outputs = {"desc-preproc_bold": strat_pool.get_data("desc-preproc_bold")} if strat_pool.check_rpool("ants-blip-warp"): - if reg_tool == 'fsl': + if reg_tool == "fsl": blip_node, blip_out = strat_pool.get_data("ants-blip-warp") - reg_tool = 'ants' + reg_tool = "ants" else: return (wf, outputs) elif strat_pool.check_rpool("fsl-blip-warp"): - if reg_tool == 'ants': + if reg_tool == "ants": blip_node, blip_out = strat_pool.get_data("fsl-blip-warp") - reg_tool = 'fsl' + reg_tool = "fsl" else: return (wf, outputs) else: return (wf, outputs) - num_cpus = cfg.pipeline_setup['system_config'][ - 'max_cores_per_participant'] + num_cpus = cfg.pipeline_setup["system_config"]["max_cores_per_participant"] - num_ants_cores = cfg.pipeline_setup['system_config']['num_ants_threads'] + num_ants_cores = cfg.pipeline_setup["system_config"]["num_ants_threads"] - apply_xfm = apply_transform(f'warp_ts_to_blip_sep_{pipe_num}', reg_tool, - time_series=True, num_cpus=num_cpus, - num_ants_cores=num_ants_cores) + apply_xfm = apply_transform( + f"warp_ts_to_blip_sep_{pipe_num}", + reg_tool, + time_series=True, + num_cpus=num_cpus, + num_ants_cores=num_ants_cores, + ) - if reg_tool == 'ants': + if reg_tool == "ants": apply_xfm.inputs.inputspec.interpolation = cfg.registration_workflows[ - 'functional_registration']['func_registration_to_template'][ - 'ANTs_pipelines']['interpolation'] - elif reg_tool == 'fsl': + "functional_registration" + ]["func_registration_to_template"]["ANTs_pipelines"]["interpolation"] + elif reg_tool == "fsl": apply_xfm.inputs.inputspec.interpolation = cfg.registration_workflows[ - 'functional_registration']['func_registration_to_template'][ - 'FNIRT_pipelines']['interpolation'] + "functional_registration" + ]["func_registration_to_template"]["FNIRT_pipelines"]["interpolation"] connect = strat_pool.get_data("desc-preproc_bold") - if opt == 'default': - node, out = strat_pool.get_data('desc-preproc_bold') - out_label = 'desc-preproc_bold' - elif opt == 'single_step_resampling_from_stc': - node, out = strat_pool.get_data('desc-stc_bold') - out_label = 'desc-stc_bold' - elif opt == 'abcd': - node, out = strat_pool.get_data('bold') - out_label = 'bold' + if opt == "default": + node, out = strat_pool.get_data("desc-preproc_bold") + out_label = "desc-preproc_bold" + elif opt == "single_step_resampling_from_stc": + node, out = strat_pool.get_data("desc-stc_bold") + out_label = "desc-stc_bold" + elif opt == "abcd": + node, out = strat_pool.get_data("bold") + out_label = "bold" - wf.connect(node, out, apply_xfm, 'inputspec.input_image') + wf.connect(node, out, apply_xfm, "inputspec.input_image") node, out = strat_pool.get_data("sbref") - wf.connect(node, out, apply_xfm, 'inputspec.reference') + wf.connect(node, out, apply_xfm, "inputspec.reference") - wf.connect(blip_node, blip_out, apply_xfm, 'inputspec.transform') + wf.connect(blip_node, blip_out, apply_xfm, "inputspec.transform") - outputs = { - out_label: (apply_xfm, 'outputspec.output_image') - } + outputs = {out_label: (apply_xfm, "outputspec.output_image")} return (wf, outputs) @@ -3494,42 +3792,41 @@ def apply_blip_to_timeseries_separately(wf, cfg, strat_pool, pipe_num, outputs={"space-template_desc-head_T1w": {"Template": "T1w-template"}}, ) def warp_wholeheadT1_to_template(wf, cfg, strat_pool, pipe_num, opt=None): - - xfm_prov = strat_pool.get_cpac_provenance( - 'from-T1w_to-template_mode-image_xfm') + xfm_prov = strat_pool.get_cpac_provenance("from-T1w_to-template_mode-image_xfm") reg_tool = check_prov_for_regtool(xfm_prov) - num_cpus = cfg.pipeline_setup['system_config'][ - 'max_cores_per_participant'] + num_cpus = cfg.pipeline_setup["system_config"]["max_cores_per_participant"] - num_ants_cores = cfg.pipeline_setup['system_config']['num_ants_threads'] + num_ants_cores = cfg.pipeline_setup["system_config"]["num_ants_threads"] - apply_xfm = apply_transform(f'warp_wholehead_T1w_to_T1template_{pipe_num}', - reg_tool, time_series=False, num_cpus=num_cpus, - num_ants_cores=num_ants_cores) + apply_xfm = apply_transform( + f"warp_wholehead_T1w_to_T1template_{pipe_num}", + reg_tool, + time_series=False, + num_cpus=num_cpus, + num_ants_cores=num_ants_cores, + ) - if reg_tool == 'ants': + if reg_tool == "ants": apply_xfm.inputs.inputspec.interpolation = cfg.registration_workflows[ - 'functional_registration']['func_registration_to_template'][ - 'ANTs_pipelines']['interpolation'] - elif reg_tool == 'fsl': + "functional_registration" + ]["func_registration_to_template"]["ANTs_pipelines"]["interpolation"] + elif reg_tool == "fsl": apply_xfm.inputs.inputspec.interpolation = cfg.registration_workflows[ - 'functional_registration']['func_registration_to_template'][ - 'FNIRT_pipelines']['interpolation'] + "functional_registration" + ]["func_registration_to_template"]["FNIRT_pipelines"]["interpolation"] connect = strat_pool.get_data("desc-head_T1w") node, out = connect - wf.connect(node, out, apply_xfm, 'inputspec.input_image') + wf.connect(node, out, apply_xfm, "inputspec.input_image") node, out = strat_pool.get_data("T1w-template") - wf.connect(node, out, apply_xfm, 'inputspec.reference') + wf.connect(node, out, apply_xfm, "inputspec.reference") node, out = strat_pool.get_data("from-T1w_to-template_mode-image_xfm") - wf.connect(node, out, apply_xfm, 'inputspec.transform') + wf.connect(node, out, apply_xfm, "inputspec.transform") - outputs = { - 'space-template_desc-head_T1w': (apply_xfm, 'outputspec.output_image') - } + outputs = {"space-template_desc-head_T1w": (apply_xfm, "outputspec.output_image")} return (wf, outputs) @@ -3548,22 +3845,23 @@ def warp_wholeheadT1_to_template(wf, cfg, strat_pool, pipe_num, opt=None): outputs={"space-template_desc-brain_mask": {"Template": "T1w-template"}}, ) def warp_T1mask_to_template(wf, cfg, strat_pool, pipe_num, opt=None): - - xfm_prov = strat_pool.get_cpac_provenance( - 'from-T1w_to-template_mode-image_xfm') + xfm_prov = strat_pool.get_cpac_provenance("from-T1w_to-template_mode-image_xfm") reg_tool = check_prov_for_regtool(xfm_prov) - num_cpus = cfg.pipeline_setup['system_config'][ - 'max_cores_per_participant'] + num_cpus = cfg.pipeline_setup["system_config"]["max_cores_per_participant"] - num_ants_cores = cfg.pipeline_setup['system_config']['num_ants_threads'] + num_ants_cores = cfg.pipeline_setup["system_config"]["num_ants_threads"] - apply_xfm = apply_transform(f'warp_T1mask_to_T1template_{pipe_num}', - reg_tool, time_series=False, num_cpus=num_cpus, - num_ants_cores=num_ants_cores) + apply_xfm = apply_transform( + f"warp_T1mask_to_T1template_{pipe_num}", + reg_tool, + time_series=False, + num_cpus=num_cpus, + num_ants_cores=num_ants_cores, + ) apply_xfm.inputs.inputspec.interpolation = "NearestNeighbor" - ''' + """ if reg_tool == 'ants': apply_xfm.inputs.inputspec.interpolation = cfg.registration_workflows[ 'functional_registration']['func_registration_to_template'][ @@ -3572,20 +3870,18 @@ def warp_T1mask_to_template(wf, cfg, strat_pool, pipe_num, opt=None): apply_xfm.inputs.inputspec.interpolation = cfg.registration_workflows[ 'functional_registration']['func_registration_to_template'][ 'FNIRT_pipelines']['interpolation'] - ''' + """ connect = strat_pool.get_data("space-T1w_desc-brain_mask") node, out = connect - wf.connect(node, out, apply_xfm, 'inputspec.input_image') + wf.connect(node, out, apply_xfm, "inputspec.input_image") node, out = strat_pool.get_data("T1w-template") - wf.connect(node, out, apply_xfm, 'inputspec.reference') + wf.connect(node, out, apply_xfm, "inputspec.reference") node, out = strat_pool.get_data("from-T1w_to-template_mode-image_xfm") - wf.connect(node, out, apply_xfm, 'inputspec.transform') + wf.connect(node, out, apply_xfm, "inputspec.transform") - outputs = { - 'space-template_desc-brain_mask': (apply_xfm, 'outputspec.output_image') - } + outputs = {"space-template_desc-brain_mask": (apply_xfm, "outputspec.output_image")} return (wf, outputs) @@ -3605,46 +3901,46 @@ def warp_T1mask_to_template(wf, cfg, strat_pool, pipe_num, opt=None): "T1w-brain-template-funcreg", ], outputs={ - "space-template_desc-preproc_bold": { - "Template": "T1w-brain-template-funcreg"} + "space-template_desc-preproc_bold": {"Template": "T1w-brain-template-funcreg"} }, ) def warp_timeseries_to_T1template(wf, cfg, strat_pool, pipe_num, opt=None): - - xfm_prov = strat_pool.get_cpac_provenance( - 'from-bold_to-template_mode-image_xfm') + xfm_prov = strat_pool.get_cpac_provenance("from-bold_to-template_mode-image_xfm") reg_tool = check_prov_for_regtool(xfm_prov) - num_cpus = cfg.pipeline_setup['system_config'][ - 'max_cores_per_participant'] + num_cpus = cfg.pipeline_setup["system_config"]["max_cores_per_participant"] - num_ants_cores = cfg.pipeline_setup['system_config']['num_ants_threads'] + num_ants_cores = cfg.pipeline_setup["system_config"]["num_ants_threads"] - apply_xfm = apply_transform(f'warp_ts_to_T1template_{pipe_num}', reg_tool, - time_series=True, num_cpus=num_cpus, - num_ants_cores=num_ants_cores) + apply_xfm = apply_transform( + f"warp_ts_to_T1template_{pipe_num}", + reg_tool, + time_series=True, + num_cpus=num_cpus, + num_ants_cores=num_ants_cores, + ) - if reg_tool == 'ants': + if reg_tool == "ants": apply_xfm.inputs.inputspec.interpolation = cfg.registration_workflows[ - 'functional_registration']['func_registration_to_template'][ - 'ANTs_pipelines']['interpolation'] - elif reg_tool == 'fsl': + "functional_registration" + ]["func_registration_to_template"]["ANTs_pipelines"]["interpolation"] + elif reg_tool == "fsl": apply_xfm.inputs.inputspec.interpolation = cfg.registration_workflows[ - 'functional_registration']['func_registration_to_template'][ - 'FNIRT_pipelines']['interpolation'] + "functional_registration" + ]["func_registration_to_template"]["FNIRT_pipelines"]["interpolation"] connect = strat_pool.get_data("desc-preproc_bold") node, out = connect - wf.connect(node, out, apply_xfm, 'inputspec.input_image') + wf.connect(node, out, apply_xfm, "inputspec.input_image") node, out = strat_pool.get_data("T1w-brain-template-funcreg") - wf.connect(node, out, apply_xfm, 'inputspec.reference') + wf.connect(node, out, apply_xfm, "inputspec.reference") node, out = strat_pool.get_data("from-bold_to-template_mode-image_xfm") - wf.connect(node, out, apply_xfm, 'inputspec.transform') + wf.connect(node, out, apply_xfm, "inputspec.transform") outputs = { - 'space-template_desc-preproc_bold': (apply_xfm, 'outputspec.output_image') + "space-template_desc-preproc_bold": (apply_xfm, "outputspec.output_image") } return (wf, outputs) @@ -3670,43 +3966,46 @@ def warp_timeseries_to_T1template(wf, cfg, strat_pool, pipe_num, opt=None): } }, ) -def warp_timeseries_to_T1template_deriv(wf, cfg, strat_pool, pipe_num, - opt=None): - xfm_prov = strat_pool.get_cpac_provenance( - 'from-bold_to-template_mode-image_xfm') +def warp_timeseries_to_T1template_deriv(wf, cfg, strat_pool, pipe_num, opt=None): + xfm_prov = strat_pool.get_cpac_provenance("from-bold_to-template_mode-image_xfm") reg_tool = check_prov_for_regtool(xfm_prov) - num_cpus = cfg.pipeline_setup['system_config'][ - 'max_cores_per_participant'] + num_cpus = cfg.pipeline_setup["system_config"]["max_cores_per_participant"] - num_ants_cores = cfg.pipeline_setup['system_config']['num_ants_threads'] + num_ants_cores = cfg.pipeline_setup["system_config"]["num_ants_threads"] - apply_xfm = apply_transform(f'warp_ts_to_T1template_{pipe_num}', reg_tool, - time_series=True, num_cpus=num_cpus, - num_ants_cores=num_ants_cores) + apply_xfm = apply_transform( + f"warp_ts_to_T1template_{pipe_num}", + reg_tool, + time_series=True, + num_cpus=num_cpus, + num_ants_cores=num_ants_cores, + ) - if reg_tool == 'ants': + if reg_tool == "ants": apply_xfm.inputs.inputspec.interpolation = cfg.registration_workflows[ - 'functional_registration']['func_registration_to_template'][ - 'ANTs_pipelines']['interpolation'] - elif reg_tool == 'fsl': + "functional_registration" + ]["func_registration_to_template"]["ANTs_pipelines"]["interpolation"] + elif reg_tool == "fsl": apply_xfm.inputs.inputspec.interpolation = cfg.registration_workflows[ - 'functional_registration']['func_registration_to_template'][ - 'FNIRT_pipelines']['interpolation'] + "functional_registration" + ]["func_registration_to_template"]["FNIRT_pipelines"]["interpolation"] connect = strat_pool.get_data("desc-preproc_bold") node, out = connect - wf.connect(node, out, apply_xfm, 'inputspec.input_image') + wf.connect(node, out, apply_xfm, "inputspec.input_image") node, out = strat_pool.get_data("T1w-brain-template-deriv") - wf.connect(node, out, apply_xfm, 'inputspec.reference') + wf.connect(node, out, apply_xfm, "inputspec.reference") node, out = strat_pool.get_data("from-bold_to-template_mode-image_xfm") - wf.connect(node, out, apply_xfm, 'inputspec.transform') + wf.connect(node, out, apply_xfm, "inputspec.transform") outputs = { - 'space-template_res-derivative_desc-preproc_bold': - (apply_xfm, 'outputspec.output_image') + "space-template_res-derivative_desc-preproc_bold": ( + apply_xfm, + "outputspec.output_image", + ) } return (wf, outputs) @@ -3723,8 +4022,12 @@ def warp_timeseries_to_T1template_deriv(wf, cfg, strat_pool, pipe_num, option_key=["apply_transform", "using"], option_val="abcd", inputs=[ - ("desc-preproc_bold", "desc-reorient_bold", "motion-basefile", - "coordinate-transformation"), + ( + "desc-preproc_bold", + "desc-reorient_bold", + "motion-basefile", + "coordinate-transformation", + ), "from-T1w_to-template_mode-image_xfm", "from-bold_to-T1w_mode-image_desc-linear_xfm", "from-bold_to-template_mode-image_xfm", @@ -3738,281 +4041,348 @@ def warp_timeseries_to_T1template_deriv(wf, cfg, strat_pool, pipe_num, "space-template_desc-brain_mask", ], outputs={ - "space-template_desc-preproc_bold": { - "Template": "T1w-brain-template-funcreg"}, - "space-template_desc-scout_bold": { - "Template": "T1w-brain-template-funcreg"}, - "space-template_desc-head_bold": { - "Template": "T1w-brain-template-funcreg"}, + "space-template_desc-preproc_bold": {"Template": "T1w-brain-template-funcreg"}, + "space-template_desc-scout_bold": {"Template": "T1w-brain-template-funcreg"}, + "space-template_desc-head_bold": {"Template": "T1w-brain-template-funcreg"}, }, ) -def warp_timeseries_to_T1template_abcd(wf, cfg, strat_pool, pipe_num, opt=None - ): +def warp_timeseries_to_T1template_abcd(wf, cfg, strat_pool, pipe_num, opt=None): # Apply motion correction, coreg, anat-to-template transforms on raw functional timeseries using ABCD-style registration # Ref: https://github.com/DCAN-Labs/DCAN-HCP/blob/master/fMRIVolume/scripts/OneStepResampling.sh#L168-L197 # https://github.com/DCAN-Labs/DCAN-HCP/blob/master/fMRIVolume/scripts/DistortionCorrectionAndEPIToT1wReg_FLIRTBBRAndFreeSurferBBRbased.sh#L548 # convertwarp --relout --rel -m ${WD}/fMRI2str.mat --ref=${T1wImage} --out=${WD}/fMRI2str.nii.gz - convert_func_to_anat_linear_warp = pe.Node(interface=fsl.ConvertWarp(), - name=f'convert_func_to_anat_linear_warp_{pipe_num}') + convert_func_to_anat_linear_warp = pe.Node( + interface=fsl.ConvertWarp(), name=f"convert_func_to_anat_linear_warp_{pipe_num}" + ) convert_func_to_anat_linear_warp.inputs.out_relwarp = True convert_func_to_anat_linear_warp.inputs.relwarp = True - - node, out = strat_pool.get_data('desc-preproc_T1w') - wf.connect(node, out, convert_func_to_anat_linear_warp, 'reference') - - if strat_pool.check_rpool('fsl-blip-warp'): - node, out = strat_pool.get_data('from-bold_to-T1w_mode-image_desc-linear_xfm') - wf.connect(node, out, convert_func_to_anat_linear_warp, 'postmat') - - node, out = strat_pool.get_data('fsl-blip-warp') - wf.connect(node, out, convert_func_to_anat_linear_warp, 'warp1') + + node, out = strat_pool.get_data("desc-preproc_T1w") + wf.connect(node, out, convert_func_to_anat_linear_warp, "reference") + + if strat_pool.check_rpool("fsl-blip-warp"): + node, out = strat_pool.get_data("from-bold_to-T1w_mode-image_desc-linear_xfm") + wf.connect(node, out, convert_func_to_anat_linear_warp, "postmat") + + node, out = strat_pool.get_data("fsl-blip-warp") + wf.connect(node, out, convert_func_to_anat_linear_warp, "warp1") else: - node, out = strat_pool.get_data('from-bold_to-T1w_mode-image_desc-linear_xfm') - wf.connect(node, out, convert_func_to_anat_linear_warp, 'premat') + node, out = strat_pool.get_data("from-bold_to-T1w_mode-image_desc-linear_xfm") + wf.connect(node, out, convert_func_to_anat_linear_warp, "premat") # https://github.com/DCAN-Labs/DCAN-HCP/blob/master/fMRIVolume/scripts/OneStepResampling.sh#L140 # convertwarp --relout --rel --warp1=${fMRIToStructuralInput} --warp2=${StructuralToStandard} --ref=${WD}/${T1wImageFile}.${FinalfMRIResolution} --out=${OutputTransform} - convert_func_to_standard_warp = pe.Node(interface=fsl.ConvertWarp(), - name=f'convert_func_to_standard_warp_{pipe_num}') + convert_func_to_standard_warp = pe.Node( + interface=fsl.ConvertWarp(), name=f"convert_func_to_standard_warp_{pipe_num}" + ) convert_func_to_standard_warp.inputs.out_relwarp = True convert_func_to_standard_warp.inputs.relwarp = True - wf.connect(convert_func_to_anat_linear_warp, 'out_file', - convert_func_to_standard_warp, 'warp1') + wf.connect( + convert_func_to_anat_linear_warp, + "out_file", + convert_func_to_standard_warp, + "warp1", + ) - node, out = strat_pool.get_data('from-T1w_to-template_mode-image_xfm') - wf.connect(node, out, convert_func_to_standard_warp, 'warp2') + node, out = strat_pool.get_data("from-T1w_to-template_mode-image_xfm") + wf.connect(node, out, convert_func_to_standard_warp, "warp2") + from CPAC.func_preproc.func_preproc import ( + anat_brain_mask_to_bold_res, + anat_brain_to_bold_res, + ) - from CPAC.func_preproc.func_preproc import anat_brain_to_bold_res, anat_brain_mask_to_bold_res anat_brain_to_func_res = anat_brain_to_bold_res(wf, cfg, pipe_num) - node, out = strat_pool.get_data('space-template_desc-preproc_T1w') - wf.connect(node, out, anat_brain_to_func_res, 'inputspec.space-template_desc-preproc_T1w') + node, out = strat_pool.get_data("space-template_desc-preproc_T1w") + wf.connect( + node, out, anat_brain_to_func_res, "inputspec.space-template_desc-preproc_T1w" + ) - node, out = strat_pool.get_data('T1w-template-funcreg') - wf.connect(node, out, anat_brain_to_func_res, 'inputspec.T1w-template-funcreg') + node, out = strat_pool.get_data("T1w-template-funcreg") + wf.connect(node, out, anat_brain_to_func_res, "inputspec.T1w-template-funcreg") - wf.connect(anat_brain_to_func_res, 'outputspec.space-template_res-bold_desc-brain_T1w', convert_func_to_standard_warp, 'reference') + wf.connect( + anat_brain_to_func_res, + "outputspec.space-template_res-bold_desc-brain_T1w", + convert_func_to_standard_warp, + "reference", + ) # TODO add condition: if no gradient distortion # https://github.com/DCAN-Labs/DCAN-HCP/blob/master/fMRIVolume/GenericfMRIVolumeProcessingPipeline.sh#L283-L284 # fslroi "$fMRIFolder"/"$NameOffMRI"_gdc "$fMRIFolder"/"$NameOffMRI"_gdc_warp 0 3 - extract_func_roi = pe.Node(interface=fsl.ExtractROI(), - name=f'extract_func_roi_{pipe_num}') + extract_func_roi = pe.Node( + interface=fsl.ExtractROI(), name=f"extract_func_roi_{pipe_num}" + ) extract_func_roi.inputs.t_min = 0 extract_func_roi.inputs.t_size = 3 - node, out = strat_pool.get_data('desc-reorient_bold') - wf.connect(node, out, extract_func_roi, 'in_file') + node, out = strat_pool.get_data("desc-reorient_bold") + wf.connect(node, out, extract_func_roi, "in_file") # fslmaths "$fMRIFolder"/"$NameOffMRI"_gdc_warp -mul 0 "$fMRIFolder"/"$NameOffMRI"_gdc_warp - multiply_func_roi_by_zero = pe.Node(interface=fsl.maths.MathsCommand(), - name=f'multiply_func_roi_by_zero_{pipe_num}') + multiply_func_roi_by_zero = pe.Node( + interface=fsl.maths.MathsCommand(), name=f"multiply_func_roi_by_zero_{pipe_num}" + ) - multiply_func_roi_by_zero.inputs.args = '-mul 0' + multiply_func_roi_by_zero.inputs.args = "-mul 0" - wf.connect(extract_func_roi, 'roi_file', - multiply_func_roi_by_zero, 'in_file') + wf.connect(extract_func_roi, "roi_file", multiply_func_roi_by_zero, "in_file") # https://github.com/DCAN-Labs/DCAN-HCP/blob/master/fMRIVolume/scripts/OneStepResampling.sh#L168-L193 # fslsplit ${InputfMRI} ${WD}/prevols/vol -t - split_func = pe.Node(interface=fsl.Split(), - name=f'split_func_{pipe_num}') + split_func = pe.Node(interface=fsl.Split(), name=f"split_func_{pipe_num}") - split_func.inputs.dimension = 't' + split_func.inputs.dimension = "t" - node, out = strat_pool.get_data('desc-reorient_bold') - wf.connect(node, out, split_func, 'in_file') + node, out = strat_pool.get_data("desc-reorient_bold") + wf.connect(node, out, split_func, "in_file") ### Loop starts! ### # convertwarp --relout --rel --ref=${WD}/prevols/vol${vnum}.nii.gz --warp1=${GradientDistortionField} --postmat=${MotionMatrixFolder}/${MotionMatrixPrefix}${vnum} --out=${MotionMatrixFolder}/${MotionMatrixPrefix}${vnum}_gdc_warp.nii.gz - convert_motion_distortion_warp = pe.MapNode(interface=fsl.ConvertWarp(), - name=f'convert_motion_distortion_warp_{pipe_num}', - iterfield=['reference', 'postmat']) + convert_motion_distortion_warp = pe.MapNode( + interface=fsl.ConvertWarp(), + name=f"convert_motion_distortion_warp_{pipe_num}", + iterfield=["reference", "postmat"], + ) convert_motion_distortion_warp.inputs.out_relwarp = True convert_motion_distortion_warp.inputs.relwarp = True - wf.connect(multiply_func_roi_by_zero, 'out_file', - convert_motion_distortion_warp, 'warp1') + wf.connect( + multiply_func_roi_by_zero, "out_file", convert_motion_distortion_warp, "warp1" + ) - wf.connect(split_func, 'out_files', - convert_motion_distortion_warp, 'reference') + wf.connect(split_func, "out_files", convert_motion_distortion_warp, "reference") - node, out = strat_pool.get_data('coordinate-transformation') - wf.connect(node, out, convert_motion_distortion_warp, 'postmat') + node, out = strat_pool.get_data("coordinate-transformation") + wf.connect(node, out, convert_motion_distortion_warp, "postmat") # convertwarp --relout --rel --ref=${WD}/${T1wImageFile}.${FinalfMRIResolution} --warp1=${MotionMatrixFolder}/${MotionMatrixPrefix}${vnum}_gdc_warp.nii.gz --warp2=${OutputTransform} --out=${MotionMatrixFolder}/${MotionMatrixPrefix}${vnum}_all_warp.nii.gz - convert_registration_warp = pe.MapNode(interface=fsl.ConvertWarp(), - name=f'convert_registration_warp_{pipe_num}', - iterfield=['warp1']) + convert_registration_warp = pe.MapNode( + interface=fsl.ConvertWarp(), + name=f"convert_registration_warp_{pipe_num}", + iterfield=["warp1"], + ) convert_registration_warp.inputs.out_relwarp = True convert_registration_warp.inputs.relwarp = True - wf.connect(anat_brain_to_func_res, 'outputspec.space-template_res-bold_desc-brain_T1w', convert_registration_warp, 'reference') + wf.connect( + anat_brain_to_func_res, + "outputspec.space-template_res-bold_desc-brain_T1w", + convert_registration_warp, + "reference", + ) - wf.connect(convert_motion_distortion_warp, 'out_file', - convert_registration_warp, 'warp1') + wf.connect( + convert_motion_distortion_warp, "out_file", convert_registration_warp, "warp1" + ) - wf.connect(convert_func_to_standard_warp, 'out_file', - convert_registration_warp, 'warp2') + wf.connect( + convert_func_to_standard_warp, "out_file", convert_registration_warp, "warp2" + ) # fslmaths ${WD}/prevols/vol${vnum}.nii.gz -mul 0 -add 1 ${WD}/prevols/vol${vnum}_mask.nii.gz - generate_vol_mask = pe.MapNode(interface=fsl.maths.MathsCommand(), - name=f'generate_mask_{pipe_num}', - iterfield=['in_file']) + generate_vol_mask = pe.MapNode( + interface=fsl.maths.MathsCommand(), + name=f"generate_mask_{pipe_num}", + iterfield=["in_file"], + ) - generate_vol_mask.inputs.args = '-mul 0 -add 1' + generate_vol_mask.inputs.args = "-mul 0 -add 1" - wf.connect(split_func, 'out_files', - generate_vol_mask, 'in_file') + wf.connect(split_func, "out_files", generate_vol_mask, "in_file") # applywarp --rel --interp=spline --in=${WD}/prevols/vol${vnum}.nii.gz --warp=${MotionMatrixFolder}/${MotionMatrixPrefix}${vnum}_all_warp.nii.gz --ref=${WD}/${T1wImageFile}.${FinalfMRIResolution} --out=${WD}/postvols/vol${vnum}.nii.gz - applywarp_func_to_standard = pe.MapNode(interface=fsl.ApplyWarp(), - name=f'applywarp_func_to_standard_{pipe_num}', - iterfield=['in_file', 'field_file']) + applywarp_func_to_standard = pe.MapNode( + interface=fsl.ApplyWarp(), + name=f"applywarp_func_to_standard_{pipe_num}", + iterfield=["in_file", "field_file"], + ) applywarp_func_to_standard.inputs.relwarp = True - applywarp_func_to_standard.inputs.interp = 'spline' + applywarp_func_to_standard.inputs.interp = "spline" - wf.connect(split_func, 'out_files', - applywarp_func_to_standard, 'in_file') + wf.connect(split_func, "out_files", applywarp_func_to_standard, "in_file") - wf.connect(convert_registration_warp, 'out_file', - applywarp_func_to_standard, 'field_file') + wf.connect( + convert_registration_warp, "out_file", applywarp_func_to_standard, "field_file" + ) - wf.connect(anat_brain_to_func_res, 'outputspec.space-template_res-bold_desc-brain_T1w', - applywarp_func_to_standard, 'ref_file') + wf.connect( + anat_brain_to_func_res, + "outputspec.space-template_res-bold_desc-brain_T1w", + applywarp_func_to_standard, + "ref_file", + ) # applywarp --rel --interp=nn --in=${WD}/prevols/vol${vnum}_mask.nii.gz --warp=${MotionMatrixFolder}/${MotionMatrixPrefix}${vnum}_all_warp.nii.gz --ref=${WD}/${T1wImageFile}.${FinalfMRIResolution} --out=${WD}/postvols/vol${vnum}_mask.nii.gz - applywarp_func_mask_to_standard = pe.MapNode(interface=fsl.ApplyWarp(), - name=f'applywarp_func_mask_to_standard_{pipe_num}', - iterfield=['in_file', 'field_file']) + applywarp_func_mask_to_standard = pe.MapNode( + interface=fsl.ApplyWarp(), + name=f"applywarp_func_mask_to_standard_{pipe_num}", + iterfield=["in_file", "field_file"], + ) applywarp_func_mask_to_standard.inputs.relwarp = True - applywarp_func_mask_to_standard.inputs.interp = 'nn' + applywarp_func_mask_to_standard.inputs.interp = "nn" - wf.connect(generate_vol_mask, 'out_file', - applywarp_func_mask_to_standard, 'in_file') + wf.connect( + generate_vol_mask, "out_file", applywarp_func_mask_to_standard, "in_file" + ) - wf.connect(convert_registration_warp, 'out_file', - applywarp_func_mask_to_standard, 'field_file') + wf.connect( + convert_registration_warp, + "out_file", + applywarp_func_mask_to_standard, + "field_file", + ) - wf.connect(anat_brain_to_func_res, 'outputspec.space-template_res-bold_desc-brain_T1w', - applywarp_func_mask_to_standard, 'ref_file') + wf.connect( + anat_brain_to_func_res, + "outputspec.space-template_res-bold_desc-brain_T1w", + applywarp_func_mask_to_standard, + "ref_file", + ) ### Loop ends! ### # fslmerge -tr ${OutputfMRI} $FrameMergeSTRING $TR_vol - merge_func_to_standard = pe.Node(interface=fslMerge(), - name=f'merge_func_to_standard_{pipe_num}') + merge_func_to_standard = pe.Node( + interface=fslMerge(), name=f"merge_func_to_standard_{pipe_num}" + ) - merge_func_to_standard.inputs.dimension = 't' + merge_func_to_standard.inputs.dimension = "t" - wf.connect(applywarp_func_to_standard, 'out_file', - merge_func_to_standard, 'in_files') + wf.connect( + applywarp_func_to_standard, "out_file", merge_func_to_standard, "in_files" + ) # fslmerge -tr ${OutputfMRI}_mask $FrameMergeSTRINGII $TR_vol - merge_func_mask_to_standard = pe.Node(interface=fslMerge(), - name='merge_func_mask_to_' - f'standard_{pipe_num}') + merge_func_mask_to_standard = pe.Node( + interface=fslMerge(), name="merge_func_mask_to_" f"standard_{pipe_num}" + ) - merge_func_mask_to_standard.inputs.dimension = 't' + merge_func_mask_to_standard.inputs.dimension = "t" - wf.connect(applywarp_func_mask_to_standard, 'out_file', - merge_func_mask_to_standard, 'in_files') + wf.connect( + applywarp_func_mask_to_standard, + "out_file", + merge_func_mask_to_standard, + "in_files", + ) # fslmaths ${OutputfMRI}_mask -Tmin ${OutputfMRI}_mask - find_min_mask = pe.Node(interface=fsl.maths.MathsCommand(), - name=f'find_min_mask_{pipe_num}') + find_min_mask = pe.Node( + interface=fsl.maths.MathsCommand(), name=f"find_min_mask_{pipe_num}" + ) - find_min_mask.inputs.args = '-Tmin' + find_min_mask.inputs.args = "-Tmin" - wf.connect(merge_func_mask_to_standard, 'merged_file', - find_min_mask, 'in_file') + wf.connect(merge_func_mask_to_standard, "merged_file", find_min_mask, "in_file") # Combine transformations: gradient non-linearity distortion + fMRI_dc to standard # convertwarp --relout --rel --ref=${WD}/${T1wImageFile}.${FinalfMRIResolution} --warp1=${GradientDistortionField} --warp2=${OutputTransform} --out=${WD}/Scout_gdc_MNI_warp.nii.gz - convert_dc_warp = pe.Node(interface=fsl.ConvertWarp(), - name=f'convert_dc_warp_{pipe_num}') + convert_dc_warp = pe.Node( + interface=fsl.ConvertWarp(), name=f"convert_dc_warp_{pipe_num}" + ) convert_dc_warp.inputs.out_relwarp = True convert_dc_warp.inputs.relwarp = True - wf.connect(anat_brain_to_func_res, 'outputspec.space-template_res-bold_desc-brain_T1w', convert_dc_warp, 'reference') + wf.connect( + anat_brain_to_func_res, + "outputspec.space-template_res-bold_desc-brain_T1w", + convert_dc_warp, + "reference", + ) - wf.connect(multiply_func_roi_by_zero, 'out_file', - convert_dc_warp, 'warp1') + wf.connect(multiply_func_roi_by_zero, "out_file", convert_dc_warp, "warp1") - wf.connect(convert_func_to_standard_warp, 'out_file', - convert_dc_warp, 'warp2') + wf.connect(convert_func_to_standard_warp, "out_file", convert_dc_warp, "warp2") # applywarp --rel --interp=spline --in=${ScoutInput} -w ${WD}/Scout_gdc_MNI_warp.nii.gz -r ${WD}/${T1wImageFile}.${FinalfMRIResolution} -o ${ScoutOutput} - applywarp_scout = pe.Node(interface=fsl.ApplyWarp(), - name=f'applywarp_scout_input_{pipe_num}') + applywarp_scout = pe.Node( + interface=fsl.ApplyWarp(), name=f"applywarp_scout_input_{pipe_num}" + ) applywarp_scout.inputs.relwarp = True - applywarp_scout.inputs.interp = 'spline' + applywarp_scout.inputs.interp = "spline" - node, out = strat_pool.get_data('motion-basefile') - wf.connect(node, out, applywarp_scout, 'in_file') + node, out = strat_pool.get_data("motion-basefile") + wf.connect(node, out, applywarp_scout, "in_file") - wf.connect(anat_brain_to_func_res, 'outputspec.space-template_res-bold_desc-brain_T1w', applywarp_scout, 'ref_file') + wf.connect( + anat_brain_to_func_res, + "outputspec.space-template_res-bold_desc-brain_T1w", + applywarp_scout, + "ref_file", + ) - wf.connect(convert_dc_warp, 'out_file', applywarp_scout, 'field_file') + wf.connect(convert_dc_warp, "out_file", applywarp_scout, "field_file") # https://github.com/DCAN-Labs/DCAN-HCP/blob/master/fMRIVolume/scripts/IntensityNormalization.sh#L124-L127 # fslmaths ${InputfMRI} -mas ${BrainMask} -mas ${InputfMRI}_mask -thr 0 -ing 10000 ${OutputfMRI} -odt float - merge_func_mask = pe.Node(util.Merge(2), - name=f'merge_func_mask_{pipe_num}') + merge_func_mask = pe.Node(util.Merge(2), name=f"merge_func_mask_{pipe_num}") + + anat_brain_mask_to_func_res = anat_brain_mask_to_bold_res( + wf_name="anat_brain_mask_to_bold_res", cfg=cfg, pipe_num=pipe_num + ) - anat_brain_mask_to_func_res = anat_brain_mask_to_bold_res(wf_name='anat_brain_mask_to_bold_res', cfg=cfg, pipe_num=pipe_num) - - node, out = strat_pool.get_data('space-template_desc-brain_mask') - wf.connect(node, out, anat_brain_mask_to_func_res, 'inputspec.space-template_desc-T1w_mask') + node, out = strat_pool.get_data("space-template_desc-brain_mask") + wf.connect( + node, out, anat_brain_mask_to_func_res, "inputspec.space-template_desc-T1w_mask" + ) - wf.connect(anat_brain_to_func_res, 'outputspec.space-template_res-bold_desc-brain_T1w', - anat_brain_mask_to_func_res, 'inputspec.space-template_desc-preproc_T1w') + wf.connect( + anat_brain_to_func_res, + "outputspec.space-template_res-bold_desc-brain_T1w", + anat_brain_mask_to_func_res, + "inputspec.space-template_desc-preproc_T1w", + ) - wf.connect(anat_brain_mask_to_func_res, 'outputspec.space-template_desc-bold_mask', merge_func_mask, 'in1') + wf.connect( + anat_brain_mask_to_func_res, + "outputspec.space-template_desc-bold_mask", + merge_func_mask, + "in1", + ) - wf.connect(find_min_mask, 'out_file', merge_func_mask, 'in2') + wf.connect(find_min_mask, "out_file", merge_func_mask, "in2") - extract_func_brain = pe.Node(interface=fsl.MultiImageMaths(), - name=f'extract_func_brain_{pipe_num}') + extract_func_brain = pe.Node( + interface=fsl.MultiImageMaths(), name=f"extract_func_brain_{pipe_num}" + ) - extract_func_brain.inputs.op_string = '-mas %s -mas %s -thr 0 -ing 10000' - extract_func_brain.inputs.output_datatype = 'float' + extract_func_brain.inputs.op_string = "-mas %s -mas %s -thr 0 -ing 10000" + extract_func_brain.inputs.output_datatype = "float" - wf.connect(merge_func_to_standard, 'merged_file', - extract_func_brain, 'in_file') + wf.connect(merge_func_to_standard, "merged_file", extract_func_brain, "in_file") - wf.connect(merge_func_mask, 'out', - extract_func_brain, 'operand_files') + wf.connect(merge_func_mask, "out", extract_func_brain, "operand_files") # fslmaths ${ScoutInput} -mas ${BrainMask} -mas ${InputfMRI}_mask -thr 0 -ing 10000 ${ScoutOutput} -odt float - extract_scout_brain = pe.Node(interface=fsl.MultiImageMaths(), - name=f'extract_scout_brain_{pipe_num}') + extract_scout_brain = pe.Node( + interface=fsl.MultiImageMaths(), name=f"extract_scout_brain_{pipe_num}" + ) - extract_scout_brain.inputs.op_string = '-mas %s -mas %s -thr 0 -ing 10000' - extract_scout_brain.inputs.output_datatype = 'float' + extract_scout_brain.inputs.op_string = "-mas %s -mas %s -thr 0 -ing 10000" + extract_scout_brain.inputs.output_datatype = "float" - wf.connect(applywarp_scout, 'out_file', - extract_scout_brain, 'in_file') + wf.connect(applywarp_scout, "out_file", extract_scout_brain, "in_file") - wf.connect(merge_func_mask, 'out', - extract_scout_brain, 'operand_files') + wf.connect(merge_func_mask, "out", extract_scout_brain, "operand_files") outputs = { - 'space-template_desc-preproc_bold': (extract_func_brain, 'out_file'), - 'space-template_desc-scout_bold': (extract_scout_brain, 'out_file'), - 'space-template_desc-head_bold': (merge_func_to_standard, 'merged_file') + "space-template_desc-preproc_bold": (extract_func_brain, "out_file"), + "space-template_desc-scout_bold": (extract_scout_brain, "out_file"), + "space-template_desc-head_bold": (merge_func_to_standard, "merged_file"), } return (wf, outputs) @@ -4045,265 +4415,302 @@ def warp_timeseries_to_T1template_abcd(wf, cfg, strat_pool, pipe_num, opt=None "space-template_desc-bold_mask": {"Template": "T1w-template"}, }, ) -def warp_timeseries_to_T1template_dcan_nhp(wf, cfg, strat_pool, pipe_num, - opt=None): +def warp_timeseries_to_T1template_dcan_nhp(wf, cfg, strat_pool, pipe_num, opt=None): # Apply motion correction, coreg, anat-to-template transforms on raw functional timeseries # Ref: https://github.com/DCAN-Labs/dcan-macaque-pipeline/blob/master/fMRIVolume/scripts/OneStepResampling.sh # https://github.com/DCAN-Labs/dcan-macaque-pipeline/blob/master/fMRIVolume/scripts/OneStepResampling.sh#L131 # ${FSLDIR}/bin/flirt -interp spline -in ${T1wImage} -ref ${T1wImage} -applyisoxfm $FinalfMRIResolution -out ${WD}/${T1wImageFile}.${FinalfMRIResolution} - anat_resample = pe.Node(interface=fsl.FLIRT(), - name=f'anat_resample_func_res_{pipe_num}' - ) - anat_resample.inputs.apply_isoxfm = float(cfg.registration_workflows['functional_registration']['func_registration_to_template']['output_resolution']['func_preproc_outputs'].replace("mm", "")) - anat_resample.inputs.interp = 'spline' + anat_resample = pe.Node( + interface=fsl.FLIRT(), name=f"anat_resample_func_res_{pipe_num}" + ) + anat_resample.inputs.apply_isoxfm = float( + cfg.registration_workflows["functional_registration"][ + "func_registration_to_template" + ]["output_resolution"]["func_preproc_outputs"].replace("mm", "") + ) + anat_resample.inputs.interp = "spline" - node, out = strat_pool.get_data('space-template_desc-head_T1w') - wf.connect(node, out, anat_resample, 'in_file') - wf.connect(node, out, anat_resample, 'reference') + node, out = strat_pool.get_data("space-template_desc-head_T1w") + wf.connect(node, out, anat_resample, "in_file") + wf.connect(node, out, anat_resample, "reference") # ${FSLDIR}/bin/applywarp --rel --interp=spline -i ${T1wImage} -r ${ResampRefIm} --premat=$FSLDIR/etc/flirtsch/ident.mat -o ${WD}/${T1wImageFile}.${FinalfMRIResolution} - applywarp_anat_res = pe.Node(interface=fsl.ApplyWarp(), - name=f'anat_func_res_{pipe_num}') + applywarp_anat_res = pe.Node( + interface=fsl.ApplyWarp(), name=f"anat_func_res_{pipe_num}" + ) applywarp_anat_res.inputs.relwarp = True - applywarp_anat_res.inputs.interp = 'spline' - applywarp_anat_res.inputs.premat = cfg.registration_workflows['anatomical_registration']['registration']['FSL-FNIRT']['identity_matrix'] + applywarp_anat_res.inputs.interp = "spline" + applywarp_anat_res.inputs.premat = cfg.registration_workflows[ + "anatomical_registration" + ]["registration"]["FSL-FNIRT"]["identity_matrix"] - node, out = strat_pool.get_data('space-template_desc-head_T1w') - wf.connect(node, out, applywarp_anat_res, 'in_file') - wf.connect(anat_resample, 'out_file', applywarp_anat_res, 'ref_file') + node, out = strat_pool.get_data("space-template_desc-head_T1w") + wf.connect(node, out, applywarp_anat_res, "in_file") + wf.connect(anat_resample, "out_file", applywarp_anat_res, "ref_file") # https://github.com/DCAN-Labs/dcan-macaque-pipeline/blob/master/fMRIVolume/scripts/OneStepResampling.sh#L136-L138 # Create brain masks in this space (changing resolution) # ${FSLDIR}/bin/applywarp --rel --interp=nn -i ${FreeSurferBrainMask}.nii.gz -r ${WD}/${T1wImageFile}.${FinalfMRIResolution} --premat=$FSLDIR/etc/flirtsch/ident.mat -o ${WD}/${FreeSurferBrainMaskFile}.${FinalfMRIResolution}.nii.gz - applywarp_anat_mask_res = pe.Node(interface=fsl.ApplyWarp(), - name=f'anat_mask_func_res_{pipe_num}') + applywarp_anat_mask_res = pe.Node( + interface=fsl.ApplyWarp(), name=f"anat_mask_func_res_{pipe_num}" + ) applywarp_anat_mask_res.inputs.relwarp = True - applywarp_anat_mask_res.inputs.interp = 'nn' - applywarp_anat_mask_res.inputs.premat = cfg.registration_workflows['anatomical_registration']['registration']['FSL-FNIRT']['identity_matrix'] + applywarp_anat_mask_res.inputs.interp = "nn" + applywarp_anat_mask_res.inputs.premat = cfg.registration_workflows[ + "anatomical_registration" + ]["registration"]["FSL-FNIRT"]["identity_matrix"] - node, out = strat_pool.get_data('space-template_desc-T1w_mask') - wf.connect(node, out, applywarp_anat_mask_res, 'in_file') - wf.connect(applywarp_anat_res, 'out_file', applywarp_anat_mask_res, 'ref_file') + node, out = strat_pool.get_data("space-template_desc-T1w_mask") + wf.connect(node, out, applywarp_anat_mask_res, "in_file") + wf.connect(applywarp_anat_res, "out_file", applywarp_anat_mask_res, "ref_file") # ${FSLDIR}/bin/fslmaths ${WD}/${T1wImageFile}.${FinalfMRIResolution} -mas ${WD}/${FreeSurferBrainMaskFile}.${FinalfMRIResolution}.nii.gz ${WD}/${FreeSurferBrainMaskFile}.${FinalfMRIResolution}.nii.gz - T1_brain_res = pe.Node(interface=fsl.MultiImageMaths(), - name=f't1_brain_func_res_{pipe_num}') + T1_brain_res = pe.Node( + interface=fsl.MultiImageMaths(), name=f"t1_brain_func_res_{pipe_num}" + ) T1_brain_res.inputs.op_string = "-mas %s " - wf.connect(applywarp_anat_res, 'out_file', T1_brain_res, 'in_file') - wf.connect(applywarp_anat_mask_res, 'out_file', T1_brain_res, 'operand_files') + wf.connect(applywarp_anat_res, "out_file", T1_brain_res, "in_file") + wf.connect(applywarp_anat_mask_res, "out_file", T1_brain_res, "operand_files") # Create versions of the biasfield (changing resolution) # ${FSLDIR}/bin/applywarp --rel --interp=spline -i ${BiasField} -r ${WD}/${FreeSurferBrainMaskFile}.${FinalfMRIResolution}.nii.gz --premat=$FSLDIR/etc/flirtsch/ident.mat -o ${WD}/${BiasFieldFile}.${FinalfMRIResolution} - applywarp_bias_field_res = pe.Node(interface=fsl.ApplyWarp(), - name=f'biasfiled_func_res_{pipe_num}') + applywarp_bias_field_res = pe.Node( + interface=fsl.ApplyWarp(), name=f"biasfiled_func_res_{pipe_num}" + ) applywarp_bias_field_res.inputs.relwarp = True - applywarp_bias_field_res.inputs.interp = 'spline' - applywarp_bias_field_res.inputs.premat = cfg.registration_workflows['anatomical_registration']['registration']['FSL-FNIRT']['identity_matrix'] + applywarp_bias_field_res.inputs.interp = "spline" + applywarp_bias_field_res.inputs.premat = cfg.registration_workflows[ + "anatomical_registration" + ]["registration"]["FSL-FNIRT"]["identity_matrix"] - node, out = strat_pool.get_data('space-template_desc-T1wT2w_biasfield') - wf.connect(node, out, applywarp_bias_field_res, 'in_file') - wf.connect(T1_brain_res, 'out_file', applywarp_bias_field_res, 'ref_file') + node, out = strat_pool.get_data("space-template_desc-T1wT2w_biasfield") + wf.connect(node, out, applywarp_bias_field_res, "in_file") + wf.connect(T1_brain_res, "out_file", applywarp_bias_field_res, "ref_file") # ${FSLDIR}/bin/fslmaths ${WD}/${BiasFieldFile}.${FinalfMRIResolution} -thr 0.1 ${WD}/${BiasFieldFile}.${FinalfMRIResolution} - biasfield_thr = pe.Node(interface=fsl.MultiImageMaths(), - name=f'biasfiedl_thr_{pipe_num}') + biasfield_thr = pe.Node( + interface=fsl.MultiImageMaths(), name=f"biasfiedl_thr_{pipe_num}" + ) biasfield_thr.inputs.op_string = "-thr 0.1" - wf.connect(applywarp_bias_field_res, 'out_file', biasfield_thr, 'in_file') + wf.connect(applywarp_bias_field_res, "out_file", biasfield_thr, "in_file") # https://github.com/DCAN-Labs/dcan-macaque-pipeline/blob/master/fMRIVolume/scripts/OneStepResampling.sh#L144-L146 # convertwarp --relout --rel --warp1=${fMRIToStructuralInput} --warp2=${StructuralToStandard} --ref=${WD}/${T1wImageFile}.${FinalfMRIResolution} --out=${OutputTransform} - convert_func_to_standard_warp = pe.Node(interface=fsl.ConvertWarp(), - name=f'convert_func_to_standard_warp_{pipe_num}') + convert_func_to_standard_warp = pe.Node( + interface=fsl.ConvertWarp(), name=f"convert_func_to_standard_warp_{pipe_num}" + ) convert_func_to_standard_warp.inputs.out_relwarp = True convert_func_to_standard_warp.inputs.relwarp = True - node, out = strat_pool.get_data('from-bold_to-T1w_mode-image_desc-linear_warp') - wf.connect(node, out, convert_func_to_standard_warp, 'warp1') + node, out = strat_pool.get_data("from-bold_to-T1w_mode-image_desc-linear_warp") + wf.connect(node, out, convert_func_to_standard_warp, "warp1") - node, out = strat_pool.get_data('from-T1w_to-template_mode-image_warp') - wf.connect(node, out, convert_func_to_standard_warp, 'warp2') + node, out = strat_pool.get_data("from-T1w_to-template_mode-image_warp") + wf.connect(node, out, convert_func_to_standard_warp, "warp2") - wf.connect(applywarp_anat_res, 'out_file', convert_func_to_standard_warp, 'reference') + wf.connect( + applywarp_anat_res, "out_file", convert_func_to_standard_warp, "reference" + ) # https://github.com/DCAN-Labs/dcan-macaque-pipeline/blob/master/fMRIVolume/GenericfMRIVolumeProcessingPipeline.sh#L157-L158 # fslroi "$fMRIFolder"/"$NameOffMRI"_gdc "$fMRIFolder"/"$NameOffMRI"_gdc_warp 0 3 - extract_func_roi = pe.Node(interface=fsl.ExtractROI(), - name=f'extract_func_roi_{pipe_num}') + extract_func_roi = pe.Node( + interface=fsl.ExtractROI(), name=f"extract_func_roi_{pipe_num}" + ) extract_func_roi.inputs.t_min = 0 extract_func_roi.inputs.t_size = 3 - node, out = strat_pool.get_data(['desc-reorient_bold', 'bold']) - wf.connect(node, out, extract_func_roi, 'in_file') + node, out = strat_pool.get_data(["desc-reorient_bold", "bold"]) + wf.connect(node, out, extract_func_roi, "in_file") # fslmaths "$fMRIFolder"/"$NameOffMRI"_gdc_warp -mul 0 "$fMRIFolder"/"$NameOffMRI"_gdc_warp - multiply_func_roi_by_zero = pe.Node(interface=fsl.maths.MathsCommand(), - name=f'multiply_func_roi_by_zero_{pipe_num}') + multiply_func_roi_by_zero = pe.Node( + interface=fsl.maths.MathsCommand(), name=f"multiply_func_roi_by_zero_{pipe_num}" + ) - multiply_func_roi_by_zero.inputs.args = '-mul 0' + multiply_func_roi_by_zero.inputs.args = "-mul 0" - wf.connect(extract_func_roi, 'roi_file', - multiply_func_roi_by_zero, 'in_file') + wf.connect(extract_func_roi, "roi_file", multiply_func_roi_by_zero, "in_file") # https://github.com/DCAN-Labs/dcan-macaque-pipeline/blob/master/fMRIVolume/scripts/OneStepResampling.sh#L173 # fslsplit ${InputfMRI} ${WD}/prevols/vol -t - split_func = pe.Node(interface=fsl.Split(), - name=f'split_func_{pipe_num}') + split_func = pe.Node(interface=fsl.Split(), name=f"split_func_{pipe_num}") - split_func.inputs.dimension = 't' + split_func.inputs.dimension = "t" - node, out = strat_pool.get_data(['desc-reorient_bold', 'bold']) - wf.connect(node, out, split_func, 'in_file') + node, out = strat_pool.get_data(["desc-reorient_bold", "bold"]) + wf.connect(node, out, split_func, "in_file") ### Loop starts! ### # convertwarp --relout --rel --ref=${WD}/prevols/vol${vnum}.nii.gz --warp1=${GradientDistortionField} --postmat=${MotionMatrixFolder}/${MotionMatrixPrefix}${vnum} --out=${MotionMatrixFolder}/${MotionMatrixPrefix}${vnum}_gdc_warp.nii.gz - convert_motion_distortion_warp = pe.MapNode(interface=fsl.ConvertWarp(), - name=f'convert_motion_distortion_warp_{pipe_num}', - iterfield=['reference', 'postmat']) + convert_motion_distortion_warp = pe.MapNode( + interface=fsl.ConvertWarp(), + name=f"convert_motion_distortion_warp_{pipe_num}", + iterfield=["reference", "postmat"], + ) convert_motion_distortion_warp.inputs.out_relwarp = True convert_motion_distortion_warp.inputs.relwarp = True - wf.connect(multiply_func_roi_by_zero, 'out_file', - convert_motion_distortion_warp, 'warp1') + wf.connect( + multiply_func_roi_by_zero, "out_file", convert_motion_distortion_warp, "warp1" + ) - wf.connect(split_func, 'out_files', - convert_motion_distortion_warp, 'reference') + wf.connect(split_func, "out_files", convert_motion_distortion_warp, "reference") - node, out = strat_pool.get_data('coordinate-transformation') - wf.connect(node, out, convert_motion_distortion_warp, 'postmat') + node, out = strat_pool.get_data("coordinate-transformation") + wf.connect(node, out, convert_motion_distortion_warp, "postmat") # convertwarp --relout --rel --ref=${WD}/${T1wImageFile}.${FinalfMRIResolution} --warp1=${MotionMatrixFolder}/${MotionMatrixPrefix}${vnum}_gdc_warp.nii.gz --warp2=${OutputTransform} --out=${MotionMatrixFolder}/${MotionMatrixPrefix}${vnum}_all_warp.nii.gz - convert_registration_warp = pe.MapNode(interface=fsl.ConvertWarp(), - name=f'convert_registration_warp_{pipe_num}', - iterfield=['warp1']) + convert_registration_warp = pe.MapNode( + interface=fsl.ConvertWarp(), + name=f"convert_registration_warp_{pipe_num}", + iterfield=["warp1"], + ) convert_registration_warp.inputs.out_relwarp = True convert_registration_warp.inputs.relwarp = True - wf.connect(applywarp_anat_res, 'out_file', convert_registration_warp, 'reference') + wf.connect(applywarp_anat_res, "out_file", convert_registration_warp, "reference") - wf.connect(convert_motion_distortion_warp, 'out_file', - convert_registration_warp, 'warp1') + wf.connect( + convert_motion_distortion_warp, "out_file", convert_registration_warp, "warp1" + ) - wf.connect(convert_func_to_standard_warp, 'out_file', - convert_registration_warp, 'warp2') + wf.connect( + convert_func_to_standard_warp, "out_file", convert_registration_warp, "warp2" + ) # fslmaths ${WD}/prevols/vol${vnum}.nii.gz -mul 0 -add 1 ${WD}/prevols/vol${vnum}_mask.nii.gz - generate_vol_mask = pe.MapNode(interface=fsl.maths.MathsCommand(), - name=f'generate_mask_{pipe_num}', - iterfield=['in_file']) + generate_vol_mask = pe.MapNode( + interface=fsl.maths.MathsCommand(), + name=f"generate_mask_{pipe_num}", + iterfield=["in_file"], + ) - generate_vol_mask.inputs.args = '-mul 0 -add 1' + generate_vol_mask.inputs.args = "-mul 0 -add 1" - wf.connect(split_func, 'out_files', - generate_vol_mask, 'in_file') + wf.connect(split_func, "out_files", generate_vol_mask, "in_file") # applywarp --rel --interp=spline --in=${WD}/prevols/vol${vnum}.nii.gz --warp=${MotionMatrixFolder}/${MotionMatrixPrefix}${vnum}_all_warp.nii.gz --ref=${WD}/${T1wImageFile}.${FinalfMRIResolution} --out=${WD}/postvols/vol${vnum}.nii.gz - applywarp_func_to_standard = pe.MapNode(interface=fsl.ApplyWarp(), - name=f'applywarp_func_to_standard_{pipe_num}', - iterfield=['in_file', 'field_file']) + applywarp_func_to_standard = pe.MapNode( + interface=fsl.ApplyWarp(), + name=f"applywarp_func_to_standard_{pipe_num}", + iterfield=["in_file", "field_file"], + ) applywarp_func_to_standard.inputs.relwarp = True - applywarp_func_to_standard.inputs.interp = 'spline' + applywarp_func_to_standard.inputs.interp = "spline" - wf.connect(split_func, 'out_files', - applywarp_func_to_standard, 'in_file') + wf.connect(split_func, "out_files", applywarp_func_to_standard, "in_file") - wf.connect(convert_registration_warp, 'out_file', - applywarp_func_to_standard, 'field_file') + wf.connect( + convert_registration_warp, "out_file", applywarp_func_to_standard, "field_file" + ) - wf.connect(applywarp_anat_res, 'out_file', - applywarp_func_to_standard, 'ref_file') + wf.connect(applywarp_anat_res, "out_file", applywarp_func_to_standard, "ref_file") # applywarp --rel --interp=nn --in=${WD}/prevols/vol${vnum}_mask.nii.gz --warp=${MotionMatrixFolder}/${MotionMatrixPrefix}${vnum}_all_warp.nii.gz --ref=${WD}/${T1wImageFile}.${FinalfMRIResolution} --out=${WD}/postvols/vol${vnum}_mask.nii.gz - applywarp_func_mask_to_standard = pe.MapNode(interface=fsl.ApplyWarp(), - name=f'applywarp_func_mask_to_standard_{pipe_num}', - iterfield=['in_file', 'field_file']) + applywarp_func_mask_to_standard = pe.MapNode( + interface=fsl.ApplyWarp(), + name=f"applywarp_func_mask_to_standard_{pipe_num}", + iterfield=["in_file", "field_file"], + ) applywarp_func_mask_to_standard.inputs.relwarp = True - applywarp_func_mask_to_standard.inputs.interp = 'nn' + applywarp_func_mask_to_standard.inputs.interp = "nn" - wf.connect(generate_vol_mask, 'out_file', - applywarp_func_mask_to_standard, 'in_file') + wf.connect( + generate_vol_mask, "out_file", applywarp_func_mask_to_standard, "in_file" + ) - wf.connect(convert_registration_warp, 'out_file', - applywarp_func_mask_to_standard, 'field_file') + wf.connect( + convert_registration_warp, + "out_file", + applywarp_func_mask_to_standard, + "field_file", + ) - wf.connect(applywarp_anat_res, 'out_file', - applywarp_func_mask_to_standard, 'ref_file') + wf.connect( + applywarp_anat_res, "out_file", applywarp_func_mask_to_standard, "ref_file" + ) ### Loop ends! ### # fslmerge -tr ${OutputfMRI} $FrameMergeSTRING $TR_vol - merge_func_to_standard = pe.Node(interface=fslMerge(), - name=f'merge_func_to_standard_{pipe_num}') + merge_func_to_standard = pe.Node( + interface=fslMerge(), name=f"merge_func_to_standard_{pipe_num}" + ) - merge_func_to_standard.inputs.dimension = 't' + merge_func_to_standard.inputs.dimension = "t" - wf.connect(applywarp_func_to_standard, 'out_file', - merge_func_to_standard, 'in_files') + wf.connect( + applywarp_func_to_standard, "out_file", merge_func_to_standard, "in_files" + ) # fslmerge -tr ${OutputfMRI}_mask $FrameMergeSTRINGII $TR_vol - merge_func_mask_to_standard = pe.Node(interface=fslMerge(), - name='merge_func_mask_to_' - f'standard_{pipe_num}') + merge_func_mask_to_standard = pe.Node( + interface=fslMerge(), name="merge_func_mask_to_" f"standard_{pipe_num}" + ) - merge_func_mask_to_standard.inputs.dimension = 't' + merge_func_mask_to_standard.inputs.dimension = "t" - wf.connect(applywarp_func_mask_to_standard, 'out_file', - merge_func_mask_to_standard, 'in_files') + wf.connect( + applywarp_func_mask_to_standard, + "out_file", + merge_func_mask_to_standard, + "in_files", + ) # fslmaths ${OutputfMRI}_mask -Tmin ${OutputfMRI}_mask - find_min_mask = pe.Node(interface=fsl.maths.MathsCommand(), - name=f'find_min_mask_{pipe_num}') + find_min_mask = pe.Node( + interface=fsl.maths.MathsCommand(), name=f"find_min_mask_{pipe_num}" + ) - find_min_mask.inputs.args = '-Tmin' + find_min_mask.inputs.args = "-Tmin" - wf.connect(merge_func_mask_to_standard, 'merged_file', - find_min_mask, 'in_file') + wf.connect(merge_func_mask_to_standard, "merged_file", find_min_mask, "in_file") # https://github.com/DCAN-Labs/dcan-macaque-pipeline/blob/master/fMRIVolume/scripts/IntensityNormalization.sh#L113-L119 # fslmaths ${InputfMRI} -div ${BiasField} $jacobiancom -mas ${BrainMask} -mas ${InputfMRI}_mask -ing 10000 ${OutputfMRI} -odt float - merge_func_mask = pe.Node(util.Merge(3), - name=f'merge_operand_files_{pipe_num}') - - wf.connect(biasfield_thr, 'out_file', merge_func_mask, 'in1') + merge_func_mask = pe.Node(util.Merge(3), name=f"merge_operand_files_{pipe_num}") - wf.connect(applywarp_anat_mask_res, 'out_file', merge_func_mask, 'in2') + wf.connect(biasfield_thr, "out_file", merge_func_mask, "in1") - wf.connect(find_min_mask, 'out_file', merge_func_mask, 'in3') + wf.connect(applywarp_anat_mask_res, "out_file", merge_func_mask, "in2") + wf.connect(find_min_mask, "out_file", merge_func_mask, "in3") - extract_func_brain = pe.Node(interface=fsl.MultiImageMaths(), - name=f'extract_func_brain_{pipe_num}') + extract_func_brain = pe.Node( + interface=fsl.MultiImageMaths(), name=f"extract_func_brain_{pipe_num}" + ) - extract_func_brain.inputs.op_string = '-div %s -mas %s -mas %s -ing 10000' - extract_func_brain.inputs.output_datatype = 'float' + extract_func_brain.inputs.op_string = "-div %s -mas %s -mas %s -ing 10000" + extract_func_brain.inputs.output_datatype = "float" - wf.connect(merge_func_to_standard, 'merged_file', - extract_func_brain, 'in_file') + wf.connect(merge_func_to_standard, "merged_file", extract_func_brain, "in_file") - wf.connect(merge_func_mask, 'out', - extract_func_brain, 'operand_files') + wf.connect(merge_func_mask, "out", extract_func_brain, "operand_files") - func_mask_final = pe.Node(interface=fsl.MultiImageMaths(), - name=f'func_mask_final_{pipe_num}') + func_mask_final = pe.Node( + interface=fsl.MultiImageMaths(), name=f"func_mask_final_{pipe_num}" + ) func_mask_final.inputs.op_string = "-mas %s " - wf.connect(applywarp_anat_mask_res, 'out_file', func_mask_final, 'in_file') + wf.connect(applywarp_anat_mask_res, "out_file", func_mask_final, "in_file") - wf.connect(find_min_mask, 'out_file', func_mask_final, 'operand_files') + wf.connect(find_min_mask, "out_file", func_mask_final, "operand_files") outputs = { - 'space-template_desc-preproc_bold': (extract_func_brain, 'out_file'), - 'space-template_desc-bold_mask': (func_mask_final, 'out_file') + "space-template_desc-preproc_bold": (extract_func_brain, "out_file"), + "space-template_desc-bold_mask": (func_mask_final, "out_file"), } return (wf, outputs) @@ -4338,14 +4745,10 @@ def warp_timeseries_to_T1template_dcan_nhp(wf, cfg, strat_pool, pipe_num, ) ], outputs={ - "space-template_desc-preproc_bold": { - "Template": "T1w-brain-template-funcreg"}, - "space-template_desc-brain_bold": { - "Template": "T1w-brain-template-funcreg"}, - "space-template_desc-bold_mask": { - "Template": "T1w-brain-template-funcreg"}, - "space-template_desc-head_bold": { - "Template": "T1w-brain-template-funcreg"}, + "space-template_desc-preproc_bold": {"Template": "T1w-brain-template-funcreg"}, + "space-template_desc-brain_bold": {"Template": "T1w-brain-template-funcreg"}, + "space-template_desc-bold_mask": {"Template": "T1w-brain-template-funcreg"}, + "space-template_desc-head_bold": {"Template": "T1w-brain-template-funcreg"}, "space-template_res-derivative_desc-preproc_bold": { "Template": "T1w-brain-template-deriv" }, @@ -4354,9 +4757,10 @@ def warp_timeseries_to_T1template_dcan_nhp(wf, cfg, strat_pool, pipe_num, }, }, ) -def single_step_resample_timeseries_to_T1template(wf, cfg, strat_pool, - pipe_num, opt=None): - ''' +def single_step_resample_timeseries_to_T1template( + wf, cfg, strat_pool, pipe_num, opt=None +): + """ Apply motion correction, coreg, anat-to-template transforms on slice-time corrected functional timeseries based on fMRIPrep pipeline @@ -4393,213 +4797,238 @@ def single_step_resample_timeseries_to_T1template(wf, cfg, strat_pool, OF THE POSSIBILITY OF SUCH DAMAGE. Ref: https://github.com/nipreps/fmriprep/blob/84a6005b/fmriprep/workflows/bold/resampling.py#L159-L419 - ''' # noqa: 501 - xfm_prov = strat_pool.get_cpac_provenance( - 'from-T1w_to-template_mode-image_xfm') + """ # noqa: 501 + xfm_prov = strat_pool.get_cpac_provenance("from-T1w_to-template_mode-image_xfm") reg_tool = check_prov_for_regtool(xfm_prov) - bbr2itk = pe.Node(util.Function(input_names=['reference_file', - 'source_file', - 'transform_file'], - output_names=['itk_transform'], - function=run_c3d), - name=f'convert_bbr2itk_{pipe_num}') - - if cfg.registration_workflows['functional_registration'][ - 'coregistration']['boundary_based_registration'][ - 'reference'] == 'whole-head': - node, out = strat_pool.get_data('T1w') - wf.connect(node, out, bbr2itk, 'reference_file') + bbr2itk = pe.Node( + util.Function( + input_names=["reference_file", "source_file", "transform_file"], + output_names=["itk_transform"], + function=run_c3d, + ), + name=f"convert_bbr2itk_{pipe_num}", + ) - elif cfg.registration_workflows['functional_registration'][ - 'coregistration']['boundary_based_registration'][ - 'reference'] == 'brain': - node, out = strat_pool.get_data('desc-preproc_T1w') - wf.connect(node, out, bbr2itk, 'reference_file') + if ( + cfg.registration_workflows["functional_registration"]["coregistration"][ + "boundary_based_registration" + ]["reference"] + == "whole-head" + ): + node, out = strat_pool.get_data("T1w") + wf.connect(node, out, bbr2itk, "reference_file") + + elif ( + cfg.registration_workflows["functional_registration"]["coregistration"][ + "boundary_based_registration" + ]["reference"] + == "brain" + ): + node, out = strat_pool.get_data("desc-preproc_T1w") + wf.connect(node, out, bbr2itk, "reference_file") - node, out = strat_pool.get_data('sbref') - wf.connect(node, out, bbr2itk, 'source_file') + node, out = strat_pool.get_data("sbref") + wf.connect(node, out, bbr2itk, "source_file") - node, out = strat_pool.get_data( - 'from-bold_to-T1w_mode-image_desc-linear_xfm') - wf.connect(node, out, bbr2itk, 'transform_file') + node, out = strat_pool.get_data("from-bold_to-T1w_mode-image_desc-linear_xfm") + wf.connect(node, out, bbr2itk, "transform_file") - split_func = pe.Node(interface=fsl.Split(), - name=f'split_func_{pipe_num}') + split_func = pe.Node(interface=fsl.Split(), name=f"split_func_{pipe_num}") - split_func.inputs.dimension = 't' + split_func.inputs.dimension = "t" - node, out = strat_pool.get_data('desc-stc_bold') - wf.connect(node, out, split_func, 'in_file') + node, out = strat_pool.get_data("desc-stc_bold") + wf.connect(node, out, split_func, "in_file") ### Loop starts! ### - motionxfm2itk = pe.MapNode(util.Function( - input_names=['reference_file', - 'source_file', - 'transform_file'], - output_names=['itk_transform'], - function=run_c3d), - name=f'convert_motionxfm2itk_{pipe_num}', - iterfield=['transform_file']) - - node, out = strat_pool.get_data('motion-basefile') - wf.connect(node, out, motionxfm2itk, 'reference_file') - wf.connect(node, out, motionxfm2itk, 'source_file') - - node, out = strat_pool.get_data('coordinate-transformation') + motionxfm2itk = pe.MapNode( + util.Function( + input_names=["reference_file", "source_file", "transform_file"], + output_names=["itk_transform"], + function=run_c3d, + ), + name=f"convert_motionxfm2itk_{pipe_num}", + iterfield=["transform_file"], + ) + + node, out = strat_pool.get_data("motion-basefile") + wf.connect(node, out, motionxfm2itk, "reference_file") + wf.connect(node, out, motionxfm2itk, "source_file") + + node, out = strat_pool.get_data("coordinate-transformation") motion_correct_tool = check_prov_for_motion_tool( - strat_pool.get_cpac_provenance('coordinate-transformation')) - if motion_correct_tool == 'mcflirt': - wf.connect(node, out, motionxfm2itk, 'transform_file') - elif motion_correct_tool == '3dvolreg': - convert_transform = pe.Node(util.Function( - input_names=['one_d_filename'], - output_names=['transform_directory'], - function=one_d_to_mat, - imports=['import os', 'import numpy as np']), - name=f'convert_transform_{pipe_num}') - wf.connect(node, out, convert_transform, 'one_d_filename') - wf.connect(convert_transform, 'transform_directory', - motionxfm2itk, 'transform_file') + strat_pool.get_cpac_provenance("coordinate-transformation") + ) + if motion_correct_tool == "mcflirt": + wf.connect(node, out, motionxfm2itk, "transform_file") + elif motion_correct_tool == "3dvolreg": + convert_transform = pe.Node( + util.Function( + input_names=["one_d_filename"], + output_names=["transform_directory"], + function=one_d_to_mat, + imports=["import os", "import numpy as np"], + ), + name=f"convert_transform_{pipe_num}", + ) + wf.connect(node, out, convert_transform, "one_d_filename") + wf.connect( + convert_transform, "transform_directory", motionxfm2itk, "transform_file" + ) merge_num = 4 blip = False - if strat_pool.check_rpool('ants-blip-warp') and reg_tool == 'ants': - blip_node, blip_out = strat_pool.get_data('ants-blip-warp') + if strat_pool.check_rpool("ants-blip-warp") and reg_tool == "ants": + blip_node, blip_out = strat_pool.get_data("ants-blip-warp") merge_num = 5 blip = True - elif strat_pool.check_rpool('fsl-blip-warp') and reg_tool == 'fsl': - blip_node, blip_out = strat_pool.get_data('fsl-blip-warp') + elif strat_pool.check_rpool("fsl-blip-warp") and reg_tool == "fsl": + blip_node, blip_out = strat_pool.get_data("fsl-blip-warp") merge_num = 5 blip = True - collectxfm = pe.MapNode(util.Merge(merge_num), - name=f'collectxfm_func_to_standard_{pipe_num}', - iterfield=[f'in{merge_num}']) + collectxfm = pe.MapNode( + util.Merge(merge_num), + name=f"collectxfm_func_to_standard_{pipe_num}", + iterfield=[f"in{merge_num}"], + ) - node, out = strat_pool.get_data('from-T1w_to-template_mode-image_xfm') - wf.connect(node, out, collectxfm, 'in1') + node, out = strat_pool.get_data("from-T1w_to-template_mode-image_xfm") + wf.connect(node, out, collectxfm, "in1") - wf.connect(bbr2itk, 'itk_transform', - collectxfm, 'in2') + wf.connect(bbr2itk, "itk_transform", collectxfm, "in2") - collectxfm.inputs.in3 = 'identity' + collectxfm.inputs.in3 = "identity" if blip: - wf.connect(blip_node, blip_out, collectxfm, 'in4') + wf.connect(blip_node, blip_out, collectxfm, "in4") - wf.connect(motionxfm2itk, 'itk_transform', - collectxfm, f'in{merge_num}') + wf.connect(motionxfm2itk, "itk_transform", collectxfm, f"in{merge_num}") applyxfm_func_to_standard = pe.MapNode( interface=ants.ApplyTransforms(), - name=f'applyxfm_func_to_standard_{pipe_num}', - iterfield=['input_image', 'transforms']) + name=f"applyxfm_func_to_standard_{pipe_num}", + iterfield=["input_image", "transforms"], + ) applyxfm_func_to_standard.inputs.float = True - applyxfm_func_to_standard.inputs.interpolation = 'LanczosWindowedSinc' + applyxfm_func_to_standard.inputs.interpolation = "LanczosWindowedSinc" applyxfm_derivfunc_to_standard = pe.MapNode( interface=ants.ApplyTransforms(), - name=f'applyxfm_derivfunc_to_standard_{pipe_num}', - iterfield=['input_image', 'transforms']) + name=f"applyxfm_derivfunc_to_standard_{pipe_num}", + iterfield=["input_image", "transforms"], + ) applyxfm_derivfunc_to_standard.inputs.float = True - applyxfm_derivfunc_to_standard.inputs.interpolation = 'LanczosWindowedSinc' + applyxfm_derivfunc_to_standard.inputs.interpolation = "LanczosWindowedSinc" + + wf.connect(split_func, "out_files", applyxfm_func_to_standard, "input_image") + wf.connect(split_func, "out_files", applyxfm_derivfunc_to_standard, "input_image") - wf.connect(split_func, 'out_files', - applyxfm_func_to_standard, 'input_image') - wf.connect(split_func, 'out_files', - applyxfm_derivfunc_to_standard, 'input_image') + node, out = strat_pool.get_data("T1w-brain-template-funcreg") + wf.connect(node, out, applyxfm_func_to_standard, "reference_image") - node, out = strat_pool.get_data('T1w-brain-template-funcreg') - wf.connect(node, out, applyxfm_func_to_standard, 'reference_image') - - node, out = strat_pool.get_data('T1w-brain-template-deriv') - wf.connect(node, out, applyxfm_derivfunc_to_standard, 'reference_image') + node, out = strat_pool.get_data("T1w-brain-template-deriv") + wf.connect(node, out, applyxfm_derivfunc_to_standard, "reference_image") - wf.connect(collectxfm, 'out', applyxfm_func_to_standard, 'transforms') - wf.connect(collectxfm, 'out', applyxfm_derivfunc_to_standard, 'transforms') + wf.connect(collectxfm, "out", applyxfm_func_to_standard, "transforms") + wf.connect(collectxfm, "out", applyxfm_derivfunc_to_standard, "transforms") ### Loop ends! ### - merge_func_to_standard = pe.Node(interface=fslMerge(), - name=f'merge_func_to_standard_{pipe_num}') - merge_func_to_standard.inputs.dimension = 't' + merge_func_to_standard = pe.Node( + interface=fslMerge(), name=f"merge_func_to_standard_{pipe_num}" + ) + merge_func_to_standard.inputs.dimension = "t" - wf.connect(applyxfm_func_to_standard, 'output_image', - merge_func_to_standard, 'in_files') + wf.connect( + applyxfm_func_to_standard, "output_image", merge_func_to_standard, "in_files" + ) merge_derivfunc_to_standard = pe.Node( - interface=fslMerge(), name=f'merge_derivfunc_to_standard_{pipe_num}') - merge_derivfunc_to_standard.inputs.dimension = 't' + interface=fslMerge(), name=f"merge_derivfunc_to_standard_{pipe_num}" + ) + merge_derivfunc_to_standard.inputs.dimension = "t" - wf.connect(applyxfm_derivfunc_to_standard, 'output_image', - merge_derivfunc_to_standard, 'in_files') + wf.connect( + applyxfm_derivfunc_to_standard, + "output_image", + merge_derivfunc_to_standard, + "in_files", + ) applyxfm_func_mask_to_standard = pe.Node( interface=ants.ApplyTransforms(), - name=f'applyxfm_func_mask_to_standard_{pipe_num}') - applyxfm_func_mask_to_standard.inputs.interpolation = 'MultiLabel' + name=f"applyxfm_func_mask_to_standard_{pipe_num}", + ) + applyxfm_func_mask_to_standard.inputs.interpolation = "MultiLabel" - node, out = strat_pool.get_data('space-bold_desc-brain_mask') - wf.connect(node, out, applyxfm_func_mask_to_standard, 'input_image') + node, out = strat_pool.get_data("space-bold_desc-brain_mask") + wf.connect(node, out, applyxfm_func_mask_to_standard, "input_image") - node, out = strat_pool.get_data('T1w-brain-template-funcreg') - wf.connect(node, out, applyxfm_func_mask_to_standard, 'reference_image') + node, out = strat_pool.get_data("T1w-brain-template-funcreg") + wf.connect(node, out, applyxfm_func_mask_to_standard, "reference_image") collectxfm_mask = pe.Node( - util.Merge(2), name=f'collectxfm_func_mask_to_standard_{pipe_num}') + util.Merge(2), name=f"collectxfm_func_mask_to_standard_{pipe_num}" + ) - node, out = strat_pool.get_data('from-T1w_to-template_mode-image_xfm') - wf.connect(node, out, collectxfm_mask, 'in1') + node, out = strat_pool.get_data("from-T1w_to-template_mode-image_xfm") + wf.connect(node, out, collectxfm_mask, "in1") - wf.connect(bbr2itk, 'itk_transform', collectxfm_mask, 'in2') + wf.connect(bbr2itk, "itk_transform", collectxfm_mask, "in2") - wf.connect(collectxfm_mask, 'out', - applyxfm_func_mask_to_standard, 'transforms') + wf.connect(collectxfm_mask, "out", applyxfm_func_mask_to_standard, "transforms") applyxfm_deriv_mask_to_standard = pe.Node( interface=ants.ApplyTransforms(), - name=f'applyxfm_deriv_mask_to_standard_{pipe_num}') - applyxfm_deriv_mask_to_standard.inputs.interpolation = 'MultiLabel' + name=f"applyxfm_deriv_mask_to_standard_{pipe_num}", + ) + applyxfm_deriv_mask_to_standard.inputs.interpolation = "MultiLabel" - node, out = strat_pool.get_data('space-bold_desc-brain_mask') - wf.connect(node, out, applyxfm_deriv_mask_to_standard, 'input_image') + node, out = strat_pool.get_data("space-bold_desc-brain_mask") + wf.connect(node, out, applyxfm_deriv_mask_to_standard, "input_image") - node, out = strat_pool.get_data('T1w-brain-template-deriv') - wf.connect(node, out, applyxfm_deriv_mask_to_standard, 'reference_image') + node, out = strat_pool.get_data("T1w-brain-template-deriv") + wf.connect(node, out, applyxfm_deriv_mask_to_standard, "reference_image") collectxfm_deriv_mask = pe.Node( - util.Merge(2), name=f'collectxfm_deriv_mask_to_standard_{pipe_num}') + util.Merge(2), name=f"collectxfm_deriv_mask_to_standard_{pipe_num}" + ) - node, out = strat_pool.get_data('from-T1w_to-template_mode-image_xfm') - wf.connect(node, out, collectxfm_deriv_mask, 'in1') + node, out = strat_pool.get_data("from-T1w_to-template_mode-image_xfm") + wf.connect(node, out, collectxfm_deriv_mask, "in1") - wf.connect(bbr2itk, 'itk_transform', - collectxfm_deriv_mask, 'in2') + wf.connect(bbr2itk, "itk_transform", collectxfm_deriv_mask, "in2") - wf.connect(collectxfm_deriv_mask, 'out', - applyxfm_deriv_mask_to_standard, 'transforms') + wf.connect( + collectxfm_deriv_mask, "out", applyxfm_deriv_mask_to_standard, "transforms" + ) - apply_mask = pe.Node(interface=fsl.maths.ApplyMask(), - name=f'get_func_brain_to_standard_{pipe_num}') + apply_mask = pe.Node( + interface=fsl.maths.ApplyMask(), name=f"get_func_brain_to_standard_{pipe_num}" + ) - wf.connect(merge_func_to_standard, 'merged_file', - apply_mask, 'in_file') + wf.connect(merge_func_to_standard, "merged_file", apply_mask, "in_file") - wf.connect(applyxfm_func_mask_to_standard, 'output_image', - apply_mask, 'mask_file') + wf.connect(applyxfm_func_mask_to_standard, "output_image", apply_mask, "mask_file") outputs = { - 'space-template_desc-head_bold': (merge_func_to_standard, - 'merged_file'), - 'space-template_desc-brain_bold': (apply_mask, 'out_file'), - 'space-template_desc-preproc_bold': (apply_mask, 'out_file'), - 'space-template_desc-bold_mask': (applyxfm_func_mask_to_standard, - 'output_image'), - 'space-template_res-derivative_desc-preproc_bold': - (merge_derivfunc_to_standard, 'merged_file'), - 'space-template_res-derivative_desc-bold_mask': - (applyxfm_deriv_mask_to_standard, 'output_image') + "space-template_desc-head_bold": (merge_func_to_standard, "merged_file"), + "space-template_desc-brain_bold": (apply_mask, "out_file"), + "space-template_desc-preproc_bold": (apply_mask, "out_file"), + "space-template_desc-bold_mask": ( + applyxfm_func_mask_to_standard, + "output_image", + ), + "space-template_res-derivative_desc-preproc_bold": ( + merge_derivfunc_to_standard, + "merged_file", + ), + "space-template_res-derivative_desc-bold_mask": ( + applyxfm_deriv_mask_to_standard, + "output_image", + ), } return (wf, outputs) @@ -4620,18 +5049,24 @@ def single_step_resample_timeseries_to_T1template(wf, cfg, strat_pool, outputs={ "space-template_sbref": { "Description": "Single-volume sbref of the BOLD time-series " - "transformed to template space.", + "transformed to template space.", "Template": "T1w-brain-template-funcreg", } }, ) def warp_sbref_to_T1template(wf, cfg, strat_pool, pipe_num, opt=None): - xfm = 'from-bold_to-template_mode-image_xfm' + xfm = "from-bold_to-template_mode-image_xfm" wf, apply_xfm = warp_resource_to_template( - wf, cfg, strat_pool, pipe_num, 'sbref', xfm, - reference='T1w-brain-template-funcreg', time_series=False)[:2] - outputs = {'space-template_sbref': - (apply_xfm, 'outputspec.output_image')} + wf, + cfg, + strat_pool, + pipe_num, + "sbref", + xfm, + reference="T1w-brain-template-funcreg", + time_series=False, + )[:2] + outputs = {"space-template_sbref": (apply_xfm, "outputspec.output_image")} return _warp_return(wf, apply_xfm, outputs) @@ -4659,15 +5094,22 @@ def warp_sbref_to_T1template(wf, cfg, strat_pool, pipe_num, opt=None): "T1w-brain-template-funcreg", ], outputs={ - "space-template_desc-bold_mask": { - "Template": "T1w-brain-template-funcreg"}}) + "space-template_desc-bold_mask": {"Template": "T1w-brain-template-funcreg"} + }, +) def warp_bold_mask_to_T1template(wf, cfg, strat_pool, pipe_num, opt=None): - xfm = 'from-bold_to-template_mode-image_xfm' + xfm = "from-bold_to-template_mode-image_xfm" wf, apply_xfm = warp_resource_to_template( - wf, cfg, strat_pool, pipe_num, 'space-bold_desc-brain_mask', xfm, - reference='T1w-brain-template-funcreg', time_series=False)[:2] - outputs = {'space-template_desc-bold_mask': - (apply_xfm, 'outputspec.output_image')} + wf, + cfg, + strat_pool, + pipe_num, + "space-bold_desc-brain_mask", + xfm, + reference="T1w-brain-template-funcreg", + time_series=False, + )[:2] + outputs = {"space-template_desc-bold_mask": (apply_xfm, "outputspec.output_image")} return _warp_return(wf, apply_xfm, outputs) @@ -4701,15 +5143,26 @@ def warp_bold_mask_to_T1template(wf, cfg, strat_pool, pipe_num, opt=None): }, ) def warp_deriv_mask_to_T1template(wf, cfg, strat_pool, pipe_num, opt=None): - '''Transform the BOLD mask to template space and to the resolution set for + """Transform the BOLD mask to template space and to the resolution set for the derivative outputs. - ''' - xfm = 'from-bold_to-template_mode-image_xfm' + """ + xfm = "from-bold_to-template_mode-image_xfm" wf, apply_xfm = warp_resource_to_template( - wf, cfg, strat_pool, pipe_num, 'space-bold_desc-brain_mask', xfm, - reference='T1w-brain-template-deriv', time_series=False)[:2] - outputs = {'space-template_res-derivative_desc-bold_mask': - (apply_xfm, 'outputspec.output_image')} + wf, + cfg, + strat_pool, + pipe_num, + "space-bold_desc-brain_mask", + xfm, + reference="T1w-brain-template-deriv", + time_series=False, + )[:2] + outputs = { + "space-template_res-derivative_desc-bold_mask": ( + apply_xfm, + "outputspec.output_image", + ) + } return _warp_return(wf, apply_xfm, outputs) @@ -4728,12 +5181,11 @@ def warp_deriv_mask_to_T1template(wf, cfg, strat_pool, pipe_num, opt=None): outputs={"space-template_desc-preproc_bold": {"Template": "EPI-template"}}, ) def warp_timeseries_to_EPItemplate(wf, cfg, strat_pool, pipe_num, opt=None): - xfm = 'from-bold_to-EPItemplate_mode-image_xfm' + xfm = "from-bold_to-EPItemplate_mode-image_xfm" wf, apply_xfm, resource = warp_resource_to_template( - wf, cfg, strat_pool, pipe_num, 'desc-preproc_bold', xfm, - time_series=True) - outputs = {f'space-template_{resource}': - (apply_xfm, 'outputspec.output_image')} + wf, cfg, strat_pool, pipe_num, "desc-preproc_bold", xfm, time_series=True + ) + outputs = {f"space-template_{resource}": (apply_xfm, "outputspec.output_image")} return _warp_return(wf, apply_xfm, outputs) @@ -4752,12 +5204,11 @@ def warp_timeseries_to_EPItemplate(wf, cfg, strat_pool, pipe_num, opt=None): outputs={"space-template_desc-mean_bold": {"Template": "EPI-template"}}, ) def warp_bold_mean_to_EPItemplate(wf, cfg, strat_pool, pipe_num, opt=None): - xfm = 'from-bold_to-EPItemplate_mode-image_xfm' + xfm = "from-bold_to-EPItemplate_mode-image_xfm" wf, apply_xfm = warp_resource_to_template( - wf, cfg, strat_pool, pipe_num, 'desc-mean_bold', xfm, - time_series=False)[:2] - outputs = {'space-template_desc-mean_bold': - (apply_xfm, 'outputspec.output_image')} + wf, cfg, strat_pool, pipe_num, "desc-mean_bold", xfm, time_series=False + )[:2] + outputs = {"space-template_desc-mean_bold": (apply_xfm, "outputspec.output_image")} return _warp_return(wf, apply_xfm, outputs) @@ -4770,19 +5221,23 @@ def warp_bold_mean_to_EPItemplate(wf, cfg, strat_pool, pipe_num, opt=None): ], switch=["run_EPI"], inputs=[ - ("space-bold_desc-brain_mask", - "from-bold_to-EPItemplate_mode-image_xfm"), + ("space-bold_desc-brain_mask", "from-bold_to-EPItemplate_mode-image_xfm"), "EPI-template", ], outputs={"space-template_desc-bold_mask": {"Template": "EPI-template"}}, ) def warp_bold_mask_to_EPItemplate(wf, cfg, strat_pool, pipe_num, opt=None): - xfm = 'from-bold_to-EPItemplate_mode-image_xfm' + xfm = "from-bold_to-EPItemplate_mode-image_xfm" wf, apply_xfm = warp_resource_to_template( - wf, cfg, strat_pool, pipe_num, 'space-bold_desc-brain_mask', xfm, - time_series=False)[:2] - outputs = {'space-template_desc-bold_mask': - (apply_xfm, 'outputspec.output_image')} + wf, + cfg, + strat_pool, + pipe_num, + "space-bold_desc-brain_mask", + xfm, + time_series=False, + )[:2] + outputs = {"space-template_desc-bold_mask": (apply_xfm, "outputspec.output_image")} return _warp_return(wf, apply_xfm, outputs) @@ -4795,25 +5250,33 @@ def warp_bold_mask_to_EPItemplate(wf, cfg, strat_pool, pipe_num, opt=None): ], switch=["run_EPI"], inputs=[ - ("space-bold_desc-brain_mask", - "from-bold_to-EPItemplate_mode-image_xfm"), + ("space-bold_desc-brain_mask", "from-bold_to-EPItemplate_mode-image_xfm"), "EPI-template", ], outputs={ - "space-template_res-derivative_desc-bold_mask": { - "Template": "EPI-template"} + "space-template_res-derivative_desc-bold_mask": {"Template": "EPI-template"} }, ) def warp_deriv_mask_to_EPItemplate(wf, cfg, strat_pool, pipe_num, opt=None): - '''Transform the BOLD mask to template space and to the resolution set for + """Transform the BOLD mask to template space and to the resolution set for the derivative outputs. - ''' - xfm = 'from-bold_to-EPItemplate_mode-image_xfm' + """ + xfm = "from-bold_to-EPItemplate_mode-image_xfm" wf, apply_xfm = warp_resource_to_template( - wf, cfg, strat_pool, pipe_num, 'space-bold_desc-brain_mask', xfm, - time_series=False)[:2] - outputs = {'space-template_res-derivative_desc-bold_mask': - (apply_xfm, 'outputspec.output_image')} + wf, + cfg, + strat_pool, + pipe_num, + "space-bold_desc-brain_mask", + xfm, + time_series=False, + )[:2] + outputs = { + "space-template_res-derivative_desc-bold_mask": ( + apply_xfm, + "outputspec.output_image", + ) + } return _warp_return(wf, apply_xfm, outputs) @@ -4836,9 +5299,14 @@ def warp_deriv_mask_to_EPItemplate(wf, cfg, strat_pool, pipe_num, opt=None): }, ) def warp_tissuemask_to_T1template(wf, cfg, strat_pool, pipe_num, opt=None): - return warp_tissuemask_to_template(wf, cfg, strat_pool, pipe_num, - xfm='from-T1w_to-template_mode-image_' - 'xfm', template_space='T1') + return warp_tissuemask_to_template( + wf, + cfg, + strat_pool, + pipe_num, + xfm="from-T1w_to-template_mode-image_" "xfm", + template_space="T1", + ) @nodeblock( @@ -4865,15 +5333,18 @@ def warp_tissuemask_to_T1template(wf, cfg, strat_pool, pipe_num, opt=None): }, ) def warp_tissuemask_to_EPItemplate(wf, cfg, strat_pool, pipe_num, opt=None): - return warp_tissuemask_to_template(wf, cfg, strat_pool, pipe_num, - xfm='from-bold_to-EPItemplate_' - 'mode-image_xfm', - template_space='EPI') + return warp_tissuemask_to_template( + wf, + cfg, + strat_pool, + pipe_num, + xfm="from-bold_to-EPItemplate_" "mode-image_xfm", + template_space="EPI", + ) -def warp_tissuemask_to_template(wf, cfg, strat_pool, pipe_num, xfm, - template_space): - '''Function to apply transforms to tissue masks +def warp_tissuemask_to_template(wf, cfg, strat_pool, pipe_num, xfm, template_space): + """Function to apply transforms to tissue masks Parameters ---------- @@ -4891,27 +5362,42 @@ def warp_tissuemask_to_template(wf, cfg, strat_pool, pipe_num, xfm, wf : nipype.pipeline.engine.workflows.Workflow outputs : dict - ''' - tissue_types = ['CSF', 'WM', 'GM'] + """ + tissue_types = ["CSF", "WM", "GM"] apply_xfm = {} for tissue in tissue_types: wf, apply_xfm[tissue] = warp_resource_to_template( - wf, cfg, strat_pool, pipe_num, f'label-{tissue}_mask', xfm, - time_series=False)[:2] - if template_space == 'T1': - template_space = '' - outputs = {f'space-{template_space}template_label-{tissue}_mask': ( - apply_xfm[tissue], 'outputspec.output_image') for - tissue in tissue_types} + wf, + cfg, + strat_pool, + pipe_num, + f"label-{tissue}_mask", + xfm, + time_series=False, + )[:2] + if template_space == "T1": + template_space = "" + outputs = { + f"space-{template_space}template_label-{tissue}_mask": ( + apply_xfm[tissue], + "outputspec.output_image", + ) + for tissue in tissue_types + } return _warp_return(wf, apply_xfm, outputs) -def warp_resource_to_template(wf: pe.Workflow, cfg, strat_pool, pipe_num: int, - input_resource: LIST_OR_STR, xfm: str, - reference: Optional[str] = None, - time_series: Optional[bool] = False - ) -> TUPLE[pe.Workflow, pe.Workflow, str]: - '''Function to warp a resource into a template space +def warp_resource_to_template( + wf: pe.Workflow, + cfg, + strat_pool, + pipe_num: int, + input_resource: LIST_OR_STR, + xfm: str, + reference: Optional[str] = None, + time_series: Optional[bool] = False, +) -> TUPLE[pe.Workflow, pe.Workflow, str]: + """Function to warp a resource into a template space Parameters ---------- @@ -4946,53 +5432,58 @@ def warp_resource_to_template(wf: pe.Workflow, cfg, strat_pool, pipe_num: int, resource : str key of input resource in strat_pool - ''' + """ # determine space we're warping to - template_space = xfm.split('_to-', 1)[1].split('template')[0] - if template_space == '': - template_space = 'T1w' + template_space = xfm.split("_to-", 1)[1].split("template")[0] + if template_space == "": + template_space = "T1w" # determine tool used for registration xfm_prov = strat_pool.get_cpac_provenance(xfm) reg_tool = check_prov_for_regtool(xfm_prov) # set 'resource' if strat_pool.check_rpool(input_resource): - resource, input_resource = strat_pool.get_data(input_resource, - report_fetched=True) + resource, input_resource = strat_pool.get_data( + input_resource, report_fetched=True + ) else: return wf, None, input_resource # set 'reference' if not passed and determine subworkflow name if reference is None: subwf_input_name = input_resource - reference = f'{template_space}-template' + reference = f"{template_space}-template" else: - subwf_input_name = '-'.join([ - reference.split('-')[-1].split('_')[-1], - input_resource.split('-')[-1].split('_')[-1]]) + subwf_input_name = "-".join( + [ + reference.split("-")[-1].split("_")[-1], + input_resource.split("-")[-1].split("_")[-1], + ] + ) # set up 'apply_transform' subworkflow - apply_xfm = apply_transform(f'warp_{subwf_input_name}_to_' - f'{template_space}template_{pipe_num}', - reg_tool, time_series=time_series, - num_cpus=cfg.pipeline_setup['system_config'][ - 'max_cores_per_participant'], - num_ants_cores=cfg.pipeline_setup[ - 'system_config']['num_ants_threads']) + apply_xfm = apply_transform( + f"warp_{subwf_input_name}_to_" f"{template_space}template_{pipe_num}", + reg_tool, + time_series=time_series, + num_cpus=cfg.pipeline_setup["system_config"]["max_cores_per_participant"], + num_ants_cores=cfg.pipeline_setup["system_config"]["num_ants_threads"], + ) # set appropriate 'interpolation' input based on registration tool - if reg_tool == 'ants': - apply_xfm.inputs.inputspec.interpolation = 'NearestNeighbor' - elif reg_tool == 'fsl': - apply_xfm.inputs.inputspec.interpolation = 'nn' + if reg_tool == "ants": + apply_xfm.inputs.inputspec.interpolation = "NearestNeighbor" + elif reg_tool == "fsl": + apply_xfm.inputs.inputspec.interpolation = "nn" # connect nodes to subworkflow node, out = resource - wf.connect(node, out, apply_xfm, 'inputspec.input_image') + wf.connect(node, out, apply_xfm, "inputspec.input_image") node, out = strat_pool.get_data(reference) - wf.connect(node, out, apply_xfm, 'inputspec.reference') + wf.connect(node, out, apply_xfm, "inputspec.reference") node, out = strat_pool.get_data(xfm) - wf.connect(node, out, apply_xfm, 'inputspec.transform') + wf.connect(node, out, apply_xfm, "inputspec.transform") return wf, apply_xfm, input_resource -def _warp_return(wf: pe.Workflow, apply_xfm: Optional[pe.Workflow], - outputs: dict) -> TUPLE[pe.Workflow, dict]: +def _warp_return( + wf: pe.Workflow, apply_xfm: Optional[pe.Workflow], outputs: dict +) -> TUPLE[pe.Workflow, dict]: """Check if we have a transform to apply; if not, don't add the outputs""" if apply_xfm is None: return wf, {} diff --git a/CPAC/registration/tests/mocks.py b/CPAC/registration/tests/mocks.py index b0a1000499..0bdf2f678b 100644 --- a/CPAC/registration/tests/mocks.py +++ b/CPAC/registration/tests/mocks.py @@ -1,111 +1,159 @@ import os + from nipype.interfaces import utility as util + from CPAC.pipeline import nipype_pipeline_engine as pe from CPAC.utils.configuration import Configuration from CPAC.utils.datasource import resolve_resolution from CPAC.utils.interfaces.function import Function from CPAC.utils.strategy import Strategy + def file_node(path, file_node_num=0): input_node = pe.Node( - util.IdentityInterface(fields=['file']), name='file_node_{0}'.format(file_node_num) + util.IdentityInterface(fields=["file"]), name=f"file_node_{file_node_num}" ) input_node.inputs.file = path - return input_node, 'file' + return input_node, "file" + -def configuration_strategy_mock( method = 'FSL' ): +def configuration_strategy_mock(method="FSL"): fsldir = os.environ.get("FSLDIR") # mock the config dictionary - c = Configuration({ - "num_ants_threads": 4, - "workingDirectory": "/scratch/pipeline_tests", - "crashLogDirectory": "/scratch", - "outputDirectory": "/output/output/pipeline_analysis_nuisance/sub-M10978008_ses-NFB3", - "resolution_for_func_preproc": "3mm", - "resolution_for_func_derivative": "3mm", - "template_for_resample": f"{fsldir}/data/standard/" - "MNI152_T1_1mm_brain.nii.gz", - "template_brain_only_for_func": f"{fsldir}/data/standard/" - r"MNI152_T1_${func_resolution}_" - "brain.nii.gz", - "template_skull_for_func": f"{fsldir}/data/standard/" - r"MNI152_T1_${func_resolution}.nii.gz", - "identityMatrix": f"{fsldir}/etc/flirtsch/ident.mat", - "funcRegFSLinterpolation": "sinc", - "funcRegANTSinterpolation": "LanczosWindowedSinc" - }) - - if method == 'ANTS': - c.update('regOption', 'ANTS') + c = Configuration( + { + "num_ants_threads": 4, + "workingDirectory": "/scratch/pipeline_tests", + "crashLogDirectory": "/scratch", + "outputDirectory": "/output/output/pipeline_analysis_nuisance/sub-M10978008_ses-NFB3", + "resolution_for_func_preproc": "3mm", + "resolution_for_func_derivative": "3mm", + "template_for_resample": f"{fsldir}/data/standard/" + "MNI152_T1_1mm_brain.nii.gz", + "template_brain_only_for_func": f"{fsldir}/data/standard/" + r"MNI152_T1_${func_resolution}_" + "brain.nii.gz", + "template_skull_for_func": f"{fsldir}/data/standard/" + r"MNI152_T1_${func_resolution}.nii.gz", + "identityMatrix": f"{fsldir}/etc/flirtsch/ident.mat", + "funcRegFSLinterpolation": "sinc", + "funcRegANTSinterpolation": "LanczosWindowedSinc", + } + ) + + if method == "ANTS": + c.update("regOption", "ANTS") else: - c.update('regOption', 'FSL') + c.update("regOption", "FSL") # mock the strategy strat = Strategy() resource_dict = { - "mean_functional": os.path.join(c.outputDirectory, - "mean_functional/sub-M10978008_ses-NFB3_task-test_bold_calc_tshift_resample_volreg_calc_tstat.nii.gz"), - "motion_correct": os.path.join(c.outputDirectory, - "motion_correct/_scan_test/sub-M10978008_ses-NFB3_task-test_bold_calc_tshift_resample_volreg.nii.gz"), - "anatomical_brain": os.path.join(c.outputDirectory, - "anatomical_brain/sub-M10978008_ses-NFB3_acq-ao_brain_resample.nii.gz"), - "ants_initial_xfm": os.path.join(c.outputDirectory, - "ants_initial_xfm/transform0DerivedInitialMovingTranslation.mat"), - "ants_affine_xfm": os.path.join(c.outputDirectory, - "ants_affine_xfm/transform2Affine.mat"), - "ants_rigid_xfm": os.path.join(c.outputDirectory, - "ants_rigid_xfm/transform1Rigid.mat"), - "anatomical_to_mni_linear_xfm": os.path.join(c.outputDirectory, - "anatomical_to_mni_linear_xfm/sub-M10978008_ses-NFB3_T1w_resample_calc_flirt.mat"), - "functional_to_anat_linear_xfm": os.path.join(c.outputDirectory, - "functional_to_anat_linear_xfm/_scan_test/sub-M10978008_ses-NFB3_task-test_bold_calc_tshift_resample_volreg_calc_tstat_flirt.mat"), - 'ants_symm_warp_field': os.path.join(c.outputDirectory, - "anatomical_to_symmetric_mni_nonlinear_xfm/transform3Warp.nii.gz"), - 'ants_symm_affine_xfm': os.path.join(c.outputDirectory, - "ants_symmetric_affine_xfm/transform2Affine.mat"), - 'ants_symm_rigid_xfm': os.path.join(c.outputDirectory, - "ants_symmetric_rigid_xfm/transform1Rigid.mat"), - 'ants_symm_initial_xfm': os.path.join(c.outputDirectory, - "ants_symmetric_initial_xfm/transform0DerivedInitialMovingTranslation.mat"), - "dr_tempreg_maps_files": [os.path.join('/scratch', 'resting_preproc_sub-M10978008_ses-NFB3_cpac105', 'temporal_dual_regression_0/_scan_test/_selector_CSF-2mmE-M_aC-WM-2mmE-DPC5_G-M_M-SDB_P-2/_spatial_map_PNAS_Smith09_rsn10_spatial_map_file_..cpac_templates..PNAS_Smith09_rsn10.nii.gz/split_raw_volumes/temp_reg_map_000{0}.nii.gz'.format(n)) for n in range(10)] + "mean_functional": os.path.join( + c.outputDirectory, + "mean_functional/sub-M10978008_ses-NFB3_task-test_bold_calc_tshift_resample_volreg_calc_tstat.nii.gz", + ), + "motion_correct": os.path.join( + c.outputDirectory, + "motion_correct/_scan_test/sub-M10978008_ses-NFB3_task-test_bold_calc_tshift_resample_volreg.nii.gz", + ), + "anatomical_brain": os.path.join( + c.outputDirectory, + "anatomical_brain/sub-M10978008_ses-NFB3_acq-ao_brain_resample.nii.gz", + ), + "ants_initial_xfm": os.path.join( + c.outputDirectory, + "ants_initial_xfm/transform0DerivedInitialMovingTranslation.mat", + ), + "ants_affine_xfm": os.path.join( + c.outputDirectory, "ants_affine_xfm/transform2Affine.mat" + ), + "ants_rigid_xfm": os.path.join( + c.outputDirectory, "ants_rigid_xfm/transform1Rigid.mat" + ), + "anatomical_to_mni_linear_xfm": os.path.join( + c.outputDirectory, + "anatomical_to_mni_linear_xfm/sub-M10978008_ses-NFB3_T1w_resample_calc_flirt.mat", + ), + "functional_to_anat_linear_xfm": os.path.join( + c.outputDirectory, + "functional_to_anat_linear_xfm/_scan_test/sub-M10978008_ses-NFB3_task-test_bold_calc_tshift_resample_volreg_calc_tstat_flirt.mat", + ), + "ants_symm_warp_field": os.path.join( + c.outputDirectory, + "anatomical_to_symmetric_mni_nonlinear_xfm/transform3Warp.nii.gz", + ), + "ants_symm_affine_xfm": os.path.join( + c.outputDirectory, "ants_symmetric_affine_xfm/transform2Affine.mat" + ), + "ants_symm_rigid_xfm": os.path.join( + c.outputDirectory, "ants_symmetric_rigid_xfm/transform1Rigid.mat" + ), + "ants_symm_initial_xfm": os.path.join( + c.outputDirectory, + "ants_symmetric_initial_xfm/transform0DerivedInitialMovingTranslation.mat", + ), + "dr_tempreg_maps_files": [ + os.path.join( + "/scratch", + "resting_preproc_sub-M10978008_ses-NFB3_cpac105", + f"temporal_dual_regression_0/_scan_test/_selector_CSF-2mmE-M_aC-WM-2mmE-DPC5_G-M_M-SDB_P-2/_spatial_map_PNAS_Smith09_rsn10_spatial_map_file_..cpac_templates..PNAS_Smith09_rsn10.nii.gz/split_raw_volumes/temp_reg_map_000{n}.nii.gz", + ) + for n in range(10) + ], } - if method == 'ANTS': - resource_dict["anatomical_to_mni_nonlinear_xfm"] = os.path.join(c.outputDirectory, - "anatomical_to_mni_nonlinear_xfm/transform3Warp.nii.gz") + if method == "ANTS": + resource_dict["anatomical_to_mni_nonlinear_xfm"] = os.path.join( + c.outputDirectory, "anatomical_to_mni_nonlinear_xfm/transform3Warp.nii.gz" + ) else: - resource_dict["anatomical_to_mni_nonlinear_xfm"] = os.path.join(c.outputDirectory, - "anatomical_to_mni_nonlinear_xfm/sub-M10978008_ses-NFB3_T1w_resample_fieldwarp.nii.gz") - + resource_dict["anatomical_to_mni_nonlinear_xfm"] = os.path.join( + c.outputDirectory, + "anatomical_to_mni_nonlinear_xfm/sub-M10978008_ses-NFB3_T1w_resample_fieldwarp.nii.gz", + ) + file_node_num = 0 for resource, filepath in resource_dict.items(): - strat.update_resource_pool({ - resource: file_node(filepath, file_node_num) - }) - strat.append_name(resource+'_0') + strat.update_resource_pool({resource: file_node(filepath, file_node_num)}) + strat.append_name(resource + "_0") file_node_num += 1 templates_for_resampling = [ - (c.resolution_for_func_preproc, c.template_brain_only_for_func, - 'template_brain_for_func_preproc', 'resolution_for_func_preproc'), - (c.resolution_for_func_preproc, c.template_brain_only_for_func, - 'template_skull_for_func_preproc', 'resolution_for_func_preproc') + ( + c.resolution_for_func_preproc, + c.template_brain_only_for_func, + "template_brain_for_func_preproc", + "resolution_for_func_preproc", + ), + ( + c.resolution_for_func_preproc, + c.template_brain_only_for_func, + "template_skull_for_func_preproc", + "resolution_for_func_preproc", + ), ] for resolution, template, template_name, tag in templates_for_resampling: - resampled_template = pe.Node(Function(input_names = ['resolution', 'template', 'template_name', 'tag'], - output_names = ['resampled_template'], - function = resolve_resolution, - as_module = True), - name = 'resampled_' + template_name) + resampled_template = pe.Node( + Function( + input_names=["resolution", "template", "template_name", "tag"], + output_names=["resampled_template"], + function=resolve_resolution, + as_module=True, + ), + name="resampled_" + template_name, + ) resampled_template.inputs.resolution = resolution resampled_template.inputs.template = template resampled_template.inputs.template_name = template_name resampled_template.inputs.tag = tag - strat.update_resource_pool({template_name: (resampled_template, 'resampled_template')}) - strat.append_name('resampled_template_0') + strat.update_resource_pool( + {template_name: (resampled_template, "resampled_template")} + ) + strat.append_name("resampled_template_0") return c, strat diff --git a/CPAC/registration/tests/test_registration.py b/CPAC/registration/tests/test_registration.py index 430e71ef7b..4b8edea0cd 100755 --- a/CPAC/registration/tests/test_registration.py +++ b/CPAC/registration/tests/test_registration.py @@ -1,13 +1,13 @@ import pytest -@pytest.mark.skip(reason='needs refactoring') +@pytest.mark.skip(reason="needs refactoring") def test_nonlinear_register(): - from ..registration import create_nonlinear_register - + from nipype.interfaces import fsl + from CPAC.pipeline import nipype_pipeline_engine as pe - import nipype.interfaces.fsl as fsl - + from ..registration import create_nonlinear_register + ## necessary inputs ## -input_brain ## -input_skull @@ -15,146 +15,144 @@ def test_nonlinear_register(): ## -reference_skull ## -fnirt_config ## -fnirt_warp_res - + ## input_brain - anat_bet_file = '/home/data/Projects/nuisance_reliability_paper/working_dir_CPAC_order/resting_preproc/anatpreproc/_session_id_NYU_TRT_session1_subject_id_sub05676/anat_skullstrip/mprage_anonymized_RPI_3dT.nii.gz' - + anat_bet_file = "/home/data/Projects/nuisance_reliability_paper/working_dir_CPAC_order/resting_preproc/anatpreproc/_session_id_NYU_TRT_session1_subject_id_sub05676/anat_skullstrip/mprage_anonymized_RPI_3dT.nii.gz" + ## input_skull - + ## reference_brain - mni_file = '/usr/share/fsl/4.1/data/standard/MNI152_T1_3mm_brain.nii.gz' - + mni_file = "/usr/share/fsl/4.1/data/standard/MNI152_T1_3mm_brain.nii.gz" + ## reference_skull - + ## fnirt_config - fnirt_config = 'T1_2_MNI152_3mm' - + fnirt_config = "T1_2_MNI152_3mm" + ## fnirt_warp_res fnirt_warp_res = None - - #?? what is this for?: - func_file = '/home/data/Projects/nuisance_reliability_paper/working_dir_CPAC_order/resting_preproc/nuisance_preproc/_session_id_NYU_TRT_session1_subject_id_sub05676/_csf_threshold_0.4/_gm_threshold_0.2/_wm_threshold_0.66/_run_scrubbing_False/_nc_5/_selector_6.7/regress_nuisance/mapflow/_regress_nuisance0/residual.nii.gz' - - - mni_workflow = pe.Workflow(name='mni_workflow') - - linear_reg = pe.Node(interface=fsl.FLIRT(), - name='linear_reg_0') - linear_reg.inputs.cost = 'corratio' + + # ?? what is this for?: + func_file = "/home/data/Projects/nuisance_reliability_paper/working_dir_CPAC_order/resting_preproc/nuisance_preproc/_session_id_NYU_TRT_session1_subject_id_sub05676/_csf_threshold_0.4/_gm_threshold_0.2/_wm_threshold_0.66/_run_scrubbing_False/_nc_5/_selector_6.7/regress_nuisance/mapflow/_regress_nuisance0/residual.nii.gz" + + mni_workflow = pe.Workflow(name="mni_workflow") + + linear_reg = pe.Node(interface=fsl.FLIRT(), name="linear_reg_0") + linear_reg.inputs.cost = "corratio" linear_reg.inputs.dof = 6 - linear_reg.inputs.interp = 'nearestneighbour' - + linear_reg.inputs.interp = "nearestneighbour" + linear_reg.inputs.in_file = func_file linear_reg.inputs.reference = anat_bet_file - - #T1 to MNI Node + + # T1 to MNI Node c = create_nonlinear_register() c.inputs.inputspec.input = anat_bet_file - c.inputs.inputspec.reference = '/usr/share/fsl/4.1/data/standard/MNI152_T1_3mm_brain.nii.gz' - c.inputs.inputspec.fnirt_config = 'T1_2_MNI152_3mm' - - #EPI to MNI warp Node - mni_warp = pe.Node(interface=fsl.ApplyWarp(), - name='mni_warp') - mni_warp.inputs.ref_file = '/usr/share/fsl/4.1/data/standard/MNI152_T1_3mm_brain.nii.gz' + c.inputs.inputspec.reference = ( + "/usr/share/fsl/4.1/data/standard/MNI152_T1_3mm_brain.nii.gz" + ) + c.inputs.inputspec.fnirt_config = "T1_2_MNI152_3mm" + + # EPI to MNI warp Node + mni_warp = pe.Node(interface=fsl.ApplyWarp(), name="mni_warp") + mni_warp.inputs.ref_file = ( + "/usr/share/fsl/4.1/data/standard/MNI152_T1_3mm_brain.nii.gz" + ) mni_warp.inputs.in_file = func_file - mni_workflow.connect(c, 'outputspec.nonlinear_xfm', - mni_warp, 'field_file') - mni_workflow.connect(linear_reg, 'out_matrix_file', - mni_warp, 'premat') - - mni_workflow.base_dir = './' - mni_workflow.run() + mni_workflow.connect(c, "outputspec.nonlinear_xfm", mni_warp, "field_file") + mni_workflow.connect(linear_reg, "out_matrix_file", mni_warp, "premat") + + mni_workflow.base_dir = "./" + mni_workflow.run() -@pytest.mark.skip(reason='needs refactoring') +@pytest.mark.skip(reason="needs refactoring") def test_registration(): - from ..registration import create_nonlinear_register - from ..registration import create_register_func_to_mni - from CPAC.pipeline import nipype_pipeline_engine as pe - import nipype.interfaces.fsl as fsl - - func_file = '/home/data/Projects/nuisance_reliability_paper/working_dir_CPAC_order/resting_preproc/nuisance_preproc/_session_id_NYU_TRT_session1_subject_id_sub05676/_csf_threshold_0.4/_gm_threshold_0.2/_wm_threshold_0.66/_run_scrubbing_False/_nc_5/_selector_6.7/regress_nuisance/mapflow/_regress_nuisance0/residual.nii.gz' - anat_skull_file = '/home/data/Projects/nuisance_reliability_paper/working_dir_CPAC_order/resting_preproc/anatpreproc/_session_id_NYU_TRT_session1_subject_id_sub05676/anat_reorient/mprage_anonymized_RPI.nii.gz' - anat_bet_file = '/home/data/Projects/nuisance_reliability_paper/working_dir_CPAC_order/resting_preproc/anatpreproc/_session_id_NYU_TRT_session1_subject_id_sub05676/anat_skullstrip/mprage_anonymized_RPI_3dT.nii.gz' - mni_brain_file = '/usr/share/fsl/4.1/data/standard/MNI152_T1_3mm_brain.nii.gz' - mni_skull_file = '/usr/share/fsl/4.1/data/standard/MNI152_T1_3mm.nii.gz' - - - mni_workflow = pe.Workflow(name='mni_workflow') - + from ..registration import create_nonlinear_register, create_register_func_to_mni + + func_file = "/home/data/Projects/nuisance_reliability_paper/working_dir_CPAC_order/resting_preproc/nuisance_preproc/_session_id_NYU_TRT_session1_subject_id_sub05676/_csf_threshold_0.4/_gm_threshold_0.2/_wm_threshold_0.66/_run_scrubbing_False/_nc_5/_selector_6.7/regress_nuisance/mapflow/_regress_nuisance0/residual.nii.gz" + anat_skull_file = "/home/data/Projects/nuisance_reliability_paper/working_dir_CPAC_order/resting_preproc/anatpreproc/_session_id_NYU_TRT_session1_subject_id_sub05676/anat_reorient/mprage_anonymized_RPI.nii.gz" + anat_bet_file = "/home/data/Projects/nuisance_reliability_paper/working_dir_CPAC_order/resting_preproc/anatpreproc/_session_id_NYU_TRT_session1_subject_id_sub05676/anat_skullstrip/mprage_anonymized_RPI_3dT.nii.gz" + mni_brain_file = "/usr/share/fsl/4.1/data/standard/MNI152_T1_3mm_brain.nii.gz" + mni_skull_file = "/usr/share/fsl/4.1/data/standard/MNI152_T1_3mm.nii.gz" + + mni_workflow = pe.Workflow(name="mni_workflow") + nr = create_nonlinear_register() nr.inputs.inputspec.input_brain = anat_bet_file nr.inputs.inputspec.input_skull = anat_skull_file nr.inputs.inputspec.reference_brain = mni_brain_file nr.inputs.inputspec.reference_skull = mni_skull_file - nr.inputs.inputspec.fnirt_config = '/usr/share/fsl/4.1/etc/flirtsch/T1_2_MNI152_3mm.cnf' + nr.inputs.inputspec.fnirt_config = ( + "/usr/share/fsl/4.1/etc/flirtsch/T1_2_MNI152_3mm.cnf" + ) func2mni = create_register_func_to_mni() func2mni.inputs.inputspec.func = func_file func2mni.inputs.inputspec.mni = mni_brain_file func2mni.inputs.inputspec.anat = anat_bet_file - - mni_workflow.connect(nr, 'outputspec.nonlinear_xfm', - func2mni, 'inputspec.anat_to_mni_xfm') - mni_workflow.base_dir = './mni_05676_3' + + mni_workflow.connect( + nr, "outputspec.nonlinear_xfm", func2mni, "inputspec.anat_to_mni_xfm" + ) + mni_workflow.base_dir = "./mni_05676_3" mni_workflow.run() -@pytest.mark.skip(reason='needs refactoring') +@pytest.mark.skip(reason="needs refactoring") def test_registration_lesion(): import os - from CPAC.pipeline import nipype_pipeline_engine as pe - from ..registration import create_wf_calculate_ants_warp + from CPAC.anat_preproc.anat_preproc import create_anat_preproc from CPAC.anat_preproc.lesion_preproc import create_lesion_preproc + from CPAC.pipeline import nipype_pipeline_engine as pe + from ..registration import create_wf_calculate_ants_warp # Skull stripped anat image - anat_file = '/bids_dataset/sub-0027228/ses-1/anat/sub-0027228_ses-1_run-1_T1w.nii.gz' - lesion_file = '/bids_dataset/sub-0027228/ses-1/anat/sub-0027228_ses-1_run-1_T1w_lesion-mask.nii.gz' - mni_brain_file = (f'{os.environ.get("FSLDIR")}/data/standard/' - 'MNI152_T1_3mm_brain.nii.gz') + anat_file = ( + "/bids_dataset/sub-0027228/ses-1/anat/sub-0027228_ses-1_run-1_T1w.nii.gz" + ) + lesion_file = "/bids_dataset/sub-0027228/ses-1/anat/sub-0027228_ses-1_run-1_T1w_lesion-mask.nii.gz" + mni_brain_file = ( + f'{os.environ.get("FSLDIR")}/data/standard/' 'MNI152_T1_3mm_brain.nii.gz' + ) if not os.path.exists(anat_file): - raise IOError(anat_file + ' not found') + raise IOError(anat_file + " not found") if not os.path.exists(lesion_file): - raise IOError(lesion_file + ' not found') + raise IOError(lesion_file + " not found") if not os.path.exists(mni_brain_file): - raise IOError(mni_brain_file + ' not found') + raise IOError(mni_brain_file + " not found") - wf = pe.Workflow(name='test_reg_lesion') + wf = pe.Workflow(name="test_reg_lesion") - anat_preproc = create_anat_preproc(method='mask', - already_skullstripped=True, - wf_name='anat_preproc') + anat_preproc = create_anat_preproc( + method="mask", already_skullstripped=True, wf_name="anat_preproc" + ) anat_preproc.inputs.inputspec.anat = anat_file - lesion_preproc = create_lesion_preproc( - wf_name='lesion_preproc' - ) + lesion_preproc = create_lesion_preproc(wf_name="lesion_preproc") lesion_preproc.inputs.inputspec.lesion = lesion_file - ants_reg_anat_mni = \ - create_wf_calculate_ants_warp( - 'anat_mni_ants_register', - 0, - num_threads=4 - ) + ants_reg_anat_mni = create_wf_calculate_ants_warp( + "anat_mni_ants_register", 0, num_threads=4 + ) # pass the reference file ants_reg_anat_mni.inputs.inputspec.reference_brain = mni_brain_file wf.connect( - anat_preproc, 'outputspec.reorient', - ants_reg_anat_mni, 'inputspec.moving_brain' + anat_preproc, "outputspec.reorient", ants_reg_anat_mni, "inputspec.moving_brain" ) wf.connect( - lesion_preproc, 'outputspec.reorient', - ants_reg_anat_mni, 'inputspec.fixed_image_mask' + lesion_preproc, + "outputspec.reorient", + ants_reg_anat_mni, + "inputspec.fixed_image_mask", ) ants_reg_anat_mni.inputs.inputspec.set( @@ -162,30 +160,22 @@ def test_registration_lesion(): use_histogram_matching=True, winsorize_lower_quantile=0.01, winsorize_upper_quantile=0.99, - metric=['MI', 'MI', 'CC'], + metric=["MI", "MI", "CC"], metric_weight=[1, 1, 1], radius_or_number_of_bins=[32, 32, 4], - sampling_strategy=['Regular', 'Regular', None], + sampling_strategy=["Regular", "Regular", None], sampling_percentage=[0.25, 0.25, None], number_of_iterations=[ [1000, 500, 250, 100], [1000, 500, 250, 100], - [100, 100, 70, 20] + [100, 100, 70, 20], ], convergence_threshold=[1e-8, 1e-8, 1e-9], convergence_window_size=[10, 10, 15], - transforms=['Rigid', 'Affine', 'SyN'], + transforms=["Rigid", "Affine", "SyN"], transform_parameters=[[0.1], [0.1], [0.1, 3, 0]], - shrink_factors=[ - [8, 4, 2, 1], - [8, 4, 2, 1], - [6, 4, 2, 1] - ], - smoothing_sigmas=[ - [3, 2, 1, 0], - [3, 2, 1, 0], - [3, 2, 1, 0] - ] + shrink_factors=[[8, 4, 2, 1], [8, 4, 2, 1], [6, 4, 2, 1]], + smoothing_sigmas=[[3, 2, 1, 0], [3, 2, 1, 0], [3, 2, 1, 0]], ) wf.run() diff --git a/CPAC/resources/configs/pipeline_config_blank.yml b/CPAC/resources/configs/pipeline_config_blank.yml index 67e1277c79..ea7b4db5b8 100644 --- a/CPAC/resources/configs/pipeline_config_blank.yml +++ b/CPAC/resources/configs/pipeline_config_blank.yml @@ -352,7 +352,8 @@ anatomical_preproc: monkey: Off FSL-BET: - # Swich "On" to crop out neck regions before generating the mask (default: Off). + + # Switch "On" to crop out neck regions before generating the mask (default: Off). Robustfov: Off # Set the threshold value controling the brain vs non-brain voxels, default is 0.5 diff --git a/CPAC/resources/configs/pipeline_config_default.yml b/CPAC/resources/configs/pipeline_config_default.yml index cb0bca639c..d3008f0c09 100644 --- a/CPAC/resources/configs/pipeline_config_default.yml +++ b/CPAC/resources/configs/pipeline_config_default.yml @@ -200,7 +200,7 @@ surface_analysis: # Will run Freesurfer for surface-based analysis. Will output traditional Freesurfer derivatives. # If you wish to employ Freesurfer outputs for brain masking or tissue segmentation in the voxel-based pipeline, # select those 'Freesurfer-' labeled options further below in anatomical_preproc. - freesurfer: + freesurfer: run_reconall: Off @@ -212,7 +212,7 @@ surface_analysis: # Run ABCD-HCP post FreeSurfer and fMRISurface pipeline - post_freesurfer: + post_freesurfer: run: Off @@ -280,7 +280,7 @@ anatomical_preproc: run_t2: Off # Non-local means filtering via ANTs DenoiseImage - non_local_means_filtering: + non_local_means_filtering: # this is a fork option run: [Off] @@ -298,12 +298,12 @@ anatomical_preproc: shrink_factor: 2 # Bias field correction based on square root of T1w * T2w - t1t2_bias_field_correction: + t1t2_bias_field_correction: + + run: Off - run: Off - BiasFieldSmoothingSigma: 5 - + acpc_alignment: run: Off @@ -316,17 +316,17 @@ anatomical_preproc: # Default: 150mm for human data. brain_size: 150 - # Choose a tool to crop the FOV in ACPC alignment. - # Using FSL's robustfov or flirt command. - # Default: robustfov for human data, flirt for monkey data. + # Choose a tool to crop the FOV in ACPC alignment. + # Using FSL's robustfov or flirt command. + # Default: robustfov for human data, flirt for monkey data. FOV_crop: robustfov - + # ACPC Target # options: 'brain' or 'whole-head' # note: 'brain' requires T1w_brain_ACPC_template below to be populated acpc_target: 'whole-head' - # Run ACPC alignment on brain mask + # Run ACPC alignment on brain mask # If the brain mask is in native space, turn it on # If the brain mask is ACPC aligned, turn it off align_brain_mask: Off @@ -338,7 +338,7 @@ anatomical_preproc: T2w_brain_ACPC_template: None brain_extraction: - + run: On # using: ['3dSkullStrip', 'BET', 'UNet', 'niworkflows-ants', 'FreeSurfer-ABCD', 'FreeSurfer-BET-Tight', 'FreeSurfer-BET-Loose', 'FreeSurfer-Brainmask'] @@ -412,9 +412,9 @@ anatomical_preproc: monkey: False FSL-BET: - # Switch "On" to crop out neck regions before generating the mask (default: Off). + # Switch "On" to crop out neck regions before generating the mask (default: Off). Robustfov : Off - + # Set the threshold value controling the brain vs non-brain voxels, default is 0.5 frac: 0.5 @@ -715,21 +715,21 @@ registration_workflows: fnirt_config: T1_2_MNI152_2mm # The resolution to which anatomical images should be transformed during registration. - # This is the resolution at which processed anatomical files will be output. + # This is the resolution at which processed anatomical files will be output. # specifically for monkey pipeline ref_resolution: 2mm # Reference mask for FSL registration. ref_mask: $FSLDIR/data/standard/MNI152_T1_${resolution_for_anat}_brain_mask_dil.nii.gz - + # Template to be used during registration. - # It is for monkey pipeline specifically. + # It is for monkey pipeline specifically. FNIRT_T1w_brain_template: None # Template to be used during registration. - # It is for monkey pipeline specifically. + # It is for monkey pipeline specifically. FNIRT_T1w_template: None - + # Interpolation method for writing out transformed anatomical images. # Possible values: trilinear, sinc, spline interpolation: sinc @@ -922,10 +922,10 @@ registration_workflows: # these options modify the application (to the functional data), not the calculation, of the # T1-to-template and EPI-to-template transforms calculated earlier during registration - + # apply the functional-to-template (T1 template) registration transform to the functional data run: On - + # apply the functional-to-template (EPI template) registration transform to the functional data run_EPI: Off @@ -946,7 +946,7 @@ registration_workflows: # thus, a higher resolution may not result in a large increase in RAM needs as above func_derivative_outputs: 3mm - target_template: + target_template: # choose which template space to transform derivatives towards # using: ['T1_template', 'EPI_template'] # this is a fork point @@ -1014,11 +1014,11 @@ functional_preproc: run: On - update_header: + update_header: # Convert raw data from LPI to RPI run: On - + truncation: # First timepoint to include in analysis. @@ -1063,10 +1063,10 @@ functional_preproc: tzero: None motion_estimates_and_correction: - + run: On - motion_estimates: + motion_estimates: # calculate motion statistics BEFORE slice-timing correction calculate_motion_first: Off @@ -1206,48 +1206,48 @@ functional_preproc: # Set the threshold value for the skull-stripping of the magnitude file. Depending on the data, a tighter extraction may be necessary in order to prevent noisy voxels from interfering with preparing the field map. # The default value is 0.6. fmap_skullstrip_AFNI_threshold: 0.6 - + Blip-FSL-TOPUP: - + # (approximate) resolution (in mm) of warp basis for the different sub-sampling levels, default 10 warpres: 10 - + # sub-sampling scheme, default 1 subsamp: 1 - + # FWHM (in mm) of gaussian smoothing kernel, default 8 fwhm: 8 - + # Max # of non-linear iterations, default 5 miter: 5 - + # Weight of regularisation, default depending on --ssqlambda and --regmod switches. See user documentation. lambda: 1 - + # If set (=1), lambda is weighted by current ssq, default 1 ssqlambda: 1 - + # Model for regularisation of warp-field [membrane_energy bending_energy], default bending_energy regmod: bending_energy - + # Estimate movements if set, default 1 (true) estmov: 1 - + # Minimisation method 0=Levenberg-Marquardt, 1=Scaled Conjugate Gradient, default 0 (LM) minmet: 0 - + # Order of spline, 2->Qadratic spline, 3->Cubic spline. Default=3 splineorder: 3 - + # Precision for representing Hessian, double or float. Default double numprec: double - + # Image interpolation model, linear or spline. Default spline interp: spline - + # If set (=1), the images are individually scaled to a common mean, default 0 (false) scale: 0 - + # If set (=1), the calculations are done in a different grid, default 1 (true) regrid: 1 @@ -1272,7 +1272,7 @@ functional_preproc: functional_mean_boolean: Off # Set an intensity threshold to improve skull stripping performances of FSL BET on rodent scans. - functional_mean_thr: + functional_mean_thr: run: Off threshold_value: 98 @@ -1340,7 +1340,7 @@ functional_preproc: # Normalize functional image run: On - + coreg_prep: # Generate sbref diff --git a/CPAC/resources/tests/test_templates.py b/CPAC/resources/tests/test_templates.py index 40a598cfab..e73a4d7bc0 100644 --- a/CPAC/resources/tests/test_templates.py +++ b/CPAC/resources/tests/test_templates.py @@ -15,28 +15,37 @@ # You should have received a copy of the GNU Lesser General Public # License along with C-PAC. If not, see . """Tests for packaged templates""" + import os + import pytest + from CPAC.pipeline import ALL_PIPELINE_CONFIGS from CPAC.pipeline.engine import ingress_pipeconfig_paths, ResourcePool from CPAC.utils.configuration import Preconfiguration from CPAC.utils.datasource import get_highest_local_res -@pytest.mark.parametrize('pipeline', ALL_PIPELINE_CONFIGS) +@pytest.mark.parametrize("pipeline", ALL_PIPELINE_CONFIGS) def test_packaged_path_exists(pipeline): """ Check that all local templates are included in image at at least one resolution """ - rpool = ingress_pipeconfig_paths(Preconfiguration(pipeline), - ResourcePool(), 'pytest') + rpool = ingress_pipeconfig_paths( + Preconfiguration(pipeline), ResourcePool(), "pytest" + ) for resource in rpool.rpool.values(): - node = list(resource.values())[0].get('data')[0] - if hasattr(node.inputs, 'template' - ) and not node.inputs.template.startswith('s3:'): - if not pipeline == 'rodent' and node.inputs.template.startswith( - '/template/study_based'): - assert (os.path.exists(node.inputs.template) or - get_highest_local_res(node.inputs.template, - node.inputs.resolution).exists()) + node = list(resource.values())[0].get("data")[0] + if hasattr(node.inputs, "template") and not node.inputs.template.startswith( + "s3:" + ): + if not pipeline == "rodent" and node.inputs.template.startswith( + "/template/study_based" + ): + assert ( + os.path.exists(node.inputs.template) + or get_highest_local_res( + node.inputs.template, node.inputs.resolution + ).exists() + ) diff --git a/CPAC/utils/datasource.py b/CPAC/utils/datasource.py index 0a45ae37b7..439d09ab61 100644 --- a/CPAC/utils/datasource.py +++ b/CPAC/utils/datasource.py @@ -16,21 +16,22 @@ # License along with C-PAC. If not, see . import csv import json -import re from pathlib import Path +import re from typing import Union + from nipype import logging from nipype.interfaces import utility as util + from CPAC.pipeline import nipype_pipeline_engine as pe -from CPAC.resources.templates.lookup_table import format_identifier, \ - lookup_identifier +from CPAC.resources.templates.lookup_table import format_identifier, lookup_identifier from CPAC.utils import function from CPAC.utils.bids_utils import bids_remove_entity from CPAC.utils.interfaces.function import Function from CPAC.utils.typing import TUPLE from CPAC.utils.utils import get_scan_params -logger = logging.getLogger('nipype.workflow') +logger = logging.getLogger("nipype.workflow") def bidsier_prefix(unique_id): @@ -56,13 +57,13 @@ def bidsier_prefix(unique_id): >>> bidsier_prefix('01_ses-1') 'sub-01_ses-1' """ - keys = ['sub', 'ses'] - components = unique_id.split('_') + keys = ["sub", "ses"] + components = unique_id.split("_") for i, component in enumerate(components): if i < len(keys): if not component.startswith(keys[i]): - components[i] = '-'.join([keys[i], component]) - return '_'.join(components) + components[i] = "-".join([keys[i], component]) + return "_".join(components) def get_rest(scan, rest_dict, resource="scan"): @@ -99,35 +100,36 @@ def select_model_files(model, ftest, model_name): """ Method to select model files """ - - import os import glob + import os - files = glob.glob(os.path.join(model, '*')) + files = glob.glob(os.path.join(model, "*")) if len(files) == 0: raise Exception("No files found inside directory %s" % model) - fts_file = '' + fts_file = "" for filename in files: - if (model_name + '.mat') in filename: + if (model_name + ".mat") in filename: mat_file = filename - elif (model_name + '.grp') in filename: + elif (model_name + ".grp") in filename: grp_file = filename - elif ((model_name + '.fts') in filename) and ftest: + elif ((model_name + ".fts") in filename) and ftest: fts_file = filename - elif (model_name + '.con') in filename: + elif (model_name + ".con") in filename: con_file = filename - if ftest == True and fts_file == '': - errmsg = "\n[!] CPAC says: You have f-tests included in your group " \ - "analysis model '%s', but no .fts files were found in the " \ - "output folder specified for group analysis: %s.\n\nThe " \ - ".fts file is automatically generated by CPAC, and if you " \ - "are seeing this error, it is because something went wrong " \ - "with the generation of this file, or it has been moved." \ - "\n\n" % (model_name, model) + if ftest == True and fts_file == "": + errmsg = ( + "\n[!] CPAC says: You have f-tests included in your group " + "analysis model '%s', but no .fts files were found in the " + "output folder specified for group analysis: %s.\n\nThe " + ".fts file is automatically generated by CPAC, and if you " + "are seeing this error, it is because something went wrong " + "with the generation of this file, or it has been moved." + "\n\n" % (model_name, model) + ) raise Exception(errmsg) @@ -136,223 +138,252 @@ def select_model_files(model, ftest, model_name): def check_func_scan(func_scan_dct, scan): """Run some checks on the functional timeseries-related files for a given - series/scan name or label.""" - + series/scan name or label. + """ scan_resources = func_scan_dct[scan] try: scan_resources.keys() except AttributeError: - err = "\n[!] The data configuration file you provided is " \ - "missing a level under the 'func:' key. CPAC versions " \ - "1.2 and later use data configurations with an " \ - "additional level of nesting.\n\nExample\nfunc:\n " \ - "rest01:\n scan: /path/to/rest01_func.nii.gz\n" \ - " scan parameters: /path/to/scan_params.json\n\n" \ - "See the User Guide for more information.\n\n" + err = ( + "\n[!] The data configuration file you provided is " + "missing a level under the 'func:' key. CPAC versions " + "1.2 and later use data configurations with an " + "additional level of nesting.\n\nExample\nfunc:\n " + "rest01:\n scan: /path/to/rest01_func.nii.gz\n" + " scan parameters: /path/to/scan_params.json\n\n" + "See the User Guide for more information.\n\n" + ) raise Exception(err) # actual 4D time series file if "scan" not in scan_resources.keys(): - err = "\n\n[!] The {0} scan is missing its actual time-series " \ - "scan file, which should be a filepath labeled with the " \ - "'scan' key.\n\n".format(scan) + err = ( + f"\n\n[!] The {scan} scan is missing its actual time-series " + "scan file, which should be a filepath labeled with the " + "'scan' key.\n\n" + ) raise Exception(err) # Nipype restriction (may have changed) - if '.' in scan or '+' in scan or '*' in scan: - raise Exception('\n\n[!] Scan names cannot contain any special ' - 'characters (., +, *, etc.). Please update this ' - 'and try again.\n\nScan: {0}' - '\n\n'.format(scan)) + if "." in scan or "+" in scan or "*" in scan: + raise Exception( + "\n\n[!] Scan names cannot contain any special " + "characters (., +, *, etc.). Please update this " + f"and try again.\n\nScan: {scan}" + "\n\n" + ) -def create_func_datasource(rest_dict, rpool, wf_name='func_datasource'): +def create_func_datasource(rest_dict, rpool, wf_name="func_datasource"): """Return the functional timeseries-related file paths for each series/scan, from the dictionary of functional files described in the data configuration (sublist) YAML file. Scan input (from inputnode) is an iterable. """ - from CPAC.pipeline import nipype_pipeline_engine as pe import nipype.interfaces.utility as util + from CPAC.pipeline import nipype_pipeline_engine as pe + wf = pe.Workflow(name=wf_name) - inputnode = pe.Node(util.IdentityInterface( - fields=['subject', 'scan', 'creds_path', 'dl_dir'], - mandatory_inputs=True), - name='inputnode') + inputnode = pe.Node( + util.IdentityInterface( + fields=["subject", "scan", "creds_path", "dl_dir"], mandatory_inputs=True + ), + name="inputnode", + ) - outputnode = pe.Node(util.IdentityInterface(fields=['subject', 'rest', - 'scan', 'scan_params', - 'phase_diff', - 'magnitude']), - name='outputspec') + outputnode = pe.Node( + util.IdentityInterface( + fields=["subject", "rest", "scan", "scan_params", "phase_diff", "magnitude"] + ), + name="outputspec", + ) # have this here for now because of the big change in the data - # configuration format + # configuration format # (Not necessary with ingress - format does not comply) - if not rpool.check_rpool('derivatives-dir'): - check_scan = pe.Node(function.Function(input_names=['func_scan_dct', - 'scan'], - output_names=[], - function=check_func_scan, - as_module=True), - name='check_func_scan') + if not rpool.check_rpool("derivatives-dir"): + check_scan = pe.Node( + function.Function( + input_names=["func_scan_dct", "scan"], + output_names=[], + function=check_func_scan, + as_module=True, + ), + name="check_func_scan", + ) check_scan.inputs.func_scan_dct = rest_dict - wf.connect(inputnode, 'scan', check_scan, 'scan') - + wf.connect(inputnode, "scan", check_scan, "scan") # get the functional scan itself - selectrest = pe.Node(function.Function(input_names=['scan', - 'rest_dict', - 'resource'], - output_names=['file_path'], - function=get_rest, - as_module=True), - name='selectrest') + selectrest = pe.Node( + function.Function( + input_names=["scan", "rest_dict", "resource"], + output_names=["file_path"], + function=get_rest, + as_module=True, + ), + name="selectrest", + ) selectrest.inputs.rest_dict = rest_dict selectrest.inputs.resource = "scan" - wf.connect(inputnode, 'scan', selectrest, 'scan') + wf.connect(inputnode, "scan", selectrest, "scan") # check to see if it's on an Amazon AWS S3 bucket, and download it, if it # is - otherwise, just return the local file path - check_s3_node = pe.Node(function.Function(input_names=['file_path', - 'creds_path', - 'dl_dir', - 'img_type'], - output_names=['local_path'], - function=check_for_s3, - as_module=True), - name='check_for_s3') - - wf.connect(selectrest, 'file_path', check_s3_node, 'file_path') - wf.connect(inputnode, 'creds_path', check_s3_node, 'creds_path') - wf.connect(inputnode, 'dl_dir', check_s3_node, 'dl_dir') - check_s3_node.inputs.img_type = 'func' - - wf.connect(inputnode, 'subject', outputnode, 'subject') - wf.connect(check_s3_node, 'local_path', outputnode, 'rest') - wf.connect(inputnode, 'scan', outputnode, 'scan') + check_s3_node = pe.Node( + function.Function( + input_names=["file_path", "creds_path", "dl_dir", "img_type"], + output_names=["local_path"], + function=check_for_s3, + as_module=True, + ), + name="check_for_s3", + ) + + wf.connect(selectrest, "file_path", check_s3_node, "file_path") + wf.connect(inputnode, "creds_path", check_s3_node, "creds_path") + wf.connect(inputnode, "dl_dir", check_s3_node, "dl_dir") + check_s3_node.inputs.img_type = "func" + + wf.connect(inputnode, "subject", outputnode, "subject") + wf.connect(check_s3_node, "local_path", outputnode, "rest") + wf.connect(inputnode, "scan", outputnode, "scan") # scan parameters CSV - select_scan_params = pe.Node(function.Function(input_names=['scan', - 'rest_dict', - 'resource'], - output_names=['file_path'], - function=get_rest, - as_module=True), - name='select_scan_params') + select_scan_params = pe.Node( + function.Function( + input_names=["scan", "rest_dict", "resource"], + output_names=["file_path"], + function=get_rest, + as_module=True, + ), + name="select_scan_params", + ) select_scan_params.inputs.rest_dict = rest_dict select_scan_params.inputs.resource = "scan_parameters" - wf.connect(inputnode, 'scan', select_scan_params, 'scan') + wf.connect(inputnode, "scan", select_scan_params, "scan") # if the scan parameters file is on AWS S3, download it - s3_scan_params = pe.Node(function.Function(input_names=['file_path', - 'creds_path', - 'dl_dir', - 'img_type'], - output_names=['local_path'], - function=check_for_s3, - as_module=True), - name='s3_scan_params') - - wf.connect(select_scan_params, 'file_path', s3_scan_params, 'file_path') - wf.connect(inputnode, 'creds_path', s3_scan_params, 'creds_path') - wf.connect(inputnode, 'dl_dir', s3_scan_params, 'dl_dir') - wf.connect(s3_scan_params, 'local_path', outputnode, 'scan_params') + s3_scan_params = pe.Node( + function.Function( + input_names=["file_path", "creds_path", "dl_dir", "img_type"], + output_names=["local_path"], + function=check_for_s3, + as_module=True, + ), + name="s3_scan_params", + ) + + wf.connect(select_scan_params, "file_path", s3_scan_params, "file_path") + wf.connect(inputnode, "creds_path", s3_scan_params, "creds_path") + wf.connect(inputnode, "dl_dir", s3_scan_params, "dl_dir") + wf.connect(s3_scan_params, "local_path", outputnode, "scan_params") return wf -def create_fmap_datasource(fmap_dct, wf_name='fmap_datasource'): +def create_fmap_datasource(fmap_dct, wf_name="fmap_datasource"): """Return the field map files, from the dictionary of functional files described in the data configuration (sublist) YAML file. """ + import nipype.interfaces.utility as util from CPAC.pipeline import nipype_pipeline_engine as pe - import nipype.interfaces.utility as util wf = pe.Workflow(name=wf_name) - inputnode = pe.Node(util.IdentityInterface( - fields=['subject', 'scan', 'creds_path', 'dl_dir'], - mandatory_inputs=True), - name='inputnode') - - outputnode = pe.Node(util.IdentityInterface(fields=['subject', 'rest', - 'scan', 'scan_params', - 'phase_diff', - 'magnitude']), - name='outputspec') - - selectrest = pe.Node(function.Function(input_names=['scan', - 'rest_dict', - 'resource'], - output_names=['file_path'], - function=get_rest, - as_module=True), - name='selectrest') + inputnode = pe.Node( + util.IdentityInterface( + fields=["subject", "scan", "creds_path", "dl_dir"], mandatory_inputs=True + ), + name="inputnode", + ) + + outputnode = pe.Node( + util.IdentityInterface( + fields=["subject", "rest", "scan", "scan_params", "phase_diff", "magnitude"] + ), + name="outputspec", + ) + + selectrest = pe.Node( + function.Function( + input_names=["scan", "rest_dict", "resource"], + output_names=["file_path"], + function=get_rest, + as_module=True, + ), + name="selectrest", + ) selectrest.inputs.rest_dict = fmap_dct selectrest.inputs.resource = "scan" - wf.connect(inputnode, 'scan', selectrest, 'scan') + wf.connect(inputnode, "scan", selectrest, "scan") # check to see if it's on an Amazon AWS S3 bucket, and download it, if it # is - otherwise, just return the local file path - check_s3_node = pe.Node(function.Function(input_names=['file_path', - 'creds_path', - 'dl_dir', - 'img_type'], - output_names=['local_path'], - function=check_for_s3, - as_module=True), - name='check_for_s3') - - wf.connect(selectrest, 'file_path', check_s3_node, 'file_path') - wf.connect(inputnode, 'creds_path', check_s3_node, 'creds_path') - wf.connect(inputnode, 'dl_dir', check_s3_node, 'dl_dir') - check_s3_node.inputs.img_type = 'other' - - wf.connect(inputnode, 'subject', outputnode, 'subject') - wf.connect(check_s3_node, 'local_path', outputnode, 'rest') - wf.connect(inputnode, 'scan', outputnode, 'scan') + check_s3_node = pe.Node( + function.Function( + input_names=["file_path", "creds_path", "dl_dir", "img_type"], + output_names=["local_path"], + function=check_for_s3, + as_module=True, + ), + name="check_for_s3", + ) + + wf.connect(selectrest, "file_path", check_s3_node, "file_path") + wf.connect(inputnode, "creds_path", check_s3_node, "creds_path") + wf.connect(inputnode, "dl_dir", check_s3_node, "dl_dir") + check_s3_node.inputs.img_type = "other" + + wf.connect(inputnode, "subject", outputnode, "subject") + wf.connect(check_s3_node, "local_path", outputnode, "rest") + wf.connect(inputnode, "scan", outputnode, "scan") # scan parameters CSV - select_scan_params = pe.Node(function.Function(input_names=['scan', - 'rest_dict', - 'resource'], - output_names=['file_path'], - function=get_rest, - as_module=True), - name='select_scan_params') + select_scan_params = pe.Node( + function.Function( + input_names=["scan", "rest_dict", "resource"], + output_names=["file_path"], + function=get_rest, + as_module=True, + ), + name="select_scan_params", + ) select_scan_params.inputs.rest_dict = fmap_dct select_scan_params.inputs.resource = "scan_parameters" - wf.connect(inputnode, 'scan', select_scan_params, 'scan') + wf.connect(inputnode, "scan", select_scan_params, "scan") # if the scan parameters file is on AWS S3, download it - s3_scan_params = pe.Node(function.Function(input_names=['file_path', - 'creds_path', - 'dl_dir', - 'img_type'], - output_names=['local_path'], - function=check_for_s3, - as_module=True), - name='s3_scan_params') - - wf.connect(select_scan_params, 'file_path', s3_scan_params, 'file_path') - wf.connect(inputnode, 'creds_path', s3_scan_params, 'creds_path') - wf.connect(inputnode, 'dl_dir', s3_scan_params, 'dl_dir') - wf.connect(s3_scan_params, 'local_path', outputnode, 'scan_params') + s3_scan_params = pe.Node( + function.Function( + input_names=["file_path", "creds_path", "dl_dir", "img_type"], + output_names=["local_path"], + function=check_for_s3, + as_module=True, + ), + name="s3_scan_params", + ) + + wf.connect(select_scan_params, "file_path", s3_scan_params, "file_path") + wf.connect(inputnode, "creds_path", s3_scan_params, "creds_path") + wf.connect(inputnode, "dl_dir", s3_scan_params, "dl_dir") + wf.connect(s3_scan_params, "local_path", outputnode, "scan_params") return wf def get_fmap_phasediff_metadata(data_config_scan_params): - if (not isinstance(data_config_scan_params, dict) and - ".json" in data_config_scan_params): - with open(data_config_scan_params, 'r', encoding='utf-8') as _f: + if ( + not isinstance(data_config_scan_params, dict) + and ".json" in data_config_scan_params + ): + with open(data_config_scan_params, "r", encoding="utf-8") as _f: data_config_scan_params = json.load(_f) echo_time = None @@ -360,20 +391,30 @@ def get_fmap_phasediff_metadata(data_config_scan_params): echo_time_two = None if "EchoTime" in data_config_scan_params: echo_time = data_config_scan_params.get("EchoTime") - elif "EchoTime1" in data_config_scan_params and "EchoTime2" \ - in data_config_scan_params: + elif ( + "EchoTime1" in data_config_scan_params + and "EchoTime2" in data_config_scan_params + ): echo_time_one = data_config_scan_params.get("EchoTime1") echo_time_two = data_config_scan_params.get("EchoTime2") dwell_time = data_config_scan_params.get("DwellTime") pe_direction = data_config_scan_params.get("PhaseEncodingDirection") total_readout = data_config_scan_params.get("TotalReadoutTime") - return (dwell_time, pe_direction, total_readout, echo_time, - echo_time_one, echo_time_two) + return ( + dwell_time, + pe_direction, + total_readout, + echo_time, + echo_time_one, + echo_time_two, + ) -@Function.sig_imports(['from CPAC.utils.typing import TUPLE']) -def calc_delta_te_and_asym_ratio(effective_echo_spacing: float, - echo_times: list) -> TUPLE[float, float]: + +@Function.sig_imports(["from CPAC.utils.typing import TUPLE"]) +def calc_delta_te_and_asym_ratio( + effective_echo_spacing: float, echo_times: list +) -> TUPLE[float, float]: """Calcluate ``deltaTE`` and ``ees_asym_ratio`` from given metadata Parameters @@ -390,9 +431,11 @@ def calc_delta_te_and_asym_ratio(effective_echo_spacing: float, ees_asym_ratio : float """ if not isinstance(effective_echo_spacing, float): - raise LookupError('C-PAC could not find `EffectiveEchoSpacing` in ' - 'either fmap or func sidecar JSON, but that field ' - 'is required for PhaseDiff distortion correction.') + raise LookupError( + "C-PAC could not find `EffectiveEchoSpacing` in " + "either fmap or func sidecar JSON, but that field " + "is required for PhaseDiff distortion correction." + ) # convert into milliseconds if necessary # these values will/should never be more than 10ms @@ -401,7 +444,7 @@ def calc_delta_te_and_asym_ratio(effective_echo_spacing: float, echo_times[1] = echo_times[1] * 1000 deltaTE = abs(echo_times[0] - echo_times[1]) - ees_asym_ratio = (effective_echo_spacing / deltaTE) + ees_asym_ratio = effective_echo_spacing / deltaTE return deltaTE, ees_asym_ratio @@ -410,14 +453,21 @@ def gather_echo_times(echotime_1, echotime_2=None, echotime_3=None, echotime_4=N echotime_list = list(filter(lambda item: item is not None, echotime_list)) echotime_list = list(set(echotime_list)) if len(echotime_list) != 2: - raise Exception("\n[!] Something went wrong with the field map echo " - "times - there should be two distinct values.\n\n" - f"Echo Times:\n{echotime_list}\n") + raise Exception( + "\n[!] Something went wrong with the field map echo " + "times - there should be two distinct values.\n\n" + f"Echo Times:\n{echotime_list}\n" + ) return echotime_list -def match_epi_fmaps(bold_pedir, epi_fmap_one, epi_fmap_params_one, - epi_fmap_two=None, epi_fmap_params_two=None): +def match_epi_fmaps( + bold_pedir, + epi_fmap_one, + epi_fmap_params_one, + epi_fmap_two=None, + epi_fmap_params_two=None, +): """Parse the field map files in the data configuration and determine which ones have the same and opposite phase-encoding directions as the BOLD scan in the current pipeline. @@ -438,7 +488,6 @@ def match_epi_fmaps(bold_pedir, epi_fmap_one, epi_fmap_params_one, 2. Check whether there are one or two EPI's in the field map data. 3. Grab the one or two EPI field maps. """ - fmap_dct = {epi_fmap_one: epi_fmap_params_one} if epi_fmap_two and epi_fmap_params_two: fmap_dct[epi_fmap_two] = epi_fmap_params_two @@ -449,7 +498,7 @@ def match_epi_fmaps(bold_pedir, epi_fmap_one, epi_fmap_params_one, for epi_scan in fmap_dct.keys(): scan_params = fmap_dct[epi_scan] if not isinstance(scan_params, dict) and ".json" in scan_params: - with open(scan_params, 'r') as f: + with open(scan_params, "r") as f: scan_params = json.load(f) if "PhaseEncodingDirection" in scan_params: epi_pedir = scan_params["PhaseEncodingDirection"] @@ -461,12 +510,20 @@ def match_epi_fmaps(bold_pedir, epi_fmap_one, epi_fmap_params_one, return (opposite_pe_epi, same_pe_epi) -def ingress_func_metadata(wf, cfg, rpool, sub_dict, subject_id, - input_creds_path, unique_id=None, num_strat=None): - name_suffix = '' +def ingress_func_metadata( + wf, + cfg, + rpool, + sub_dict, + subject_id, + input_creds_path, + unique_id=None, + num_strat=None, +): + name_suffix = "" for suffix_part in (unique_id, num_strat): if suffix_part is not None: - name_suffix += f'_{suffix_part}' + name_suffix += f"_{suffix_part}" # Grab field maps diff = False blip = False @@ -476,42 +533,58 @@ def ingress_func_metadata(wf, cfg, rpool, sub_dict, subject_id, second = False for key in sub_dict["fmap"]: gather_fmap = create_fmap_datasource( - sub_dict["fmap"], f"fmap_gather_{key}_{subject_id}") + sub_dict["fmap"], f"fmap_gather_{key}_{subject_id}" + ) gather_fmap.inputs.inputnode.set( - subject=subject_id, creds_path=input_creds_path, - dl_dir=cfg.pipeline_setup['working_directory']['path']) + subject=subject_id, + creds_path=input_creds_path, + dl_dir=cfg.pipeline_setup["working_directory"]["path"], + ) gather_fmap.inputs.inputnode.scan = key orig_key = key - if 'epi' in key and not second: - key = 'epi-1' + if "epi" in key and not second: + key = "epi-1" second = True - elif 'epi' in key and second: - key = 'epi-2' - - rpool.set_data(key, gather_fmap, 'outputspec.rest', {}, "", - "fmap_ingress") - rpool.set_data(f'{key}-scan-params', gather_fmap, - 'outputspec.scan_params', {}, "", - "fmap_params_ingress") + elif "epi" in key and second: + key = "epi-2" + + rpool.set_data(key, gather_fmap, "outputspec.rest", {}, "", "fmap_ingress") + rpool.set_data( + f"{key}-scan-params", + gather_fmap, + "outputspec.scan_params", + {}, + "", + "fmap_params_ingress", + ) fmap_rp_list.append(key) - get_fmap_metadata_imports = ['import json'] - get_fmap_metadata = pe.Node(Function( - input_names=['data_config_scan_params'], - output_names=['dwell_time', - 'pe_direction', - 'total_readout', - 'echo_time', - 'echo_time_one', - 'echo_time_two'], - function=get_fmap_phasediff_metadata, - imports=get_fmap_metadata_imports), - name=f'{key}_get_metadata{name_suffix}') - - wf.connect(gather_fmap, 'outputspec.scan_params', - get_fmap_metadata, 'data_config_scan_params') + get_fmap_metadata_imports = ["import json"] + get_fmap_metadata = pe.Node( + Function( + input_names=["data_config_scan_params"], + output_names=[ + "dwell_time", + "pe_direction", + "total_readout", + "echo_time", + "echo_time_one", + "echo_time_two", + ], + function=get_fmap_phasediff_metadata, + imports=get_fmap_metadata_imports, + ), + name=f"{key}_get_metadata{name_suffix}", + ) + + wf.connect( + gather_fmap, + "outputspec.scan_params", + get_fmap_metadata, + "data_config_scan_params", + ) if "phase" in key: # leave it open to all three options, in case there is a @@ -521,233 +594,302 @@ def ingress_func_metadata(wf, cfg, rpool, sub_dict, subject_id, # at least one of these rpool keys will have a None value, # which will be sorted out in gather_echo_times below - rpool.set_data(f'{key}-TE', get_fmap_metadata, 'echo_time', - {}, "", "fmap_TE_ingress") + rpool.set_data( + f"{key}-TE", + get_fmap_metadata, + "echo_time", + {}, + "", + "fmap_TE_ingress", + ) fmap_TE_list.append(f"{key}-TE") - rpool.set_data(f'{key}-TE1', - get_fmap_metadata, 'echo_time_one', - {}, "", "fmap_TE1_ingress") + rpool.set_data( + f"{key}-TE1", + get_fmap_metadata, + "echo_time_one", + {}, + "", + "fmap_TE1_ingress", + ) fmap_TE_list.append(f"{key}-TE1") - rpool.set_data(f'{key}-TE2', - get_fmap_metadata, 'echo_time_two', - {}, "", "fmap_TE2_ingress") + rpool.set_data( + f"{key}-TE2", + get_fmap_metadata, + "echo_time_two", + {}, + "", + "fmap_TE2_ingress", + ) fmap_TE_list.append(f"{key}-TE2") elif "magnitude" in key: - rpool.set_data(f'{key}-TE', get_fmap_metadata, 'echo_time', - {}, "", "fmap_TE_ingress") + rpool.set_data( + f"{key}-TE", + get_fmap_metadata, + "echo_time", + {}, + "", + "fmap_TE_ingress", + ) fmap_TE_list.append(f"{key}-TE") - rpool.set_data(f'{key}-dwell', get_fmap_metadata, - 'dwell_time', {}, "", "fmap_dwell_ingress") - rpool.set_data(f'{key}-pedir', get_fmap_metadata, - 'pe_direction', {}, "", "fmap_pedir_ingress") - rpool.set_data(f'{key}-total-readout', get_fmap_metadata, - 'total_readout', {}, "", "fmap_readout_ingress") + rpool.set_data( + f"{key}-dwell", + get_fmap_metadata, + "dwell_time", + {}, + "", + "fmap_dwell_ingress", + ) + rpool.set_data( + f"{key}-pedir", + get_fmap_metadata, + "pe_direction", + {}, + "", + "fmap_pedir_ingress", + ) + rpool.set_data( + f"{key}-total-readout", + get_fmap_metadata, + "total_readout", + {}, + "", + "fmap_readout_ingress", + ) - if 'phase' in key or 'mag' in key: + if "phase" in key or "mag" in key: diff = True - if re.match('epi_[AP]{2}', orig_key): + if re.match("epi_[AP]{2}", orig_key): blip = True if diff: - calc_delta_ratio = pe.Node(Function( - input_names=['effective_echo_spacing', - 'echo_times'], - output_names=['deltaTE', - 'ees_asym_ratio'], - function=calc_delta_te_and_asym_ratio, - imports=['from typing import Optional, Tuple']), - name=f'diff_distcor_calc_delta{name_suffix}') - - gather_echoes = pe.Node(Function( - input_names=['echotime_1', - 'echotime_2', - 'echotime_3', - 'echotime_4'], - output_names=['echotime_list'], - function=gather_echo_times), - name='fugue_gather_echo_times') + calc_delta_ratio = pe.Node( + Function( + input_names=["effective_echo_spacing", "echo_times"], + output_names=["deltaTE", "ees_asym_ratio"], + function=calc_delta_te_and_asym_ratio, + imports=["from typing import Optional, Tuple"], + ), + name=f"diff_distcor_calc_delta{name_suffix}", + ) + + gather_echoes = pe.Node( + Function( + input_names=[ + "echotime_1", + "echotime_2", + "echotime_3", + "echotime_4", + ], + output_names=["echotime_list"], + function=gather_echo_times, + ), + name="fugue_gather_echo_times", + ) for idx, fmap_file in enumerate(fmap_TE_list, start=1): try: node, out_file = rpool.get(fmap_file)[ - f"['{fmap_file}:fmap_TE_ingress']"]['data'] - wf.connect(node, out_file, gather_echoes, - f'echotime_{idx}') + f"['{fmap_file}:fmap_TE_ingress']" + ]["data"] + wf.connect(node, out_file, gather_echoes, f"echotime_{idx}") except KeyError: pass - wf.connect(gather_echoes, 'echotime_list', - calc_delta_ratio, 'echo_times') + wf.connect(gather_echoes, "echotime_list", calc_delta_ratio, "echo_times") # Add in nodes to get parameters from configuration file # a node which checks if scan_parameters are present for each scan - scan_params = pe.Node(Function( - input_names=['data_config_scan_params', - 'subject_id', - 'scan', - 'pipeconfig_tr', - 'pipeconfig_tpattern', - 'pipeconfig_start_indx', - 'pipeconfig_stop_indx'], - output_names=['tr', - 'tpattern', - 'template', - 'ref_slice', - 'start_indx', - 'stop_indx', - 'pe_direction', - 'effective_echo_spacing'], - function=get_scan_params, - imports=['from CPAC.utils.utils import check, try_fetch_parameter'] - ), name=f"bold_scan_params_{subject_id}{name_suffix}") + scan_params = pe.Node( + Function( + input_names=[ + "data_config_scan_params", + "subject_id", + "scan", + "pipeconfig_tr", + "pipeconfig_tpattern", + "pipeconfig_start_indx", + "pipeconfig_stop_indx", + ], + output_names=[ + "tr", + "tpattern", + "template", + "ref_slice", + "start_indx", + "stop_indx", + "pe_direction", + "effective_echo_spacing", + ], + function=get_scan_params, + imports=["from CPAC.utils.utils import check, try_fetch_parameter"], + ), + name=f"bold_scan_params_{subject_id}{name_suffix}", + ) scan_params.inputs.subject_id = subject_id scan_params.inputs.set( - pipeconfig_start_indx=cfg.functional_preproc['truncation'][ - 'start_tr'], - pipeconfig_stop_indx=cfg.functional_preproc['truncation']['stop_tr']) + pipeconfig_start_indx=cfg.functional_preproc["truncation"]["start_tr"], + pipeconfig_stop_indx=cfg.functional_preproc["truncation"]["stop_tr"], + ) - node, out = rpool.get('scan')["['scan:func_ingress']"]['data'] - wf.connect(node, out, scan_params, 'scan') + node, out = rpool.get("scan")["['scan:func_ingress']"]["data"] + wf.connect(node, out, scan_params, "scan") # Workaround for extracting metadata with ingress - if rpool.check_rpool('derivatives-dir'): - selectrest_json = pe.Node(function.Function(input_names=['scan', - 'rest_dict', - 'resource'], - output_names=['file_path'], - function=get_rest, - as_module=True), - name='selectrest_json') + if rpool.check_rpool("derivatives-dir"): + selectrest_json = pe.Node( + function.Function( + input_names=["scan", "rest_dict", "resource"], + output_names=["file_path"], + function=get_rest, + as_module=True, + ), + name="selectrest_json", + ) selectrest_json.inputs.rest_dict = sub_dict selectrest_json.inputs.resource = "scan_parameters" - wf.connect(node, out, selectrest_json, 'scan') - wf.connect(selectrest_json, 'file_path', scan_params, 'data_config_scan_params') - + wf.connect(node, out, selectrest_json, "scan") + wf.connect(selectrest_json, "file_path", scan_params, "data_config_scan_params") + else: # wire in the scan parameter workflow - node, out = rpool.get('scan-params')[ - "['scan-params:scan_params_ingress']"]['data'] - wf.connect(node, out, scan_params, 'data_config_scan_params') - - rpool.set_data('TR', scan_params, 'tr', {}, "", "func_metadata_ingress") - rpool.set_data('tpattern', scan_params, 'tpattern', {}, "", - "func_metadata_ingress") - rpool.set_data('template', scan_params, 'template', {}, "", - "func_metadata_ingress") - rpool.set_data('start-tr', scan_params, 'start_indx', {}, "", - "func_metadata_ingress") - rpool.set_data('stop-tr', scan_params, 'stop_indx', {}, "", - "func_metadata_ingress") - rpool.set_data('pe-direction', scan_params, 'pe_direction', {}, "", - "func_metadata_ingress") + node, out = rpool.get("scan-params")["['scan-params:scan_params_ingress']"][ + "data" + ] + wf.connect(node, out, scan_params, "data_config_scan_params") + + rpool.set_data("TR", scan_params, "tr", {}, "", "func_metadata_ingress") + rpool.set_data("tpattern", scan_params, "tpattern", {}, "", "func_metadata_ingress") + rpool.set_data("template", scan_params, "template", {}, "", "func_metadata_ingress") + rpool.set_data( + "start-tr", scan_params, "start_indx", {}, "", "func_metadata_ingress" + ) + rpool.set_data("stop-tr", scan_params, "stop_indx", {}, "", "func_metadata_ingress") + rpool.set_data( + "pe-direction", scan_params, "pe_direction", {}, "", "func_metadata_ingress" + ) if diff: # Connect EffectiveEchoSpacing from functional metadata - rpool.set_data('effectiveEchoSpacing', scan_params, - 'effective_echo_spacing', {}, '', - 'func_metadata_ingress') - node, out_file = rpool.get('effectiveEchoSpacing')[ - "['effectiveEchoSpacing:func_metadata_ingress']"]['data'] - wf.connect(node, out_file, calc_delta_ratio, 'effective_echo_spacing') - rpool.set_data('deltaTE', calc_delta_ratio, 'deltaTE', {}, '', - 'deltaTE_ingress') - rpool.set_data('ees-asym-ratio', calc_delta_ratio, - 'ees_asym_ratio', {}, '', - 'ees_asym_ratio_ingress') + rpool.set_data( + "effectiveEchoSpacing", + scan_params, + "effective_echo_spacing", + {}, + "", + "func_metadata_ingress", + ) + node, out_file = rpool.get("effectiveEchoSpacing")[ + "['effectiveEchoSpacing:func_metadata_ingress']" + ]["data"] + wf.connect(node, out_file, calc_delta_ratio, "effective_echo_spacing") + rpool.set_data( + "deltaTE", calc_delta_ratio, "deltaTE", {}, "", "deltaTE_ingress" + ) + rpool.set_data( + "ees-asym-ratio", + calc_delta_ratio, + "ees_asym_ratio", + {}, + "", + "ees_asym_ratio_ingress", + ) return wf, rpool, diff, blip, fmap_rp_list def create_general_datasource(wf_name): - from CPAC.pipeline import nipype_pipeline_engine as pe import nipype.interfaces.utility as util + from CPAC.pipeline import nipype_pipeline_engine as pe + wf = pe.Workflow(name=wf_name) - inputnode = pe.Node(util.IdentityInterface( - fields=['unique_id', 'data', 'scan', 'creds_path', - 'dl_dir'], - mandatory_inputs=True), - name='inputnode') - - check_s3_node = pe.Node(function.Function(input_names=['file_path', - 'creds_path', - 'dl_dir', - 'img_type'], - output_names=['local_path'], - function=check_for_s3, - as_module=True), - name='check_for_s3') + inputnode = pe.Node( + util.IdentityInterface( + fields=["unique_id", "data", "scan", "creds_path", "dl_dir"], + mandatory_inputs=True, + ), + name="inputnode", + ) + + check_s3_node = pe.Node( + function.Function( + input_names=["file_path", "creds_path", "dl_dir", "img_type"], + output_names=["local_path"], + function=check_for_s3, + as_module=True, + ), + name="check_for_s3", + ) check_s3_node.inputs.img_type = "other" - wf.connect(inputnode, 'data', check_s3_node, 'file_path') - wf.connect(inputnode, 'creds_path', check_s3_node, 'creds_path') - wf.connect(inputnode, 'dl_dir', check_s3_node, 'dl_dir') + wf.connect(inputnode, "data", check_s3_node, "file_path") + wf.connect(inputnode, "creds_path", check_s3_node, "creds_path") + wf.connect(inputnode, "dl_dir", check_s3_node, "dl_dir") - outputnode = pe.Node(util.IdentityInterface(fields=['unique_id', - 'data', - 'scan']), - name='outputspec') + outputnode = pe.Node( + util.IdentityInterface(fields=["unique_id", "data", "scan"]), name="outputspec" + ) - wf.connect(inputnode, 'unique_id', outputnode, 'unique_id') - wf.connect(inputnode, 'scan', outputnode, 'scan') - wf.connect(check_s3_node, 'local_path', outputnode, 'data') + wf.connect(inputnode, "unique_id", outputnode, "unique_id") + wf.connect(inputnode, "scan", outputnode, "scan") + wf.connect(check_s3_node, "local_path", outputnode, "data") return wf -def create_check_for_s3_node(name, file_path, img_type='other', - creds_path=None, dl_dir=None, map_node=False): +def create_check_for_s3_node( + name, file_path, img_type="other", creds_path=None, dl_dir=None, map_node=False +): if map_node: - check_s3_node = pe.MapNode(function.Function(input_names=['file_path', - 'creds_path', - 'dl_dir', - 'img_type'], - output_names=[ - 'local_path'], - function=check_for_s3, - as_module=True), - iterfield=['file_path'], - name='check_for_s3_%s' % name) + check_s3_node = pe.MapNode( + function.Function( + input_names=["file_path", "creds_path", "dl_dir", "img_type"], + output_names=["local_path"], + function=check_for_s3, + as_module=True, + ), + iterfield=["file_path"], + name="check_for_s3_%s" % name, + ) else: - check_s3_node = pe.Node(function.Function(input_names=['file_path', - 'creds_path', - 'dl_dir', - 'img_type'], - output_names=['local_path'], - function=check_for_s3, - as_module=True), - name='check_for_s3_%s' % name) + check_s3_node = pe.Node( + function.Function( + input_names=["file_path", "creds_path", "dl_dir", "img_type"], + output_names=["local_path"], + function=check_for_s3, + as_module=True, + ), + name="check_for_s3_%s" % name, + ) check_s3_node.inputs.set( - file_path=file_path, - creds_path=creds_path, - dl_dir=dl_dir, - img_type=img_type + file_path=file_path, creds_path=creds_path, dl_dir=dl_dir, img_type=img_type ) return check_s3_node # Check if passed-in file is on S3 -def check_for_s3(file_path, creds_path=None, dl_dir=None, img_type='other', - verbose=False): +def check_for_s3( + file_path, creds_path=None, dl_dir=None, img_type="other", verbose=False +): # Import packages import os - import nibabel as nib + import botocore.exceptions + import nibabel as nib from indi_aws import fetch_creds # Init variables - s3_str = 's3://' + s3_str = "s3://" if creds_path: - if "None" in creds_path or "none" in creds_path or \ - "null" in creds_path: + if "None" in creds_path or "none" in creds_path or "null" in creds_path: creds_path = None if dl_dir is None: @@ -765,14 +907,13 @@ def check_for_s3(file_path, creds_path=None, dl_dir=None, img_type='other', return local_path if file_path.lower().startswith(s3_str): - - file_path = s3_str + file_path[len(s3_str):] + file_path = s3_str + file_path[len(s3_str) :] # Get bucket name and bucket object - bucket_name = file_path[len(s3_str):].split('/')[0] + bucket_name = file_path[len(s3_str) :].split("/")[0] # Extract relative key path from bucket and local path s3_prefix = s3_str + bucket_name - s3_key = file_path[len(s3_prefix) + 1:] + s3_key = file_path[len(s3_prefix) + 1 :] local_path = os.path.join(dl_dir, bucket_name, s3_key) # Get local directory and create folders if they dont exist @@ -781,35 +922,41 @@ def check_for_s3(file_path, creds_path=None, dl_dir=None, img_type='other', os.makedirs(local_dir, exist_ok=True) if os.path.exists(local_path): - print("{0} already exists- skipping download.".format(local_path)) + print(f"{local_path} already exists- skipping download.") else: # Download file try: bucket = fetch_creds.return_bucket(creds_path, bucket_name) - print("Attempting to download from AWS S3: {0}".format( - file_path)) + print(f"Attempting to download from AWS S3: {file_path}") bucket.download_file(Key=s3_key, Filename=local_path) except botocore.exceptions.ClientError as exc: - error_code = int(exc.response['Error']['Code']) + error_code = int(exc.response["Error"]["Code"]) err_msg = str(exc) if error_code == 403: - err_msg = 'Access to bucket: "%s" is denied; using credentials ' \ - 'in subject list: "%s"; cannot access the file "%s"' \ - % (bucket_name, creds_path, file_path) + err_msg = ( + 'Access to bucket: "%s" is denied; using credentials ' + 'in subject list: "%s"; cannot access the file "%s"' + % (bucket_name, creds_path, file_path) + ) elif error_code == 404: - err_msg = 'File: {0} does not exist; check spelling and try ' \ - 'again'.format( - os.path.join(bucket_name, s3_key)) + err_msg = ( + f"File: {os.path.join(bucket_name, s3_key)} does not exist; check spelling and try " + "again" + ) else: - err_msg = 'Unable to connect to bucket: "%s". Error message:\n%s' \ - % (bucket_name, exc) + err_msg = ( + 'Unable to connect to bucket: "%s". Error message:\n%s' + % (bucket_name, exc) + ) raise Exception(err_msg) except Exception as exc: - err_msg = 'Unable to connect to bucket: "%s". Error message:\n%s' \ - % (bucket_name, exc) + err_msg = 'Unable to connect to bucket: "%s". Error message:\n%s' % ( + bucket_name, + exc, + ) raise Exception(err_msg) # Otherwise just return what was passed in, resolving if a link @@ -821,52 +968,61 @@ def check_for_s3(file_path, creds_path=None, dl_dir=None, img_type='other', # alert users to 2020-07-20 Neuroparc atlas update (v0 to v1) ndmg_atlases = {} with open( - os.path.join( - os.path.dirname(os.path.dirname(__file__)), - 'resources/templates/ndmg_atlases.csv' - ) + os.path.join( + os.path.dirname(os.path.dirname(__file__)), + "resources/templates/ndmg_atlases.csv", + ) ) as ndmg_atlases_file: - ndmg_atlases['v0'], ndmg_atlases['v1'] = zip(*[( - f'/ndmg_atlases/label/Human/{atlas[0]}', - f'/ndmg_atlases/label/Human/{atlas[1]}' - ) for atlas in - csv.reader( - ndmg_atlases_file)]) - if local_path in ndmg_atlases['v0']: + ndmg_atlases["v0"], ndmg_atlases["v1"] = zip( + *[ + ( + f"/ndmg_atlases/label/Human/{atlas[0]}", + f"/ndmg_atlases/label/Human/{atlas[1]}", + ) + for atlas in csv.reader(ndmg_atlases_file) + ] + ) + if local_path in ndmg_atlases["v0"]: raise FileNotFoundError( - ''.join([ - 'Neuroparc atlas paths were updated on July 20, 2020. ' - 'C-PAC configuration files using Neuroparc v0 atlas paths ' - '(including C-PAC default and preconfigured pipeline ' - 'configurations from v1.6.2a and earlier) need to be ' - 'updated to use Neuroparc atlases. Your current ' - 'configuration includes the Neuroparc v0 path ' - f'{local_path} which needs to be updated to ', - ndmg_atlases['v1'][ndmg_atlases['v0'].index(local_path)], - '. For a full list such paths, see https://fcp-indi.' - 'github.io/docs/nightly/user/ndmg_atlases' - ]) + "".join( + [ + "Neuroparc atlas paths were updated on July 20, 2020. " + "C-PAC configuration files using Neuroparc v0 atlas paths " + "(including C-PAC default and preconfigured pipeline " + "configurations from v1.6.2a and earlier) need to be " + "updated to use Neuroparc atlases. Your current " + "configuration includes the Neuroparc v0 path " + f"{local_path} which needs to be updated to ", + ndmg_atlases["v1"][ndmg_atlases["v0"].index(local_path)], + ". For a full list such paths, see https://fcp-indi." + "github.io/docs/nightly/user/ndmg_atlases", + ] + ) ) else: - raise FileNotFoundError(f'File {local_path} does not exist!') + raise FileNotFoundError(f"File {local_path} does not exist!") if verbose: - print("Downloaded file:\n{0}\n".format(local_path)) + print(f"Downloaded file:\n{local_path}\n") # Check image dimensionality - if local_path.endswith('.nii') or local_path.endswith('.nii.gz'): + if local_path.endswith(".nii") or local_path.endswith(".nii.gz"): img_nii = nib.load(local_path) - if img_type == 'anat': + if img_type == "anat": if len(img_nii.shape) != 3: - raise IOError('File: %s must be an anatomical image with 3 ' - 'dimensions but %d dimensions found!' - % (local_path, len(img_nii.shape))) - elif img_type == 'func': + raise IOError( + "File: %s must be an anatomical image with 3 " + "dimensions but %d dimensions found!" + % (local_path, len(img_nii.shape)) + ) + elif img_type == "func": if len(img_nii.shape) not in [3, 4]: - raise IOError('File: %s must be a functional image with 3 or ' - '4 dimensions but %d dimensions found!' - % (local_path, len(img_nii.shape))) + raise IOError( + "File: %s must be a functional image with 3 or " + "4 dimensions but %d dimensions found!" + % (local_path, len(img_nii.shape)) + ) return local_path @@ -875,41 +1031,38 @@ def gather_extraction_maps(c): ts_analysis_dict = {} sca_analysis_dict = {} - if hasattr(c, 'timeseries_extraction'): - - tsa_roi_dict = c.timeseries_extraction['tse_roi_paths'] + if hasattr(c, "timeseries_extraction"): + tsa_roi_dict = c.timeseries_extraction["tse_roi_paths"] # Timeseries and SCA config selections processing # flip the dictionary for roi_path in tsa_roi_dict.keys(): - ts_analysis_to_run = [ - x.strip() for x in tsa_roi_dict[roi_path].split(",") - ] + ts_analysis_to_run = [x.strip() for x in tsa_roi_dict[roi_path].split(",")] for analysis_type in ts_analysis_to_run: if analysis_type not in ts_analysis_dict.keys(): ts_analysis_dict[analysis_type] = [] ts_analysis_dict[analysis_type].append(roi_path) - if c.timeseries_extraction['run']: - + if c.timeseries_extraction["run"]: if not tsa_roi_dict: - err = "\n\n[!] CPAC says: Time Series Extraction is " \ - "set to run, but no ROI NIFTI file paths were " \ - "provided!\n\n" + err = ( + "\n\n[!] CPAC says: Time Series Extraction is " + "set to run, but no ROI NIFTI file paths were " + "provided!\n\n" + ) raise Exception(err) - if c.seed_based_correlation_analysis['run']: - + if c.seed_based_correlation_analysis["run"]: try: - sca_roi_dict = c.seed_based_correlation_analysis[ - 'sca_roi_paths' - ] + sca_roi_dict = c.seed_based_correlation_analysis["sca_roi_paths"] except KeyError: - err = "\n\n[!] CPAC says: Seed-based Correlation Analysis " \ - "is set to run, but no ROI NIFTI file paths were " \ - "provided!\n\n" + err = ( + "\n\n[!] CPAC says: Seed-based Correlation Analysis " + "is set to run, but no ROI NIFTI file paths were " + "provided!\n\n" + ) raise Exception(err) # flip the dictionary @@ -958,13 +1111,19 @@ def get_highest_local_res(template: Union[Path, str], tagname: str) -> Path: LookupError: Could not find template /cpac_templates/dne_T1w_2mm.nii.gz """ from CPAC.pipeline.schema import RESOLUTION_REGEX + if isinstance(template, str): template = Path(template) template_pattern = ( - RESOLUTION_REGEX.replace('^', '').replace('$', '').join([ - re.escape(_part) for _part in template.name.split(tagname, 1)])) - matching_templates = [file for file in template.parent.iterdir() if - re.match(template_pattern, file.name)] + RESOLUTION_REGEX.replace("^", "") + .replace("$", "") + .join([re.escape(_part) for _part in template.name.split(tagname, 1)]) + ) + matching_templates = [ + file + for file in template.parent.iterdir() + if re.match(template_pattern, file.name) + ] matching_templates.sort() try: return matching_templates[0] @@ -987,13 +1146,13 @@ def res_string_to_tuple(resolution): Tuple of floats, e.g. (3.438, 3.438, 3.4) """ if "x" in str(resolution): - return tuple( - float(i.replace('mm', '')) for i in resolution.split("x")) - return (float(resolution.replace('mm', '')),) * 3 + return tuple(float(i.replace("mm", "")) for i in resolution.split("x")) + return (float(resolution.replace("mm", "")),) * 3 def resolve_resolution(resolution, template, template_name, tag=None): from nipype.interfaces import afni + from CPAC.pipeline import nipype_pipeline_engine as pe from CPAC.utils.datasource import check_for_s3 @@ -1004,8 +1163,7 @@ def resolve_resolution(resolution, template, template_name, tag=None): tagname = "${" + tag + "}" try: if tagname is not None: - local_path = check_for_s3( - template.replace(tagname, str(resolution))) + local_path = check_for_s3(template.replace(tagname, str(resolution))) except (IOError, OSError): local_path = None @@ -1017,25 +1175,27 @@ def resolve_resolution(resolution, template, template_name, tag=None): if local_path is None: if tagname is not None: - if template.startswith('s3:'): - ref_template = template.replace(tagname, '1mm') + if template.startswith("s3:"): + ref_template = template.replace(tagname, "1mm") local_path = check_for_s3(ref_template) else: local_path = get_highest_local_res(template, tagname) - elif tagname is None and template.startswith('s3:'): + elif tagname is None and template.startswith("s3:"): local_path = check_for_s3(template) else: local_path = template - resample = pe.Node(interface=afni.Resample(), - name=template_name, - mem_gb=0, - mem_x=(0.0115, 'in_file', 't')) + resample = pe.Node( + interface=afni.Resample(), + name=template_name, + mem_gb=0, + mem_x=(0.0115, "in_file", "t"), + ) resample.inputs.voxel_size = res_string_to_tuple(resolution) - resample.inputs.outputtype = 'NIFTI_GZ' - resample.inputs.resample_mode = 'Cu' + resample.inputs.outputtype = "NIFTI_GZ" + resample.inputs.resample_mode = "Cu" resample.inputs.in_file = local_path - resample.base_dir = '.' + resample.base_dir = "." resampled_template = resample.run() local_path = resampled_template.outputs.out_file @@ -1043,77 +1203,82 @@ def resolve_resolution(resolution, template, template_name, tag=None): return local_path -def create_anat_datasource(wf_name='anat_datasource'): - from CPAC.pipeline import nipype_pipeline_engine as pe +def create_anat_datasource(wf_name="anat_datasource"): import nipype.interfaces.utility as util + from CPAC.pipeline import nipype_pipeline_engine as pe + wf = pe.Workflow(name=wf_name) - inputnode = pe.Node(util.IdentityInterface( - fields=['subject', 'anat', 'creds_path', - 'dl_dir', 'img_type'], - mandatory_inputs=True), - name='inputnode') - - check_s3_node = pe.Node(function.Function(input_names=['file_path', - 'creds_path', - 'dl_dir', - 'img_type'], - output_names=['local_path'], - function=check_for_s3, - as_module=True), - name='check_for_s3') - - wf.connect(inputnode, 'anat', check_s3_node, 'file_path') - wf.connect(inputnode, 'creds_path', check_s3_node, 'creds_path') - wf.connect(inputnode, 'dl_dir', check_s3_node, 'dl_dir') - wf.connect(inputnode, 'img_type', check_s3_node, 'img_type') - - outputnode = pe.Node(util.IdentityInterface(fields=['subject', - 'anat']), - name='outputspec') - - wf.connect(inputnode, 'subject', outputnode, 'subject') - wf.connect(check_s3_node, 'local_path', outputnode, 'anat') + inputnode = pe.Node( + util.IdentityInterface( + fields=["subject", "anat", "creds_path", "dl_dir", "img_type"], + mandatory_inputs=True, + ), + name="inputnode", + ) + + check_s3_node = pe.Node( + function.Function( + input_names=["file_path", "creds_path", "dl_dir", "img_type"], + output_names=["local_path"], + function=check_for_s3, + as_module=True, + ), + name="check_for_s3", + ) + + wf.connect(inputnode, "anat", check_s3_node, "file_path") + wf.connect(inputnode, "creds_path", check_s3_node, "creds_path") + wf.connect(inputnode, "dl_dir", check_s3_node, "dl_dir") + wf.connect(inputnode, "img_type", check_s3_node, "img_type") + + outputnode = pe.Node( + util.IdentityInterface(fields=["subject", "anat"]), name="outputspec" + ) + + wf.connect(inputnode, "subject", outputnode, "subject") + wf.connect(check_s3_node, "local_path", outputnode, "anat") # Return the workflow return wf -def create_roi_mask_dataflow(masks, wf_name='datasource_roi_mask'): +def create_roi_mask_dataflow(masks, wf_name="datasource_roi_mask"): import os mask_dict = {} for mask_file in masks: + mask_file = mask_file.rstrip("\r\n") - mask_file = mask_file.rstrip('\r\n') - - if mask_file.strip() == '' or mask_file.startswith('#'): + if mask_file.strip() == "" or mask_file.startswith("#"): continue name, desc = lookup_identifier(mask_file) - if name == 'template': + if name == "template": base_file = os.path.basename(mask_file) try: - valid_extensions = ['.nii', '.nii.gz'] + valid_extensions = [".nii", ".nii.gz"] base_name = [ - base_file[:-len(ext)] + base_file[: -len(ext)] for ext in valid_extensions if base_file.endswith(ext) - ][0] + ][0] - for key in ['res', 'space']: + for key in ["res", "space"]: base_name = bids_remove_entity(base_name, key) except IndexError: # pylint: disable=raise-missing-from - raise ValueError('Error in spatial_map_dataflow: File ' - f'extension of {base_file} not ".nii" or ' - '.nii.gz') + raise ValueError( + "Error in spatial_map_dataflow: File " + f'extension of {base_file} not ".nii" or ' + ".nii.gz" + ) except Exception as e: raise e @@ -1121,54 +1286,56 @@ def create_roi_mask_dataflow(masks, wf_name='datasource_roi_mask'): base_name = format_identifier(name, desc) if base_name in mask_dict: - raise ValueError('Duplicate templates/atlases not allowed: ' - f'{mask_file} {mask_dict[base_name]}') + raise ValueError( + "Duplicate templates/atlases not allowed: " + f"{mask_file} {mask_dict[base_name]}" + ) mask_dict[base_name] = mask_file wf = pe.Workflow(name=wf_name) - inputnode = pe.Node(util.IdentityInterface(fields=['mask', - 'mask_file', - 'creds_path', - 'dl_dir'], - mandatory_inputs=True), - name='inputspec') + inputnode = pe.Node( + util.IdentityInterface( + fields=["mask", "mask_file", "creds_path", "dl_dir"], mandatory_inputs=True + ), + name="inputspec", + ) - mask_keys, mask_values = \ - zip(*mask_dict.items()) + mask_keys, mask_values = zip(*mask_dict.items()) inputnode.synchronize = True inputnode.iterables = [ - ('mask', mask_keys), - ('mask_file', mask_values), + ("mask", mask_keys), + ("mask_file", mask_values), ] - check_s3_node = pe.Node(function.Function(input_names=['file_path', - 'creds_path', - 'dl_dir', - 'img_type'], - output_names=['local_path'], - function=check_for_s3, - as_module=True), - name='check_for_s3') + check_s3_node = pe.Node( + function.Function( + input_names=["file_path", "creds_path", "dl_dir", "img_type"], + output_names=["local_path"], + function=check_for_s3, + as_module=True, + ), + name="check_for_s3", + ) - wf.connect(inputnode, 'mask_file', check_s3_node, 'file_path') - wf.connect(inputnode, 'creds_path', check_s3_node, 'creds_path') - wf.connect(inputnode, 'dl_dir', check_s3_node, 'dl_dir') - check_s3_node.inputs.img_type = 'mask' + wf.connect(inputnode, "mask_file", check_s3_node, "file_path") + wf.connect(inputnode, "creds_path", check_s3_node, "creds_path") + wf.connect(inputnode, "dl_dir", check_s3_node, "dl_dir") + check_s3_node.inputs.img_type = "mask" - outputnode = pe.Node(util.IdentityInterface(fields=['out_file', - 'out_name']), - name='outputspec') + outputnode = pe.Node( + util.IdentityInterface(fields=["out_file", "out_name"]), name="outputspec" + ) - wf.connect(check_s3_node, 'local_path', outputnode, 'out_file') - wf.connect(inputnode, 'mask', outputnode, 'out_name') + wf.connect(check_s3_node, "local_path", outputnode, "out_file") + wf.connect(inputnode, "mask", outputnode, "out_name") return wf -def create_spatial_map_dataflow(spatial_maps, wf_name='datasource_maps'): +def create_spatial_map_dataflow(spatial_maps, wf_name="datasource_maps"): import os wf = pe.Workflow(name=wf_name) @@ -1176,126 +1343,123 @@ def create_spatial_map_dataflow(spatial_maps, wf_name='datasource_maps'): spatial_map_dict = {} for spatial_map_file in spatial_maps: - - spatial_map_file = spatial_map_file.rstrip('\r\n') + spatial_map_file = spatial_map_file.rstrip("\r\n") base_file = os.path.basename(spatial_map_file) try: - valid_extensions = ['.nii', '.nii.gz'] + valid_extensions = [".nii", ".nii.gz"] base_name = [ - base_file[:-len(ext)] + base_file[: -len(ext)] for ext in valid_extensions if base_file.endswith(ext) - ][0] + ][0] if base_name in spatial_map_dict: raise ValueError( - 'Files with same name not allowed: %s %s' % ( - spatial_map_file, - spatial_map_dict[base_name] - ) + "Files with same name not allowed: %s %s" + % (spatial_map_file, spatial_map_dict[base_name]) ) spatial_map_dict[base_name] = spatial_map_file - except IndexError as e: - raise Exception('Error in spatial_map_dataflow: ' - 'File extension not in .nii and .nii.gz') + except IndexError: + raise Exception( + "Error in spatial_map_dataflow: " + "File extension not in .nii and .nii.gz" + ) - inputnode = pe.Node(util.IdentityInterface(fields=['spatial_map', - 'spatial_map_file', - 'creds_path', - 'dl_dir'], - mandatory_inputs=True), - name='inputspec') + inputnode = pe.Node( + util.IdentityInterface( + fields=["spatial_map", "spatial_map_file", "creds_path", "dl_dir"], + mandatory_inputs=True, + ), + name="inputspec", + ) - spatial_map_keys, spatial_map_values = \ - zip(*spatial_map_dict.items()) + spatial_map_keys, spatial_map_values = zip(*spatial_map_dict.items()) inputnode.synchronize = True inputnode.iterables = [ - ('spatial_map', spatial_map_keys), - ('spatial_map_file', spatial_map_values), + ("spatial_map", spatial_map_keys), + ("spatial_map_file", spatial_map_values), ] - check_s3_node = pe.Node(function.Function(input_names=['file_path', - 'creds_path', - 'dl_dir', - 'img_type'], - output_names=['local_path'], - function=check_for_s3, - as_module=True), - name='check_for_s3') + check_s3_node = pe.Node( + function.Function( + input_names=["file_path", "creds_path", "dl_dir", "img_type"], + output_names=["local_path"], + function=check_for_s3, + as_module=True, + ), + name="check_for_s3", + ) - wf.connect(inputnode, 'spatial_map_file', check_s3_node, 'file_path') - wf.connect(inputnode, 'creds_path', check_s3_node, 'creds_path') - wf.connect(inputnode, 'dl_dir', check_s3_node, 'dl_dir') - check_s3_node.inputs.img_type = 'mask' + wf.connect(inputnode, "spatial_map_file", check_s3_node, "file_path") + wf.connect(inputnode, "creds_path", check_s3_node, "creds_path") + wf.connect(inputnode, "dl_dir", check_s3_node, "dl_dir") + check_s3_node.inputs.img_type = "mask" - select_spatial_map = pe.Node(util.IdentityInterface(fields=['out_file', - 'out_name'], - mandatory_inputs=True), - name='select_spatial_map') + select_spatial_map = pe.Node( + util.IdentityInterface(fields=["out_file", "out_name"], mandatory_inputs=True), + name="select_spatial_map", + ) - wf.connect(check_s3_node, 'local_path', select_spatial_map, 'out_file') - wf.connect(inputnode, 'spatial_map', select_spatial_map, 'out_name') + wf.connect(check_s3_node, "local_path", select_spatial_map, "out_file") + wf.connect(inputnode, "spatial_map", select_spatial_map, "out_name") return wf -def create_grp_analysis_dataflow(wf_name='gp_dataflow'): - from CPAC.pipeline import nipype_pipeline_engine as pe +def create_grp_analysis_dataflow(wf_name="gp_dataflow"): import nipype.interfaces.utility as util + + from CPAC.pipeline import nipype_pipeline_engine as pe from CPAC.utils.datasource import select_model_files wf = pe.Workflow(name=wf_name) - inputnode = pe.Node(util.IdentityInterface(fields=['ftest', - 'grp_model', - 'model_name'], - mandatory_inputs=True), - name='inputspec') - - selectmodel = pe.Node(function.Function(input_names=['model', - 'ftest', - 'model_name'], - output_names=['fts_file', - 'con_file', - 'grp_file', - 'mat_file'], - function=select_model_files, - as_module=True), - name='selectnode') - - wf.connect(inputnode, 'ftest', - selectmodel, 'ftest') - wf.connect(inputnode, 'grp_model', - selectmodel, 'model') - wf.connect(inputnode, 'model_name', selectmodel, 'model_name') - - outputnode = pe.Node(util.IdentityInterface(fields=['fts', - 'grp', - 'mat', - 'con'], - mandatory_inputs=True), - name='outputspec') - - wf.connect(selectmodel, 'mat_file', - outputnode, 'mat') - wf.connect(selectmodel, 'grp_file', - outputnode, 'grp') - wf.connect(selectmodel, 'fts_file', - outputnode, 'fts') - wf.connect(selectmodel, 'con_file', - outputnode, 'con') + inputnode = pe.Node( + util.IdentityInterface( + fields=["ftest", "grp_model", "model_name"], mandatory_inputs=True + ), + name="inputspec", + ) + + selectmodel = pe.Node( + function.Function( + input_names=["model", "ftest", "model_name"], + output_names=["fts_file", "con_file", "grp_file", "mat_file"], + function=select_model_files, + as_module=True, + ), + name="selectnode", + ) + + wf.connect(inputnode, "ftest", selectmodel, "ftest") + wf.connect(inputnode, "grp_model", selectmodel, "model") + wf.connect(inputnode, "model_name", selectmodel, "model_name") + + outputnode = pe.Node( + util.IdentityInterface( + fields=["fts", "grp", "mat", "con"], mandatory_inputs=True + ), + name="outputspec", + ) + + wf.connect(selectmodel, "mat_file", outputnode, "mat") + wf.connect(selectmodel, "grp_file", outputnode, "grp") + wf.connect(selectmodel, "fts_file", outputnode, "fts") + wf.connect(selectmodel, "con_file", outputnode, "con") return wf def resample_func_roi(in_func, in_roi, realignment, identity_matrix): import os + import nibabel as nb + from CPAC.utils.monitoring.custom_logging import log_subprocess # load func and ROI dimension @@ -1306,32 +1470,46 @@ def resample_func_roi(in_func, in_roi, realignment, identity_matrix): # check if func size = ROI size, return func and ROI; else resample using flirt if roi_shape != func_shape: - # resample func to ROI: in_file = func, reference = ROI - if 'func_to_ROI' in realignment: + if "func_to_ROI" in realignment: in_file = in_func reference = in_roi - out_file = os.path.join(os.getcwd(), in_file[in_file.rindex( - '/') + 1:in_file.rindex('.nii')] + '_resampled.nii.gz') + out_file = os.path.join( + os.getcwd(), + in_file[in_file.rindex("/") + 1 : in_file.rindex(".nii")] + + "_resampled.nii.gz", + ) out_func = out_file out_roi = in_roi - interp = 'trilinear' + interp = "trilinear" # resample ROI to func: in_file = ROI, reference = func - elif 'ROI_to_func' in realignment: + elif "ROI_to_func" in realignment: in_file = in_roi reference = in_func - out_file = os.path.join(os.getcwd(), in_file[in_file.rindex( - '/') + 1:in_file.rindex('.nii')] + '_resampled.nii.gz') + out_file = os.path.join( + os.getcwd(), + in_file[in_file.rindex("/") + 1 : in_file.rindex(".nii")] + + "_resampled.nii.gz", + ) out_func = in_func out_roi = out_file - interp = 'nearestneighbour' - - cmd = ['flirt', '-in', in_file, - '-ref', reference, - '-out', out_file, - '-interp', interp, - '-applyxfm', '-init', identity_matrix] + interp = "nearestneighbour" + + cmd = [ + "flirt", + "-in", + in_file, + "-ref", + reference, + "-out", + out_file, + "-interp", + interp, + "-applyxfm", + "-init", + identity_matrix, + ] log_subprocess(cmd) else: diff --git a/CPAC/utils/test_mocks.py b/CPAC/utils/test_mocks.py index f2a0a6aafb..084f299c0a 100644 --- a/CPAC/utils/test_mocks.py +++ b/CPAC/utils/test_mocks.py @@ -1,5 +1,7 @@ import os + from nipype.interfaces import utility as util + from CPAC.pipeline import nipype_pipeline_engine as pe from CPAC.utils.configuration import Configuration from CPAC.utils.datasource import resolve_resolution @@ -9,206 +11,233 @@ def file_node(path, file_node_num=0): input_node = pe.Node( - util.IdentityInterface(fields=['file']), name='file_node_{0}'.format( - file_node_num) + util.IdentityInterface(fields=["file"]), name=f"file_node_{file_node_num}" ) input_node.inputs.file = path - return input_node, 'file' + return input_node, "file" -def configuration_strategy_mock(method='FSL'): - fsldir = os.environ.get('FSLDIR') +def configuration_strategy_mock(method="FSL"): + fsldir = os.environ.get("FSLDIR") # mock the config dictionary - c = Configuration({ - "pipeline_setup": { - "output_directory": { - "path": "/output/output/pipeline_analysis_nuisance/" - "sub-M10978008_ses-NFB3" - }, - "working_directory": { - "path": "/scratch/pipeline_tests" - }, - "system_config": { - "num_ants_threads": 4 - } - }, - "registration_workflows": { - "functional_registration": { - "EPI_registration": { - "FSL-FNIRT": { - "identity_matrix": f"{fsldir}/etc/flirtsch/" - "ident.mat", - "interpolation": "sinc" - } + c = Configuration( + { + "pipeline_setup": { + "output_directory": { + "path": "/output/output/pipeline_analysis_nuisance/" + "sub-M10978008_ses-NFB3" }, - "func_registration_to_template": { - "ANTs_pipelines": { - "interpolation": "LanczosWindowedSinc" + "working_directory": {"path": "/scratch/pipeline_tests"}, + "system_config": {"num_ants_threads": 4}, + }, + "registration_workflows": { + "functional_registration": { + "EPI_registration": { + "FSL-FNIRT": { + "identity_matrix": f"{fsldir}/etc/flirtsch/" "ident.mat", + "interpolation": "sinc", + } }, - "output_resolution": { - "func_preproc_outputs": "3mm", - "func_derivative_outputs": "3mm" + "func_registration_to_template": { + "ANTs_pipelines": {"interpolation": "LanczosWindowedSinc"}, + "output_resolution": { + "func_preproc_outputs": "3mm", + "func_derivative_outputs": "3mm", + }, + "target_template": { + "T1_template": { + "T1w_template_for_resample": f"{fsldir}/" + "data/standard/" + "MNI152_T1_1mm_brain." + "nii.gz", + "T1w_brain_template_funcreg": f"{fsldir}/" + "data/standard/" + "MNI152_T1_" + "${resolution_for_" + "func_preproc}_" + "brain.nii.gz", + "T1w_template_funcreg": f"{fsldir}/data/" + "standard/MNI152_T1_" + "${resolution_for_func_" + "preproc}.nii.gz", + } + }, }, - "target_template": { - "T1_template": { - "T1w_template_for_resample": f"{fsldir}/" - "data/standard/" - "MNI152_T1_1mm_brain." - "nii.gz", - "T1w_brain_template_funcreg": f"{fsldir}/" - "data/standard/" - "MNI152_T1_" - "${resolution_for_" - "func_preproc}_" - "brain.nii.gz", - "T1w_template_funcreg": f"{fsldir}/data/" - "standard/MNI152_T1_" - "${resolution_for_func_" - "preproc}.nii.gz" - } - } } - } - }, - "post_processing": { - "spatial_smoothing": { - "fwhm": [2, 3, 4] - } + }, + "post_processing": {"spatial_smoothing": {"fwhm": [2, 3, 4]}}, } - }) + ) - if method == 'ANTS': - c.update('regOption', 'ANTS') + if method == "ANTS": + c.update("regOption", "ANTS") else: - c.update('regOption', 'FSL') + c.update("regOption", "FSL") # mock the strategy strat = Strategy() resource_dict = { - "functional_nuisance_residuals": os.path.join( - c['pipeline_setup', 'output_directory', 'path'], - "motion_correct/_scan_test/" - "sub-M10978008_ses-NFB3_task-test_bold_calc_tshift_resample_" - "volreg.nii.gz"), - "mean_functional": os.path.join( - c['pipeline_setup', 'output_directory', 'path'], - "mean_functional/" - "sub-M10978008_ses-NFB3_task-test_bold_calc_tshift_resample_" - "volreg_calc_tstat.nii.gz"), - "functional_brain_mask": os.path.join( - c['pipeline_setup', 'output_directory', 'path'], - "functional_brain_mask/_scan_test/" - "sub-M10978008_ses-NFB3_task-test_bold_calc_tshift_resample_" - "volreg_mask.nii.gz"), - "motion_correct": os.path.join( - c['pipeline_setup', 'output_directory', 'path'], - "motion_correct/_scan_test/" - "sub-M10978008_ses-NFB3_task-test_bold_calc_tshift_resample_" - "volreg.nii.gz"), - "anatomical_brain": os.path.join( - c['pipeline_setup', 'output_directory', 'path'], - "anatomical_brain/" - "sub-M10978008_ses-NFB3_acq-ao_brain_resample.nii.gz"), - "ants_initial_xfm": os.path.join( - c['pipeline_setup', 'output_directory', 'path'], - "ants_initial_xfm/" - "transform0DerivedInitialMovingTranslation.mat"), - "ants_affine_xfm": os.path.join( - c['pipeline_setup', 'output_directory', 'path'], - "ants_affine_xfm/transform2Affine.mat"), - "ants_rigid_xfm": os.path.join( - c['pipeline_setup', 'output_directory', 'path'], - "ants_rigid_xfm/transform1Rigid.mat"), - "anatomical_to_mni_linear_xfm": os.path.join( - c['pipeline_setup', 'output_directory', 'path'], - "anatomical_to_mni_linear_xfm/" - "sub-M10978008_ses-NFB3_T1w_resample_calc_flirt.mat"), - "functional_to_anat_linear_xfm": os.path.join( - c['pipeline_setup', 'output_directory', 'path'], - "functional_to_anat_linear_xfm/_scan_test/" - "sub-M10978008_ses-NFB3_task-test_bold_calc_tshift_resample_" - "volreg_calc_tstat_flirt.mat"), - 'ants_symm_warp_field': os.path.join( - c['pipeline_setup', 'output_directory', 'path'], - "anatomical_to_symmetric_mni_nonlinear_xfm/" - "transform3Warp.nii.gz"), - 'ants_symm_affine_xfm': os.path.join( - c['pipeline_setup', 'output_directory', 'path'], - "ants_symmetric_affine_xfm/transform2Affine.mat"), - 'ants_symm_rigid_xfm': os.path.join( - c['pipeline_setup', 'output_directory', 'path'], - "ants_symmetric_rigid_xfm/transform1Rigid.mat"), - 'ants_symm_initial_xfm': os.path.join( - c['pipeline_setup', 'output_directory', 'path'], - "ants_symmetric_initial_xfm/" - "transform0DerivedInitialMovingTranslation.mat"), - "dr_tempreg_maps_files": [os.path.join( - '/scratch', - 'resting_preproc_sub-M10978008_ses-NFB3_cpac105', - 'temporal_dual_regression_0/_scan_test/' - '_selector_CSF-2mmE-M_aC-WM-2mmE-DPC5_G-M_M-SDB_P-2/' - '_spatial_map_PNAS_Smith09_rsn10_spatial_map_file_' - '..cpac_templates..PNAS_Smith09_rsn10.nii.gz/' - 'split_raw_volumes/temp_reg_map_000{0}.nii.gz'.format(n) - ) for n in range(10)] + "functional_nuisance_residuals": os.path.join( + c["pipeline_setup", "output_directory", "path"], + "motion_correct/_scan_test/" + "sub-M10978008_ses-NFB3_task-test_bold_calc_tshift_resample_" + "volreg.nii.gz", + ), + "mean_functional": os.path.join( + c["pipeline_setup", "output_directory", "path"], + "mean_functional/" + "sub-M10978008_ses-NFB3_task-test_bold_calc_tshift_resample_" + "volreg_calc_tstat.nii.gz", + ), + "functional_brain_mask": os.path.join( + c["pipeline_setup", "output_directory", "path"], + "functional_brain_mask/_scan_test/" + "sub-M10978008_ses-NFB3_task-test_bold_calc_tshift_resample_" + "volreg_mask.nii.gz", + ), + "motion_correct": os.path.join( + c["pipeline_setup", "output_directory", "path"], + "motion_correct/_scan_test/" + "sub-M10978008_ses-NFB3_task-test_bold_calc_tshift_resample_" + "volreg.nii.gz", + ), + "anatomical_brain": os.path.join( + c["pipeline_setup", "output_directory", "path"], + "anatomical_brain/" "sub-M10978008_ses-NFB3_acq-ao_brain_resample.nii.gz", + ), + "ants_initial_xfm": os.path.join( + c["pipeline_setup", "output_directory", "path"], + "ants_initial_xfm/" "transform0DerivedInitialMovingTranslation.mat", + ), + "ants_affine_xfm": os.path.join( + c["pipeline_setup", "output_directory", "path"], + "ants_affine_xfm/transform2Affine.mat", + ), + "ants_rigid_xfm": os.path.join( + c["pipeline_setup", "output_directory", "path"], + "ants_rigid_xfm/transform1Rigid.mat", + ), + "anatomical_to_mni_linear_xfm": os.path.join( + c["pipeline_setup", "output_directory", "path"], + "anatomical_to_mni_linear_xfm/" + "sub-M10978008_ses-NFB3_T1w_resample_calc_flirt.mat", + ), + "functional_to_anat_linear_xfm": os.path.join( + c["pipeline_setup", "output_directory", "path"], + "functional_to_anat_linear_xfm/_scan_test/" + "sub-M10978008_ses-NFB3_task-test_bold_calc_tshift_resample_" + "volreg_calc_tstat_flirt.mat", + ), + "ants_symm_warp_field": os.path.join( + c["pipeline_setup", "output_directory", "path"], + "anatomical_to_symmetric_mni_nonlinear_xfm/" "transform3Warp.nii.gz", + ), + "ants_symm_affine_xfm": os.path.join( + c["pipeline_setup", "output_directory", "path"], + "ants_symmetric_affine_xfm/transform2Affine.mat", + ), + "ants_symm_rigid_xfm": os.path.join( + c["pipeline_setup", "output_directory", "path"], + "ants_symmetric_rigid_xfm/transform1Rigid.mat", + ), + "ants_symm_initial_xfm": os.path.join( + c["pipeline_setup", "output_directory", "path"], + "ants_symmetric_initial_xfm/" + "transform0DerivedInitialMovingTranslation.mat", + ), + "dr_tempreg_maps_files": [ + os.path.join( + "/scratch", + "resting_preproc_sub-M10978008_ses-NFB3_cpac105", + "temporal_dual_regression_0/_scan_test/" + "_selector_CSF-2mmE-M_aC-WM-2mmE-DPC5_G-M_M-SDB_P-2/" + "_spatial_map_PNAS_Smith09_rsn10_spatial_map_file_" + "..cpac_templates..PNAS_Smith09_rsn10.nii.gz/" + f"split_raw_volumes/temp_reg_map_000{n}.nii.gz", + ) + for n in range(10) + ], } - if method == 'ANTS': + if method == "ANTS": resource_dict["anatomical_to_mni_nonlinear_xfm"] = os.path.join( - c['pipeline_setup', 'output_directory', 'path'], - "anatomical_to_mni_nonlinear_xfm/transform3Warp.nii.gz") + c["pipeline_setup", "output_directory", "path"], + "anatomical_to_mni_nonlinear_xfm/transform3Warp.nii.gz", + ) else: resource_dict["anatomical_to_mni_nonlinear_xfm"] = os.path.join( - c['pipeline_setup', 'output_directory', 'path'], + c["pipeline_setup", "output_directory", "path"], "anatomical_to_mni_nonlinear_xfm/" - "sub-M10978008_ses-NFB3_T1w_resample_fieldwarp.nii.gz") + "sub-M10978008_ses-NFB3_T1w_resample_fieldwarp.nii.gz", + ) file_node_num = 0 for resource, filepath in resource_dict.items(): - strat.update_resource_pool({ - resource: file_node(filepath, file_node_num) - }) - strat.append_name(resource+'_0') + strat.update_resource_pool({resource: file_node(filepath, file_node_num)}) + strat.append_name(resource + "_0") file_node_num += 1 templates_for_resampling = [ - (c['registration_workflows', 'functional_registration', - 'func_registration_to_template', 'output_resolution', - 'func_preproc_outputs'], - c['registration_workflows', 'functional_registration', - 'func_registration_to_template', 'target_template', 'T1_template', - 'T1w_brain_template_funcreg'], - 'template_brain_for_func_preproc', - 'resolution_for_func_preproc'), - (c['registration_workflows', 'functional_registration', - 'func_registration_to_template', 'output_resolution', - 'func_preproc_outputs'], - c['registration_workflows', 'functional_registration', - 'func_registration_to_template', 'target_template', 'T1_template', - 'T1w_brain_template_funcreg'], - 'template_skull_for_func_preproc', - 'resolution_for_func_preproc') + ( + c[ + "registration_workflows", + "functional_registration", + "func_registration_to_template", + "output_resolution", + "func_preproc_outputs", + ], + c[ + "registration_workflows", + "functional_registration", + "func_registration_to_template", + "target_template", + "T1_template", + "T1w_brain_template_funcreg", + ], + "template_brain_for_func_preproc", + "resolution_for_func_preproc", + ), + ( + c[ + "registration_workflows", + "functional_registration", + "func_registration_to_template", + "output_resolution", + "func_preproc_outputs", + ], + c[ + "registration_workflows", + "functional_registration", + "func_registration_to_template", + "target_template", + "T1_template", + "T1w_brain_template_funcreg", + ], + "template_skull_for_func_preproc", + "resolution_for_func_preproc", + ), ] for resolution, template, template_name, tag in templates_for_resampling: - resampled_template = pe.Node(Function(input_names=[ - 'resolution', 'template', 'template_name', 'tag' - ], - output_names=[ - 'resampled_template' - ], - function=resolve_resolution, - as_module=True), - name='resampled_' + template_name) + resampled_template = pe.Node( + Function( + input_names=["resolution", "template", "template_name", "tag"], + output_names=["resampled_template"], + function=resolve_resolution, + as_module=True, + ), + name="resampled_" + template_name, + ) resampled_template.inputs.resolution = resolution resampled_template.inputs.template = template resampled_template.inputs.template_name = template_name resampled_template.inputs.tag = tag - strat.update_resource_pool({ - template_name: (resampled_template, 'resampled_template')}) - strat.append_name('resampled_template_0') + strat.update_resource_pool( + {template_name: (resampled_template, "resampled_template")} + ) + strat.append_name("resampled_template_0") return c, strat diff --git a/dev/docker_data/run.py b/dev/docker_data/run.py index acdbbfbb02..71b970dac0 100755 --- a/dev/docker_data/run.py +++ b/dev/docker_data/run.py @@ -18,44 +18,51 @@ import argparse import datetime import os +import shutil import subprocess import sys import time -import shutil from warnings import simplefilter -from nipype import logging + import yaml -from CPAC import license_notice, __version__ +from nipype import logging + +from CPAC import __version__, license_notice from CPAC.pipeline import AVAILABLE_PIPELINE_CONFIGS from CPAC.pipeline.random_state import set_up_random_state from CPAC.pipeline.schema import str_to_bool1_1 -from CPAC.utils.bids_utils import cl_strip_brackets, \ - create_cpac_data_config, \ - load_cpac_data_config, \ - load_yaml_config, \ - sub_list_filter_by_labels +from CPAC.utils.bids_utils import ( + cl_strip_brackets, + create_cpac_data_config, + load_cpac_data_config, + load_yaml_config, + sub_list_filter_by_labels, +) from CPAC.utils.configuration import Configuration, preconfig_yaml, set_subject +from CPAC.utils.configuration.yaml_template import ( + create_yaml_from_template, + hash_data_config, + upgrade_pipeline_to_1_8, +) from CPAC.utils.docs import DOCS_URL_PREFIX from CPAC.utils.monitoring import failed_to_start, log_nodes_cb -from CPAC.utils.configuration.yaml_template import create_yaml_from_template, \ - hash_data_config, \ - upgrade_pipeline_to_1_8 from CPAC.utils.utils import update_nested_dict -simplefilter(action='ignore', category=FutureWarning) -logger = logging.getLogger('nipype.workflow') + +simplefilter(action="ignore", category=FutureWarning) +logger = logging.getLogger("nipype.workflow") DEFAULT_TMP_DIR = "/tmp" def run(command, env=None): if env is None: env = {} - process = subprocess.Popen(command, stdout=subprocess.PIPE, - stderr=subprocess.STDOUT, - shell=True, env=env) + process = subprocess.Popen( + command, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, shell=True, env=env + ) while True: line = process.stdout.readline() line = line.decode()[:-1] - if line == '' and process.poll() is not None: + if line == "" and process.poll() is not None: break @@ -71,324 +78,413 @@ def parse_yaml(value): def resolve_aws_credential(source): - if source == "env": from urllib.request import urlopen + aws_creds_address = "169.254.170.2{}".format( os.environ["AWS_CONTAINER_CREDENTIALS_RELATIVE_URI"] ) aws_creds = urlopen(aws_creds_address).read() - aws_input_creds = "/tmp/aws_input_creds_%d.csv" % int( - round(time.time() * 1000) - ) + aws_input_creds = "/tmp/aws_input_creds_%d.csv" % int(round(time.time() * 1000)) with open(aws_input_creds) as ofd: for key, vname in [ ("AccessKeyId", "AWSAcessKeyId"), - ("SecretAccessKey", "AWSSecretKey") + ("SecretAccessKey", "AWSSecretKey"), ]: - ofd.write("{0}={1}".format(vname, aws_creds[key])) + ofd.write(f"{vname}={aws_creds[key]}") return aws_input_creds if os.path.isfile(source): return source else: - raise IOError( - "Could not find aws credentials {0}" - .format(source) - ) + raise IOError(f"Could not find aws credentials {source}") def run_main(): """Run this function if not importing as a script""" - parser = argparse.ArgumentParser(description='C-PAC Pipeline Runner. ' + - license_notice) - parser.add_argument('bids_dir', - help='The directory with the input dataset ' - 'formatted according to the BIDS standard. ' - 'Use the format s3://bucket/path/to/bidsdir to ' - 'read data directly from an S3 bucket. This may ' - 'require AWS S3 credentials specified via the ' - '--aws_input_creds option.') - parser.add_argument('output_dir', - help='The directory where the output files should be ' - 'stored. If you are running group level analysis ' - 'this folder should be prepopulated with the ' - 'results of the participant level analysis. Use ' - 'the format s3://bucket/path/to/bidsdir to ' - 'write data directly to an S3 bucket. This may ' - 'require AWS S3 credentials specified via the ' - '--aws_output_creds option.') - parser.add_argument('analysis_level', - help='Level of the analysis that will be performed. ' - 'Multiple participant level analyses can be run ' - 'independently (in parallel) using the same ' - 'output_dir. test_config will run through the ' - 'entire configuration process but will not ' - 'execute the pipeline.', - choices=['participant', 'group', 'test_config', 'cli'], - type=lambda choice: choice.replace('-', '_').lower()) - - parser.add_argument('--pipeline-file', '--pipeline_file', - help='Path for the pipeline configuration file to ' - 'use. Use the format s3://bucket/path/to/' - 'pipeline_file to read data directly from an ' - 'S3 bucket. This may require AWS S3 credentials ' - 'specified via the --aws_input_creds option.', - default=preconfig_yaml('default')) - parser.add_argument('--group-file', '--group_file', - help='Path for the group analysis configuration file ' - 'to use. Use the format s3://bucket/path/to/' - 'pipeline_file to read data directly from an S3 ' - 'bucket. This may require AWS S3 credentials ' - 'specified via the --aws_input_creds option. ' - 'The output directory needs to refer to the ' - 'output of a preprocessing individual pipeline.', - default=None) - parser.add_argument('--data-config-file', '--data_config_file', - help='Yaml file containing the location of the data ' - 'that is to be processed. This file is not ' - 'necessary if the data in bids_dir is organized ' - 'according to the BIDS format. This enables ' - 'support for legacy data organization and cloud ' - 'based storage. A bids_dir must still be ' - 'specified when using this option, but its ' - 'value will be ignored. Use the format s3://' - 'bucket/path/to/data_config_file to read data ' - 'directly from an S3 bucket. This may require ' - 'AWS S3 credentials specified via the ' - '--aws_input_creds option.', - default=None) - - parser.add_argument('--preconfig', - help='Name of the preconfigured pipeline to run. ' - 'Available preconfigured pipelines: ' + - str(AVAILABLE_PIPELINE_CONFIGS) + '. See ' - f'{DOCS_URL_PREFIX}/user/pipelines/preconfig ' - 'for more information about the preconfigured ' - 'pipelines.', - default=None) - if [_ for _ in ['--pipeline-override', - '--pipeline_override']if _ in sys.argv]: # secret option - parser.add_argument('--pipeline-override', '--pipeline_override', - type=parse_yaml, action='append', - help='Override specific options from the ' - 'pipeline configuration. E.g.: ' - '"{\'pipeline_setup\': {\'system_config\': ' - '{\'maximum_memory_per_participant\': 1}}}"') - - parser.add_argument('--aws-input-creds', '--aws_input_creds', - help='Credentials for reading from S3. If not ' - 'provided and s3 paths are specified in the ' - 'data config we will try to access the bucket ' - 'anonymously use the string "env" to indicate ' - 'that input credentials should read from the ' - 'environment. (E.g. when using AWS iam roles).', - default=None) - parser.add_argument('--aws-output-creds', '--aws_output_creds', - help='Credentials for writing to S3. If not provided ' - 'and s3 paths are specified in the output ' - 'directory we will try to access the bucket ' - 'anonymously use the string "env" to indicate ' - 'that output credentials should read from the ' - 'environment. (E.g. when using AWS iam roles).', - default=None) + parser = argparse.ArgumentParser( + description="C-PAC Pipeline Runner. " + license_notice + ) + parser.add_argument( + "bids_dir", + help="The directory with the input dataset " + "formatted according to the BIDS standard. " + "Use the format s3://bucket/path/to/bidsdir to " + "read data directly from an S3 bucket. This may " + "require AWS S3 credentials specified via the " + "--aws_input_creds option.", + ) + parser.add_argument( + "output_dir", + help="The directory where the output files should be " + "stored. If you are running group level analysis " + "this folder should be prepopulated with the " + "results of the participant level analysis. Use " + "the format s3://bucket/path/to/bidsdir to " + "write data directly to an S3 bucket. This may " + "require AWS S3 credentials specified via the " + "--aws_output_creds option.", + ) + parser.add_argument( + "analysis_level", + help="Level of the analysis that will be performed. " + "Multiple participant level analyses can be run " + "independently (in parallel) using the same " + "output_dir. test_config will run through the " + "entire configuration process but will not " + "execute the pipeline.", + choices=["participant", "group", "test_config", "cli"], + type=lambda choice: choice.replace("-", "_").lower(), + ) + + parser.add_argument( + "--pipeline-file", + "--pipeline_file", + help="Path for the pipeline configuration file to " + "use. Use the format s3://bucket/path/to/" + "pipeline_file to read data directly from an " + "S3 bucket. This may require AWS S3 credentials " + "specified via the --aws_input_creds option.", + default=preconfig_yaml("default"), + ) + parser.add_argument( + "--group-file", + "--group_file", + help="Path for the group analysis configuration file " + "to use. Use the format s3://bucket/path/to/" + "pipeline_file to read data directly from an S3 " + "bucket. This may require AWS S3 credentials " + "specified via the --aws_input_creds option. " + "The output directory needs to refer to the " + "output of a preprocessing individual pipeline.", + default=None, + ) + parser.add_argument( + "--data-config-file", + "--data_config_file", + help="Yaml file containing the location of the data " + "that is to be processed. This file is not " + "necessary if the data in bids_dir is organized " + "according to the BIDS format. This enables " + "support for legacy data organization and cloud " + "based storage. A bids_dir must still be " + "specified when using this option, but its " + "value will be ignored. Use the format s3://" + "bucket/path/to/data_config_file to read data " + "directly from an S3 bucket. This may require " + "AWS S3 credentials specified via the " + "--aws_input_creds option.", + default=None, + ) + + parser.add_argument( + "--preconfig", + help="Name of the preconfigured pipeline to run. " + "Available preconfigured pipelines: " + + str(AVAILABLE_PIPELINE_CONFIGS) + + ". See " + f"{DOCS_URL_PREFIX}/user/pipelines/preconfig " + "for more information about the preconfigured " + "pipelines.", + default=None, + ) + if [ + _ for _ in ["--pipeline-override", "--pipeline_override"] if _ in sys.argv + ]: # secret option + parser.add_argument( + "--pipeline-override", + "--pipeline_override", + type=parse_yaml, + action="append", + help="Override specific options from the " + "pipeline configuration. E.g.: " + "\"{'pipeline_setup': {'system_config': " + "{'maximum_memory_per_participant': 1}}}\"", + ) + + parser.add_argument( + "--aws-input-creds", + "--aws_input_creds", + help="Credentials for reading from S3. If not " + "provided and s3 paths are specified in the " + "data config we will try to access the bucket " + 'anonymously use the string "env" to indicate ' + "that input credentials should read from the " + "environment. (E.g. when using AWS iam roles).", + default=None, + ) + parser.add_argument( + "--aws-output-creds", + "--aws_output_creds", + help="Credentials for writing to S3. If not provided " + "and s3 paths are specified in the output " + "directory we will try to access the bucket " + 'anonymously use the string "env" to indicate ' + "that output credentials should read from the " + "environment. (E.g. when using AWS iam roles).", + default=None, + ) # TODO: restore for <--n_cpus> once we remove # from config file # - parser.add_argument('--n-cpus', '--n_cpus', type=int, default=0, - help='Number of execution resources per participant ' - 'available for the pipeline. This flag takes ' - 'precidence over max_cores_per_participant in ' - 'the pipeline configuration file.') - parser.add_argument('--mem-mb', '--mem_mb', type=float, - help='Amount of RAM available per participant in ' - 'megabytes. Included for compatibility with ' - 'BIDS-Apps standard, but mem_gb is preferred. ' - 'This flag takes precedence over ' - 'maximum_memory_per_participant in the pipeline ' - 'configuration file.') - parser.add_argument('--mem-gb', '--mem_gb', type=float, - help='Amount of RAM available per participant in ' - 'gigabytes. If this is specified along with ' - 'mem_mb, this flag will take precedence. This ' - 'flag also takes precedence over ' - 'maximum_memory_per_participant in the pipeline ' - 'configuration file.') - parser.add_argument('--runtime-usage', '--runtime_usage', type=str, - help='Path to a callback.log from a prior run of the ' - 'same pipeline configuration (including any ' - 'resource-management parameters that will be ' - "applied in this run, like 'n_cpus' and " - "'num_ants_threads'). This log will be used to " - 'override per-node memory estimates with ' - 'observed values plus a buffer.') - parser.add_argument('--runtime-buffer', '--runtime_buffer', type=float, - help='Buffer to add to per-node memory estimates if ' - '--runtime_usage is specified. This number is a ' - 'percentage of the observed memory usage.') - parser.add_argument('--num-ants-threads', '--num_ants_threads', type=int, - default=0, - help='The number of cores to allocate to ANTS-' - 'based anatomical registration per ' - 'participant. Multiple cores can greatly ' - 'speed up this preprocessing step. This ' - 'number cannot be greater than the number of ' - 'cores per participant.') - parser.add_argument('--random-seed', '--random_seed', type=str, - help='Random seed used to fix the state of execution. ' - 'If unset, each process uses its own default. If ' - 'set, a `random.log` file will be generated ' - 'logging the random state used by each process. ' - 'If set to a positive integer (up to 2147483647' - '), that integer will be used to seed each ' - 'process. If set to \'random\', a random seed ' - 'will be generated and recorded for each ' - 'process.') - parser.add_argument('--save-working-dir', '--save_working_dir', nargs='?', - help='Save the contents of the working directory.', - default=False) - parser.add_argument('--fail-fast', '--fail_fast', type=str.title, - help='Stop worklow execution on first crash?') - parser.add_argument('--participant-label', '--participant_label', - help='The label of the participant that should be ' - 'analyzed. The label corresponds to ' - 'sub- from the BIDS spec ' - '(so it does not include "sub-"). If this ' - 'parameter is not provided all participants ' - 'should be analyzed. Multiple participants ' - 'can be specified with a space separated ' - 'list.', - nargs="+") - parser.add_argument('--participant-ndx', '--participant_ndx', - help='The index of the participant that should be ' - 'analyzed. This corresponds to the index of ' - 'the participant in the data config file. ' - 'This was added to make it easier to ' - 'accommodate SGE array jobs. Only a single ' - 'participant will be analyzed. Can be used ' - 'with participant label, in which case it is ' - 'the index into the list that follows the ' - 'participant_label flag. Use the value "-1" ' - 'to indicate that the participant index ' - 'should be read from the ' - 'AWS_BATCH_JOB_ARRAY_INDEX environment ' - 'variable.', - default=None, type=int) - - parser.add_argument('--T1w-label', '--T1w_label', - help='C-PAC only runs one T1w per participant-' - 'session at a time, at this time. Use this ' - 'flag to specify any BIDS entity (e.g., "acq-' - 'VNavNorm") or sequence of BIDS entities (' - 'e.g., "acq-VNavNorm_run-1") to specify ' - 'which of multiple T1w files to use. Specify ' - '"--T1w_label T1w" to choose the T1w file ' - 'with the fewest BIDS entities (i.e., the ' - 'final option of [*_acq-VNavNorm_T1w.nii.gz, ' - '*_acq-HCP_T1w.nii.gz, *_T1w.nii.gz"]). ' - 'C-PAC will choose the first T1w it finds if ' - 'the user does not provide this flag, or ' - 'if multiple T1w files match the --T1w_label ' - 'provided.\nIf multiple T2w files are present ' - 'and a comparable filter is possible, T2w ' - 'files will be filtered as well. If no T2w files ' - 'match this --T1w_label, T2w files will be ' - 'processed as if no --T1w_label were provided.') - parser.add_argument('--bold-label', '--bold_label', - help='To include a specified subset of available ' - 'BOLD files, use this flag to specify any ' - 'BIDS entity (e.g., "task-rest") or sequence ' - 'of BIDS entities (e.g. "task-rest_run-1"). ' - 'To specify the bold file with the fewest ' - 'BIDS entities in the file name, specify ' - '"--bold_label bold". Multiple `--bold_' - 'label`s can be specified with a space-' - 'separated list. If multiple `--bold_label`s ' - 'are provided (e.g., "--bold_label task-rest_' - 'run-1 task-rest_run-2", each scan that ' - 'includes all BIDS entities specified in any ' - 'of the provided `--bold_label`s will be ' - 'analyzed. If this parameter is not provided ' - 'all BOLD scans should be analyzed.', - nargs="+") - - parser.add_argument('-v', '--version', action='version', - version=f'C-PAC BIDS-App version {__version__}') - parser.add_argument('--bids-validator-config', '--bids_validator_config', - help='JSON file specifying configuration of ' - 'bids-validator: See https://github.com/bids-' - 'standard/bids-validator for more info.') - parser.add_argument('--skip-bids-validator', '--skip_bids_validator', - help='Skips bids validation.', - action='store_true') - - parser.add_argument('--anat-only', '--anat_only', - help='run only the anatomical preprocessing', - action='store_true') - - parser.add_argument('--user_defined', type=str, - help='Arbitrary user defined string that will be ' - 'included in every output sidecar file.') - - parser.add_argument('--tracking-opt-out', '--tracking_opt-out', - action='store_true', - help='Disable usage tracking. Only the number of ' - 'participants on the analysis is tracked.', - default=False) - - parser.add_argument('--monitoring', - help='Enable monitoring server on port 8080. You ' - 'need to bind the port using the Docker ' - 'flag "-p".', - action='store_true') - - parser.add_argument('--freesurfer_dir', '--freesurfer-dir', - help='Specify path to pre-computed FreeSurfer outputs ' - 'to pull into C-PAC run', - default=False) + parser.add_argument( + "--n-cpus", + "--n_cpus", + type=int, + default=0, + help="Number of execution resources per participant " + "available for the pipeline. This flag takes " + "precidence over max_cores_per_participant in " + "the pipeline configuration file.", + ) + parser.add_argument( + "--mem-mb", + "--mem_mb", + type=float, + help="Amount of RAM available per participant in " + "megabytes. Included for compatibility with " + "BIDS-Apps standard, but mem_gb is preferred. " + "This flag takes precedence over " + "maximum_memory_per_participant in the pipeline " + "configuration file.", + ) + parser.add_argument( + "--mem-gb", + "--mem_gb", + type=float, + help="Amount of RAM available per participant in " + "gigabytes. If this is specified along with " + "mem_mb, this flag will take precedence. This " + "flag also takes precedence over " + "maximum_memory_per_participant in the pipeline " + "configuration file.", + ) + parser.add_argument( + "--runtime-usage", + "--runtime_usage", + type=str, + help="Path to a callback.log from a prior run of the " + "same pipeline configuration (including any " + "resource-management parameters that will be " + "applied in this run, like 'n_cpus' and " + "'num_ants_threads'). This log will be used to " + "override per-node memory estimates with " + "observed values plus a buffer.", + ) + parser.add_argument( + "--runtime-buffer", + "--runtime_buffer", + type=float, + help="Buffer to add to per-node memory estimates if " + "--runtime_usage is specified. This number is a " + "percentage of the observed memory usage.", + ) + parser.add_argument( + "--num-ants-threads", + "--num_ants_threads", + type=int, + default=0, + help="The number of cores to allocate to ANTS-" + "based anatomical registration per " + "participant. Multiple cores can greatly " + "speed up this preprocessing step. This " + "number cannot be greater than the number of " + "cores per participant.", + ) + parser.add_argument( + "--random-seed", + "--random_seed", + type=str, + help="Random seed used to fix the state of execution. " + "If unset, each process uses its own default. If " + "set, a `random.log` file will be generated " + "logging the random state used by each process. " + "If set to a positive integer (up to 2147483647" + "), that integer will be used to seed each " + "process. If set to 'random', a random seed " + "will be generated and recorded for each " + "process.", + ) + parser.add_argument( + "--save-working-dir", + "--save_working_dir", + nargs="?", + help="Save the contents of the working directory.", + default=False, + ) + parser.add_argument( + "--fail-fast", + "--fail_fast", + type=str.title, + help="Stop worklow execution on first crash?", + ) + parser.add_argument( + "--participant-label", + "--participant_label", + help="The label of the participant that should be " + "analyzed. The label corresponds to " + "sub- from the BIDS spec " + '(so it does not include "sub-"). If this ' + "parameter is not provided all participants " + "should be analyzed. Multiple participants " + "can be specified with a space separated " + "list.", + nargs="+", + ) + parser.add_argument( + "--participant-ndx", + "--participant_ndx", + help="The index of the participant that should be " + "analyzed. This corresponds to the index of " + "the participant in the data config file. " + "This was added to make it easier to " + "accommodate SGE array jobs. Only a single " + "participant will be analyzed. Can be used " + "with participant label, in which case it is " + "the index into the list that follows the " + 'participant_label flag. Use the value "-1" ' + "to indicate that the participant index " + "should be read from the " + "AWS_BATCH_JOB_ARRAY_INDEX environment " + "variable.", + default=None, + type=int, + ) + + parser.add_argument( + "--T1w-label", + "--T1w_label", + help="C-PAC only runs one T1w per participant-" + "session at a time, at this time. Use this " + 'flag to specify any BIDS entity (e.g., "acq-' + 'VNavNorm") or sequence of BIDS entities (' + 'e.g., "acq-VNavNorm_run-1") to specify ' + "which of multiple T1w files to use. Specify " + '"--T1w_label T1w" to choose the T1w file ' + "with the fewest BIDS entities (i.e., the " + "final option of [*_acq-VNavNorm_T1w.nii.gz, " + '*_acq-HCP_T1w.nii.gz, *_T1w.nii.gz"]). ' + "C-PAC will choose the first T1w it finds if " + "the user does not provide this flag, or " + "if multiple T1w files match the --T1w_label " + "provided.\nIf multiple T2w files are present " + "and a comparable filter is possible, T2w " + "files will be filtered as well. If no T2w files " + "match this --T1w_label, T2w files will be " + "processed as if no --T1w_label were provided.", + ) + parser.add_argument( + "--bold-label", + "--bold_label", + help="To include a specified subset of available " + "BOLD files, use this flag to specify any " + 'BIDS entity (e.g., "task-rest") or sequence ' + 'of BIDS entities (e.g. "task-rest_run-1"). ' + "To specify the bold file with the fewest " + "BIDS entities in the file name, specify " + '"--bold_label bold". Multiple `--bold_' + "label`s can be specified with a space-" + "separated list. If multiple `--bold_label`s " + 'are provided (e.g., "--bold_label task-rest_' + 'run-1 task-rest_run-2", each scan that ' + "includes all BIDS entities specified in any " + "of the provided `--bold_label`s will be " + "analyzed. If this parameter is not provided " + "all BOLD scans should be analyzed.", + nargs="+", + ) + + parser.add_argument( + "-v", + "--version", + action="version", + version=f"C-PAC BIDS-App version {__version__}", + ) + parser.add_argument( + "--bids-validator-config", + "--bids_validator_config", + help="JSON file specifying configuration of " + "bids-validator: See https://github.com/bids-" + "standard/bids-validator for more info.", + ) + parser.add_argument( + "--skip-bids-validator", + "--skip_bids_validator", + help="Skips bids validation.", + action="store_true", + ) + + parser.add_argument( + "--anat-only", + "--anat_only", + help="run only the anatomical preprocessing", + action="store_true", + ) + + parser.add_argument( + "--user_defined", + type=str, + help="Arbitrary user defined string that will be " + "included in every output sidecar file.", + ) + + parser.add_argument( + "--tracking-opt-out", + "--tracking_opt-out", + action="store_true", + help="Disable usage tracking. Only the number of " + "participants on the analysis is tracked.", + default=False, + ) + + parser.add_argument( + "--monitoring", + help="Enable monitoring server on port 8080. You " + "need to bind the port using the Docker " + 'flag "-p".', + action="store_true", + ) + + parser.add_argument( + "--freesurfer_dir", + "--freesurfer-dir", + help="Specify path to pre-computed FreeSurfer outputs " + "to pull into C-PAC run", + default=False, + ) # get the command line arguments args = parser.parse_args( - sys.argv[ - 1:( - sys.argv.index('--') - if '--' in sys.argv - else len(sys.argv) - ) - ] + sys.argv[1 : (sys.argv.index("--") if "--" in sys.argv else len(sys.argv))] ) bids_dir_is_s3 = args.bids_dir.lower().startswith("s3://") - bids_dir = args.bids_dir if bids_dir_is_s3 else os.path.realpath( - args.bids_dir) + bids_dir = args.bids_dir if bids_dir_is_s3 else os.path.realpath(args.bids_dir) output_dir_is_s3 = args.output_dir.lower().startswith("s3://") - output_dir = args.output_dir if output_dir_is_s3 else os.path.realpath( - args.output_dir) + output_dir = ( + args.output_dir if output_dir_is_s3 else os.path.realpath(args.output_dir) + ) exitcode = 0 if args.analysis_level == "cli": from CPAC.__main__ import main - main.main(args=sys.argv[sys.argv.index('--') + 1:]) + + main.main(args=sys.argv[sys.argv.index("--") + 1 :]) sys.exit(0) elif args.analysis_level == "group": if not args.group_file or not os.path.exists(args.group_file): - print() print("No group analysis configuration file was supplied.") print() import pkg_resources as p - args.group_file = \ - p.resource_filename( - "CPAC", - os.path.join( - "resources", - "configs", - "group_config_template.yml" - ) - ) + + args.group_file = p.resource_filename( + "CPAC", + os.path.join("resources", "configs", "group_config_template.yml"), + ) output_group = os.path.join(output_dir, "group_config.yml") @@ -400,8 +496,10 @@ def run_main(): shutil.copyfile(args.group_file, output_group) except (Exception, IOError): print("Could not create group analysis configuration file.") - print("Please refer to the C-PAC documentation for group " - "analysis setup.") + print( + "Please refer to the C-PAC documentation for group " + "analysis setup." + ) print() else: print( @@ -409,38 +507,37 @@ def run_main(): "the file and, after customizing to your analysis, add " "the flag" "\n\n" - " --group_file {0}" + f" --group_file {output_group}" "\n\n" "to your `docker run` command" "\n" - .format(output_group) ) sys.exit(1) else: import CPAC.pipeline.cpac_group_runner as cgr - print("Starting group level analysis of data in {0} using " - "{1}".format(bids_dir, args.group_file)) + + print( + f"Starting group level analysis of data in {bids_dir} using " + f"{args.group_file}" + ) cgr.run(args.group_file) sys.exit(0) elif args.analysis_level in ["test_config", "participant"]: - # check to make sure that the input directory exists if ( - not args.data_config_file and - not bids_dir_is_s3 and - not os.path.exists(bids_dir) + not args.data_config_file + and not bids_dir_is_s3 + and not os.path.exists(bids_dir) ): - print(f"Error! Could not find {bids_dir}") sys.exit(1) # check to make sure that the output directory exists if not output_dir_is_s3 and not os.path.exists(output_dir): - try: os.makedirs(output_dir) except Exception: @@ -452,14 +549,11 @@ def run_main(): print() if args.bids_validator_config: print("Running BIDS validator") - run("bids-validator --config {config} {bids_dir}".format( - config=args.bids_validator_config, - bids_dir=bids_dir - )) + run(f"bids-validator --config {args.bids_validator_config} {bids_dir}") elif args.skip_bids_validator: - print('Skipping bids-validator...') + print("Skipping bids-validator...") elif bids_dir_is_s3: - print('Skipping bids-validator for S3 datasets...') + print("Skipping bids-validator for S3 datasets...") else: print("Running BIDS validator") run(f"bids-validator {bids_dir}") @@ -474,285 +568,317 @@ def run_main(): else: c = load_yaml_config(args.pipeline_file, args.aws_input_creds) - if 'pipeline_setup' not in c: - _url = (f'{DOCS_URL_PREFIX}/user/pipelines/' - '1.7-1.8-nesting-mappings') + if "pipeline_setup" not in c: + _url = f"{DOCS_URL_PREFIX}/user/pipelines/" "1.7-1.8-nesting-mappings" - logger.warning('\nC-PAC changed its pipeline configuration ' - 'format in v1.8.0.\nSee %s for details.\n', _url) + logger.warning( + "\nC-PAC changed its pipeline configuration " + "format in v1.8.0.\nSee %s for details.\n", + _url, + ) updated_config = os.path.join( - output_dir, - 'updated_config', - os.path.basename(args.pipeline_file) + output_dir, "updated_config", os.path.basename(args.pipeline_file) ) - os.makedirs( - os.path.join(output_dir, 'updated_config'), exist_ok=True) + os.makedirs(os.path.join(output_dir, "updated_config"), exist_ok=True) - open(updated_config, 'w').write(yaml.dump(c)) + open(updated_config, "w").write(yaml.dump(c)) upgrade_pipeline_to_1_8(updated_config) c = load_yaml_config(updated_config, args.aws_input_creds) overrides = {} - if hasattr(args, 'pipeline_override') and args.pipeline_override: - overrides = { - k: v for d in args.pipeline_override for k, v in d.items()} + if hasattr(args, "pipeline_override") and args.pipeline_override: + overrides = {k: v for d in args.pipeline_override for k, v in d.items()} c = update_nested_dict(c, overrides) if args.anat_only: - c = update_nested_dict(c, {'FROM': 'anat-only'}) + c = update_nested_dict(c, {"FROM": "anat-only"}) if args.user_defined: - c['pipeline_setup']['output_directory']['user_defined'] = args.user_defined + c["pipeline_setup"]["output_directory"]["user_defined"] = args.user_defined c = Configuration(c) # get the aws_input_credentials, if any are specified if args.aws_input_creds: - c['awsCredentialsFile'] = resolve_aws_credential( - args.aws_input_creds) + c["awsCredentialsFile"] = resolve_aws_credential(args.aws_input_creds) if args.aws_output_creds: - c['pipeline_setup']['Amazon-AWS'][ - 'aws_output_bucket_credentials' - ] = resolve_aws_credential( - args.aws_output_creds + c["pipeline_setup"]["Amazon-AWS"]["aws_output_bucket_credentials"] = ( + resolve_aws_credential(args.aws_output_creds) ) - c['pipeline_setup']['output_directory']['path'] = os.path.join( - output_dir, "output") + c["pipeline_setup"]["output_directory"]["path"] = os.path.join( + output_dir, "output" + ) if not output_dir_is_s3: - c['pipeline_setup']['log_directory']['path'] = os.path.join( - output_dir, "log") + c["pipeline_setup"]["log_directory"]["path"] = os.path.join( + output_dir, "log" + ) else: - c['pipeline_setup']['log_directory']['path'] = os.path.join( - DEFAULT_TMP_DIR, "log") + c["pipeline_setup"]["log_directory"]["path"] = os.path.join( + DEFAULT_TMP_DIR, "log" + ) if args.mem_gb: - c['pipeline_setup']['system_config'][ - 'maximum_memory_per_participant'] = float(args.mem_gb) + c["pipeline_setup"]["system_config"]["maximum_memory_per_participant"] = ( + float(args.mem_gb) + ) elif args.mem_mb: - c['pipeline_setup']['system_config'][ - 'maximum_memory_per_participant'] = float(args.mem_mb) / 1024.0 + c["pipeline_setup"]["system_config"]["maximum_memory_per_participant"] = ( + float(args.mem_mb) / 1024.0 + ) else: try: - c['pipeline_setup', 'system_config', - 'maximum_memory_per_participant'] = float( - c['pipeline_setup', 'system_config', - 'maximum_memory_per_participant']) + c[ + "pipeline_setup", "system_config", "maximum_memory_per_participant" + ] = float( + c[ + "pipeline_setup", + "system_config", + "maximum_memory_per_participant", + ] + ) except KeyError: - c['pipeline_setup', 'system_config', - 'maximum_memory_per_participant'] = 6.0 + c[ + "pipeline_setup", "system_config", "maximum_memory_per_participant" + ] = 6.0 # Preference: n_cpus if given, override if present, else from config if # present, else n_cpus=3 if int(args.n_cpus) == 0: try: - args.n_cpus = c['pipeline_setup', 'system_config', - 'max_cores_per_participant'] + args.n_cpus = c[ + "pipeline_setup", "system_config", "max_cores_per_participant" + ] except KeyError: args.n_cpus = 3 - c['pipeline_setup', 'system_config', - 'max_cores_per_participant'] = int(args.n_cpus) + c["pipeline_setup", "system_config", "max_cores_per_participant"] = int( + args.n_cpus + ) - c['pipeline_setup']['system_config']['num_participants_at_once'] = int( - c['pipeline_setup']['system_config'].get( - 'num_participants_at_once', 1)) + c["pipeline_setup"]["system_config"]["num_participants_at_once"] = int( + c["pipeline_setup"]["system_config"].get("num_participants_at_once", 1) + ) # Reduce cores per participant if cores times participants is more than # available CPUS. n_cpus is a hard upper limit. if ( - c['pipeline_setup']['system_config']['max_cores_per_participant'] * - c['pipeline_setup']['system_config']['num_participants_at_once'] + c["pipeline_setup"]["system_config"]["max_cores_per_participant"] + * c["pipeline_setup"]["system_config"]["num_participants_at_once"] ) > int(args.n_cpus): - c['pipeline_setup']['system_config'][ - 'max_cores_per_participant' - ] = int(args.n_cpus) // c['pipeline_setup']['system_config'][ - 'num_participants_at_once' - ] - if c['pipeline_setup']['system_config'][ - 'max_cores_per_participant' - ] == 0: - c['pipeline_setup']['system_config'][ - 'max_cores_per_participant'] = args.n_cpus - c['pipeline_setup']['system_config'][ - 'num_participants_at_once'] = 1 + c["pipeline_setup"]["system_config"]["max_cores_per_participant"] = ( + int(args.n_cpus) + // c["pipeline_setup"]["system_config"]["num_participants_at_once"] + ) + if c["pipeline_setup"]["system_config"]["max_cores_per_participant"] == 0: + c["pipeline_setup"]["system_config"]["max_cores_per_participant"] = ( + args.n_cpus + ) + c["pipeline_setup"]["system_config"]["num_participants_at_once"] = 1 if int(args.num_ants_threads) == 0: try: - args.num_ants_threads = c['pipeline_setup', 'system_config', - 'num_ants_threads'] + args.num_ants_threads = c[ + "pipeline_setup", "system_config", "num_ants_threads" + ] except KeyError: args.num_ants_threads = 3 - c['pipeline_setup', 'system_config', 'num_ants_threads'] = int( - args.num_ants_threads) + c["pipeline_setup", "system_config", "num_ants_threads"] = int( + args.num_ants_threads + ) - c['pipeline_setup']['system_config']['num_ants_threads'] = min( - c['pipeline_setup']['system_config']['max_cores_per_participant'], - int(c['pipeline_setup']['system_config']['num_ants_threads']) + c["pipeline_setup"]["system_config"]["num_ants_threads"] = min( + c["pipeline_setup"]["system_config"]["max_cores_per_participant"], + int(c["pipeline_setup"]["system_config"]["num_ants_threads"]), ) if args.random_seed: - c['pipeline_setup']['system_config']['random_seed'] = \ - args.random_seed + c["pipeline_setup"]["system_config"]["random_seed"] = args.random_seed - if c['pipeline_setup']['system_config']['random_seed'] is not None: - c['pipeline_setup']['system_config']['random_seed'] = \ - set_up_random_state(c['pipeline_setup']['system_config'][ - 'random_seed']) + if c["pipeline_setup"]["system_config"]["random_seed"] is not None: + c["pipeline_setup"]["system_config"]["random_seed"] = set_up_random_state( + c["pipeline_setup"]["system_config"]["random_seed"] + ) if args.runtime_usage is not None: - c['pipeline_setup']['system_config']['observed_usage'][ - 'callback_log'] = args.runtime_usage + c["pipeline_setup"]["system_config"]["observed_usage"]["callback_log"] = ( + args.runtime_usage + ) if args.runtime_buffer is not None: - c['pipeline_setup']['system_config']['observed_usage'][ - 'buffer'] = args.runtime_buffer + c["pipeline_setup"]["system_config"]["observed_usage"]["buffer"] = ( + args.runtime_buffer + ) if args.save_working_dir is not False: - c['pipeline_setup']['working_directory'][ - 'remove_working_dir'] = False + c["pipeline_setup"]["working_directory"]["remove_working_dir"] = False if isinstance(args.save_working_dir, str): - c['pipeline_setup']['working_directory']['path'] = \ - os.path.abspath(args.save_working_dir) + c["pipeline_setup"]["working_directory"]["path"] = os.path.abspath( + args.save_working_dir + ) elif not output_dir_is_s3: - c['pipeline_setup']['working_directory']['path'] = \ - os.path.join(output_dir, "working") + c["pipeline_setup"]["working_directory"]["path"] = os.path.join( + output_dir, "working" + ) else: - logger.warning('Cannot write working directory to S3 bucket. ' - 'Either change the output directory to something ' - 'local or turn off the --save_working_dir flag') + logger.warning( + "Cannot write working directory to S3 bucket. " + "Either change the output directory to something " + "local or turn off the --save_working_dir flag" + ) if args.fail_fast is not None: - c['pipeline_setup', 'system_config', - 'fail_fast'] = str_to_bool1_1(args.fail_fast) + c["pipeline_setup", "system_config", "fail_fast"] = str_to_bool1_1( + args.fail_fast + ) - if c['pipeline_setup']['output_directory']['quality_control'][ - 'generate_xcpqc_files']: - c['functional_preproc']['motion_estimates_and_correction'][ - 'motion_estimates']['calculate_motion_first'] = True - c['functional_preproc']['motion_estimates_and_correction'][ - 'motion_estimates']['calculate_motion_after'] = True + if c["pipeline_setup"]["output_directory"]["quality_control"][ + "generate_xcpqc_files" + ]: + c["functional_preproc"]["motion_estimates_and_correction"][ + "motion_estimates" + ]["calculate_motion_first"] = True + c["functional_preproc"]["motion_estimates_and_correction"][ + "motion_estimates" + ]["calculate_motion_after"] = True if args.participant_label: print( - "#### Running C-PAC for {0}" - .format(", ".join(args.participant_label)) + "#### Running C-PAC for {0}".format(", ".join(args.participant_label)) ) else: print("#### Running C-PAC") - print("Number of participants to run in parallel: {0}" - .format(c['pipeline_setup']['system_config'][ - 'num_participants_at_once'])) + print( + "Number of participants to run in parallel: {0}".format( + c["pipeline_setup"]["system_config"]["num_participants_at_once"] + ) + ) if args.freesurfer_dir: - c['pipeline_setup']['freesurfer_dir'] = args.freesurfer_dir - + c["pipeline_setup"]["freesurfer_dir"] = args.freesurfer_dir + if not args.data_config_file: - print("Input directory: {0}".format(bids_dir)) - - print("Output directory: {0}".format( - c['pipeline_setup']['output_directory']['path'])) - print("Working directory: {0}".format( - c['pipeline_setup']['working_directory']['path'])) - print("Log directory: {0}".format( - c['pipeline_setup']['log_directory']['path'])) - print("Remove working directory: {0}".format( - c['pipeline_setup']['working_directory']['remove_working_dir'])) - print("Available memory: {0} (GB)".format( - c['pipeline_setup']['system_config'][ - 'maximum_memory_per_participant'])) - print("Available threads: {0}".format( - c['pipeline_setup']['system_config']['max_cores_per_participant'])) - print("Number of threads for ANTs: {0}".format( - c['pipeline_setup']['system_config']['num_ants_threads'])) + print(f"Input directory: {bids_dir}") + + print( + "Output directory: {0}".format( + c["pipeline_setup"]["output_directory"]["path"] + ) + ) + print( + "Working directory: {0}".format( + c["pipeline_setup"]["working_directory"]["path"] + ) + ) + print("Log directory: {0}".format(c["pipeline_setup"]["log_directory"]["path"])) + print( + "Remove working directory: {0}".format( + c["pipeline_setup"]["working_directory"]["remove_working_dir"] + ) + ) + print( + "Available memory: {0} (GB)".format( + c["pipeline_setup"]["system_config"]["maximum_memory_per_participant"] + ) + ) + print( + "Available threads: {0}".format( + c["pipeline_setup"]["system_config"]["max_cores_per_participant"] + ) + ) + print( + "Number of threads for ANTs: {0}".format( + c["pipeline_setup"]["system_config"]["num_ants_threads"] + ) + ) # create a timestamp for writing config files # pylint: disable=invalid-name - st = datetime.datetime.now().strftime('%Y-%m-%dT%H-%M-%SZ') + st = datetime.datetime.now().strftime("%Y-%m-%dT%H-%M-%SZ") if args.participant_label: args.participant_label = cl_strip_brackets(args.participant_label) args.participant_label = [ - 'sub-' + pt if not pt.startswith('sub-') else pt + "sub-" + pt if not pt.startswith("sub-") else pt for pt in args.participant_label ] # otherwise we move on to conforming the data configuration if not args.data_config_file: - sub_list = create_cpac_data_config(bids_dir, - args.participant_label, - args.aws_input_creds, - args.skip_bids_validator, - only_one_anat=False) + sub_list = create_cpac_data_config( + bids_dir, + args.participant_label, + args.aws_input_creds, + args.skip_bids_validator, + only_one_anat=False, + ) else: - sub_list = load_cpac_data_config(args.data_config_file, - args.participant_label, - args.aws_input_creds) + sub_list = load_cpac_data_config( + args.data_config_file, args.participant_label, args.aws_input_creds + ) prefilter = list(sub_list) - sub_list = sub_list_filter_by_labels(sub_list, - {'T1w': args.T1w_label, - 'bold': args.bold_label}) + sub_list = sub_list_filter_by_labels( + sub_list, {"T1w": args.T1w_label, "bold": args.bold_label} + ) # C-PAC only handles single anatomical images (for now) # so we take just the first as a string if we have a list for i, sub in enumerate(sub_list): - if isinstance(sub.get('anat'), dict): - for anat_key in sub['anat']: - if( - isinstance(sub['anat'][anat_key], list) and - len(sub['anat'][anat_key]) + if isinstance(sub.get("anat"), dict): + for anat_key in sub["anat"]: + if isinstance(sub["anat"][anat_key], list) and len( + sub["anat"][anat_key] ): - sub_list[i]['anat'][ - anat_key] = sub['anat'][anat_key][0] - if isinstance(sub.get('anat'), list) and len(sub['anat']): - sub_list[i]['anat'] = sub['anat'][0] + sub_list[i]["anat"][anat_key] = sub["anat"][anat_key][0] + if isinstance(sub.get("anat"), list) and len(sub["anat"]): + sub_list[i]["anat"] = sub["anat"][0] if args.participant_ndx is not None: - participant_ndx = int(args.participant_ndx) if participant_ndx == -1: - args.participant_ndx = os.environ['AWS_BATCH_JOB_ARRAY_INDEX'] + args.participant_ndx = os.environ["AWS_BATCH_JOB_ARRAY_INDEX"] if 0 <= participant_ndx < len(sub_list): - print('Processing data for participant {0} ({1})'.format( - args.participant_ndx, - sub_list[participant_ndx]["subject_id"] - )) + print( + "Processing data for participant {0} ({1})".format( + args.participant_ndx, sub_list[participant_ndx]["subject_id"] + ) + ) sub_list = [sub_list[participant_ndx]] data_hash = hash_data_config(sub_list) - data_config_file = (f"cpac_data_config_{data_hash}_idx-" - f"{args.participant_ndx}_{st}.yml") + data_config_file = ( + f"cpac_data_config_{data_hash}_idx-" + f"{args.participant_ndx}_{st}.yml" + ) else: - print("Participant ndx {0} is out of bounds [0, {1})".format( - participant_ndx, - str(len(sub_list)) - )) + print( + f"Participant ndx {participant_ndx} is out of bounds [0, {len(sub_list)!s})" + ) sys.exit(1) else: data_hash = hash_data_config(sub_list) - data_config_file = (f"cpac_data_config_{data_hash}_{st}.yml") + data_config_file = f"cpac_data_config_{data_hash}_{st}.yml" sublogdirs = [set_subject(sub, c)[2] for sub in sub_list] # write out the data configuration file data_config_file = os.path.join(sublogdirs[0], data_config_file) - with open(data_config_file, 'w', encoding='utf-8') as _f: + with open(data_config_file, "w", encoding="utf-8") as _f: noalias_dumper = yaml.dumper.SafeDumper noalias_dumper.ignore_aliases = lambda self, data: True - yaml.dump(sub_list, _f, default_flow_style=False, - Dumper=noalias_dumper) + yaml.dump(sub_list, _f, default_flow_style=False, Dumper=noalias_dumper) # update and write out pipeline config file pipeline_config_file = os.path.join( - sublogdirs[0], f"cpac_pipeline_config_{data_hash}_{st}.yml") - with open(pipeline_config_file, 'w', encoding='utf-8') as _f: + sublogdirs[0], f"cpac_pipeline_config_{data_hash}_{st}.yml" + ) + with open(pipeline_config_file, "w", encoding="utf-8") as _f: _f.write(create_yaml_from_template(c)) - minimized_config = f'{pipeline_config_file[:-4]}_min.yml' - with open(minimized_config, 'w', encoding='utf-8') as _f: - _f.write(create_yaml_from_template(c, import_from='blank')) - for config_file in (data_config_file, pipeline_config_file, - minimized_config): + minimized_config = f"{pipeline_config_file[:-4]}_min.yml" + with open(minimized_config, "w", encoding="utf-8") as _f: + _f.write(create_yaml_from_template(c, import_from="blank")) + for config_file in (data_config_file, pipeline_config_file, minimized_config): os.chmod(config_file, 0o444) # Make config files readonly if len(sublogdirs) > 1: @@ -760,56 +886,68 @@ def run_main(): # file, an identical copy of the data and pipeline config # will be included in the log directory for each run for sublogdir in sublogdirs[1:]: - for config_file in (data_config_file, pipeline_config_file, - minimized_config): + for config_file in ( + data_config_file, + pipeline_config_file, + minimized_config, + ): try: - os.link(config_file, config_file.replace( - sublogdirs[0], sublogdir)) + os.link( + config_file, config_file.replace(sublogdirs[0], sublogdir) + ) except FileExistsError: pass if args.analysis_level in ["participant", "test_config"]: # build pipeline easy way - from CPAC.utils.monitoring import monitor_server import CPAC.pipeline.cpac_runner + from CPAC.utils.monitoring import monitor_server monitoring = None if args.monitoring: try: monitoring = monitor_server( - c['pipeline_setup']['pipeline_name'], - c['pipeline_setup']['log_directory']['path'] + c["pipeline_setup"]["pipeline_name"], + c["pipeline_setup"]["log_directory"]["path"], ) except: pass plugin_args = { - 'n_procs': int(c['pipeline_setup']['system_config'][ - 'max_cores_per_participant']), - 'memory_gb': int(c['pipeline_setup']['system_config'][ - 'maximum_memory_per_participant']), - 'raise_insufficient': c['pipeline_setup']['system_config'][ - 'raise_insufficient'], - 'status_callback': log_nodes_cb + "n_procs": int( + c["pipeline_setup"]["system_config"]["max_cores_per_participant"] + ), + "memory_gb": int( + c["pipeline_setup"]["system_config"][ + "maximum_memory_per_participant" + ] + ), + "raise_insufficient": c["pipeline_setup"]["system_config"][ + "raise_insufficient" + ], + "status_callback": log_nodes_cb, } - if c['pipeline_setup']['system_config']['observed_usage'][ - 'callback_log'] is not None: - plugin_args['runtime'] = { - 'usage': c['pipeline_setup']['system_config'][ - 'observed_usage']['callback_log'], - 'buffer': c['pipeline_setup']['system_config'][ - 'observed_usage']['buffer']} + if ( + c["pipeline_setup"]["system_config"]["observed_usage"]["callback_log"] + is not None + ): + plugin_args["runtime"] = { + "usage": c["pipeline_setup"]["system_config"]["observed_usage"][ + "callback_log" + ], + "buffer": c["pipeline_setup"]["system_config"]["observed_usage"][ + "buffer" + ], + } print("Starting participant level processing") exitcode = CPAC.pipeline.cpac_runner.run( data_config_file, pipeline_config_file, - plugin='MultiProc' if plugin_args[ - 'n_procs' - ] > 1 else 'Linear', + plugin="MultiProc" if plugin_args["n_procs"] > 1 else "Linear", plugin_args=plugin_args, tracking=not args.tracking_opt_out, - test_config=args.analysis_level == "test_config" + test_config=args.analysis_level == "test_config", ) if monitoring: @@ -818,24 +956,26 @@ def run_main(): if args.analysis_level == "test_config": if exitcode == 0: logger.info( - '\nPipeline and data configuration files should' - ' have been written to %s and %s respectively.\n', - pipeline_config_file, data_config_file) + "\nPipeline and data configuration files should" + " have been written to %s and %s respectively.\n", + pipeline_config_file, + data_config_file, + ) # wait to import `LOGTAIL` here so it has any runtime updates from CPAC.utils.monitoring import LOGTAIL - for warning in LOGTAIL['warnings']: - logger.warning('%s\n', warning.rstrip()) + + for warning in LOGTAIL["warnings"]: + logger.warning("%s\n", warning.rstrip()) sys.exit(exitcode) -if __name__ == '__main__': +if __name__ == "__main__": try: run_main() except Exception as exception: # if we hit an exception before the pipeline starts to build but # we're still able to create a logfile, log the error in the file - failed_to_start(sys.argv[2] if len(sys.argv) > 2 else os.getcwd(), - exception) + failed_to_start(sys.argv[2] if len(sys.argv) > 2 else os.getcwd(), exception) raise exception From eef356908b9bd1da4bd35dbfb76ef045d289c590 Mon Sep 17 00:00:00 2001 From: Jon Clucas Date: Mon, 4 Nov 2024 21:59:19 -0500 Subject: [PATCH 136/507] =?UTF-8?q?=F0=9F=9A=9A=20Move=20entrypoint=20scri?= =?UTF-8?q?pts=20into=20CPAC/=5Fentrypoints=20in=20preparation=20for=20bac?= =?UTF-8?q?kmerge?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Squashed commit of the following: commit 9ea9b068c316de0fd2f945b18faa766f95adfa19 Author: Jon Clucas Date: Mon Nov 4 21:57:33 2024 -0500 🚚 Move entrypoint scripts into CPAC/_entrypoints Ref 38a0d104521f1ef0de8c4cf4c03b8e445c99ff94 --- {dev/docker_data => CPAC/_entrypoints}/run-with-freesurfer.sh | 2 +- {dev/docker_data => CPAC/_entrypoints}/run.py | 0 2 files changed, 1 insertion(+), 1 deletion(-) rename {dev/docker_data => CPAC/_entrypoints}/run-with-freesurfer.sh (97%) rename {dev/docker_data => CPAC/_entrypoints}/run.py (100%) diff --git a/dev/docker_data/run-with-freesurfer.sh b/CPAC/_entrypoints/run-with-freesurfer.sh similarity index 97% rename from dev/docker_data/run-with-freesurfer.sh rename to CPAC/_entrypoints/run-with-freesurfer.sh index 440c6a47bb..b1551b4512 100755 --- a/dev/docker_data/run-with-freesurfer.sh +++ b/CPAC/_entrypoints/run-with-freesurfer.sh @@ -9,4 +9,4 @@ # You should have received a copy of the GNU Lesser General Public License along with C-PAC. If not, see . source $FREESURFER_HOME/SetUpFreeSurfer.sh -/code/run.py "$@" \ No newline at end of file +/code/run.py "$@" diff --git a/dev/docker_data/run.py b/CPAC/_entrypoints/run.py similarity index 100% rename from dev/docker_data/run.py rename to CPAC/_entrypoints/run.py From dc5663cba24aa344dc2745112bdf01516e332b71 Mon Sep 17 00:00:00 2001 From: Jon Clucas Date: Mon, 4 Nov 2024 23:07:19 -0500 Subject: [PATCH 137/507] :twisted_rightwards_arrows: Merge changes from branch 'feature/check_orientations' into 'many_pipelines' --- CHANGELOG.md | 47 +- CPAC/anat_preproc/anat_preproc.py | 93 ++-- CPAC/anat_preproc/lesion_preproc.py | 34 +- CPAC/func_preproc/func_preproc.py | 177 ++++++-- .../longitudinal_workflow.py | 40 +- CPAC/pipeline/engine.py | 428 +++++++++--------- CPAC/pipeline/schema.py | 56 ++- CPAC/pipeline/test/test_engine.py | 2 +- CPAC/registration/registration.py | 383 +++++++++------- CPAC/registration/tests/mocks.py | 4 +- CPAC/registration/tests/test_registration.py | 5 +- .../configs/pipeline_config_blank.yml | 3 + CPAC/resources/tests/test_templates.py | 13 +- CPAC/utils/datasource.py | 270 +++++------ CPAC/utils/test_mocks.py | 12 +- 15 files changed, 909 insertions(+), 658 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index e8a23221ea..be5ec4a432 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -18,14 +18,54 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 ### Added +- `pyproject.toml` file with `[build-system]` defined. +- [![pre-commit.ci status](https://results.pre-commit.ci/badge/github/FCP-INDI/C-PAC/main.svg)](https://results.pre-commit.ci/latest/github/FCP-INDI/C-PAC/main) badge to [`README`](./README.md). +- `desired_orientation` key in participant-level pipeline config under `pipeline_setup`. +- Required positional parameter "wf" in input and output of `ingress_pipeconfig_paths` function, where a node to reorient templates is added to the `wf`. +- Required positional parameter "orientation" to `resolve_resolution`. +- Optional positional argument "cfg" to `create_lesion_preproc`. + +### Changed + +- Moved `pygraphviz` from requirements to `graphviz` optional dependencies group. +- Automatically tag untagged `subject_id` and `unique_id` as `!!str` when loading data config files. +- Made orientation configurable (was hard-coded as "RPI"). + +### Fixed + +- A bug in which AWS S3 encryption was looked for in Nipype config instead of pipeline config (only affected uploading logs). +- Restored `bids-validator` functionality. +- Fixed empty `shell` variable in cluster run scripts. +- A bug in which bandpass filters always assumed 1D regressor files have exactly 5 header rows. + +### Removed + +- Variant image recipes. + - `ABCD-HCP` + - `fMRIPrep-LTS` +- Typehinting support for Python < 3.10. + +## [1.8.7] - 2024-05-03 + +### Added + - `Robustfov` feature in `FSL-BET` to crop images ensuring removal of neck regions that may appear in the skull-stripped images. -- Ability to throttle nodes, estimating all available memory when threading +- Ability to throttle nodes, estimating all available memory when threading. +- Ability to configure FreeSurfer ingress from the command line. ### Changed +- The ABCD-pipeline based surface post-processing workflows have been modularized to be more robust, resolving a running issue with this part of the pipeline stalling or crashing in some runs. - Moved autoversioning from CI to pre-commit - Updated `FSL-BET` config to default `-mask-boolean` flag as on, and removed all removed `mask-boolean` keys from configs. -- Added `dvars` as optional output in `cpac_outputs` +- Added `dvars` as optional output in `cpac_outputs`. + +### Fixed + +- Fixed a bug where ingressing fmriprep outputs into C-PAC with a blank nuisance confounds field in the C-PAC pipeline configuration file would cause a crash. +- Fixed a bug where spatial smoothing and z-scoring of final outputs would sometimes fail to run when running a C-PAC pipeline that would ingress fmriprep outputs. +- Fixed a bug where ingress of distortion correction-related field map metadata would sometimes fail to recognize both echo times, when there were two present, leading to an error message claiming an echo time is missing. +- Changed an extraneous default pipeline configuration setting - `surface_connectivity` is now disabled in the default configuration as intended. ## [1.8.6] - 2024-01-15 @@ -285,7 +325,8 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 See [Version 1.8.1 Beta](https://fcp-indi.github.io/docs/user/release_notes/v1.8.1) for release notes for v1.8.1 and [Release Notes](https://fcp-indi.github.io/docs/user/release_notes) for all release notes back to v0.1.1. -[unreleased]: https://github.com/FCP-INDI/C-PAC/compare/v1.8.6...develop +[unreleased]: https://github.com/FCP-INDI/C-PAC/compare/v1.8.7...develop +[1.8.7]: https://github.com/FCP-INDI/C-PAC/releases/tag/v1.8.7 [1.8.6]: https://github.com/FCP-INDI/C-PAC/releases/tag/v1.8.6 [1.8.5]: https://github.com/FCP-INDI/C-PAC/releases/tag/v1.8.5 [1.8.4]: https://github.com/FCP-INDI/C-PAC/releases/tag/v1.8.4 diff --git a/CPAC/anat_preproc/anat_preproc.py b/CPAC/anat_preproc/anat_preproc.py index b37aebe003..a561f8e077 100644 --- a/CPAC/anat_preproc/anat_preproc.py +++ b/CPAC/anat_preproc/anat_preproc.py @@ -15,7 +15,6 @@ # You should have received a copy of the GNU Lesser General Public # License along with C-PAC. If not, see . -# from copy import deepcopy import os from nipype.interfaces import afni, ants, freesurfer, fsl @@ -36,6 +35,7 @@ ) from CPAC.pipeline import nipype_pipeline_engine as pe from CPAC.pipeline.nodeblock import nodeblock +from CPAC.utils.interfaces import Function from CPAC.utils.interfaces.fsl import Merge as fslMerge @@ -86,7 +86,7 @@ def acpc_alignment( elif config.anatomical_preproc["acpc_alignment"]["FOV_crop"] == "flirt": # robustfov doesn't work on some monkey data. prefer using flirt. # ${FSLDIR}/bin/flirt -in "${Input}" -applyxfm -ref "${Input}" -omat "$WD"/roi2full.mat -out "$WD"/robustroi.nii.gz - # adopted from DCAN NHP https://github.com/DCAN-Labs/dcan-macaque-pipeline/blob/master/PreFreeSurfer/scripts/ACPCAlignment.sh#L80-L81 + # adopted from DCAN NHP https://github.com/DCAN-Labs/dcan-macaque-pipeline/blob/8fe9f61/PreFreeSurfer/scripts/ACPCAlignment.sh#L80-L81 flirt_fov = pe.Node(interface=fsl.FLIRT(), name="anat_acpc_1_fov") flirt_fov.inputs.args = "-applyxfm" @@ -138,7 +138,7 @@ def acpc_alignment( aff_to_rig_imports = ["import os", "from numpy import *"] aff_to_rig = pe.Node( - util.Function( + Function( input_names=["in_xfm", "out_name"], output_names=["out_mat"], function=fsl_aff_to_rigid, @@ -198,7 +198,7 @@ def acpc_alignment( def T2wToT1wReg(wf_name="T2w_to_T1w_reg"): # Adapted from DCAN lab - # https://github.com/DCAN-Labs/dcan-macaque-pipeline/blob/master/PreFreeSurfer/scripts/T2wToT1wReg.sh + # https://github.com/DCAN-Labs/dcan-macaque-pipeline/blob/8fe9f61/PreFreeSurfer/scripts/T2wToT1wReg.sh preproc = pe.Workflow(name=wf_name) @@ -240,7 +240,7 @@ def T2wToT1wReg(wf_name="T2w_to_T1w_reg"): def BiasFieldCorrection_sqrtT1wXT1w(config=None, wf_name="biasfield_correction_t1t2"): # Adapted from DCAN lab - # https://github.com/DCAN-Labs/dcan-macaque-pipeline/blob/master/PreFreeSurfer/scripts/BiasFieldCorrection_sqrtT1wXT1w.sh + # https://github.com/DCAN-Labs/dcan-macaque-pipeline/blob/8fe9f61/PreFreeSurfer/scripts/BiasFieldCorrection_sqrtT1wXT1w.sh preproc = pe.Workflow(name=wf_name) @@ -319,7 +319,7 @@ def T1wmulT2w_brain_norm_s_string(sigma, in_file): return "-s %f -div %s" % (sigma, in_file) T1wmulT2w_brain_norm_s_string = pe.Node( - util.Function( + Function( input_names=["sigma", "in_file"], output_names=["out_str"], function=T1wmulT2w_brain_norm_s_string, @@ -378,7 +378,7 @@ def form_lower_string(mean, std): return "-thr %s -bin -ero -mul 255" % (lower) form_lower_string = pe.Node( - util.Function( + Function( input_names=["mean", "std"], output_names=["out_str"], function=form_lower_string, @@ -441,10 +441,10 @@ def form_lower_string(mean, std): # 6. Use bias field output to create corrected images def file_to_a_list(infile_1, infile_2): - return list([infile_1, infile_2]) + return [infile_1, infile_2] file_to_a_list = pe.Node( - util.Function( + Function( input_names=["infile_1", "infile_2"], output_names=["out_list"], function=file_to_a_list, @@ -544,7 +544,7 @@ def afni_brain_connector(wf, cfg, strat_pool, pipe_num, opt): ) skullstrip_args = pe.Node( - util.Function( + Function( input_names=[ "spat_norm", "spat_norm_dxyz", @@ -762,7 +762,7 @@ def fsl_brain_connector(wf, cfg, strat_pool, pipe_num, opt): anat_robustfov.inputs.output_type = "NIFTI_GZ" anat_pad_RobustFOV_cropped = pe.Node( - util.Function( + Function( input_names=["cropped_image_path", "target_image_path"], output_names=["padded_image_path"], function=pad, @@ -891,8 +891,9 @@ def niworkflows_ants_brain_connector(wf, cfg, strat_pool, pipe_num, opt): def unet_brain_connector(wf, cfg, strat_pool, pipe_num, opt): """ - UNet - options (following numbers are default): + UNet options. + + Following numbers are default: input_slice: 3 conv_block: 5 kernel_root: 16 @@ -901,7 +902,7 @@ def unet_brain_connector(wf, cfg, strat_pool, pipe_num, opt): from CPAC.unet.function import predict_volumes unet_mask = pe.Node( - util.Function( + Function( input_names=["model_path", "cimg_in"], output_names=["out_path"], function=predict_volumes, @@ -939,9 +940,9 @@ def unet_brain_connector(wf, cfg, strat_pool, pipe_num, opt): wf.connect(unet_mask, "out_path", unet_masked_brain, "operand_files") - # flirt -v -dof 6 -in brain.nii.gz -ref NMT_SS_0.5mm.nii.gz -o brain_rot2atl -omat brain_rot2atl.mat -interp sinc + # flirt -v -dof 6 -in brain.nii.gz -ref NMT_SS_0.5mm.nii.gz -o brain_rot2atl -omat brain_rot2atl.mat -interp sinc native_brain_to_template_brain = pe.Node( - interface=fsl.FLIRT(), name=f"native_brain_to_template_" f"brain_{pipe_num}" + interface=fsl.FLIRT(), name=f"native_brain_to_template_brain_{pipe_num}" ) native_brain_to_template_brain.inputs.dof = 6 native_brain_to_template_brain.inputs.interp = "sinc" @@ -950,9 +951,9 @@ def unet_brain_connector(wf, cfg, strat_pool, pipe_num, opt): node, out = strat_pool.get_data("T1w-brain-template") wf.connect(node, out, native_brain_to_template_brain, "reference") - # flirt -in head.nii.gz -ref NMT_0.5mm.nii.gz -o head_rot2atl -applyxfm -init brain_rot2atl.mat + # flirt -in head.nii.gz -ref NMT_0.5mm.nii.gz -o head_rot2atl -applyxfm -init brain_rot2atl.mat native_head_to_template_head = pe.Node( - interface=fsl.FLIRT(), name=f"native_head_to_template_" f"head_{pipe_num}" + interface=fsl.FLIRT(), name=f"native_head_to_template_head_{pipe_num}" ) native_head_to_template_head.inputs.apply_xfm = True @@ -974,7 +975,7 @@ def unet_brain_connector(wf, cfg, strat_pool, pipe_num, opt): node, out = strat_pool.get_data("T1w-template") wf.connect(node, out, native_head_to_template_head, "reference") - # fslmaths NMT_SS_0.5mm.nii.gz -bin templateMask.nii.gz + # fslmaths NMT_SS_0.5mm.nii.gz -bin templateMask.nii.gz template_brain_mask = pe.Node( interface=fsl.maths.MathsCommand(), name=f"template_brain_mask_{pipe_num}" ) @@ -985,7 +986,7 @@ def unet_brain_connector(wf, cfg, strat_pool, pipe_num, opt): # ANTS 3 -m CC[head_rot2atl.nii.gz,NMT_0.5mm.nii.gz,1,5] -t SyN[0.25] -r Gauss[3,0] -o atl2T1rot -i 60x50x20 --use-Histogram-Matching --number-of-affine-iterations 10000x10000x10000x10000x10000 --MI-option 32x16000 ants_template_head_to_template = pe.Node( - interface=ants.Registration(), name=f"template_head_to_" f"template_{pipe_num}" + interface=ants.Registration(), name=f"template_head_to_template_{pipe_num}" ) ants_template_head_to_template.inputs.metric = ["CC"] ants_template_head_to_template.inputs.metric_weight = [1, 5] @@ -1006,7 +1007,7 @@ def unet_brain_connector(wf, cfg, strat_pool, pipe_num, opt): node, out = strat_pool.get_data("T1w-brain-template") wf.connect(node, out, ants_template_head_to_template, "moving_image") - # antsApplyTransforms -d 3 -i templateMask.nii.gz -t atl2T1rotWarp.nii.gz atl2T1rotAffine.txt -r brain_rot2atl.nii.gz -o brain_rot2atl_mask.nii.gz + # antsApplyTransforms -d 3 -i templateMask.nii.gz -t atl2T1rotWarp.nii.gz atl2T1rotAffine.txt -r brain_rot2atl.nii.gz -o brain_rot2atl_mask.nii.gz template_head_transform_to_template = pe.Node( interface=ants.ApplyTransforms(), name=f"template_head_transform_to_template_{pipe_num}", @@ -1032,14 +1033,14 @@ def unet_brain_connector(wf, cfg, strat_pool, pipe_num, opt): "transforms", ) - # convert_xfm -omat brain_rot2native.mat -inverse brain_rot2atl.mat + # convert_xfm -omat brain_rot2native.mat -inverse brain_rot2atl.mat invt = pe.Node(interface=fsl.ConvertXFM(), name="convert_xfm") invt.inputs.invert_xfm = True wf.connect(native_brain_to_template_brain, "out_matrix_file", invt, "in_file") # flirt -in brain_rot2atl_mask.nii.gz -ref brain.nii.gz -o brain_mask.nii.gz -applyxfm -init brain_rot2native.mat template_brain_to_native_brain = pe.Node( - interface=fsl.FLIRT(), name=f"template_brain_to_native_" f"brain_{pipe_num}" + interface=fsl.FLIRT(), name=f"template_brain_to_native_brain_{pipe_num}" ) template_brain_to_native_brain.inputs.apply_xfm = True wf.connect( @@ -1054,9 +1055,7 @@ def unet_brain_connector(wf, cfg, strat_pool, pipe_num, opt): wf.connect(invt, "out_file", template_brain_to_native_brain, "in_matrix_file") # fslmaths brain_mask.nii.gz -thr .5 -bin brain_mask_thr.nii.gz - refined_mask = pe.Node( - interface=fsl.Threshold(), name=f"refined_mask" f"_{pipe_num}" - ) + refined_mask = pe.Node(interface=fsl.Threshold(), name=f"refined_mask_{pipe_num}") refined_mask.inputs.thresh = 0.5 refined_mask.inputs.args = "-bin" wf.connect(template_brain_to_native_brain, "out_file", refined_mask, "in_file") @@ -1084,7 +1083,7 @@ def freesurfer_brain_connector(wf, cfg, strat_pool, pipe_num, opt): # convert brain mask file from .mgz to .nii.gz fs_brain_mask_to_nifti = pe.Node( - util.Function( + Function( input_names=["in_file"], output_names=["out_file"], function=mri_convert ), name=f"fs_brainmask_to_nifti_{pipe_num}", @@ -1115,12 +1114,12 @@ def freesurfer_brain_connector(wf, cfg, strat_pool, pipe_num, opt): def freesurfer_abcd_brain_connector(wf, cfg, strat_pool, pipe_num, opt): """ - ABCD harmonization - anatomical brain mask generation + ABCD harmonization - anatomical brain mask generation. - Ref: https://github.com/DCAN-Labs/DCAN-HCP/blob/master/PostFreeSurfer/PostFreeSurferPipeline.sh#L151-L156 + Ref: https://github.com/DCAN-Labs/DCAN-HCP/blob/7927754/PostFreeSurfer/PostFreeSurferPipeline.sh#L151-L156 """ wmparc_to_nifti = pe.Node( - util.Function( + Function( input_names=["in_file", "reslice_like", "args"], output_names=["out_file"], function=mri_convert, @@ -1131,7 +1130,7 @@ def freesurfer_abcd_brain_connector(wf, cfg, strat_pool, pipe_num, opt): # Register wmparc file if ingressing FreeSurfer data if strat_pool.check_rpool("pipeline-fs_xfm"): wmparc_to_native = pe.Node( - util.Function( + Function( input_names=["source_file", "target_file", "xfm", "out_file"], output_names=["transformed_file"], function=normalize_wmparc, @@ -1169,7 +1168,7 @@ def freesurfer_abcd_brain_connector(wf, cfg, strat_pool, pipe_num, opt): wf.connect(wmparc_to_nifti, "out_file", binary_mask, "in_file") wb_command_fill_holes = pe.Node( - util.Function( + Function( input_names=["in_file"], output_names=["out_file"], function=wb_command ), name=f"wb_command_fill_holes_{pipe_num}", @@ -1207,7 +1206,7 @@ def freesurfer_fsl_brain_connector(wf, cfg, strat_pool, pipe_num, opt): # mri_convert -it mgz ${SUBJECTS_DIR}/${subject}/mri/brainmask.mgz -ot nii brainmask.nii.gz convert_fs_brainmask_to_nifti = pe.Node( - util.Function( + Function( input_names=["in_file"], output_names=["out_file"], function=mri_convert ), name=f"convert_fs_brainmask_to_nifti_{node_id}", @@ -1218,7 +1217,7 @@ def freesurfer_fsl_brain_connector(wf, cfg, strat_pool, pipe_num, opt): # mri_convert -it mgz ${SUBJECTS_DIR}/${subject}/mri/T1.mgz -ot nii T1.nii.gz convert_fs_T1_to_nifti = pe.Node( - util.Function( + Function( input_names=["in_file"], output_names=["out_file"], function=mri_convert ), name=f"convert_fs_T1_to_nifti_{node_id}", @@ -1234,7 +1233,7 @@ def freesurfer_fsl_brain_connector(wf, cfg, strat_pool, pipe_num, opt): mem_gb=0, mem_x=(0.0115, "in_file", "t"), ) - reorient_fs_brainmask.inputs.orientation = "RPI" + reorient_fs_brainmask.inputs.orientation = cfg.pipeline_setup["desired_orientation"] reorient_fs_brainmask.inputs.outputtype = "NIFTI_GZ" wf.connect( @@ -1256,7 +1255,7 @@ def freesurfer_fsl_brain_connector(wf, cfg, strat_pool, pipe_num, opt): mem_gb=0, mem_x=(0.0115, "in_file", "t"), ) - reorient_fs_T1.inputs.orientation = "RPI" + reorient_fs_T1.inputs.orientation = cfg.pipeline_setup["desired_orientation"] reorient_fs_T1.inputs.outputtype = "NIFTI_GZ" wf.connect(convert_fs_T1_to_nifti, "out_file", reorient_fs_T1, "in_file") @@ -1385,7 +1384,7 @@ def freesurfer_fsl_brain_connector(wf, cfg, strat_pool, pipe_num, opt): def mask_T2(wf_name="mask_T2"): # create T2 mask based on T1 mask - # reference https://github.com/DCAN-Labs/dcan-macaque-pipeline/blob/master/PreliminaryMasking/macaque_masking.py + # reference https://github.com/DCAN-Labs/dcan-macaque-pipeline/blob/83512b0/PreliminaryMasking/macaque_masking.py preproc = pe.Workflow(name=wf_name) @@ -1461,7 +1460,7 @@ def anatomical_init(wf, cfg, strat_pool, pipe_num, opt=None): mem_gb=0, mem_x=(0.0115, "in_file", "t"), ) - anat_reorient.inputs.orientation = "RPI" + anat_reorient.inputs.orientation = cfg.pipeline_setup["desired_orientation"] anat_reorient.inputs.outputtype = "NIFTI_GZ" wf.connect(anat_deoblique, "out_file", anat_reorient, "in_file") @@ -2269,7 +2268,7 @@ def anatomical_init_T2(wf, cfg, strat_pool, pipe_num, opt=None): mem_gb=0, mem_x=(0.0115, "in_file", "t"), ) - T2_reorient.inputs.orientation = "RPI" + T2_reorient.inputs.orientation = cfg.pipeline_setup["desired_orientation"] T2_reorient.inputs.outputtype = "NIFTI_GZ" wf.connect(T2_deoblique, "out_file", T2_reorient, "in_file") @@ -2829,7 +2828,7 @@ def freesurfer_abcd_preproc(wf, cfg, strat_pool, pipe_num, opt=None): ) ### ABCD Harmonization ### - # Ref: https://github.com/DCAN-Labs/DCAN-HCP/blob/master/FreeSurfer/FreeSurferPipeline.sh#L140-L144 + # Ref: https://github.com/DCAN-Labs/DCAN-HCP/blob/9a02c47/FreeSurfer/FreeSurferPipeline.sh#L140-L144 # flirt -interp spline -in "$T1wImage" -ref "$T1wImage" -applyisoxfm 1 -out "$T1wImageFile"_1mm.nii.gz resample_head_1mm = pe.Node( @@ -2889,7 +2888,7 @@ def freesurfer_abcd_preproc(wf, cfg, strat_pool, pipe_num, opt=None): # fslmaths "$T1wImageFile"_1mm.nii.gz -div $Mean -mul 150 -abs "$T1wImageFile"_1mm.nii.gz normalize_head = pe.Node( - util.Function( + Function( input_names=["in_file", "number", "out_file_suffix"], output_names=["out_file"], function=fslmaths_command, @@ -2980,7 +2979,7 @@ def freesurfer_reconall(wf, cfg, strat_pool, pipe_num, opt=None): def fnirt_based_brain_extraction(config=None, wf_name="fnirt_based_brain_extraction"): ### ABCD Harmonization - FNIRT-based brain extraction ### - # Ref: https://github.com/DCAN-Labs/DCAN-HCP/blob/master/PreFreeSurfer/scripts/BrainExtraction_FNIRTbased.sh + # Ref: https://github.com/DCAN-Labs/DCAN-HCP/blob/4d9996b/PreFreeSurfer/scripts/BrainExtraction_FNIRTbased.sh preproc = pe.Workflow(name=wf_name) @@ -3101,7 +3100,7 @@ def fnirt_based_brain_extraction(config=None, wf_name="fnirt_based_brain_extract def fast_bias_field_correction(config=None, wf_name="fast_bias_field_correction"): ### ABCD Harmonization - FAST bias field correction ### - # Ref: https://github.com/DCAN-Labs/DCAN-HCP/blob/master/PreFreeSurfer/PreFreeSurferPipeline.sh#L688-L694 + # Ref: https://github.com/DCAN-Labs/DCAN-HCP/blob/9291324/PreFreeSurfer/PreFreeSurferPipeline.sh#L688-L694 preproc = pe.Workflow(name=wf_name) @@ -3186,7 +3185,7 @@ def fast_bias_field_correction(config=None, wf_name="fast_bias_field_correction" ) def correct_restore_brain_intensity_abcd(wf, cfg, strat_pool, pipe_num, opt=None): ### ABCD Harmonization - Myelin Map ### - # Ref: https://github.com/DCAN-Labs/DCAN-HCP/blob/master/PreFreeSurfer/PreFreeSurferPipeline.sh#L655-L656 + # Ref: https://github.com/DCAN-Labs/DCAN-HCP/blob/9291324/PreFreeSurfer/PreFreeSurferPipeline.sh#L655-L656 # fslmerge -t ${T1wFolder}/xfms/${T1wImage}_dc ${T1wFolder}/${T1wImage}_acpc ${T1wFolder}/${T1wImage}_acpc ${T1wFolder}/${T1wImage}_acpc merge_t1_acpc_to_list = pe.Node( util.Merge(3), name=f"merge_t1_acpc_to_list_{pipe_num}" @@ -3212,7 +3211,7 @@ def correct_restore_brain_intensity_abcd(wf, cfg, strat_pool, pipe_num, opt=None wf.connect(merge_t1_acpc, "merged_file", multiply_t1_acpc_by_zero, "in_file") - # Ref: https://github.com/DCAN-Labs/DCAN-HCP/blob/master/PostFreeSurfer/PostFreeSurferPipeline.sh#L157 + # Ref: https://github.com/DCAN-Labs/DCAN-HCP/blob/7927754/PostFreeSurfer/PostFreeSurferPipeline.sh#L157 # convertwarp --relout --rel --ref="$T1wFolder"/"$T1wImageBrainMask" --premat="$T1wFolder"/xfms/"$InitialT1wTransform" \ # --warp1="$T1wFolder"/xfms/"$dcT1wTransform" --out="$T1wFolder"/xfms/"$OutputOrigT1wToT1w" convertwarp_orig_t1_to_t1 = pe.Node( @@ -3229,7 +3228,7 @@ def correct_restore_brain_intensity_abcd(wf, cfg, strat_pool, pipe_num, opt=None wf.connect(node, out, convertwarp_orig_t1_to_t1, "premat") wf.connect(multiply_t1_acpc_by_zero, "out_file", convertwarp_orig_t1_to_t1, "warp1") - # Ref: https://github.com/DCAN-Labs/DCAN-HCP/blob/master/PostFreeSurfer/scripts/CreateMyelinMaps.sh#L72-L73 + # Ref: https://github.com/DCAN-Labs/DCAN-HCP/blob/a8d495a/PostFreeSurfer/scripts/CreateMyelinMaps.sh#L72-L73 # applywarp --rel --interp=spline -i "$BiasField" -r "$T1wImageBrain" -w "$AtlasTransform" -o "$BiasFieldOutput" applywarp_biasfield = pe.Node( interface=fsl.ApplyWarp(), name=f"applywarp_biasfield_{pipe_num}" @@ -3255,7 +3254,7 @@ def correct_restore_brain_intensity_abcd(wf, cfg, strat_pool, pipe_num, opt=None threshold_biasfield.inputs.op_string = "-thr 0.1" wf.connect(applywarp_biasfield, "out_file", threshold_biasfield, "in_file") - # Ref: https://github.com/DCAN-Labs/DCAN-HCP/blob/master/PostFreeSurfer/scripts/CreateMyelinMaps.sh#L67-L70 + # Ref: https://github.com/DCAN-Labs/DCAN-HCP/blob/a8d495a/PostFreeSurfer/scripts/CreateMyelinMaps.sh#L67-L70 # applywarp --rel --interp=spline -i "$OrginalT1wImage" -r "$T1wImageBrain" -w "$OutputOrigT1wToT1w" -o "$OutputT1wImage" applywarp_t1 = pe.Node(interface=fsl.ApplyWarp(), name=f"applywarp_t1_{pipe_num}") diff --git a/CPAC/anat_preproc/lesion_preproc.py b/CPAC/anat_preproc/lesion_preproc.py index 2ef58c3d2a..21628c97f0 100644 --- a/CPAC/anat_preproc/lesion_preproc.py +++ b/CPAC/anat_preproc/lesion_preproc.py @@ -1,13 +1,30 @@ # -*- coding: utf-8 -*- +# Copyright (C) 2019-2023 C-PAC Developers +# This file is part of C-PAC. + +# C-PAC is free software: you can redistribute it and/or modify it under +# the terms of the GNU Lesser General Public License as published by the +# Free Software Foundation, either version 3 of the License, or (at your +# option) any later version. + +# C-PAC is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or +# FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public +# License for more details. + +# You should have received a copy of the GNU Lesser General Public +# License along with C-PAC. If not, see . from nipype.interfaces import afni import nipype.interfaces.utility as util from CPAC.pipeline import nipype_pipeline_engine as pe +from CPAC.utils.interfaces import Function def inverse_lesion(lesion_path): - """ + """Replace non-zeroes with zeroes and zeroes with ones. + Check if the image contains more zeros than non-zeros, if so, replaces non-zeros by zeros and zeros by ones. @@ -38,13 +55,12 @@ def inverse_lesion(lesion_path): nii = nu.inverse_nifti_values(image=lesion_path) nib.save(nii, lesion_out) return lesion_out - else: - return lesion_out + return lesion_out -def create_lesion_preproc(wf_name="lesion_preproc"): - """ - The main purpose of this workflow is to process lesions masks. +def create_lesion_preproc(cfg=None, wf_name="lesion_preproc"): + """Process lesions masks. + Lesion mask file is deobliqued and reoriented in the same way as the T1 in the anat_preproc function. @@ -95,7 +111,7 @@ def create_lesion_preproc(wf_name="lesion_preproc"): lesion_deoblique.inputs.deoblique = True lesion_inverted = pe.Node( - interface=util.Function( + interface=Function( input_names=["lesion_path"], output_names=["lesion_out"], function=inverse_lesion, @@ -117,7 +133,9 @@ def create_lesion_preproc(wf_name="lesion_preproc"): mem_x=(0.0115, "in_file", "t"), ) - lesion_reorient.inputs.orientation = "RPI" + lesion_reorient.inputs.orientation = ( + cfg.pipeline_setup["desired_orientation"] if cfg else "RPI" + ) lesion_reorient.inputs.outputtype = "NIFTI_GZ" preproc.connect(lesion_deoblique, "out_file", lesion_reorient, "in_file") diff --git a/CPAC/func_preproc/func_preproc.py b/CPAC/func_preproc/func_preproc.py index fef6a01024..2340333ca6 100644 --- a/CPAC/func_preproc/func_preproc.py +++ b/CPAC/func_preproc/func_preproc.py @@ -14,18 +14,16 @@ # You should have received a copy of the GNU Lesser General Public # License along with C-PAC. If not, see . -"""Functional preprocessing""" +"""Functional preprocessing.""" # pylint: disable=ungrouped-imports,wrong-import-order,wrong-import-position -from nipype import logging from nipype.interfaces import afni, ants, fsl, utility as util - -logger = logging.getLogger("nipype.workflow") from nipype.interfaces.afni import preprocess, utils as afni_utils from CPAC.func_preproc.utils import nullify from CPAC.pipeline import nipype_pipeline_engine as pe from CPAC.pipeline.nodeblock import nodeblock +from CPAC.utils.interfaces import Function from CPAC.utils.interfaces.ants import ( AI, # niworkflows PrintHeader, @@ -35,6 +33,7 @@ def collect_arguments(*args): + """Collect arguments.""" command_args = [] if args[0]: command_args += [args[1]] @@ -43,6 +42,7 @@ def collect_arguments(*args): def anat_refined_mask(init_bold_mask=True, wf_name="init_bold_mask"): + """Generate an anatomically refined mask.""" wf = pe.Workflow(name=wf_name) input_node = pe.Node( @@ -77,7 +77,7 @@ def anat_refined_mask(init_bold_mask=True, wf_name="init_bold_mask"): wf.connect(func_single_volume, "out_file", func_tmp_brain, "in_file_a") # 2.1 get a tmp func brain mask - if init_bold_mask == True: + if init_bold_mask: # 2.1.1 N4BiasFieldCorrection single volume of raw_func func_single_volume_n4_corrected = pe.Node( interface=ants.N4BiasFieldCorrection( @@ -165,7 +165,7 @@ def anat_refined_mask(init_bold_mask=True, wf_name="init_bold_mask"): wf.connect(reg_anat_mask_to_func, "out_file", func_mask, "operand_files") - if init_bold_mask == True: + if init_bold_mask: wf.connect(func_tmp_brain_mask_dil, "out_file", func_mask, "in_file") else: wf.connect(input_node, "init_func_brain_mask", func_mask, "in_file") @@ -176,7 +176,10 @@ def anat_refined_mask(init_bold_mask=True, wf_name="init_bold_mask"): def anat_based_mask(wf_name="bold_mask"): - """Reference `DCAN lab BOLD mask `_""" + """Generate a functional mask from anatomical data. + + Reference `DCAN lab BOLD mask `_. + """ wf = pe.Workflow(name=wf_name) input_node = pe.Node( @@ -341,7 +344,7 @@ def create_wf_edit_func(wf_name="edit_func"): # allocate a node to check that the requested edits are # reasonable given the data func_get_idx = pe.Node( - util.Function( + Function( input_names=["in_files", "stop_idx", "start_idx"], output_names=["stopidx", "startidx"], function=get_idx, @@ -379,6 +382,7 @@ def create_wf_edit_func(wf_name="edit_func"): def slice_timing_wf(name="slice_timing", tpattern=None, tzero=None): + """Calculate corrected slice-timing.""" # allocate a workflow object wf = pe.Workflow(name=name) @@ -443,11 +447,10 @@ def slice_timing_wf(name="slice_timing", tpattern=None, tzero=None): def get_idx(in_files, stop_idx=None, start_idx=None): - """ - Method to get the first and the last slice for - the functional run. It verifies the user specified - first and last slice. If the values are not valid, it - calculates and returns the very first and the last slice + """Get the first and the last slice for the functional run. + + Verify the user specified first and last slice. If the values are not valid, + calculate and return the very first and the last slice. Parameters ---------- @@ -480,12 +483,12 @@ def get_idx(in_files, stop_idx=None, start_idx=None): shape = hdr.get_data_shape() # Check to make sure the input file is 4-dimensional - if len(shape) != 4: + if len(shape) != 4: # noqa: PLR2004 raise TypeError("Input nifti file: %s is not a 4D file" % in_files) # Grab the number of volumes nvols = int(hdr.get_data_shape()[3]) - if (start_idx == None) or (int(start_idx) < 0) or (int(start_idx) > (nvols - 1)): + if (start_idx is None) or (int(start_idx) < 0) or (int(start_idx) > (nvols - 1)): startidx = 0 else: startidx = int(start_idx) @@ -506,6 +509,7 @@ def get_idx(in_files, stop_idx=None, start_idx=None): outputs=["desc-preproc_bold", "desc-reorient_bold"], ) def func_reorient(wf, cfg, strat_pool, pipe_num, opt=None): + """Reorient functional timeseries.""" func_deoblique = pe.Node( interface=afni_utils.Refit(), name=f"func_deoblique_{pipe_num}", @@ -524,7 +528,7 @@ def func_reorient(wf, cfg, strat_pool, pipe_num, opt=None): mem_x=(0.0115, "in_file", "t"), ) - func_reorient.inputs.orientation = "RPI" + func_reorient.inputs.orientation = cfg.pipeline_setup["desired_orientation"] func_reorient.inputs.outputtype = "NIFTI_GZ" wf.connect(func_deoblique, "out_file", func_reorient, "in_file") @@ -545,6 +549,7 @@ def func_reorient(wf, cfg, strat_pool, pipe_num, opt=None): outputs=["desc-preproc_bold"], ) def func_scaling(wf, cfg, strat_pool, pipe_num, opt=None): + """Scale functional timeseries.""" scale_func_wf = create_scale_func_wf( scaling_factor=cfg.scaling_factor, wf_name=f"scale_func_{pipe_num}" ) @@ -568,6 +573,7 @@ def func_scaling(wf, cfg, strat_pool, pipe_num, opt=None): }, ) def func_truncate(wf, cfg, strat_pool, pipe_num, opt=None): + """Truncate functional timeseries.""" # if cfg.functional_preproc['truncation']['start_tr'] == 0 and \ # cfg.functional_preproc['truncation']['stop_tr'] == None: # data, key = strat_pool.get_data("desc-preproc_bold", @@ -603,6 +609,7 @@ def func_truncate(wf, cfg, strat_pool, pipe_num, opt=None): }, ) def func_despike(wf, cfg, strat_pool, pipe_num, opt=None): + """Generate de-spiked functional timeseries in native space with AFNI.""" despike = pe.Node( interface=preprocess.Despike(), name=f"func_despiked_{pipe_num}", @@ -645,6 +652,7 @@ def func_despike(wf, cfg, strat_pool, pipe_num, opt=None): }, ) def func_despike_template(wf, cfg, strat_pool, pipe_num, opt=None): + """Generate de-spiked functional timeseries in template space with AFNI.""" despike = pe.Node( interface=preprocess.Despike(), name=f"func_despiked_template_{pipe_num}", @@ -699,8 +707,9 @@ def func_despike_template(wf, cfg, strat_pool, pipe_num, opt=None): }, ) def func_slice_time(wf, cfg, strat_pool, pipe_num, opt=None): + """Genetare slice-time correctied timeseries.""" slice_time = slice_timing_wf( - name="func_slice_timing_correction_" f"{pipe_num}", + name=f"func_slice_timing_correction_{pipe_num}", tpattern=cfg.functional_preproc["slice_timing_correction"]["tpattern"], tzero=cfg.functional_preproc["slice_timing_correction"]["tzero"], ) @@ -738,6 +747,7 @@ def func_slice_time(wf, cfg, strat_pool, pipe_num, opt=None): }, ) def bold_mask_afni(wf, cfg, strat_pool, pipe_num, opt=None): + """Generate a functional mask with AFNI.""" func_get_brain_mask = pe.Node( interface=preprocess.Automask(), name=f"func_get_brain_mask_AFNI_{pipe_num}" ) @@ -763,6 +773,7 @@ def bold_mask_afni(wf, cfg, strat_pool, pipe_num, opt=None): outputs=["space-bold_desc-brain_mask"], ) def bold_mask_fsl(wf, cfg, strat_pool, pipe_num, opt=None): + """Generate functional mask with FSL.""" inputnode_bet = pe.Node( util.IdentityInterface( fields=[ @@ -867,7 +878,7 @@ def form_thr_string(thr): return "-thr %s" % (threshold_z) form_thr_string = pe.Node( - util.Function( + Function( input_names=["thr"], output_names=["out_str"], function=form_thr_string, @@ -938,12 +949,105 @@ def form_thr_string(thr): "FSL-AFNI-brain-mask", "FSL-AFNI-brain-probseg", ], - outputs=["space-bold_desc-brain_mask", "desc-ref_bold"], + outputs={ + "space-bold_desc-brain_mask": { + "Description": "mask of the skull-stripped input file" + }, + "desc-ref_bold": { + "Description": "the ``bias_corrected_file`` after skull-stripping" + }, + }, ) def bold_mask_fsl_afni(wf, cfg, strat_pool, pipe_num, opt=None): - """fMRIPrep-style BOLD mask - `Ref `_ + """fMRIPrep-style BOLD mask. + + Enhance and run brain extraction on a BOLD EPI image. + + This workflow takes in a :abbr:`BOLD (blood-oxygen level-dependant)` + :abbr:`fMRI (functional MRI)` average/summary (e.g., a reference image + averaging non-steady-state timepoints), and sharpens the histogram + with the application of the N4 algorithm for removing the + :abbr:`INU (intensity non-uniformity)` bias field and calculates a signal + mask. + + Steps of this workflow are: + + [1]. Binary dilation of the tentative mask with a sphere of 3mm diameter. + [2]. Run ANTs' ``N4BiasFieldCorrection`` on the input + :abbr:`BOLD (blood-oxygen level-dependant)` average, using the + mask generated in 1) instead of the internal Otsu thresholding. + [3]. Calculate a loose mask using FSL's ``bet``, with one mathematical morphology + dilation of one iteration and a sphere of 6mm as structuring element. + [4]. Mask the :abbr:`INU (intensity non-uniformity)`-corrected image + with the latest mask calculated in 3), then use AFNI's ``3dUnifize`` + to *standardize* the T2* contrast distribution. + [5]. Calculate a mask using AFNI's ``3dAutomask`` after the contrast + enhancement of 4). + [6]. Calculate a final mask as the intersection of 4) and 6). + [7]. Apply final mask on the enhanced reference. + + `Ref `_. """ + # STATEMENT OF CHANGES: + # This function is derived from sources licensed under the Apache-2.0 terms, + # and this function has been changed. + + # CHANGES: + # * Converted from a plain function to a CPAC.pipeline.nodeblock.NodeBlockFunction + # * Removed Registration version check + # * Hardcoded Registration parameters instead of loading epi_atlasbased_brainmask.json + # * Uses C-PAC's ``FSL-AFNI-brain-probseg`` template in place of ``templateflow.api.get("MNI152NLin2009cAsym", resolution=1, label="brain", suffix="probseg")`` + # * Replaced niworkflows.interfaces.nibabel.Binarize with fsl.maths.MathsCommand and hardcoded threshold + # * Replaced niworkflows.interfaces.images.MatchHeader with CPAC.utils.interfaces.ants.(PrintHeader and SetDirectionByMatrix) + # * Removed header fix for unifize + # * Removed header fix for skullstrip_second_pass + # * Removed ``if not pre_mask`` conditional block + # * Modified docstring to reflect local changes + # * Refactored some variables and connections and updated style to match C-PAC codebase + + # ORIGINAL WORK'S ATTRIBUTION NOTICE: + # Copyright (c) 2016, the CRN developers team. + # All rights reserved. + + # Redistribution and use in source and binary forms, with or without + # modification, are permitted provided that the following conditions are met: + + # * Redistributions of source code must retain the above copyright notice, this + # list of conditions and the following disclaimer. + + # * Redistributions in binary form must reproduce the above copyright notice, + # this list of conditions and the following disclaimer in the documentation + # and/or other materials provided with the distribution. + + # * Neither the name of niworkflows nor the names of its + # contributors may be used to endorse or promote products derived from + # this software without specific prior written permission. + + # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" + # AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + # IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE + # DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE + # FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL + # DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR + # SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER + # CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, + # OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE + # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + + # Licensed under the Apache License, Version 2.0 (the "License"); + # you may not use this file except in compliance with the License. + # You may obtain a copy of the License at + + # http://www.apache.org/licenses/LICENSE-2.0 + + # Unless required by applicable law or agreed to in writing, software + # distributed under the License is distributed on an "AS IS" BASIS, + # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + # See the License for the specific language governing permissions and + # limitations under the License. + + # Modifications copyright (C) 2021 - 2024 C-PAC Developers + # Initialize transforms with antsAI init_aff = pe.Node( AI( @@ -1040,6 +1144,7 @@ def bold_mask_fsl_afni(wf, cfg, strat_pool, pipe_num, opt=None): n_procs=1, ) + # Create a generous BET mask out of the bias-corrected EPI skullstrip_first_pass = pe.Node( fsl.BET(frac=0.2, mask=True, functional=False), name=f"skullstrip_first_pass_{pipe_num}", @@ -1055,8 +1160,9 @@ def bold_mask_fsl_afni(wf, cfg, strat_pool, pipe_num, opt=None): name=f"skullstrip_first_dilate_{pipe_num}", ) - bet_mask = pe.Node(fsl.ApplyMask(), name=f"skullstrip_first_mask_" f"{pipe_num}") + bet_mask = pe.Node(fsl.ApplyMask(), name=f"skullstrip_first_mask_{pipe_num}") + # Use AFNI's unifize for T2 constrast unifize = pe.Node( afni_utils.Unifize( t2=True, @@ -1067,15 +1173,18 @@ def bold_mask_fsl_afni(wf, cfg, strat_pool, pipe_num, opt=None): name=f"unifize_{pipe_num}", ) + # Run ANFI's 3dAutomask to extract a refined brain mask skullstrip_second_pass = pe.Node( preprocess.Automask(dilate=1, outputtype="NIFTI_GZ"), name=f"skullstrip_second_pass_{pipe_num}", ) + # Take intersection of both masks combine_masks = pe.Node( fsl.BinaryMaths(operation="mul"), name=f"combine_masks_{pipe_num}" ) + # Compute masked brain apply_mask = pe.Node(fsl.ApplyMask(), name=f"extract_ref_brain_bold_{pipe_num}") node, out = strat_pool.get_data(["motion-basefile"]) @@ -1140,6 +1249,7 @@ def bold_mask_fsl_afni(wf, cfg, strat_pool, pipe_num, opt=None): outputs=["space-bold_desc-brain_mask"], ) def bold_mask_anatomical_refined(wf, cfg, strat_pool, pipe_num, opt=None): + """Generate the BOLD mask by basing it off of the refined anatomical brain mask.""" # binarize anat mask, in case it is not a binary mask. anat_brain_mask_bin = pe.Node( interface=fsl.ImageMaths(), name=f"anat_brain_mask_bin_{pipe_num}" @@ -1180,7 +1290,7 @@ def bold_mask_anatomical_refined(wf, cfg, strat_pool, pipe_num, opt=None): mem_x=(0.0115, "in_file", "t"), ) - func_reorient.inputs.orientation = "RPI" + func_reorient.inputs.orientation = cfg.pipeline_setup["desired_orientation"] func_reorient.inputs.outputtype = "NIFTI_GZ" wf.connect(func_deoblique, "out_file", func_reorient, "in_file") @@ -1206,7 +1316,7 @@ def bold_mask_anatomical_refined(wf, cfg, strat_pool, pipe_num, opt=None): # refined_bold_mask : input motion corrected func refined_bold_mask = anat_refined_mask( - init_bold_mask=False, wf_name="refined_bold_mask" f"_{pipe_num}" + init_bold_mask=False, wf_name=f"refined_bold_mask_{pipe_num}" ) node, out = strat_pool.get_data(["desc-preproc_bold", "bold"]) @@ -1284,7 +1394,8 @@ def bold_mask_anatomical_refined(wf, cfg, strat_pool, pipe_num, opt=None): ) def bold_mask_anatomical_based(wf, cfg, strat_pool, pipe_num, opt=None): """Generate the BOLD mask by basing it off of the anatomical brain mask. - Adapted from `DCAN Lab's BOLD mask method from the ABCD pipeline `_. + + Adapted from `DCAN Lab's BOLD mask method from the ABCD pipeline `_. """ # 0. Take single volume of func func_single_volume = pe.Node(interface=afni.Calc(), name="func_single_volume") @@ -1452,7 +1563,7 @@ def anat_brain_mask_to_bold_res(wf_name, cfg, pipe_num): "desc-preproc_bold", "T1w-template-funcreg", "space-template_desc-preproc_T1w", - "space-template_desc-brain_mask", + "space-template_desc-T1w_mask", ], outputs=[ "space-template_res-bold_desc-brain_T1w", @@ -1461,8 +1572,9 @@ def anat_brain_mask_to_bold_res(wf_name, cfg, pipe_num): ], ) def bold_mask_anatomical_resampled(wf, cfg, strat_pool, pipe_num, opt=None): - """Resample anatomical brain mask in standard space to get BOLD brain mask in standard space - Adapted from `DCAN Lab's BOLD mask method from the ABCD pipeline `_. + """Resample anatomical brain mask to get BOLD brain mask in standard space. + + Adapted from `DCAN Lab's BOLD mask method from the ABCD pipeline `_. """ anat_brain_to_func_res = anat_brain_to_bold_res(wf, cfg, pipe_num) @@ -1480,7 +1592,7 @@ def bold_mask_anatomical_resampled(wf, cfg, strat_pool, pipe_num, opt=None): wf_name="anat_brain_mask_to_bold_res", cfg=cfg, pipe_num=pipe_num ) - node, out = strat_pool.get_data("space-template_desc-brain_mask") + node, out = strat_pool.get_data("space-template_desc-T1w_mask") wf.connect( node, out, anat_brain_mask_to_func_res, "inputspec.space-template_desc-T1w_mask" ) @@ -1544,7 +1656,8 @@ def bold_mask_anatomical_resampled(wf, cfg, strat_pool, pipe_num, opt=None): ) def bold_mask_ccs(wf, cfg, strat_pool, pipe_num, opt=None): """Generate the BOLD mask by basing it off of the anatomical brain. - Adapted from `the BOLD mask method from the CCS pipeline `_. + + Adapted from `the BOLD mask method from the CCS pipeline `_. """ # Run 3dAutomask to generate func initial mask func_tmp_brain_mask = pe.Node( @@ -1680,6 +1793,7 @@ def bold_mask_ccs(wf, cfg, strat_pool, pipe_num, opt=None): }, ) def bold_masking(wf, cfg, strat_pool, pipe_num, opt=None): + """Generate a functional brain mask.""" func_edge_detect = pe.Node( interface=afni_utils.Calc(), name=f"func_extract_brain_{pipe_num}" ) @@ -1711,6 +1825,7 @@ def bold_masking(wf, cfg, strat_pool, pipe_num, opt=None): outputs=["desc-mean_bold"], ) def func_mean(wf, cfg, strat_pool, pipe_num, opt=None): + """Generate a mean functional image.""" func_mean = pe.Node(interface=afni_utils.TStat(), name=f"func_mean_{pipe_num}") func_mean.inputs.options = "-mean" @@ -1734,6 +1849,7 @@ def func_mean(wf, cfg, strat_pool, pipe_num, opt=None): outputs=["desc-preproc_bold"], ) def func_normalize(wf, cfg, strat_pool, pipe_num, opt=None): + """Normalize a functional image.""" func_normalize = pe.Node( interface=fsl.ImageMaths(), name=f"func_normalize_{pipe_num}", @@ -1759,6 +1875,7 @@ def func_normalize(wf, cfg, strat_pool, pipe_num, opt=None): outputs=["space-bold_desc-brain_mask"], ) def func_mask_normalize(wf, cfg, strat_pool, pipe_num, opt=None): + """Normalize a functional mask.""" func_mask_normalize = pe.Node( interface=fsl.ImageMaths(), name=f"func_mask_normalize_{pipe_num}", diff --git a/CPAC/longitudinal_pipeline/longitudinal_workflow.py b/CPAC/longitudinal_pipeline/longitudinal_workflow.py index 9b2c389a09..5c989675c1 100644 --- a/CPAC/longitudinal_pipeline/longitudinal_workflow.py +++ b/CPAC/longitudinal_pipeline/longitudinal_workflow.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright (C) 2020-2022 C-PAC Developers +# Copyright (C) 2020-2024 C-PAC Developers # This file is part of C-PAC. @@ -17,7 +17,6 @@ # License along with C-PAC. If not, see . import os -from nipype import logging from nipype.interfaces import fsl import nipype.interfaces.io as nio from indi_aws import aws_utils @@ -47,8 +46,6 @@ from CPAC.utils.strategy import Strategy from CPAC.utils.utils import check_config_resources, check_prov_for_regtool -logger = logging.getLogger("nipype.workflow") - @nodeblock( name="mask_T1w_longitudinal_template", @@ -60,7 +57,7 @@ def mask_T1w_longitudinal_template(wf, cfg, strat_pool, pipe_num, opt=None): brain_mask = pe.Node( interface=fsl.maths.MathsCommand(), - name=f"longitudinal_anatomical_brain_mask_" f"{pipe_num}", + name=f"longitudinal_anatomical_brain_mask_{pipe_num}", ) brain_mask.inputs.args = "-bin" @@ -79,9 +76,8 @@ def create_datasink( session_id="", strat_name="", map_node_iterfield=None, -): +) -> pe.Node | pe.MapNode: """ - Parameters ---------- datasink_name @@ -90,15 +86,8 @@ def create_datasink( session_id strat_name map_node_iterfield - - Returns - ------- - """ - try: - encrypt_data = bool(config.pipeline_setup["Amazon-AWS"]["s3_encryption"]) - except: - encrypt_data = False + encrypt_data = config.pipeline_setup["Amazon-AWS"]["s3_encryption"] # TODO Enforce value with schema validation # Extract credentials path for output if it exists @@ -122,7 +111,8 @@ def create_datasink( ) if not s3_write_access: - raise Exception("Not able to write to bucket!") + msg = "Not able to write to bucket!" + raise Exception(msg) except Exception as e: if ( @@ -360,7 +350,7 @@ def warp_longitudinal_seg_to_T1w(wf, cfg, strat_pool, pipe_num, opt=None): for label in labels: apply_xfm = apply_transform( - f"warp_longitudinal_seg_to_T1w_{label}_" f"{pipe_num}", + f"warp_longitudinal_seg_to_T1w_{label}_{pipe_num}", reg_tool, time_series=False, num_cpus=num_cpus, @@ -630,7 +620,7 @@ def anat_longitudinal_wf(subject_id, sub_list, config): ) rpool.set_data( - "from-T1w_to-longitudinal_mode-image_" "desc-linear_xfm", + "from-T1w_to-longitudinal_mode-image_desc-linear_xfm", select_sess, "warp_path", {}, @@ -722,9 +712,9 @@ def func_preproc_longitudinal_wf(subject_id, sub_list, config): for sub_dict in sub_list: if "func" in sub_dict or "rest" in sub_dict: if "func" in sub_dict: - func_paths_dict = sub_dict["func"] + sub_dict["func"] else: - func_paths_dict = sub_dict["rest"] + sub_dict["rest"] unique_id = sub_dict["unique_id"] session_id_list.append(unique_id) @@ -832,9 +822,12 @@ def merge_func_preproc(working_directory): def register_func_longitudinal_template_to_standard( longitudinal_template_node, c, workflow, strat_init, strat_name ): - sub_mem_gb, num_cores_per_sub, num_ants_cores, num_omp_cores = ( - check_config_resources(c) - ) + ( + sub_mem_gb, + num_cores_per_sub, + num_ants_cores, + num_omp_cores, + ) = check_config_resources(c) strat_init_new = strat_init.fork() @@ -1211,6 +1204,7 @@ def func_longitudinal_template_wf(subject_id, strat_list, config): resampled_template.inputs.template = template resampled_template.inputs.template_name = template_name resampled_template.inputs.tag = tag + resampled_template.inputs.orientation = config["desired_orientation"] strat_init.update_resource_pool( {template_name: (resampled_template, "resampled_template")} diff --git a/CPAC/pipeline/engine.py b/CPAC/pipeline/engine.py index 3b7451d8b8..be1d0c0c17 100644 --- a/CPAC/pipeline/engine.py +++ b/CPAC/pipeline/engine.py @@ -1,4 +1,4 @@ -# Copyright (C) 2021-2023 C-PAC Developers +# Copyright (C) 2021-2024 C-PAC Developers # This file is part of C-PAC. @@ -19,31 +19,32 @@ import hashlib from itertools import chain import json -import logging import os import re -from typing import Optional, Union +from typing import Optional import warnings -from nipype import config # pylint: disable=wrong-import-order -from nipype.interfaces.utility import Rename # pylint: disable=wrong-import-order +from nipype import config, logging +from nipype.interfaces import afni +from nipype.interfaces.utility import Rename from CPAC.image_utils.spatial_smoothing import spatial_smoothing from CPAC.image_utils.statistical_transforms import ( fisher_z_score_standardize, z_score_standardize, ) -from CPAC.pipeline import ( - nipype_pipeline_engine as pe, # pylint: disable=ungrouped-imports -) +from CPAC.pipeline import nipype_pipeline_engine as pe from CPAC.pipeline.check_outputs import ExpectedOutputs -from CPAC.pipeline.nodeblock import ( - NodeBlockFunction, # pylint: disable=ungrouped-imports +from CPAC.pipeline.nodeblock import NodeBlockFunction +from CPAC.pipeline.utils import ( + MOVEMENT_FILTER_KEYS, + name_fork, + source_set, ) -from CPAC.pipeline.utils import MOVEMENT_FILTER_KEYS, name_fork, source_set from CPAC.registration.registration import transform_derivative from CPAC.resources.templates.lookup_table import lookup_identifier from CPAC.utils.bids_utils import res_in_filename +from CPAC.utils.configuration import Configuration from CPAC.utils.datasource import ( create_anat_datasource, create_func_datasource, @@ -53,9 +54,13 @@ ) from CPAC.utils.interfaces.datasink import DataSink from CPAC.utils.interfaces.function import Function -from CPAC.utils.monitoring import getLogger, LOGTAIL, WARNING_FREESURFER_OFF_WITH_DATA +from CPAC.utils.monitoring import ( + getLogger, + LOGTAIL, + WARNING_FREESURFER_OFF_WITH_DATA, + WFLOGGER, +) from CPAC.utils.outputs import Outputs -from CPAC.utils.typing import LIST_OR_STR, TUPLE from CPAC.utils.utils import ( check_prov_for_regtool, create_id_string, @@ -64,8 +69,6 @@ write_output_json, ) -logger = getLogger("nipype.workflow") - class ResourcePool: def __init__(self, rpool=None, name=None, cfg=None, pipe_list=None): @@ -149,7 +152,7 @@ def append_name(self, name): def back_propogate_template_name( self, wf, resource_idx: str, json_info: dict, id_string: "pe.Node" ) -> None: - """Find and apply the template name from a resource's provenance + """Find and apply the template name from a resource's provenance. Parameters ---------- @@ -179,9 +182,9 @@ def back_propogate_template_name( # a different space, so don't use it as the space for # descendents try: - anscestor_json = list(self.rpool.get(source).items())[0][1].get( - "json", {} - ) + anscestor_json = next(iter(self.rpool.get(source).items()))[ + 1 + ].get("json", {}) if "Description" in anscestor_json: id_string.inputs.template_desc = anscestor_json[ "Description" @@ -227,7 +230,7 @@ def copy_rpool(self): @staticmethod def get_raw_label(resource: str) -> str: - """Removes ``desc-*`` label""" + """Remove ``desc-*`` label.""" for tag in resource.split("_"): if "desc-" in tag: resource = resource.replace(f"{tag}_", "") @@ -244,7 +247,9 @@ def get_strat_info(self, prov, label=None, logdir=None): if label: if not logdir: logdir = self.logdir - print(f"\n\nPrinting out strategy info for {label} in {logdir}\n") + WFLOGGER.info( + "\n\nPrinting out strategy info for %s in %s\n", label, logdir + ) write_output_json( strat_info, f"{label}_strat_info", indent=4, basedir=logdir ) @@ -252,15 +257,15 @@ def get_strat_info(self, prov, label=None, logdir=None): def set_json_info(self, resource, pipe_idx, key, val): # TODO: actually should probably be able to inititialize resource/pipe_idx if pipe_idx not in self.rpool[resource]: - raise Exception( + msg = ( "\n[!] DEV: The pipeline/strat ID does not exist " f"in the resource pool.\nResource: {resource}" f"Pipe idx: {pipe_idx}\nKey: {key}\nVal: {val}\n" ) - else: - if "json" not in self.rpool[resource][pipe_idx]: - self.rpool[resource][pipe_idx]["json"] = {} - self.rpool[resource][pipe_idx]["json"][key] = val + raise Exception(msg) + if "json" not in self.rpool[resource][pipe_idx]: + self.rpool[resource][pipe_idx]["json"] = {} + self.rpool[resource][pipe_idx]["json"][key] = val def get_json_info(self, resource, pipe_idx, key): # TODO: key checks @@ -279,12 +284,14 @@ def get_resource_from_prov(prov): return None if isinstance(prov[-1], list): return prov[-1][-1].split(":")[0] - elif isinstance(prov[-1], str): + if isinstance(prov[-1], str): return prov[-1].split(":")[0] + return None def regressor_dct(self, cfg) -> dict: - """Returns the regressor dictionary for the current strategy if - one exists. Raises KeyError otherwise. + """Return the regressor dictionary for the current strategy if one exists. + + Raises KeyError otherwise. """ # pylint: disable=attribute-defined-outside-init if hasattr(self, "_regressor_dct"): # memoized @@ -296,7 +303,6 @@ def regressor_dct(self, cfg) -> dict: "ingress_regressors." ) _nr = cfg["nuisance_corrections", "2-nuisance_regression"] - if not hasattr(self, "timeseries"): if _nr["Regressors"]: self.regressors = {reg["Name"]: reg for reg in _nr["Regressors"]} @@ -341,15 +347,18 @@ def set_data( try: res, new_pipe_idx = self.generate_prov_string(new_prov_list) except IndexError: - raise IndexError( + msg = ( f"\n\nThe set_data() call for {resource} has no " "provenance information and should not be an " "injection." ) + raise IndexError(msg) if not json_info: json_info = { - "RawSources": [resource] - } # <---- this will be repopulated to the full file path at the end of the pipeline building, in gather_pipes() + "RawSources": [ + resource # <---- this will be repopulated to the full file path at the end of the pipeline building, in gather_pipes() + ] + } json_info["CpacProvenance"] = new_prov_list if resource not in self.rpool.keys(): @@ -357,9 +366,8 @@ def set_data( elif not fork: # <--- in the event of multiple strategies/options, this will run for every option; just keep in mind search = False if self.get_resource_from_prov(current_prov_list) == resource: - pipe_idx = self.generate_prov_string(current_prov_list)[ - 1 - ] # CHANGING PIPE_IDX, BE CAREFUL DOWNSTREAM IN THIS FUNCTION + # CHANGING PIPE_IDX, BE CAREFUL DOWNSTREAM IN THIS FUNCTION + pipe_idx = self.generate_prov_string(current_prov_list)[1] if pipe_idx not in self.rpool[resource].keys(): search = True else: @@ -368,22 +376,15 @@ def set_data( for idx in current_prov_list: if self.get_resource_from_prov(idx) == resource: if isinstance(idx, list): - pipe_idx = self.generate_prov_string( - idx - )[ - 1 - ] # CHANGING PIPE_IDX, BE CAREFUL DOWNSTREAM IN THIS FUNCTION + # CHANGING PIPE_IDX, BE CAREFUL DOWNSTREAM IN THIS FUNCTION + pipe_idx = self.generate_prov_string(idx)[1] elif isinstance(idx, str): pipe_idx = idx break - if ( - pipe_idx in self.rpool[resource].keys() - ): # <--- in case the resource name is now new, and not the original - del self.rpool[ - resource - ][ - pipe_idx - ] # <--- remove old keys so we don't end up with a new strat for every new node unit (unless we fork) + if pipe_idx in self.rpool[resource].keys(): + # in case the resource name is now new, and not the original + # remove old keys so we don't end up with a new strat for every new node unit (unless we fork) + del self.rpool[resource][pipe_idx] if new_pipe_idx not in self.rpool[resource]: self.rpool[resource][new_pipe_idx] = {} if new_pipe_idx not in self.pipe_list: @@ -394,18 +395,17 @@ def set_data( def get( self, - resource: LIST_OR_STR, + resource: list[str] | str, pipe_idx: Optional[str] = None, report_fetched: Optional[bool] = False, optional: Optional[bool] = False, - ) -> Union[TUPLE[Optional[dict], Optional[str]], Optional[dict]]: + ) -> tuple[Optional[dict], Optional[str]] | Optional[dict]: # NOTE!!! - # if this is the main rpool, this will return a dictionary of strats, and inside those, are dictionaries like {'data': (node, out), 'json': info} - # BUT, if this is a sub rpool (i.e. a strat_pool), this will return a one-level dictionary of {'data': (node, out), 'json': info} WITHOUT THE LEVEL OF STRAT KEYS ABOVE IT + # if this is the main rpool, this will return a dictionary of strats, and inside those, are dictionaries like {'data': (node, out), 'json': info} + # BUT, if this is a sub rpool (i.e. a strat_pool), this will return a one-level dictionary of {'data': (node, out), 'json': info} WITHOUT THE LEVEL OF STRAT KEYS ABOVE IT if not isinstance(resource, list): resource = [resource] - # if a list of potential inputs are given, pick the first one - # found + # if a list of potential inputs are given, pick the first one found for label in resource: if label in self.rpool.keys(): _found = self.rpool[label] @@ -418,7 +418,7 @@ def get( if report_fetched: return (None, None) return None - raise LookupError( + msg = ( "\n\n[!] C-PAC says: None of the listed resources are in " f"the resource pool:\n\n {resource}\n\nOptions:\n- You " "can enable a node block earlier in the pipeline which " @@ -432,6 +432,7 @@ def get( "through any of our support channels at: " "https://fcp-indi.github.io/\n" ) + raise LookupError(msg) def get_data( self, resource, pipe_idx=None, report_fetched=False, quick_single=False @@ -444,10 +445,10 @@ def get_data( return (connect["data"], fetched) connect, fetched = self.get(resource, report_fetched=report_fetched) return (connect["data"], fetched) - elif pipe_idx: + if pipe_idx: return self.get(resource, pipe_idx=pipe_idx)["data"] - elif quick_single or len(self.get(resource)) == 1: - for key, val in self.get(resource).items(): + if quick_single or len(self.get(resource)) == 1: + for _key, val in self.get(resource).items(): return val["data"] return self.get(resource)["data"] @@ -455,7 +456,8 @@ def copy_resource(self, resource, new_name): try: self.rpool[new_name] = self.rpool[resource] except KeyError: - raise Exception(f"[!] {resource} not in the resource pool.") + msg = f"[!] {resource} not in the resource pool." + raise Exception(msg) def update_resource(self, resource, new_name): # move over any new pipe_idx's @@ -478,11 +480,12 @@ def get_json(self, resource, strat=None): if "json" in resource_strat_dct: strat_json = resource_strat_dct["json"] else: - raise Exception( + msg = ( "\n[!] Developer info: the JSON " f"information for {resource} and {strat} " f"is incomplete.\n" ) + raise Exception(msg) return strat_json def get_cpac_provenance(self, resource, strat=None): @@ -501,12 +504,13 @@ def get_cpac_provenance(self, resource, strat=None): def generate_prov_string(prov): # this will generate a string from a SINGLE RESOURCE'S dictionary of # MULTIPLE PRECEDING RESOURCES (or single, if just one) - # NOTE: this DOES NOT merge multiple resources!!! (i.e. for merging-strat pipe_idx generation) + # NOTE: this DOES NOT merge multiple resources!!! (i.e. for merging-strat pipe_idx generation) if not isinstance(prov, list): - raise Exception( + msg = ( "\n[!] Developer info: the CpacProvenance " f"entry for {prov} has to be a list.\n" ) + raise TypeError(msg) last_entry = get_last_prov_entry(prov) resource = last_entry.split(":")[0] return (resource, str(prov)) @@ -514,10 +518,11 @@ def generate_prov_string(prov): @staticmethod def generate_prov_list(prov_str): if not isinstance(prov_str, str): - raise Exception( + msg = ( "\n[!] Developer info: the CpacProvenance " f"entry for {prov_str!s} has to be a string.\n" ) + raise TypeError(msg) return ast.literal_eval(prov_str) @staticmethod @@ -544,7 +549,7 @@ def get_resource_strats_from_prov(prov): def flatten_prov(self, prov): if isinstance(prov, str): return [prov] - elif isinstance(prov, list): + if isinstance(prov, list): flat_prov = [] for entry in prov: if isinstance(entry, list): @@ -552,6 +557,7 @@ def flatten_prov(self, prov): else: flat_prov.append(entry) return flat_prov + return None def get_strats(self, resources, debug=False): # TODO: NOTE: NOT COMPATIBLE WITH SUB-RPOOL/STRAT_POOLS @@ -562,7 +568,7 @@ def get_strats(self, resources, debug=False): linked_resources = [] resource_list = [] if debug: - verbose_logger = getLogger("engine") + verbose_logger = getLogger("CPAC.engine") verbose_logger.debug("\nresources: %s", resources) for resource in resources: # grab the linked-input tuples @@ -576,7 +582,7 @@ def get_strats(self, resources, debug=False): continue linked.append(fetched_resource) resource_list += linked - if len(linked) < 2: + if len(linked) < 2: # noqa: PLR2004 continue linked_resources.append(linked) else: @@ -586,15 +592,18 @@ def get_strats(self, resources, debug=False): variant_pool = {} len_inputs = len(resource_list) if debug: - verbose_logger = getLogger("engine") + verbose_logger = getLogger("CPAC.engine") verbose_logger.debug("linked_resources: %s", linked_resources) verbose_logger.debug("resource_list: %s", resource_list) for resource in resource_list: - rp_dct, fetched_resource = self.get( + ( + rp_dct, # <---- rp_dct has the strats/pipe_idxs as the keys on first level, then 'data' and 'json' on each strat level underneath + fetched_resource, + ) = self.get( resource, - report_fetched=True, # <---- rp_dct has the strats/pipe_idxs as the keys on first level, then 'data' and 'json' on each strat level underneath - optional=True, - ) # oh, and we make the resource fetching in get_strats optional so we can have optional inputs, but they won't be optional in the node block unless we want them to be + report_fetched=True, + optional=True, # oh, and we make the resource fetching in get_strats optional so we can have optional inputs, but they won't be optional in the node block unless we want them to be + ) if not rp_dct: len_inputs -= 1 continue @@ -614,7 +623,7 @@ def get_strats(self, resources, debug=False): variant_pool[fetched_resource].append(f"NO-{val[0]}") if debug: - verbose_logger = getLogger("engine") + verbose_logger = getLogger("CPAC.engine") verbose_logger.debug("%s sub_pool: %s\n", resource, sub_pool) total_pool.append(sub_pool) @@ -652,7 +661,7 @@ def get_strats(self, resources, debug=False): strat_list_list.append(strat_list) if debug: - verbose_logger = getLogger("engine") + verbose_logger = getLogger("CPAC.engine") verbose_logger.debug("len(strat_list_list): %s\n", len(strat_list_list)) for strat_list in strat_list_list: json_dct = {} @@ -742,10 +751,8 @@ def get_strats(self, resources, debug=False): # make the merged strat label from the multiple inputs # strat_list is actually the merged CpacProvenance lists pipe_idx = str(strat_list) - new_strats[pipe_idx] = ( - ResourcePool() - ) # <----- new_strats is A DICTIONARY OF RESOURCEPOOL OBJECTS! - + new_strats[pipe_idx] = ResourcePool() + # new_strats is A DICTIONARY OF RESOURCEPOOL OBJECTS! # placing JSON info at one level higher only for copy convenience new_strats[pipe_idx].rpool["json"] = {} new_strats[pipe_idx].rpool["json"]["subjson"] = {} @@ -754,12 +761,10 @@ def get_strats(self, resources, debug=False): # now just invert resource:strat to strat:resource for each resource:strat for cpac_prov in strat_list: resource, strat = self.generate_prov_string(cpac_prov) - resource_strat_dct = self.rpool[resource][ - strat - ] # <----- remember, this is the dct of 'data' and 'json'. - new_strats[pipe_idx].rpool[resource] = ( - resource_strat_dct # <----- new_strats is A DICTIONARY OF RESOURCEPOOL OBJECTS! each one is a new slice of the resource pool combined together. - ) + resource_strat_dct = self.rpool[resource][strat] + # remember, `resource_strat_dct` is the dct of 'data' and 'json'. + new_strats[pipe_idx].rpool[resource] = resource_strat_dct + # `new_strats` is A DICTIONARY OF RESOURCEPOOL OBJECTS! each one is a new slice of the resource pool combined together. self.pipe_list.append(pipe_idx) if "CpacVariant" in resource_strat_dct["json"]: if "CpacVariant" not in new_strats[pipe_idx].rpool["json"]: @@ -783,21 +788,18 @@ def get_strats(self, resources, debug=False): ) else: new_strats = {} - for resource_strat_list in ( - total_pool - ): # total_pool will have only one list of strats, for the one input + for resource_strat_list in total_pool: + # total_pool will have only one list of strats, for the one input for cpac_prov in resource_strat_list: # <------- cpac_prov here doesn't need to be modified, because it's not merging with other inputs resource, pipe_idx = self.generate_prov_string(cpac_prov) - resource_strat_dct = self.rpool[resource][ - pipe_idx - ] # <----- remember, this is the dct of 'data' and 'json'. + resource_strat_dct = self.rpool[resource][pipe_idx] + # remember, `resource_strat_dct` is the dct of 'data' and 'json'. new_strats[pipe_idx] = ResourcePool( rpool={resource: resource_strat_dct} ) # <----- again, new_strats is A DICTIONARY OF RESOURCEPOOL OBJECTS! # placing JSON info at one level higher only for copy convenience - new_strats[pipe_idx].rpool["json"] = resource_strat_dct[ - "json" - ] # TODO: WARNING- THIS IS A LEVEL HIGHER THAN THE ORIGINAL 'JSON' FOR EASE OF ACCESS IN CONNECT_BLOCK WITH THE .GET(JSON) + new_strats[pipe_idx].rpool["json"] = resource_strat_dct["json"] + # TODO: WARNING- THIS IS A LEVEL HIGHER THAN THE ORIGINAL 'JSON' FOR EASE OF ACCESS IN CONNECT_BLOCK WITH THE .GET(JSON) new_strats[pipe_idx].rpool["json"]["subjson"] = {} new_strats[pipe_idx].rpool["json"]["CpacProvenance"] = cpac_prov # preserve each input's JSON info also @@ -813,8 +815,7 @@ def derivative_xfm(self, wf, label, connection, json_info, pipe_idx, pipe_x): if label in self.xfm: json_info = dict(json_info) - # get the bold-to-template transform from the current strat_pool - # info + # get the bold-to-template transform from the current strat_pool info xfm_idx = None xfm_label = "from-bold_to-template_mode-image_xfm" for entry in json_info["CpacProvenance"]: @@ -880,7 +881,7 @@ def derivative_xfm(self, wf, label, connection, json_info, pipe_idx, pipe_x): @property def filtered_movement(self) -> bool: """ - Check if the movement parameters have been filtered in this strat_pool + Check if the movement parameters have been filtered in this strat_pool. Returns ------- @@ -894,14 +895,11 @@ def filtered_movement(self) -> bool: # not a strat_pool or no movement parameters in strat_pool return False - def filter_name(self, cfg) -> str: + def filter_name(self, cfg: Configuration) -> str: """ - In a strat_pool with filtered movement parameters, return the - name of the filter for this strategy + Return the name of the filter for this strategy. - Returns - ------- - str + In a strat_pool with filtered movement parameters. """ motion_filters = cfg[ "functional_preproc", @@ -958,7 +956,7 @@ def post_process(self, wf, label, connection, json_info, pipe_idx, pipe_x, outs) if label in Outputs.to_smooth: for smooth_opt in self.smooth_opts: sm = spatial_smoothing( - f"{label}_smooth_{smooth_opt}_" f"{pipe_x}", + f"{label}_smooth_{smooth_opt}_{pipe_x}", self.fwhm, input_type, smooth_opt, @@ -1052,8 +1050,7 @@ def post_process(self, wf, label, connection, json_info, pipe_idx, pipe_x, outs) connection[0], connection[1], zstd, "inputspec.correlation_file" ) - # if the output is 'space-template_desc-MeanSCA_correlations', we want - # 'desc-MeanSCA_timeseries' + # if the output is 'space-template_desc-MeanSCA_correlations', we want 'desc-MeanSCA_timeseries' oned = label.replace("correlations", "timeseries") node, out = outs[oned] @@ -1187,12 +1184,12 @@ def gather_pipes(self, wf, cfg, all=False, add_incl=None, add_excl=None): self.rpool[resource][pipe_idx]["json"] for pipe_idx in self.rpool[resource] ] - unlabelled = set( + unlabelled = { key for json_info in all_jsons for key in json_info.get("CpacVariant", {}).keys() if key not in (*MOVEMENT_FILTER_KEYS, "timeseries") - ) + } if "bold" in unlabelled: all_bolds = list( chain.from_iterable( @@ -1205,7 +1202,7 @@ def gather_pipes(self, wf, cfg, all=False, add_incl=None, add_excl=None): # not any(not) because all is overloaded as a parameter here if not any( not re.match( - r"apply_(phasediff|blip)_to_" r"timeseries_separately_.*", _bold + r"apply_(phasediff|blip)_to_timeseries_separately_.*", _bold ) for _bold in all_bolds ): @@ -1225,7 +1222,8 @@ def gather_pipes(self, wf, cfg, all=False, add_incl=None, add_excl=None): } # del all_jsons for key, forks in all_forks.items(): - if len(forks) < 2: # no int suffix needed if only one fork + if len(forks) < 2: # noqa: PLR2004 + # no int suffix needed if only one fork unlabelled.remove(key) # del all_forks for pipe_idx in self.rpool[resource]: @@ -1322,7 +1320,7 @@ def gather_pipes(self, wf, cfg, all=False, add_incl=None, add_excl=None): # need the single quote and the colon inside the double # quotes - it's the encoded pipe_idx # atlas_idx = new_idx.replace(f"'{temp_rsc}:", - # "'atlas_name:") + # "'atlas_name:") if atlas_idx in self.rpool["atlas_name"]: node, out = self.rpool["atlas_name"][atlas_idx]["data"] wf.connect(node, out, id_string, "atlas_id") @@ -1340,7 +1338,7 @@ def gather_pipes(self, wf, cfg, all=False, add_incl=None, add_excl=None): ) ) ) - nii_name = pe.Node(Rename(), name=f"nii_{resource_idx}_" f"{pipe_x}") + nii_name = pe.Node(Rename(), name=f"nii_{resource_idx}_{pipe_x}") nii_name.inputs.keep_ext = True if resource in Outputs.ciftis: @@ -1362,7 +1360,7 @@ def gather_pipes(self, wf, cfg, all=False, add_incl=None, add_excl=None): try: wf.connect(node, out, nii_name, "in_file") except OSError as os_error: - logger.warning(os_error) + WFLOGGER.warning(os_error) continue write_json_imports = ["import os", "import json"] @@ -1378,7 +1376,7 @@ def gather_pipes(self, wf, cfg, all=False, add_incl=None, add_excl=None): write_json.inputs.json_data = json_info wf.connect(id_string, "out_filename", write_json, "filename") - ds = pe.Node(DataSink(), name=f"sinker_{resource_idx}_" f"{pipe_x}") + ds = pe.Node(DataSink(), name=f"sinker_{resource_idx}_{pipe_x}") ds.inputs.parameterization = False ds.inputs.base_directory = out_dct["out_dir"] ds.inputs.encrypt_bucket_keys = cfg.pipeline_setup["Amazon-AWS"][ @@ -1406,7 +1404,7 @@ def gather_pipes(self, wf, cfg, all=False, add_incl=None, add_excl=None): outputs_logger.info(expected_outputs) def node_data(self, resource, **kwargs): - """Factory function to create NodeData objects + """Create NodeData objects. Parameters ---------- @@ -1435,13 +1433,14 @@ def __init__(self, node_block_functions, debug=False): self.input_interface = [self.input_interface] if not isinstance(node_block_function, NodeBlockFunction): - # If the object is a plain function `__name__` will be more useful then `str()` + # If the object is a plain function `__name__` will be more useful than `str()` obj_str = ( node_block_function.__name__ if hasattr(node_block_function, "__name__") else str(node_block_function) ) - raise TypeError(f'Object is not a nodeblock: "{obj_str}"') + msg = f'Object is not a nodeblock: "{obj_str}"' + raise TypeError(msg) name = node_block_function.name self.name = name @@ -1475,11 +1474,11 @@ def __init__(self, node_block_functions, debug=False): if node_block_function.outputs is not None: self.options = node_block_function.outputs - logger.info("Connecting %s...", name) + WFLOGGER.info("Connecting %s...", name) if debug: config.update_config({"logging": {"workflow_level": "DEBUG"}}) logging.update_logging(config) - logger.debug( + WFLOGGER.debug( '"inputs": %s\n\t "outputs": %s%s', node_block_function.inputs, list(self.outputs.keys()), @@ -1500,21 +1499,21 @@ def check_null(self, val): def check_output(self, outputs, label, name): if label not in outputs: - raise NameError( + msg = ( f'\n[!] Output name "{label}" in the block ' "function does not match the outputs list " f'{outputs} in Node Block "{name}"\n' ) + raise NameError(msg) def grab_tiered_dct(self, cfg, key_list): cfg_dct = cfg.dict() for key in key_list: try: cfg_dct = cfg_dct.get(key, {}) - except KeyError: - raise Exception( - "[!] The config provided to the node block is not valid" - ) + except KeyError as ke: + msg = "[!] The config provided to the node block is not valid" + raise KeyError(msg) from ke return cfg_dct def connect_block(self, wf, cfg, rpool): @@ -1540,12 +1539,12 @@ def connect_block(self, wf, cfg, rpool): else: for option in option_val: try: - if ( - option in self.grab_tiered_dct(cfg, key_list) - ): # <---- goes over the option_vals in the node block docstring, and checks if the user's pipeline config included it in the forking list + if option in self.grab_tiered_dct(cfg, key_list): + # goes over the option_vals in the node block docstring, and checks if the user's pipeline config included it in the forking list opts.append(option) except AttributeError as err: - raise Exception(f"{err}\nNode Block: {name}") + msg = f"{err}\nNode Block: {name}" + raise Exception(msg) if opts is None: opts = [opts] @@ -1553,7 +1552,7 @@ def connect_block(self, wf, cfg, rpool): elif option_key and not option_val: # enables multiple config forking entries if not isinstance(option_key[0], list): - raise Exception( + msg = ( f"[!] The option_key field ({option_key}) " f"for {name} exists but there is no " "option_val.\n\nIf you are trying to " @@ -1561,6 +1560,7 @@ def connect_block(self, wf, cfg, rpool): "option_val field must contain a list of " "a list.\n" ) + raise ValueError(msg) for option_config in option_key: # option_config is a list of pipe config levels down to the option if config: @@ -1570,7 +1570,7 @@ def connect_block(self, wf, cfg, rpool): option_val = option_config[-1] if option_val in self.grab_tiered_dct(cfg, key_list[:-1]): opts.append(option_val) - else: # AND, if there are multiple option-val's (in a list) in the docstring, it gets iterated below in 'for opt in option' etc. AND THAT'S WHEN YOU HAVE TO DELINEATE WITHIN THE NODE BLOCK CODE!!! + else: # AND, if there are multiple option-val's (in a list) in the docstring, it gets iterated below in 'for opt in option' etc. AND THAT'S WHEN YOU HAVE TO DELINEATE WITHIN THE NODE BLOCK CODE!!! opts = [None] all_opts += opts @@ -1586,10 +1586,8 @@ def connect_block(self, wf, cfg, rpool): "output_directory" ]["user_defined"] - for ( - name, - block_dct, - ) in self.node_blocks.items(): # <--- iterates over either the single node block in the sequence, or a list of node blocks within the list of node blocks, i.e. for option forking. + for name, block_dct in self.node_blocks.items(): + # iterates over either the single node block in the sequence, or a list of node blocks within the list of node blocks, i.e. for option forking. switch = self.check_null(block_dct["switch"]) config = self.check_null(block_dct["config"]) option_key = self.check_null(block_dct["option_key"]) @@ -1614,14 +1612,12 @@ def connect_block(self, wf, cfg, rpool): opts = self.grab_tiered_dct(cfg, key_list) else: for option in option_val: - if ( - option in self.grab_tiered_dct(cfg, key_list) - ): # <---- goes over the option_vals in the node block docstring, and checks if the user's pipeline config included it in the forking list + if option in self.grab_tiered_dct(cfg, key_list): + # goes over the option_vals in the node block docstring, and checks if the user's pipeline config included it in the forking list opts.append(option) - else: # AND, if there are multiple option-val's (in a list) in the docstring, it gets iterated below in 'for opt in option' etc. AND THAT'S WHEN YOU HAVE TO DELINEATE WITHIN THE NODE BLOCK CODE!!! - opts = [ - None - ] # THIS ALSO MEANS the multiple option-val's in docstring node blocks can be entered once in the entire node-block sequence, not in a list of multiples + else: # AND, if there are multiple option-val's (in a list) in the docstring, it gets iterated below in 'for opt in option' etc. AND THAT'S WHEN YOU HAVE TO DELINEATE WITHIN THE NODE BLOCK CODE!!! + opts = [None] + # THIS ALSO MEANS the multiple option-val's in docstring node blocks can be entered once in the entire node-block sequence, not in a list of multiples if not opts: # for node blocks where the options are split into different # block functions - opts will be empty for non-selected @@ -1634,14 +1630,14 @@ def connect_block(self, wf, cfg, rpool): if config: try: key_list = config + switch - except TypeError: - raise Exception( + except TypeError as te: + msg = ( "\n\n[!] Developer info: Docstring error " f"for {name}, make sure the 'config' or " "'switch' fields are lists.\n\n" ) + raise TypeError(msg) from te switch = self.grab_tiered_dct(cfg, key_list) - elif isinstance(switch[0], list): # we have multiple switches, which is designed to only work if # config is set to "None" @@ -1667,17 +1663,17 @@ def connect_block(self, wf, cfg, rpool): if not isinstance(switch, list): switch = [switch] if True in switch: - for pipe_idx, strat_pool in rpool.get_strats( - inputs, debug - ).items(): # strat_pool is a ResourcePool like {'desc-preproc_T1w': { 'json': info, 'data': (node, out) }, 'desc-brain_mask': etc.} - fork = ( - False in switch - ) # keep in mind rpool.get_strats(inputs) = {pipe_idx1: {'desc-preproc_T1w': etc.}, pipe_idx2: {..} } - for opt in opts: # it's a dictionary of ResourcePools called strat_pools, except those sub-ResourcePools only have one level! no pipe_idx strat keys. + for ( + pipe_idx, + strat_pool, # strat_pool is a ResourcePool like {'desc-preproc_T1w': { 'json': info, 'data': (node, out) }, 'desc-brain_mask': etc.} + ) in rpool.get_strats(inputs, debug).items(): + # keep in mind rpool.get_strats(inputs) = {pipe_idx1: {'desc-preproc_T1w': etc.}, pipe_idx2: {..} } + fork = False in switch + for opt in opts: # it's a dictionary of ResourcePools called strat_pools, except those sub-ResourcePools only have one level! no pipe_idx strat keys. # remember, you can get 'data' or 'json' from strat_pool with member functions # strat_pool has all of the JSON information of all the inputs! # so when we set_data below for the TOP-LEVEL MAIN RPOOL (not the strat_pool), we can generate new merged JSON information for each output. - # particularly, our custom 'CpacProvenance' field. + # particularly, our custom 'CpacProvenance' field. node_name = name pipe_x = rpool.get_pipe_number(pipe_idx) @@ -1694,12 +1690,12 @@ def connect_block(self, wf, cfg, rpool): try: wf, outs = block_function(wf, cfg, strat_pool, pipe_x, opt) except IOError as e: # duplicate node - logger.warning(e) + WFLOGGER.warning(e) continue if not outs: - if block_function.__name__ == "freesurfer_" "postproc": - logger.warning(WARNING_FREESURFER_OFF_WITH_DATA) + if block_function.__name__ == "freesurfer_postproc": + WFLOGGER.warning(WARNING_FREESURFER_OFF_WITH_DATA) LOGTAIL["warnings"].append( WARNING_FREESURFER_OFF_WITH_DATA ) @@ -1711,7 +1707,7 @@ def connect_block(self, wf, cfg, rpool): node_name = f'{node_name}_{opt["Name"]}' if debug: - verbose_logger = getLogger("engine") + verbose_logger = getLogger("CPAC.engine") verbose_logger.debug("\n=======================") verbose_logger.debug("Node name: %s", node_name) prov_dct = rpool.get_resource_strats_from_prov( @@ -1736,7 +1732,7 @@ def connect_block(self, wf, cfg, rpool): new_json_info = copy.deepcopy(strat_pool.get("json")) # transfer over data-specific json info - # for example, if the input data json is _bold and the output is also _bold + # for example, if the input data json is _bold and the output is also _bold data_type = label.split("_")[-1] if data_type in new_json_info["subjson"]: if ( @@ -1845,8 +1841,7 @@ def connect_block(self, wf, cfg, rpool): def wrap_block(node_blocks, interface, wf, cfg, strat_pool, pipe_num, opt): - """Wrap a list of node block functions to make them easier to use within - other node blocks. + """Wrap a list of node block functions to use within other node blocks. Example usage: @@ -1914,7 +1909,7 @@ def wrap_block(node_blocks, interface, wf, cfg, strat_pool, pipe_num, opt): def ingress_raw_anat_data(wf, rpool, cfg, data_paths, unique_id, part_id, ses_id): if "anat" not in data_paths: - print("No anatomical data present.") + WFLOGGER.warning("No anatomical data present.") return rpool if "creds_path" not in data_paths: @@ -1923,7 +1918,7 @@ def ingress_raw_anat_data(wf, rpool, cfg, data_paths, unique_id, part_id, ses_id anat_flow = create_anat_datasource(f"anat_T1w_gather_{part_id}_{ses_id}") anat = {} - if type(data_paths["anat"]) is str: + if isinstance(data_paths["anat"], str): anat["T1"] = data_paths["anat"] elif "T1w" in data_paths["anat"]: anat["T1"] = data_paths["anat"]["T1w"] @@ -1961,7 +1956,7 @@ def ingress_freesurfer(wf, rpool, cfg, data_paths, unique_id, part_id, ses_id): try: fs_path = os.path.join(cfg.pipeline_setup["freesurfer_dir"], part_id) except KeyError: - print("No FreeSurfer data present.") + WFLOGGER.warning("No FreeSurfer data present.") return rpool # fs_path = os.path.join(cfg.pipeline_setup['freesurfer_dir'], part_id) @@ -1980,7 +1975,7 @@ def ingress_freesurfer(wf, rpool, cfg, data_paths, unique_id, part_id, ses_id): subj_ses = part_id + "-" + ses_id fs_path = os.path.join(cfg.pipeline_setup["freesurfer_dir"], subj_ses) if not os.path.exists(fs_path): - print(f"No FreeSurfer data found for subject {part_id}") + WFLOGGER.info("No FreeSurfer data found for subject %s", part_id) return rpool # Check for double nested subj names @@ -2043,7 +2038,7 @@ def ingress_freesurfer(wf, rpool, cfg, data_paths, unique_id, part_id, ses_id): ) else: warnings.warn( - str(LookupError("\n[!] Path does not exist for " f"{fullpath}.\n")) + str(LookupError(f"\n[!] Path does not exist for {fullpath}.\n")) ) return rpool @@ -2088,7 +2083,7 @@ def ingress_raw_func_data(wf, rpool, cfg, data_paths, unique_id, part_id, ses_id # pylint: disable=protected-access wf._local_func_scans = local_func_scans if cfg.pipeline_setup["Debugging"]["verbose"]: - verbose_logger = getLogger("engine") + verbose_logger = getLogger("CPAC.engine") verbose_logger.debug("local_func_scans: %s", local_func_scans) del local_func_scans @@ -2100,7 +2095,7 @@ def ingress_output_dir( ): dir_path = data_paths["derivatives_dir"] - print(f"\nPulling outputs from {dir_path}.\n") + WFLOGGER.info("\nPulling outputs from %s.\n", dir_path) anat = os.path.join(dir_path, "anat") func = os.path.join(dir_path, "func") @@ -2143,11 +2138,12 @@ def ingress_output_dir( data_label = filename.split(unique_id)[1].lstrip("_") if len(filename) == len(data_label): - raise Exception( + msg = ( "\n\n[!] Possibly wrong participant or " "session in this directory?\n\n" f"Filepath: {filepath}\n\n" ) + raise Exception(msg) bidstag = "" for tag in data_label.split("_"): @@ -2253,8 +2249,8 @@ def json_outdir_ingress(rpool, filepath, exts, data_label, json): jsonpath = f"{jsonpath}.json" if not os.path.exists(jsonpath): - print( - f"\n\n[!] No JSON found for file {filepath}.\nCreating " f"{jsonpath}..\n\n" + WFLOGGER.info( + "\n\n[!] No JSON found for file %s.\nCreating %s..\n\n", filepath, jsonpath ) json_info = { "Description": "This data was generated elsewhere and " @@ -2283,13 +2279,14 @@ def json_outdir_ingress(rpool, filepath, exts, data_label, json): if only_desc[-1] == "-": only_desc = only_desc.rstrip("-") else: - raise Exception( + msg = ( "\n[!] Something went wrong with either " "reading in the output directory or when " "it was written out previously.\n\nGive " "this to your friendly local C-PAC " f"developer:\n\n{data_label!s}\n" ) + raise IOError(msg) # remove the integer at the end of the desc-* variant, we will # get the unique pipe_idx from the CpacProvenance below @@ -2319,7 +2316,6 @@ def func_outdir_ingress( wf, cfg, func_dict, rpool, unique_id, creds_path, part_id, key, func_paths ): pipe_x = len(rpool.pipe_list) - exts = [".nii", ".gz", ".mat", ".1D", ".txt", ".csv", ".rms", ".tsv"] ingress = create_func_datasource( func_dict, rpool, f"gather_func_outdir_{key}_{pipe_x}" ) @@ -2362,7 +2358,7 @@ def func_outdir_ingress( wf.connect(ingress, "outputspec.scan", iterables, "scan") for key in func_paths: - if key == mask_paths_key or key == ts_paths_key: + if key in (mask_paths_key, ts_paths_key): ingress_func = create_general_datasource(f"ingress_func_data_{key}") ingress_func.inputs.inputnode.set( unique_id=unique_id, @@ -2412,7 +2408,7 @@ def strip_template(data_label, dir_path, filename): return data_label, json -def ingress_pipeconfig_paths(cfg, rpool, unique_id, creds_path=None): +def ingress_pipeconfig_paths(wf, cfg, rpool, unique_id, creds_path=None): # ingress config file paths # TODO: may want to change the resource keys for each to include one level up in the YAML as well @@ -2421,6 +2417,7 @@ def ingress_pipeconfig_paths(cfg, rpool, unique_id, creds_path=None): template_csv = p.resource_filename("CPAC", "resources/cpac_templates.csv") template_df = pd.read_csv(template_csv, keep_default_na=False) + desired_orientation = cfg.pipeline_setup["desired_orientation"] for row in template_df.itertuples(): key = row.Key @@ -2477,7 +2474,13 @@ def ingress_pipeconfig_paths(cfg, rpool, unique_id, creds_path=None): resampled_template = pe.Node( Function( - input_names=["resolution", "template", "template_name", "tag"], + input_names=[ + "orientation", + "resolution", + "template", + "template_name", + "tag", + ], output_names=["resampled_template"], function=resolve_resolution, as_module=True, @@ -2485,24 +2488,15 @@ def ingress_pipeconfig_paths(cfg, rpool, unique_id, creds_path=None): name="resampled_" + key, ) + resampled_template.inputs.orientation = desired_orientation resampled_template.inputs.resolution = resolution resampled_template.inputs.template = val resampled_template.inputs.template_name = key resampled_template.inputs.tag = tag - # the set_data below is set up a little differently, because we are - # injecting and also over-writing already-existing entries - # other alternative would have been to ingress into the - # resampled_template node from the already existing entries, but we - # didn't do that here - rpool.set_data( - key, - resampled_template, - "resampled_template", - json_info, - "", - "template_resample", - ) # , inject=True) # pipe_idx (after the blank json {}) should be the previous strat that you want deleted! because you're not connecting this the regular way, you have to do it manually + node = resampled_template + output = "resampled_template" + node_name = "template_resample" elif val: config_ingress = create_general_datasource(f"gather_{key}") @@ -2512,14 +2506,33 @@ def ingress_pipeconfig_paths(cfg, rpool, unique_id, creds_path=None): creds_path=creds_path, dl_dir=cfg.pipeline_setup["working_directory"]["path"], ) - rpool.set_data( - key, - config_ingress, - "outputspec.data", - json_info, - "", - f"{key}_config_ingress", - ) + node = config_ingress + output = "outputspec.data" + node_name = f"{key}_config_ingress" + + if val.endswith(".nii" or ".nii.gz"): + check_reorient = pe.Node( + interface=afni.Resample(), + name=f"reorient_{key}", + ) + + check_reorient.inputs.orientation = desired_orientation + check_reorient.inputs.outputtype = "NIFTI_GZ" + + wf.connect(node, output, check_reorient, "in_file") + node = check_reorient + output = "out_file" + node_name = f"{key}_reorient" + + rpool.set_data( + key, + node, + output, + json_info, + "", + node_name, + ) + # templates, resampling from config """ template_keys = [ @@ -2605,12 +2618,12 @@ def _set_nested(attr, keys): ) cfg.set_nested(cfg, key, node) """ - - return rpool + return wf, rpool def initiate_rpool(wf, cfg, data_paths=None, part_id=None): """ + Initialize a new ResourcePool. data_paths format: {'anat': { @@ -2676,7 +2689,7 @@ def initiate_rpool(wf, cfg, data_paths=None, part_id=None): ) # grab any file paths from the pipeline config YAML - rpool = ingress_pipeconfig_paths(cfg, rpool, unique_id, creds_path) + wf, rpool = ingress_pipeconfig_paths(wf, cfg, rpool, unique_id, creds_path) # output files with 4 different scans @@ -2709,11 +2722,11 @@ def run_node_blocks(blocks, data_paths, cfg=None): run_blocks = [] if rpool.check_rpool("desc-preproc_T1w"): - print("Preprocessed T1w found, skipping anatomical preprocessing.") + WFLOGGER.info("Preprocessed T1w found, skipping anatomical preprocessing.") else: run_blocks += blocks[0] if rpool.check_rpool("desc-preproc_bold"): - print("Preprocessed BOLD found, skipping functional preprocessing.") + WFLOGGER.info("Preprocessed BOLD found, skipping functional preprocessing.") else: run_blocks += blocks[1] @@ -2727,16 +2740,15 @@ def run_node_blocks(blocks, data_paths, cfg=None): class NodeData: - r"""Class to hold outputs of - CPAC.pipeline.engine.ResourcePool().get_data(), so one can do + r"""Attribute access for ResourcePool.get_data outputs. - ``node_data = strat_pool.node_data(resource)`` and have - ``node_data.node`` and ``node_data.out`` instead of doing - ``node, out = strat_pool.get_data(resource)`` and needing two - variables (``node`` and ``out``) to store that information. + Class to hold outputs of CPAC.pipeline.engine.ResourcePool().get_data(), so one can + do ``node_data = strat_pool.node_data(resource)`` and have ``node_data.node`` and + ``node_data.out`` instead of doing ``node, out = strat_pool.get_data(resource)`` + and needing two variables (``node`` and ``out``) to store that information. - Also includes ``variant`` attribute providing the resource's self- - keyed value within its ``CpacVariant`` dictionary. + Also includes ``variant`` attribute providing the resource's self-keyed value + within its ``CpacVariant`` dictionary. Examples -------- @@ -2766,5 +2778,5 @@ def __init__(self, strat_pool=None, resource=None, **kwargs): if strat_pool is not None and resource is not None: self.node, self.out = strat_pool.get_data(resource, **kwargs) - def __repr__(self): + def __repr__(self): # noqa: D105 return f'{getattr(self.node, "name", str(self.node))} ({self.out})' diff --git a/CPAC/pipeline/schema.py b/CPAC/pipeline/schema.py index eb83a9107f..fa36a0dd2e 100644 --- a/CPAC/pipeline/schema.py +++ b/CPAC/pipeline/schema.py @@ -15,7 +15,7 @@ # You should have received a copy of the GNU Lesser General Public # License along with C-PAC. If not, see . -"""Validation schema for C-PAC pipeline configurations""" +"""Validation schema for C-PAC pipeline configurations.""" # pylint: disable=too-many-lines from itertools import chain, permutations @@ -58,13 +58,13 @@ # ('x', # 1 or more digits, optional decimal, 0 or more lowercase characters (units) # ) 0 or more times -RESOLUTION_REGEX = r"^[0-9]+(\.[0-9]*){0,1}[a-z]*" r"(x[0-9]+(\.[0-9]*){0,1}[a-z]*)*$" +RESOLUTION_REGEX = r"^[0-9]+(\.[0-9]*){0,1}[a-z]*(x[0-9]+(\.[0-9]*){0,1}[a-z]*)*$" Number = Any(float, int, All(str, Match(SCIENTIFIC_NOTATION_STR_REGEX))) def str_to_bool1_1(x): # pylint: disable=invalid-name - """Convert strings to Booleans for YAML1.1 syntax + """Convert strings to Booleans for YAML1.1 syntax. Ref https://yaml.org/type/bool.html @@ -91,11 +91,12 @@ def str_to_bool1_1(x): # pylint: disable=invalid-name else x ) if not isinstance(x, (bool, int)): - raise BooleanInvalid( + msg = ( 'Type boolean value was expected, type ' f'{getattr(type(x), "__name__", str(type(x)))} ' f'value\n\n{x}\n\nwas provided' ) + raise BooleanInvalid(msg) return bool(x) @@ -316,7 +317,7 @@ def str_to_bool1_1(x): # pylint: disable=invalid-name def name_motion_filter(mfilter, mfilters=None): - """Given a motion filter, create a short string for the filename + """Given a motion filter, create a short string for the filename. Parameters ---------- @@ -385,8 +386,7 @@ def name_motion_filter(mfilter, mfilters=None): def permutation_message(key, options): - """Function to give a clean, human-readable error message for keys - that accept permutation values + """Give a human-readable error message for keys that accept permutation values. Parameters ---------- @@ -413,7 +413,7 @@ def permutation_message(key, options): def sanitize(filename): - """Sanitize a filename and replace whitespaces with underscores""" + """Sanitize a filename and replace whitespaces with underscores.""" return re.sub(r"\s+", "_", sanitize_filename(filename)) @@ -423,6 +423,9 @@ def sanitize(filename): "skip env check": Maybe(bool), # flag for skipping an environment check "pipeline_setup": { "pipeline_name": All(str, Length(min=1), sanitize), + "desired_orientation": In( + {"RPI", "LPI", "RAI", "LAI", "RAS", "LAS", "RPS", "LPS"} + ), "output_directory": { "path": str, "source_outputs_dir": Maybe(str), @@ -1254,10 +1257,11 @@ def sanitize(filename): def schema(config_dict): - """Validate a pipeline configuration against the latest validation schema - by first applying backwards-compatibility patches, then applying - Voluptuous validation, then handling complex configuration interaction - checks before returning validated config_dict. + """Validate a participant-analysis pipeline configuration. + + Validate against the latest validation schema by first applying backwards- + compatibility patches, then applying Voluptuous validation, then handling complex + configuration interaction checks before returning validated config_dict. Parameters ---------- @@ -1277,9 +1281,12 @@ def schema(config_dict): "2-nuisance_regression", "space", ] and isinstance(multiple_invalid.errors[0], CoerceInvalid): - raise CoerceInvalid( + msg = ( 'Nusiance regression space is not forkable. Please choose ' - f'only one of {valid_options["space"]}', + f'only one of {valid_options["space"]}' + ) + raise CoerceInvalid( + msg, path=multiple_invalid.path, ) from multiple_invalid raise multiple_invalid @@ -1306,24 +1313,26 @@ def schema(config_dict): ]["space"] != "template" ): - raise ExclusiveInvalid( + msg = ( "``single_step_resampling_from_stc`` requires " "template-space nuisance regression. Either set " "``nuisance_corrections: 2-nuisance_regression: space`` " f"to ``template`` {or_else}" ) + raise ExclusiveInvalid(msg) if any( registration != "ANTS" for registration in partially_validated["registration_workflows"][ "anatomical_registration" ]["registration"]["using"] ): - raise ExclusiveInvalid( + msg = ( "``single_step_resampling_from_stc`` requires " "ANTS registration. Either set " "``registration_workflows: anatomical_registration: " f"registration: using`` to ``ANTS`` {or_else}" ) + raise ExclusiveInvalid(msg) except KeyError: pass try: @@ -1351,12 +1360,15 @@ def schema(config_dict): Length(min=1, max=1)(mec["motion_correction"]["using"]) except LengthInvalid: mec_path = ["functional_preproc", "motion_estimates_and_correction"] - raise LengthInvalid( # pylint: disable=raise-missing-from + msg = ( f'If data[{"][".join(map(repr, mec_path))}][\'run\'] is ' # length must be between 1 and # len(valid_options['motion_correction']) once #1935 is # resolved - 'True, length of list must be exactly 1', + 'True, length of list must be exactly 1' + ) + raise LengthInvalid( # pylint: disable=raise-missing-from + msg, path=[*mec_path, "motion_correction", "using"], ) except KeyError: @@ -1371,10 +1383,11 @@ def schema(config_dict): "create_regressors" ] ): - raise ExclusiveInvalid( + msg = ( "[!] Ingress_regressors and create_regressors can't both run! " " Try turning one option off.\n " ) + raise ExclusiveInvalid(msg) except KeyError: pass try: @@ -1396,12 +1409,13 @@ def schema(config_dict): ) as error: import site - raise OSError( + msg = ( "U-Net brain extraction requires torch to be installed, " "but the installation path in this container is " "read-only. Please bind a local writable path to " f'"{site.USER_BASE}" in the container to use U-Net.' - ) from error + ) + raise OSError(msg) from error except KeyError: pass return partially_validated diff --git a/CPAC/pipeline/test/test_engine.py b/CPAC/pipeline/test/test_engine.py index c228fc3640..cf85f50dbe 100644 --- a/CPAC/pipeline/test/test_engine.py +++ b/CPAC/pipeline/test/test_engine.py @@ -90,7 +90,7 @@ def test_ingress_pipeconfig_data(pipe_config, bids_dir, test_dir): rpool = ResourcePool(name=unique_id, cfg=cfg) - rpool = ingress_pipeconfig_paths(cfg, rpool, sub_data_dct, unique_id) + wf, rpool = ingress_pipeconfig_paths(wf, cfg, rpool, sub_data_dct, unique_id) rpool.gather_pipes(wf, cfg, all=True) diff --git a/CPAC/registration/registration.py b/CPAC/registration/registration.py index 7410b335f2..33af078797 100644 --- a/CPAC/registration/registration.py +++ b/CPAC/registration/registration.py @@ -1,4 +1,4 @@ -# Copyright (C) 2012-2023 C-PAC Developers +# Copyright (C) 2012-2024 C-PAC Developers # This file is part of C-PAC. @@ -15,8 +15,11 @@ # You should have received a copy of the GNU Lesser General Public # License along with C-PAC. If not, see . # pylint: disable=too-many-lines,ungrouped-imports,wrong-import-order +"""Workflows for registration.""" + from typing import Optional +from voluptuous import RequiredFieldInvalid from nipype.interfaces import afni, ants, c3, fsl, utility as util from nipype.interfaces.afni import utils as afni_utils @@ -36,8 +39,8 @@ seperate_warps_list, single_ants_xfm_to_list, ) +from CPAC.utils.interfaces import Function from CPAC.utils.interfaces.fsl import Merge as fslMerge -from CPAC.utils.typing import LIST_OR_STR, TUPLE from CPAC.utils.utils import check_prov_for_motion_tool, check_prov_for_regtool @@ -49,12 +52,13 @@ def apply_transform( num_cpus=1, num_ants_cores=1, ): + """Apply transform.""" if not reg_tool: - raise Exception( - "\n[!] Developer info: the 'reg_tool' parameter sent " - f"to the 'apply_transform' node for '{wf_name}' is " - f"empty.\n" + msg = ( + "\n[!] Developer info: the 'reg_tool' parameter sent to the" + f" 'apply_transform' node for '{wf_name}' is empty.\n" ) + raise RequiredFieldInvalid(msg) wf = pe.Workflow(name=wf_name) @@ -101,7 +105,7 @@ def apply_transform( wf.connect(inputNode, "reference", apply_warp, "reference_image") interp_string = pe.Node( - util.Function( + Function( input_names=["interpolation", "reg_tool"], output_names=["interpolation"], function=interpolation_string, @@ -115,7 +119,7 @@ def apply_transform( wf.connect(interp_string, "interpolation", apply_warp, "interpolation") ants_xfm_list = pe.Node( - util.Function( + Function( input_names=["transform"], output_names=["transform_list"], function=single_ants_xfm_to_list, @@ -130,9 +134,9 @@ def apply_transform( # parallelize the apply warp, if multiple CPUs, and it's a time # series! if int(num_cpus) > 1 and time_series: - chunk_imports = ["import nibabel as nb"] + chunk_imports = ["import nibabel as nib"] chunk = pe.Node( - util.Function( + Function( input_names=["func_file", "n_chunks", "chunk_size"], output_names=["TR_ranges"], function=chunk_ts, @@ -151,7 +155,7 @@ def apply_transform( split_imports = ["import os", "import subprocess"] split = pe.Node( - util.Function( + Function( input_names=["func_file", "tr_ranges"], output_names=["split_funcs"], function=split_ts_chunks, @@ -193,7 +197,7 @@ def apply_transform( ) interp_string = pe.Node( - util.Function( + Function( input_names=["interpolation", "reg_tool"], output_names=["interpolation"], function=interpolation_string, @@ -217,9 +221,9 @@ def apply_transform( # parallelize the apply warp, if multiple CPUs, and it's a time # series! if int(num_cpus) > 1 and time_series: - chunk_imports = ["import nibabel as nb"] + chunk_imports = ["import nibabel as nib"] chunk = pe.Node( - util.Function( + Function( input_names=["func_file", "n_chunks", "chunk_size"], output_names=["TR_ranges"], function=chunk_ts, @@ -238,7 +242,7 @@ def apply_transform( split_imports = ["import os", "import subprocess"] split = pe.Node( - util.Function( + Function( input_names=["func_file", "tr_ranges"], output_names=["split_funcs"], function=split_ts_chunks, @@ -324,7 +328,7 @@ def transform_derivative( def convert_pedir(pedir, convert="xyz_to_int"): - """FSL Flirt requires pedir input encoded as an int""" + """FSL Flirt requires pedir input encoded as an int.""" if convert == "xyz_to_int": conv_dct = { "x": 1, @@ -352,18 +356,16 @@ def convert_pedir(pedir, convert="xyz_to_int"): if isinstance(pedir, bytes): pedir = pedir.decode() if not isinstance(pedir, str): - raise Exception( - "\n\nPhase-encoding direction must be a " - f"string value.\n\nValue: {pedir}" - "\n\n" - ) + msg = f"\n\nPhase-encoding direction must be a string value.\n\nValue: {pedir}\n\n" + raise ValueError(msg) if pedir not in conv_dct.keys(): - raise Exception("\n\nInvalid phase-encoding direction " f"entered: {pedir}\n\n") - pedir = conv_dct[pedir] - return pedir + msg = f"\n\nInvalid phase-encoding direction entered: {pedir}\n\n" + raise ValueError(msg) + return conv_dct[pedir] def create_fsl_flirt_linear_reg(name="fsl_flirt_linear_reg"): + """Create a FLIRT workflow.""" linear_register = pe.Workflow(name=name) inputspec = pe.Node( @@ -404,9 +406,7 @@ def create_fsl_flirt_linear_reg(name="fsl_flirt_linear_reg"): def create_fsl_fnirt_nonlinear_reg(name="fsl_fnirt_nonlinear_reg"): - """ - Performs non-linear registration of an input file to a reference file - using FSL FNIRT. + """Perform non-linear registration of an input to a reference using FSL FNIRT. Parameters ---------- @@ -515,9 +515,7 @@ def create_fsl_fnirt_nonlinear_reg(name="fsl_fnirt_nonlinear_reg"): def create_fsl_fnirt_nonlinear_reg_nhp(name="fsl_fnirt_nonlinear_reg_nhp"): - """ - Performs non-linear registration of an input file to a reference file - using FSL FNIRT. + """Perform non-linear registration of an input to a reference using FSL FNIRT. Parameters ---------- @@ -680,9 +678,9 @@ def create_fsl_fnirt_nonlinear_reg_nhp(name="fsl_fnirt_nonlinear_reg_nhp"): def create_register_func_to_anat( config, phase_diff_distcor=False, name="register_func_to_anat" ): - """ - Registers a functional scan in native space to anatomical space using a - linear transform and does not include bbregister. + """Register a functional scan in native space to anatomical space... + + ...using a linear transform and does not include bbregister. Parameters ---------- @@ -764,7 +762,7 @@ def create_register_func_to_anat( if phase_diff_distcor: conv_pedir = pe.Node( - interface=util.Function( + interface=Function( input_names=["pedir", "convert"], output_names=["pedir"], function=convert_pedir, @@ -803,12 +801,9 @@ def create_register_func_to_anat( def create_register_func_to_anat_use_T2(config, name="register_func_to_anat_use_T2"): - # for monkey data - # ref: https://github.com/DCAN-Labs/dcan-macaque-pipeline/blob/master/fMRIVolume/GenericfMRIVolumeProcessingPipeline.sh#L287-L295 - # https://github.com/HechengJin0/dcan-macaque-pipeline/blob/master/fMRIVolume/GenericfMRIVolumeProcessingPipeline.sh#L524-L535 - """ - Registers a functional scan in native space to anatomical space using a - linear transform and does not include bbregister, use T1 and T2 image. + """Register a functional scan in native space to anatomical space... + + ...using a linear transform and does not include bbregister, use T1 and T2 image. Parameters ---------- @@ -823,6 +818,10 @@ def create_register_func_to_anat_use_T2(config, name="register_func_to_anat_use_ Notes ----- + for monkey data + ref: https://github.com/DCAN-Labs/dcan-macaque-pipeline/blob/90e7e3f/fMRIVolume/GenericfMRIVolumeProcessingPipeline.sh#L287-L295 + https://github.com/HechengJin0/dcan-macaque-pipeline/blob/9f69302/fMRIVolume/GenericfMRIVolumeProcessingPipeline.sh#L524-L535 + Workflow Inputs:: inputspec.func : string (nifti file) @@ -961,9 +960,9 @@ def create_register_func_to_anat_use_T2(config, name="register_func_to_anat_use_ def create_bbregister_func_to_anat( phase_diff_distcor=False, name="bbregister_func_to_anat" ): - """ - Registers a functional scan in native space to structural. This is - meant to be used after create_nonlinear_register() has been run and + """Register a functional scan in native space to structural. + + This is meant to be used after create_nonlinear_register() has been run and relies on some of its outputs. Parameters @@ -1069,7 +1068,7 @@ def bbreg_args(bbreg_target): if phase_diff_distcor: conv_pedir = pe.Node( - interface=util.Function( + interface=Function( input_names=["pedir", "convert"], output_names=["pedir"], function=convert_pedir, @@ -1108,9 +1107,9 @@ def bbreg_args(bbreg_target): def create_wf_calculate_ants_warp( name="create_wf_calculate_ants_warp", num_threads=1, reg_ants_skull=1 ): - """ - Calculates the nonlinear ANTS registration transform. This workflow - employs the antsRegistration tool: + """Calculate the nonlinear ANTS registration transform. + + This workflow employs the antsRegistration tool: http://stnava.github.io/ANTs/ @@ -1278,7 +1277,7 @@ def create_wf_calculate_ants_warp( """ reg_imports = ["import os", "import subprocess"] calculate_ants_warp = pe.Node( - interface=util.Function( + interface=Function( input_names=[ "moving_brain", "reference_brain", @@ -1304,7 +1303,7 @@ def create_wf_calculate_ants_warp( calculate_ants_warp.interface.num_threads = num_threads select_forward_initial = pe.Node( - util.Function( + Function( input_names=["warp_list", "selection"], output_names=["selected_warp"], function=seperate_warps_list, @@ -1315,7 +1314,7 @@ def create_wf_calculate_ants_warp( select_forward_initial.inputs.selection = "Initial" select_forward_rigid = pe.Node( - util.Function( + Function( input_names=["warp_list", "selection"], output_names=["selected_warp"], function=seperate_warps_list, @@ -1326,7 +1325,7 @@ def create_wf_calculate_ants_warp( select_forward_rigid.inputs.selection = "Rigid" select_forward_affine = pe.Node( - util.Function( + Function( input_names=["warp_list", "selection"], output_names=["selected_warp"], function=seperate_warps_list, @@ -1337,7 +1336,7 @@ def create_wf_calculate_ants_warp( select_forward_affine.inputs.selection = "Affine" select_forward_warp = pe.Node( - util.Function( + Function( input_names=["warp_list", "selection"], output_names=["selected_warp"], function=seperate_warps_list, @@ -1348,7 +1347,7 @@ def create_wf_calculate_ants_warp( select_forward_warp.inputs.selection = "Warp" select_inverse_warp = pe.Node( - util.Function( + Function( input_names=["warp_list", "selection"], output_names=["selected_warp"], function=seperate_warps_list, @@ -1456,6 +1455,7 @@ def create_wf_calculate_ants_warp( def FSL_registration_connector( wf_name, cfg, orig="T1w", opt=None, symmetric=False, template="T1w" ): + """Transform raw data to template with FSL.""" wf = pe.Workflow(name=wf_name) inputNode = pe.Node( @@ -1485,7 +1485,7 @@ def FSL_registration_connector( if template == "EPI": tmpl = "EPI" - if opt == "FSL" or opt == "FSL-linear": + if opt in ("FSL", "FSL-linear"): flirt_reg_anat_mni = create_fsl_flirt_linear_reg( f"anat_mni_flirt_register{symm}" ) @@ -1518,7 +1518,7 @@ def FSL_registration_connector( ) write_invlin_composite_xfm = pe.Node( - interface=fsl.ConvertWarp(), name=f"fsl_invlin-warp_to_" f"nii{symm}" + interface=fsl.ConvertWarp(), name=f"fsl_invlin-warp_to_nii{symm}" ) wf.connect( @@ -1625,6 +1625,7 @@ def FSL_registration_connector( def ANTs_registration_connector( wf_name, cfg, params, orig="T1w", symmetric=False, template="T1w" ): + """Transform raw data to template with ANTs.""" wf = pe.Workflow(name=wf_name) inputNode = pe.Node( @@ -1655,12 +1656,11 @@ def ANTs_registration_connector( if params is None: err_msg = ( - "\n\n[!] C-PAC says: \nYou have selected ANTs as your " - "anatomical registration method.\n" - "However, no ANTs parameters were specified.\n" - "Please specify ANTs parameters properly and try again." + "\n\n[!] C-PAC says: \nYou have selected ANTs as your" + " anatomical registration method.\nHowever, no ANTs parameters were" + " specified.\nPlease specify ANTs parameters properly and try again." ) - raise Exception(err_msg) + raise RequiredFieldInvalid(err_msg) ants_reg_anat_mni = create_wf_calculate_ants_warp( f"anat_mni_ants_register{symm}", @@ -1703,7 +1703,7 @@ def ANTs_registration_connector( "ANTs" ]["use_lesion_mask"]: # Create lesion preproc node to apply afni Refit and Resample - lesion_preproc = create_lesion_preproc(wf_name=f"lesion_preproc{symm}") + lesion_preproc = create_lesion_preproc(cfg, wf_name=f"lesion_preproc{symm}") wf.connect(inputNode, "lesion_mask", lesion_preproc, "inputspec.lesion") wf.connect( lesion_preproc, @@ -1756,7 +1756,7 @@ def ANTs_registration_connector( # check transform list to exclude Nonetype (missing) init/rig/affine check_transform = pe.Node( - util.Function( + Function( input_names=["transform_list"], output_names=["checked_transform_list", "list_length"], function=check_transforms, @@ -1802,7 +1802,7 @@ def ANTs_registration_connector( write_composite_invlinear_xfm.inputs.dimension = 3 collect_inv_transforms = pe.Node( - util.Merge(3), name="collect_inv_transforms" f"{symm}" + util.Merge(3), name=f"collect_inv_transforms{symm}" ) wf.connect( @@ -1819,7 +1819,7 @@ def ANTs_registration_connector( # check transform list to exclude Nonetype (missing) init/rig/affine check_invlinear_transform = pe.Node( - util.Function( + Function( input_names=["transform_list"], output_names=["checked_transform_list", "list_length"], function=check_transforms, @@ -1841,7 +1841,7 @@ def ANTs_registration_connector( # generate inverse transform flags, which depends on the # number of transforms inverse_transform_flags = pe.Node( - util.Function( + Function( input_names=["transform_list"], output_names=["inverse_transform_flags"], function=generate_inverse_transform_flags, @@ -1882,7 +1882,7 @@ def ANTs_registration_connector( write_composite_xfm.inputs.dimension = 3 collect_all_transforms = pe.Node( - util.Merge(4), name=f"collect_all_transforms" f"{symm}" + util.Merge(4), name=f"collect_all_transforms{symm}" ) wf.connect( @@ -1903,7 +1903,7 @@ def ANTs_registration_connector( # check transform list to exclude Nonetype (missing) init/rig/affine check_all_transform = pe.Node( - util.Function( + Function( input_names=["transform_list"], output_names=["checked_transform_list", "list_length"], function=check_transforms, @@ -1939,7 +1939,7 @@ def ANTs_registration_connector( write_composite_inv_xfm.inputs.dimension = 3 collect_all_inv_transforms = pe.Node( - util.Merge(4), name=f"collect_all_inv_transforms" f"{symm}" + util.Merge(4), name=f"collect_all_inv_transforms{symm}" ) wf.connect( @@ -1972,7 +1972,7 @@ def ANTs_registration_connector( # check transform list to exclude Nonetype (missing) init/rig/affine check_all_inv_transform = pe.Node( - util.Function( + Function( input_names=["transform_list"], output_names=["checked_transform_list", "list_length"], function=check_transforms, @@ -1994,7 +1994,7 @@ def ANTs_registration_connector( # generate inverse transform flags, which depends on the # number of transforms inverse_all_transform_flags = pe.Node( - util.Function( + Function( input_names=["transform_list"], output_names=["inverse_transform_flags"], function=generate_inverse_transform_flags, @@ -2053,6 +2053,7 @@ def ANTs_registration_connector( def bold_to_T1template_xfm_connector( wf_name, cfg, reg_tool, symmetric=False, blip=False ): + """Transform functional to T1w template.""" wf = pe.Workflow(name=wf_name) inputNode = pe.Node( @@ -2089,7 +2090,7 @@ def bold_to_T1template_xfm_connector( itk_imports = ["import os"] change_transform = pe.Node( - util.Function( + Function( input_names=["input_affine_file"], output_names=["updated_affine_file"], function=change_itk_transform_type, @@ -2253,8 +2254,9 @@ def bold_to_T1template_xfm_connector( }, ) def register_FSL_anat_to_template(wf, cfg, strat_pool, pipe_num, opt=None): + """Register T1w to template with FSL.""" fsl, outputs = FSL_registration_connector( - f"register_{opt}_anat_to_" f"template_{pipe_num}", cfg, orig="T1w", opt=opt + f"register_{opt}_anat_to_template_{pipe_num}", cfg, orig="T1w", opt=opt ) fsl.inputs.inputspec.interpolation = cfg.registration_workflows[ @@ -2339,8 +2341,9 @@ def register_FSL_anat_to_template(wf, cfg, strat_pool, pipe_num, opt=None): }, ) def register_symmetric_FSL_anat_to_template(wf, cfg, strat_pool, pipe_num, opt=None): + """Register T1w to symmetric template with FSL.""" fsl, outputs = FSL_registration_connector( - f"register_{opt}_anat_to_" f"template_symmetric_" f"{pipe_num}", + f"register_{opt}_anat_to_template_symmetric_{pipe_num}", cfg, orig="T1w", opt=opt, @@ -2412,11 +2415,9 @@ def register_symmetric_FSL_anat_to_template(wf, cfg, strat_pool, pipe_num, opt=N }, ) def register_FSL_EPI_to_template(wf, cfg, strat_pool, pipe_num, opt=None): - """Directly register the mean functional to an EPI template. No T1w - involved. - """ + """Directly register the mean functional to an EPI template. No T1w involved.""" fsl, outputs = FSL_registration_connector( - f"register_{opt}_EPI_to_" f"template_{pipe_num}", + f"register_{opt}_EPI_to_template_{pipe_num}", cfg, orig="bold", opt=opt, @@ -2551,12 +2552,13 @@ def register_FSL_EPI_to_template(wf, cfg, strat_pool, pipe_num, opt=None): }, ) def register_ANTs_anat_to_template(wf, cfg, strat_pool, pipe_num, opt=None): + """Register T1w to template with ANTs.""" params = cfg.registration_workflows["anatomical_registration"]["registration"][ "ANTs" ]["T1_registration"] ants_rc, outputs = ANTs_registration_connector( - "ANTS_T1_to_template_" f"{pipe_num}", cfg, params, orig="T1w" + f"ANTS_T1_to_template_{pipe_num}", cfg, params, orig="T1w" ) ants_rc.inputs.inputspec.interpolation = cfg.registration_workflows[ @@ -2617,6 +2619,7 @@ def register_ANTs_anat_to_template(wf, cfg, strat_pool, pipe_num, opt=None): ) outputs[new_key] = outputs[key] del outputs[key] + return (wf, outputs) @@ -2684,12 +2687,13 @@ def register_ANTs_anat_to_template(wf, cfg, strat_pool, pipe_num, opt=None): }, ) def register_symmetric_ANTs_anat_to_template(wf, cfg, strat_pool, pipe_num, opt=None): + """Register T1 to symmetric template with ANTs.""" params = cfg.registration_workflows["anatomical_registration"]["registration"][ "ANTs" ]["T1_registration"] ants, outputs = ANTs_registration_connector( - "ANTS_T1_to_template_" f"symmetric_{pipe_num}", + f"ANTS_T1_to_template_symmetric_{pipe_num}", cfg, params, orig="T1w", @@ -2773,15 +2777,13 @@ def register_symmetric_ANTs_anat_to_template(wf, cfg, strat_pool, pipe_num, opt= }, ) def register_ANTs_EPI_to_template(wf, cfg, strat_pool, pipe_num, opt=None): - """Directly register the mean functional to an EPI template. No T1w - involved. - """ + """Directly register the mean functional to an EPI template. No T1w involved.""" params = cfg.registration_workflows["functional_registration"]["EPI_registration"][ "ANTs" ]["parameters"] ants, outputs = ANTs_registration_connector( - "ANTS_bold_to_EPI-template" f"_{pipe_num}", + f"ANTS_bold_to_EPI-template_{pipe_num}", cfg, params, orig="bold", @@ -2855,6 +2857,7 @@ def register_ANTs_EPI_to_template(wf, cfg, strat_pool, pipe_num, opt=None): }, ) def overwrite_transform_anat_to_template(wf, cfg, strat_pool, pipe_num, opt=None): + """Overwrite ANTs transforms with FSL transforms.""" xfm_prov = strat_pool.get_cpac_provenance("from-T1w_to-template_mode-image_xfm") reg_tool = check_prov_for_regtool(xfm_prov) @@ -2862,7 +2865,7 @@ def overwrite_transform_anat_to_template(wf, cfg, strat_pool, pipe_num, opt=None if opt.lower() == "fsl" and reg_tool.lower() == "ants": # Apply head-to-head transforms on brain using ABCD-style registration # Convert ANTs warps to FSL warps to be consistent with the functional registration - # Ref: https://github.com/DCAN-Labs/DCAN-HCP/blob/master/PostFreeSurfer/scripts/AtlasRegistrationToMNI152_ANTsbased.sh#L134-L172 + # Ref: https://github.com/DCAN-Labs/DCAN-HCP/blob/e8d373d/PostFreeSurfer/scripts/AtlasRegistrationToMNI152_ANTsbased.sh#L134-L172 # antsApplyTransforms -d 3 -i ${T1wRestore}.nii.gz -r ${Reference} \ # -t ${WD}/xfms/T1w_to_MNI_3Warp.nii.gz \ @@ -2917,8 +2920,8 @@ def overwrite_transform_anat_to_template(wf, cfg, strat_pool, pipe_num, opt=None # c4d -mcs ${WD}/xfms/ANTs_CombinedWarp.nii.gz -oo ${WD}/xfms/e1.nii.gz ${WD}/xfms/e2.nii.gz ${WD}/xfms/e3.nii.gz # -mcs: -multicomponent-split, -oo: -output-multiple split_combined_warp = pe.Node( - util.Function( - input_names=["input", "output_name"], + Function( + input_names=["input_name", "output_name"], output_names=["output1", "output2", "output3"], function=run_c4d, ), @@ -2927,13 +2930,16 @@ def overwrite_transform_anat_to_template(wf, cfg, strat_pool, pipe_num, opt=None split_combined_warp.inputs.output_name = "e" wf.connect( - ants_apply_warp_t1_to_template, "output_image", split_combined_warp, "input" + ants_apply_warp_t1_to_template, + "output_image", + split_combined_warp, + "input_name", ) # c4d -mcs ${WD}/xfms/ANTs_CombinedInvWarp.nii.gz -oo ${WD}/xfms/e1inv.nii.gz ${WD}/xfms/e2inv.nii.gz ${WD}/xfms/e3inv.nii.gz split_combined_inv_warp = pe.Node( - util.Function( - input_names=["input", "output_name"], + Function( + input_names=["input_name", "output_name"], output_names=["output1", "output2", "output3"], function=run_c4d, ), @@ -2945,7 +2951,7 @@ def overwrite_transform_anat_to_template(wf, cfg, strat_pool, pipe_num, opt=None ants_apply_warp_template_to_t1, "output_image", split_combined_inv_warp, - "input", + "input_name", ) # fslmaths ${WD}/xfms/e2.nii.gz -mul -1 ${WD}/xfms/e-2.nii.gz @@ -3091,6 +3097,7 @@ def overwrite_transform_anat_to_template(wf, cfg, strat_pool, pipe_num, opt=None outputs=["sbref"], ) def coregistration_prep_vol(wf, cfg, strat_pool, pipe_num, opt=None): + """Create single-band reference for coreg by selecting a functional volume.""" get_func_volume = pe.Node(interface=afni.Calc(), name=f"get_func_volume_{pipe_num}") get_func_volume.inputs.set( @@ -3134,6 +3141,7 @@ def coregistration_prep_vol(wf, cfg, strat_pool, pipe_num, opt=None): outputs=["sbref"], ) def coregistration_prep_mean(wf, cfg, strat_pool, pipe_num, opt=None): + """Create single-band reference for coregistration from mean BOLD.""" coreg_input = strat_pool.get_data("desc-mean_bold") # TODO add mean skull @@ -3174,6 +3182,7 @@ def coregistration_prep_mean(wf, cfg, strat_pool, pipe_num, opt=None): outputs=["sbref"], ) def coregistration_prep_fmriprep(wf, cfg, strat_pool, pipe_num, opt=None): + """Generate fMRIPrep-style single-band reference for coregistration.""" coreg_input = strat_pool.get_data("desc-ref_bold") outputs = {"sbref": coreg_input} @@ -3214,6 +3223,7 @@ def coregistration_prep_fmriprep(wf, cfg, strat_pool, pipe_num, opt=None): ], ) def coregistration(wf, cfg, strat_pool, pipe_num, opt=None): + """Coregister BOLD to T1w.""" diff_complete = False if strat_pool.check_rpool("despiked-fieldmap") and strat_pool.check_rpool( "fieldmap-mask" @@ -3223,10 +3233,10 @@ def coregistration(wf, cfg, strat_pool, pipe_num, opt=None): if strat_pool.check_rpool("T2w") and cfg.anatomical_preproc["run_t2"]: # monkey data func_to_anat = create_register_func_to_anat_use_T2( - cfg, f"func_to_anat_FLIRT_" f"{pipe_num}" + cfg, f"func_to_anat_FLIRT_{pipe_num}" ) - # https://github.com/DCAN-Labs/dcan-macaque-pipeline/blob/master/fMRIVolume/GenericfMRIVolumeProcessingPipeline.sh#L177 + # https://github.com/DCAN-Labs/dcan-macaque-pipeline/blob/90e7e3f/fMRIVolume/GenericfMRIVolumeProcessingPipeline.sh#L177 # fslmaths "$fMRIFolder"/"$NameOffMRI"_mc -Tmean "$fMRIFolder"/"$ScoutName"_gdc func_mc_mean = pe.Node( interface=afni_utils.TStat(), name=f"func_motion_corrected_mean_{pipe_num}" @@ -3253,7 +3263,7 @@ def coregistration(wf, cfg, strat_pool, pipe_num, opt=None): # if field map-based distortion correction is on, but BBR is off, # send in the distortion correction files here func_to_anat = create_register_func_to_anat( - cfg, diff_complete, f"func_to_anat_FLIRT_" f"{pipe_num}" + cfg, diff_complete, f"func_to_anat_FLIRT_{pipe_num}" ) func_to_anat.inputs.inputspec.dof = cfg.registration_workflows[ @@ -3311,7 +3321,7 @@ def coregistration(wf, cfg, strat_pool, pipe_num, opt=None): ]["run"] ): func_to_anat_bbreg = create_bbregister_func_to_anat( - diff_complete, f"func_to_anat_" f"bbreg_" f"{pipe_num}" + diff_complete, f"func_to_anat_bbreg_{pipe_num}" ) func_to_anat_bbreg.inputs.inputspec.bbr_schedule = cfg.registration_workflows[ "functional_registration" @@ -3429,14 +3439,15 @@ def coregistration(wf, cfg, strat_pool, pipe_num, opt=None): }, ) def create_func_to_T1template_xfm(wf, cfg, strat_pool, pipe_num, opt=None): - """Condense the BOLD-to-T1 coregistration transform and the T1-to-template - transform into one transform matrix. + """Create a single transform from BOLD-to-T1 coregistration and T1-to-template. + + Condense the BOLD-to-T1 coregistration transform and the T1-to-template transform into one transform matrix. """ xfm_prov = strat_pool.get_cpac_provenance("from-T1w_to-template_mode-image_xfm") reg_tool = check_prov_for_regtool(xfm_prov) xfm, outputs = bold_to_T1template_xfm_connector( - "create_func_to_T1w" f"template_xfm_{pipe_num}", cfg, reg_tool, symmetric=False + f"create_func_to_T1wtemplate_xfm_{pipe_num}", cfg, reg_tool, symmetric=False ) node, out = strat_pool.get_data("from-bold_to-T1w_mode-image_desc-linear_xfm") @@ -3506,14 +3517,16 @@ def create_func_to_T1template_xfm(wf, cfg, strat_pool, pipe_num, opt=None): }, ) def create_func_to_T1template_symmetric_xfm(wf, cfg, strat_pool, pipe_num, opt=None): - """Condense the BOLD-to-T1 coregistration transform and the T1-to- - symmetric-template transform into one transform matrix. + """Create a single transform from coregistration and T1-to-symmetric-template. + + Condense the BOLD-to-T1 coregistration transform and the T1-to-symmetric-template + transform into one transform matrix. """ xfm_prov = strat_pool.get_cpac_provenance("from-T1w_to-symtemplate_mode-image_xfm") reg_tool = check_prov_for_regtool(xfm_prov) xfm, outputs = bold_to_T1template_xfm_connector( - "create_func_to_T1wsymtem" f"plate_xfm_{pipe_num}", + f"create_func_to_T1wsymtemplate_xfm_{pipe_num}", cfg, reg_tool, symmetric=True, @@ -3576,6 +3589,7 @@ def create_func_to_T1template_symmetric_xfm(wf, cfg, strat_pool, pipe_num, opt=N outputs=["sbref", "desc-preproc_bold", "desc-stc_bold", "bold"], ) def apply_phasediff_to_timeseries_separately(wf, cfg, strat_pool, pipe_num, opt=None): + """Apply phasediff to timeseries.""" outputs = {"desc-preproc_bold": strat_pool.get_data("desc-preproc_bold")} if not strat_pool.check_rpool("despiked-fieldmap"): return (wf, outputs) @@ -3606,7 +3620,7 @@ def apply_phasediff_to_timeseries_separately(wf, cfg, strat_pool, pipe_num, opt= wf.connect(warp_fmap, "out_file", mask_fmap, "in_file") conv_pedir = pe.Node( - interface=util.Function( + interface=Function( input_names=["pedir", "convert"], output_names=["pedir"], function=convert_pedir, @@ -3713,6 +3727,7 @@ def apply_phasediff_to_timeseries_separately(wf, cfg, strat_pool, pipe_num, opt= outputs=["desc-preproc_bold", "desc-stc_bold", "bold"], ) def apply_blip_to_timeseries_separately(wf, cfg, strat_pool, pipe_num, opt=None): + """Apply blip to timeseries.""" xfm_prov = strat_pool.get_cpac_provenance("from-bold_to-template_mode-image_xfm") reg_tool = check_prov_for_regtool(xfm_prov) @@ -3753,8 +3768,6 @@ def apply_blip_to_timeseries_separately(wf, cfg, strat_pool, pipe_num, opt=None) "functional_registration" ]["func_registration_to_template"]["FNIRT_pipelines"]["interpolation"] - connect = strat_pool.get_data("desc-preproc_bold") - if opt == "default": node, out = strat_pool.get_data("desc-preproc_bold") out_label = "desc-preproc_bold" @@ -3792,6 +3805,7 @@ def apply_blip_to_timeseries_separately(wf, cfg, strat_pool, pipe_num, opt=None) outputs={"space-template_desc-head_T1w": {"Template": "T1w-template"}}, ) def warp_wholeheadT1_to_template(wf, cfg, strat_pool, pipe_num, opt=None): + """Warp T1 head to template.""" xfm_prov = strat_pool.get_cpac_provenance("from-T1w_to-template_mode-image_xfm") reg_tool = check_prov_for_regtool(xfm_prov) @@ -3845,6 +3859,7 @@ def warp_wholeheadT1_to_template(wf, cfg, strat_pool, pipe_num, opt=None): outputs={"space-template_desc-brain_mask": {"Template": "T1w-template"}}, ) def warp_T1mask_to_template(wf, cfg, strat_pool, pipe_num, opt=None): + """Warp T1 mask to template.""" xfm_prov = strat_pool.get_cpac_provenance("from-T1w_to-template_mode-image_xfm") reg_tool = check_prov_for_regtool(xfm_prov) @@ -3905,6 +3920,7 @@ def warp_T1mask_to_template(wf, cfg, strat_pool, pipe_num, opt=None): }, ) def warp_timeseries_to_T1template(wf, cfg, strat_pool, pipe_num, opt=None): + """Warp timeseries to T1 template.""" xfm_prov = strat_pool.get_cpac_provenance("from-bold_to-template_mode-image_xfm") reg_tool = check_prov_for_regtool(xfm_prov) @@ -3967,6 +3983,7 @@ def warp_timeseries_to_T1template(wf, cfg, strat_pool, pipe_num, opt=None): }, ) def warp_timeseries_to_T1template_deriv(wf, cfg, strat_pool, pipe_num, opt=None): + """Warp timeseries to T1 template at derivative resolution.""" xfm_prov = strat_pool.get_cpac_provenance("from-bold_to-template_mode-image_xfm") reg_tool = check_prov_for_regtool(xfm_prov) @@ -4047,11 +4064,15 @@ def warp_timeseries_to_T1template_deriv(wf, cfg, strat_pool, pipe_num, opt=None) }, ) def warp_timeseries_to_T1template_abcd(wf, cfg, strat_pool, pipe_num, opt=None): - # Apply motion correction, coreg, anat-to-template transforms on raw functional timeseries using ABCD-style registration - # Ref: https://github.com/DCAN-Labs/DCAN-HCP/blob/master/fMRIVolume/scripts/OneStepResampling.sh#L168-L197 + """Apply motion correction, coreg, anat-to-template transforms... - # https://github.com/DCAN-Labs/DCAN-HCP/blob/master/fMRIVolume/scripts/DistortionCorrectionAndEPIToT1wReg_FLIRTBBRAndFreeSurferBBRbased.sh#L548 - # convertwarp --relout --rel -m ${WD}/fMRI2str.mat --ref=${T1wImage} --out=${WD}/fMRI2str.nii.gz + ...on raw functional timeseries using ABCD-style registration. + + Ref: https://github.com/DCAN-Labs/DCAN-HCP/blob/1d90814/fMRIVolume/scripts/OneStepResampling.sh#L168-L197 + + https://github.com/DCAN-Labs/DCAN-HCP/blob/a8d495a/fMRIVolume/scripts/DistortionCorrectionAndEPIToT1wReg_FLIRTBBRAndFreeSurferBBRbased.sh#L548 + convertwarp --relout --rel -m ${WD}/fMRI2str.mat --ref=${T1wImage} --out=${WD}/fMRI2str.nii.gz + """ convert_func_to_anat_linear_warp = pe.Node( interface=fsl.ConvertWarp(), name=f"convert_func_to_anat_linear_warp_{pipe_num}" ) @@ -4072,7 +4093,7 @@ def warp_timeseries_to_T1template_abcd(wf, cfg, strat_pool, pipe_num, opt=None): node, out = strat_pool.get_data("from-bold_to-T1w_mode-image_desc-linear_xfm") wf.connect(node, out, convert_func_to_anat_linear_warp, "premat") - # https://github.com/DCAN-Labs/DCAN-HCP/blob/master/fMRIVolume/scripts/OneStepResampling.sh#L140 + # https://github.com/DCAN-Labs/DCAN-HCP/blob/1d90814/fMRIVolume/scripts/OneStepResampling.sh#L140 # convertwarp --relout --rel --warp1=${fMRIToStructuralInput} --warp2=${StructuralToStandard} --ref=${WD}/${T1wImageFile}.${FinalfMRIResolution} --out=${OutputTransform} convert_func_to_standard_warp = pe.Node( interface=fsl.ConvertWarp(), name=f"convert_func_to_standard_warp_{pipe_num}" @@ -4114,7 +4135,7 @@ def warp_timeseries_to_T1template_abcd(wf, cfg, strat_pool, pipe_num, opt=None): ) # TODO add condition: if no gradient distortion - # https://github.com/DCAN-Labs/DCAN-HCP/blob/master/fMRIVolume/GenericfMRIVolumeProcessingPipeline.sh#L283-L284 + # https://github.com/DCAN-Labs/DCAN-HCP/blob/6466b78/fMRIVolume/GenericfMRIVolumeProcessingPipeline.sh#L283-L284 # fslroi "$fMRIFolder"/"$NameOffMRI"_gdc "$fMRIFolder"/"$NameOffMRI"_gdc_warp 0 3 extract_func_roi = pe.Node( interface=fsl.ExtractROI(), name=f"extract_func_roi_{pipe_num}" @@ -4135,7 +4156,7 @@ def warp_timeseries_to_T1template_abcd(wf, cfg, strat_pool, pipe_num, opt=None): wf.connect(extract_func_roi, "roi_file", multiply_func_roi_by_zero, "in_file") - # https://github.com/DCAN-Labs/DCAN-HCP/blob/master/fMRIVolume/scripts/OneStepResampling.sh#L168-L193 + # https://github.com/DCAN-Labs/DCAN-HCP/blob/1d90814/fMRIVolume/scripts/OneStepResampling.sh#L168-L193 # fslsplit ${InputfMRI} ${WD}/prevols/vol -t split_func = pe.Node(interface=fsl.Split(), name=f"split_func_{pipe_num}") @@ -4266,7 +4287,7 @@ def warp_timeseries_to_T1template_abcd(wf, cfg, strat_pool, pipe_num, opt=None): # fslmerge -tr ${OutputfMRI}_mask $FrameMergeSTRINGII $TR_vol merge_func_mask_to_standard = pe.Node( - interface=fslMerge(), name="merge_func_mask_to_" f"standard_{pipe_num}" + interface=fslMerge(), name=f"merge_func_mask_to_standard_{pipe_num}" ) merge_func_mask_to_standard.inputs.dimension = "t" @@ -4327,7 +4348,7 @@ def warp_timeseries_to_T1template_abcd(wf, cfg, strat_pool, pipe_num, opt=None): wf.connect(convert_dc_warp, "out_file", applywarp_scout, "field_file") - # https://github.com/DCAN-Labs/DCAN-HCP/blob/master/fMRIVolume/scripts/IntensityNormalization.sh#L124-L127 + # https://github.com/DCAN-Labs/DCAN-HCP/blob/1214767/fMRIVolume/scripts/IntensityNormalization.sh#L124-L127 # fslmaths ${InputfMRI} -mas ${BrainMask} -mas ${InputfMRI}_mask -thr 0 -ing 10000 ${OutputfMRI} -odt float merge_func_mask = pe.Node(util.Merge(2), name=f"merge_func_mask_{pipe_num}") @@ -4416,11 +4437,16 @@ def warp_timeseries_to_T1template_abcd(wf, cfg, strat_pool, pipe_num, opt=None): }, ) def warp_timeseries_to_T1template_dcan_nhp(wf, cfg, strat_pool, pipe_num, opt=None): - # Apply motion correction, coreg, anat-to-template transforms on raw functional timeseries - # Ref: https://github.com/DCAN-Labs/dcan-macaque-pipeline/blob/master/fMRIVolume/scripts/OneStepResampling.sh + """ + Apply motion correction, coreg, anat-to-template transforms... - # https://github.com/DCAN-Labs/dcan-macaque-pipeline/blob/master/fMRIVolume/scripts/OneStepResampling.sh#L131 - # ${FSLDIR}/bin/flirt -interp spline -in ${T1wImage} -ref ${T1wImage} -applyisoxfm $FinalfMRIResolution -out ${WD}/${T1wImageFile}.${FinalfMRIResolution} + ...on raw functional timeseries. + + Ref: https://github.com/DCAN-Labs/dcan-macaque-pipeline/blob/8fe9f61/fMRIVolume/scripts/OneStepResampling.sh + + https://github.com/DCAN-Labs/dcan-macaque-pipeline/blob/8fe9f61/fMRIVolume/scripts/OneStepResampling.sh#L131 + ${FSLDIR}/bin/flirt -interp spline -in ${T1wImage} -ref ${T1wImage} -applyisoxfm $FinalfMRIResolution -out ${WD}/${T1wImageFile}.${FinalfMRIResolution} + """ anat_resample = pe.Node( interface=fsl.FLIRT(), name=f"anat_resample_func_res_{pipe_num}" ) @@ -4450,7 +4476,7 @@ def warp_timeseries_to_T1template_dcan_nhp(wf, cfg, strat_pool, pipe_num, opt=No wf.connect(node, out, applywarp_anat_res, "in_file") wf.connect(anat_resample, "out_file", applywarp_anat_res, "ref_file") - # https://github.com/DCAN-Labs/dcan-macaque-pipeline/blob/master/fMRIVolume/scripts/OneStepResampling.sh#L136-L138 + # https://github.com/DCAN-Labs/dcan-macaque-pipeline/blob/8fe9f61/fMRIVolume/scripts/OneStepResampling.sh#L136-L138 # Create brain masks in this space (changing resolution) # ${FSLDIR}/bin/applywarp --rel --interp=nn -i ${FreeSurferBrainMask}.nii.gz -r ${WD}/${T1wImageFile}.${FinalfMRIResolution} --premat=$FSLDIR/etc/flirtsch/ident.mat -o ${WD}/${FreeSurferBrainMaskFile}.${FinalfMRIResolution}.nii.gz applywarp_anat_mask_res = pe.Node( @@ -4498,7 +4524,7 @@ def warp_timeseries_to_T1template_dcan_nhp(wf, cfg, strat_pool, pipe_num, opt=No wf.connect(applywarp_bias_field_res, "out_file", biasfield_thr, "in_file") - # https://github.com/DCAN-Labs/dcan-macaque-pipeline/blob/master/fMRIVolume/scripts/OneStepResampling.sh#L144-L146 + # https://github.com/DCAN-Labs/dcan-macaque-pipeline/blob/8fe9f61/fMRIVolume/scripts/OneStepResampling.sh#L144-L146 # convertwarp --relout --rel --warp1=${fMRIToStructuralInput} --warp2=${StructuralToStandard} --ref=${WD}/${T1wImageFile}.${FinalfMRIResolution} --out=${OutputTransform} convert_func_to_standard_warp = pe.Node( interface=fsl.ConvertWarp(), name=f"convert_func_to_standard_warp_{pipe_num}" @@ -4517,7 +4543,7 @@ def warp_timeseries_to_T1template_dcan_nhp(wf, cfg, strat_pool, pipe_num, opt=No applywarp_anat_res, "out_file", convert_func_to_standard_warp, "reference" ) - # https://github.com/DCAN-Labs/dcan-macaque-pipeline/blob/master/fMRIVolume/GenericfMRIVolumeProcessingPipeline.sh#L157-L158 + # https://github.com/DCAN-Labs/dcan-macaque-pipeline/blob/90e7e3f/fMRIVolume/GenericfMRIVolumeProcessingPipeline.sh#L157-L158 # fslroi "$fMRIFolder"/"$NameOffMRI"_gdc "$fMRIFolder"/"$NameOffMRI"_gdc_warp 0 3 extract_func_roi = pe.Node( interface=fsl.ExtractROI(), name=f"extract_func_roi_{pipe_num}" @@ -4538,7 +4564,7 @@ def warp_timeseries_to_T1template_dcan_nhp(wf, cfg, strat_pool, pipe_num, opt=No wf.connect(extract_func_roi, "roi_file", multiply_func_roi_by_zero, "in_file") - # https://github.com/DCAN-Labs/dcan-macaque-pipeline/blob/master/fMRIVolume/scripts/OneStepResampling.sh#L173 + # https://github.com/DCAN-Labs/dcan-macaque-pipeline/blob/8fe9f61/fMRIVolume/scripts/OneStepResampling.sh#L173 # fslsplit ${InputfMRI} ${WD}/prevols/vol -t split_func = pe.Node(interface=fsl.Split(), name=f"split_func_{pipe_num}") @@ -4656,7 +4682,7 @@ def warp_timeseries_to_T1template_dcan_nhp(wf, cfg, strat_pool, pipe_num, opt=No # fslmerge -tr ${OutputfMRI}_mask $FrameMergeSTRINGII $TR_vol merge_func_mask_to_standard = pe.Node( - interface=fslMerge(), name="merge_func_mask_to_" f"standard_{pipe_num}" + interface=fslMerge(), name=f"merge_func_mask_to_standard_{pipe_num}" ) merge_func_mask_to_standard.inputs.dimension = "t" @@ -4677,7 +4703,7 @@ def warp_timeseries_to_T1template_dcan_nhp(wf, cfg, strat_pool, pipe_num, opt=No wf.connect(merge_func_mask_to_standard, "merged_file", find_min_mask, "in_file") - # https://github.com/DCAN-Labs/dcan-macaque-pipeline/blob/master/fMRIVolume/scripts/IntensityNormalization.sh#L113-L119 + # https://github.com/DCAN-Labs/dcan-macaque-pipeline/blob/8fe9f61/fMRIVolume/scripts/IntensityNormalization.sh#L113-L119 # fslmaths ${InputfMRI} -div ${BiasField} $jacobiancom -mas ${BrainMask} -mas ${InputfMRI}_mask -ing 10000 ${OutputfMRI} -odt float merge_func_mask = pe.Node(util.Merge(3), name=f"merge_operand_files_{pipe_num}") @@ -4760,49 +4786,49 @@ def warp_timeseries_to_T1template_dcan_nhp(wf, cfg, strat_pool, pipe_num, opt=No def single_step_resample_timeseries_to_T1template( wf, cfg, strat_pool, pipe_num, opt=None ): - """ - Apply motion correction, coreg, anat-to-template transforms on - slice-time corrected functional timeseries based on fMRIPrep - pipeline - - Copyright (c) 2015-2018, the CRN developers team. - All rights reserved. - - Redistribution and use in source and binary forms, with or without - modification, are permitted provided that the following conditions - are met: - - * Redistributions of source code must retain the above copyright - notice, this list of conditions and the following disclaimer. - - * Redistributions in binary form must reproduce the above copyright - notice, this list of conditions and the following disclaimer in the - documentation and/or other materials provided with the distribution. - - * Neither the name of fmriprep nor the names of its contributors - may be used to endorse or promote products derived from this - software without specific prior written permission. - - THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS - "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT - LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS - FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE - COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, - INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES - (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR - SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) - HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, - STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) - ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED - OF THE POSSIBILITY OF SUCH DAMAGE. + """Apply motion correction, coreg, anat-to-template transforms... + + ...on slice-time corrected functional timeseries based on fMRIPrep pipeline. Ref: https://github.com/nipreps/fmriprep/blob/84a6005b/fmriprep/workflows/bold/resampling.py#L159-L419 - """ # noqa: 501 + """ + # Copyright (c) 2015-2018, the CRN developers team. + # All rights reserved. + + # Redistribution and use in source and binary forms, with or without + # modification, are permitted provided that the following conditions + # are met: + + # * Redistributions of source code must retain the above copyright + # notice, this list of conditions and the following disclaimer. + + # * Redistributions in binary form must reproduce the above copyright + # notice, this list of conditions and the following disclaimer in the + # documentation and/or other materials provided with the distribution. + + # * Neither the name of fmriprep nor the names of its contributors + # may be used to endorse or promote products derived from this + # software without specific prior written permission. + + # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS + # "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT + # LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS + # FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE + # COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, + # INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES + # (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR + # SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) + # HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, + # STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) + # ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED + # OF THE POSSIBILITY OF SUCH DAMAGE. + + # Modifications copyright (C) 2021 - 2024 C-PAC Developers xfm_prov = strat_pool.get_cpac_provenance("from-T1w_to-template_mode-image_xfm") reg_tool = check_prov_for_regtool(xfm_prov) bbr2itk = pe.Node( - util.Function( + Function( input_names=["reference_file", "source_file", "transform_file"], output_names=["itk_transform"], function=run_c3d, @@ -4843,7 +4869,7 @@ def single_step_resample_timeseries_to_T1template( ### Loop starts! ### motionxfm2itk = pe.MapNode( - util.Function( + Function( input_names=["reference_file", "source_file", "transform_file"], output_names=["itk_transform"], function=run_c3d, @@ -4864,7 +4890,7 @@ def single_step_resample_timeseries_to_T1template( wf.connect(node, out, motionxfm2itk, "transform_file") elif motion_correct_tool == "3dvolreg": convert_transform = pe.Node( - util.Function( + Function( input_names=["one_d_filename"], output_names=["transform_directory"], function=one_d_to_mat, @@ -5055,6 +5081,7 @@ def single_step_resample_timeseries_to_T1template( }, ) def warp_sbref_to_T1template(wf, cfg, strat_pool, pipe_num, opt=None): + """Warp single-band reference to T1 template.""" xfm = "from-bold_to-template_mode-image_xfm" wf, apply_xfm = warp_resource_to_template( wf, @@ -5098,6 +5125,7 @@ def warp_sbref_to_T1template(wf, cfg, strat_pool, pipe_num, opt=None): }, ) def warp_bold_mask_to_T1template(wf, cfg, strat_pool, pipe_num, opt=None): + """Warp BOLD mask to T1 template.""" xfm = "from-bold_to-template_mode-image_xfm" wf, apply_xfm = warp_resource_to_template( wf, @@ -5143,8 +5171,9 @@ def warp_bold_mask_to_T1template(wf, cfg, strat_pool, pipe_num, opt=None): }, ) def warp_deriv_mask_to_T1template(wf, cfg, strat_pool, pipe_num, opt=None): - """Transform the BOLD mask to template space and to the resolution set for - the derivative outputs. + """Transform the BOLD mask to template space... + + ...and to the resolution set for the derivative outputs. """ xfm = "from-bold_to-template_mode-image_xfm" wf, apply_xfm = warp_resource_to_template( @@ -5181,6 +5210,7 @@ def warp_deriv_mask_to_T1template(wf, cfg, strat_pool, pipe_num, opt=None): outputs={"space-template_desc-preproc_bold": {"Template": "EPI-template"}}, ) def warp_timeseries_to_EPItemplate(wf, cfg, strat_pool, pipe_num, opt=None): + """Warp timeseries to EPI template.""" xfm = "from-bold_to-EPItemplate_mode-image_xfm" wf, apply_xfm, resource = warp_resource_to_template( wf, cfg, strat_pool, pipe_num, "desc-preproc_bold", xfm, time_series=True @@ -5204,6 +5234,7 @@ def warp_timeseries_to_EPItemplate(wf, cfg, strat_pool, pipe_num, opt=None): outputs={"space-template_desc-mean_bold": {"Template": "EPI-template"}}, ) def warp_bold_mean_to_EPItemplate(wf, cfg, strat_pool, pipe_num, opt=None): + """Warp mean BOLD to EPI template space.""" xfm = "from-bold_to-EPItemplate_mode-image_xfm" wf, apply_xfm = warp_resource_to_template( wf, cfg, strat_pool, pipe_num, "desc-mean_bold", xfm, time_series=False @@ -5227,6 +5258,7 @@ def warp_bold_mean_to_EPItemplate(wf, cfg, strat_pool, pipe_num, opt=None): outputs={"space-template_desc-bold_mask": {"Template": "EPI-template"}}, ) def warp_bold_mask_to_EPItemplate(wf, cfg, strat_pool, pipe_num, opt=None): + """Warp BOLD mask to EPI tempalate.""" xfm = "from-bold_to-EPItemplate_mode-image_xfm" wf, apply_xfm = warp_resource_to_template( wf, @@ -5258,8 +5290,9 @@ def warp_bold_mask_to_EPItemplate(wf, cfg, strat_pool, pipe_num, opt=None): }, ) def warp_deriv_mask_to_EPItemplate(wf, cfg, strat_pool, pipe_num, opt=None): - """Transform the BOLD mask to template space and to the resolution set for - the derivative outputs. + """Transform the BOLD mask to EPI template space... + + ...and to the resolution set for the derivative outputs. """ xfm = "from-bold_to-EPItemplate_mode-image_xfm" wf, apply_xfm = warp_resource_to_template( @@ -5299,12 +5332,13 @@ def warp_deriv_mask_to_EPItemplate(wf, cfg, strat_pool, pipe_num, opt=None): }, ) def warp_tissuemask_to_T1template(wf, cfg, strat_pool, pipe_num, opt=None): + """Warp tissue masks to T1 template.""" return warp_tissuemask_to_template( wf, cfg, strat_pool, pipe_num, - xfm="from-T1w_to-template_mode-image_" "xfm", + xfm="from-T1w_to-template_mode-image_xfm", template_space="T1", ) @@ -5333,18 +5367,19 @@ def warp_tissuemask_to_T1template(wf, cfg, strat_pool, pipe_num, opt=None): }, ) def warp_tissuemask_to_EPItemplate(wf, cfg, strat_pool, pipe_num, opt=None): + """Warp tissue masks to EPI template.""" return warp_tissuemask_to_template( wf, cfg, strat_pool, pipe_num, - xfm="from-bold_to-EPItemplate_" "mode-image_xfm", + xfm="from-bold_to-EPItemplate_mode-image_xfm", template_space="EPI", ) def warp_tissuemask_to_template(wf, cfg, strat_pool, pipe_num, xfm, template_space): - """Function to apply transforms to tissue masks + """Apply transforms to tissue masks. Parameters ---------- @@ -5392,12 +5427,12 @@ def warp_resource_to_template( cfg, strat_pool, pipe_num: int, - input_resource: LIST_OR_STR, + input_resource: list[str] | str, xfm: str, reference: Optional[str] = None, time_series: Optional[bool] = False, -) -> TUPLE[pe.Workflow, pe.Workflow, str]: - """Function to warp a resource into a template space +) -> tuple[pe.Workflow, pe.Workflow, str]: + """Warp a resource into a template space. Parameters ---------- @@ -5460,7 +5495,7 @@ def warp_resource_to_template( ) # set up 'apply_transform' subworkflow apply_xfm = apply_transform( - f"warp_{subwf_input_name}_to_" f"{template_space}template_{pipe_num}", + f"warp_{subwf_input_name}_to_{template_space}template_{pipe_num}", reg_tool, time_series=time_series, num_cpus=cfg.pipeline_setup["system_config"]["max_cores_per_participant"], @@ -5483,8 +5518,8 @@ def warp_resource_to_template( def _warp_return( wf: pe.Workflow, apply_xfm: Optional[pe.Workflow], outputs: dict -) -> TUPLE[pe.Workflow, dict]: - """Check if we have a transform to apply; if not, don't add the outputs""" +) -> tuple[pe.Workflow, dict]: + """Check if we have a transform to apply; if not, don't add the outputs.""" if apply_xfm is None: return wf, {} return wf, outputs diff --git a/CPAC/registration/tests/mocks.py b/CPAC/registration/tests/mocks.py index 0bdf2f678b..4f35595abd 100644 --- a/CPAC/registration/tests/mocks.py +++ b/CPAC/registration/tests/mocks.py @@ -11,7 +11,8 @@ def file_node(path, file_node_num=0): input_node = pe.Node( - util.IdentityInterface(fields=["file"]), name=f"file_node_{file_node_num}" + util.IdentityInterface(fields=["file"]), + name=f"file_node_{file_node_num}", ) input_node.inputs.file = path return input_node, "file" @@ -150,6 +151,7 @@ def configuration_strategy_mock(method="FSL"): resampled_template.inputs.template = template resampled_template.inputs.template_name = template_name resampled_template.inputs.tag = tag + resampled_template.inputs.orientation = "RPI" strat.update_resource_pool( {template_name: (resampled_template, "resampled_template")} diff --git a/CPAC/registration/tests/test_registration.py b/CPAC/registration/tests/test_registration.py index 4b8edea0cd..d8e8228497 100755 --- a/CPAC/registration/tests/test_registration.py +++ b/CPAC/registration/tests/test_registration.py @@ -22,15 +22,12 @@ def test_nonlinear_register(): ## input_skull ## reference_brain - mni_file = "/usr/share/fsl/4.1/data/standard/MNI152_T1_3mm_brain.nii.gz" ## reference_skull ## fnirt_config - fnirt_config = "T1_2_MNI152_3mm" ## fnirt_warp_res - fnirt_warp_res = None # ?? what is this for?: func_file = "/home/data/Projects/nuisance_reliability_paper/working_dir_CPAC_order/resting_preproc/nuisance_preproc/_session_id_NYU_TRT_session1_subject_id_sub05676/_csf_threshold_0.4/_gm_threshold_0.2/_wm_threshold_0.66/_run_scrubbing_False/_nc_5/_selector_6.7/regress_nuisance/mapflow/_regress_nuisance0/residual.nii.gz" @@ -133,7 +130,7 @@ def test_registration_lesion(): anat_preproc.inputs.inputspec.anat = anat_file - lesion_preproc = create_lesion_preproc(wf_name="lesion_preproc") + lesion_preproc = create_lesion_preproc(cfg, wf_name="lesion_preproc") lesion_preproc.inputs.inputspec.lesion = lesion_file diff --git a/CPAC/resources/configs/pipeline_config_blank.yml b/CPAC/resources/configs/pipeline_config_blank.yml index a2c0afe6bb..869937df04 100644 --- a/CPAC/resources/configs/pipeline_config_blank.yml +++ b/CPAC/resources/configs/pipeline_config_blank.yml @@ -11,6 +11,9 @@ pipeline_setup: # Name for this pipeline configuration - useful for identification. # This string will be sanitized and used in filepaths pipeline_name: cpac-blank-template + + # Desired orientation for the output data. "RPI", "LPI", "RAI", "LAI", "RAS", "LAS", "RPS", "LPS" + desired_orientation: RPI output_directory: # Quality control outputs diff --git a/CPAC/resources/tests/test_templates.py b/CPAC/resources/tests/test_templates.py index e73a4d7bc0..048cbe9b1c 100644 --- a/CPAC/resources/tests/test_templates.py +++ b/CPAC/resources/tests/test_templates.py @@ -14,11 +14,12 @@ # You should have received a copy of the GNU Lesser General Public # License along with C-PAC. If not, see . -"""Tests for packaged templates""" +"""Tests for packaged templates.""" import os import pytest +import nipype.pipeline.engine as pe from CPAC.pipeline import ALL_PIPELINE_CONFIGS from CPAC.pipeline.engine import ingress_pipeconfig_paths, ResourcePool @@ -29,14 +30,14 @@ @pytest.mark.parametrize("pipeline", ALL_PIPELINE_CONFIGS) def test_packaged_path_exists(pipeline): """ - Check that all local templates are included in image at at - least one resolution + Check that all local templates are included in image at atleast one resolution. """ - rpool = ingress_pipeconfig_paths( - Preconfiguration(pipeline), ResourcePool(), "pytest" + wf = pe.Workflow(name="test") + wf, rpool = ingress_pipeconfig_paths( + wf, Preconfiguration(pipeline), ResourcePool(), "pytest" ) for resource in rpool.rpool.values(): - node = list(resource.values())[0].get("data")[0] + node = next(iter(resource.values())).get("data")[0] if hasattr(node.inputs, "template") and not node.inputs.template.startswith( "s3:" ): diff --git a/CPAC/utils/datasource.py b/CPAC/utils/datasource.py index 439d09ab61..25adb1eeca 100644 --- a/CPAC/utils/datasource.py +++ b/CPAC/utils/datasource.py @@ -1,4 +1,4 @@ -# Copyright (C) 2012-2023 C-PAC Developers +# Copyright (C) 2012-2024 C-PAC Developers # This file is part of C-PAC. @@ -14,13 +14,14 @@ # You should have received a copy of the GNU Lesser General Public # License along with C-PAC. If not, see . +"""Utilities for sourcing data.""" + import csv import json from pathlib import Path import re -from typing import Union -from nipype import logging +from voluptuous import RequiredFieldInvalid from nipype.interfaces import utility as util from CPAC.pipeline import nipype_pipeline_engine as pe @@ -28,15 +29,12 @@ from CPAC.utils import function from CPAC.utils.bids_utils import bids_remove_entity from CPAC.utils.interfaces.function import Function -from CPAC.utils.typing import TUPLE +from CPAC.utils.monitoring import FMLOGGER from CPAC.utils.utils import get_scan_params -logger = logging.getLogger("nipype.workflow") - def bidsier_prefix(unique_id): - """ - Function to return a BIDSier prefix for a given unique_id + """Return a BIDSier prefix for a given unique_id. Parameters ---------- @@ -67,8 +65,7 @@ def bidsier_prefix(unique_id): def get_rest(scan, rest_dict, resource="scan"): - """Return the file path of the chosen resource stored in the functional - file dictionary, if it exists. + """Return the path of the chosen resource in the functional file dictionary. scan: the scan/series name or label rest_dict: the dictionary read in from the data configuration YAML file @@ -88,25 +85,20 @@ def get_rest(scan, rest_dict, resource="scan"): def extract_scan_params_dct(scan_params_dct): + """Extract the scan parameters dictionary from the data configuration file.""" return scan_params_dct -def get_map(map, map_dct): - # return the spatial map required - return map_dct[map] - - def select_model_files(model, ftest, model_name): - """ - Method to select model files - """ + """Select model files.""" import glob import os files = glob.glob(os.path.join(model, "*")) if len(files) == 0: - raise Exception("No files found inside directory %s" % model) + msg = f"No files found inside directory {model}" + raise FileNotFoundError(msg) fts_file = "" @@ -120,25 +112,25 @@ def select_model_files(model, ftest, model_name): elif (model_name + ".con") in filename: con_file = filename - if ftest == True and fts_file == "": + if ftest and fts_file == "": errmsg = ( "\n[!] CPAC says: You have f-tests included in your group " - "analysis model '%s', but no .fts files were found in the " - "output folder specified for group analysis: %s.\n\nThe " + f"analysis model '{model_name}', but no .fts files were found in the " + f"output folder specified for group analysis: {model}.\n\nThe " ".fts file is automatically generated by CPAC, and if you " "are seeing this error, it is because something went wrong " "with the generation of this file, or it has been moved." - "\n\n" % (model_name, model) ) - raise Exception(errmsg) + raise FileNotFoundError(errmsg) return fts_file, con_file, grp_file, mat_file def check_func_scan(func_scan_dct, scan): - """Run some checks on the functional timeseries-related files for a given - series/scan name or label. + """Run some checks on the functional timeseries-related files. + + For a given series/scan name or label. """ scan_resources = func_scan_dct[scan] @@ -154,7 +146,7 @@ def check_func_scan(func_scan_dct, scan): " scan parameters: /path/to/scan_params.json\n\n" "See the User Guide for more information.\n\n" ) - raise Exception(err) + raise ValueError(err) # actual 4D time series file if "scan" not in scan_resources.keys(): @@ -163,21 +155,23 @@ def check_func_scan(func_scan_dct, scan): "scan file, which should be a filepath labeled with the " "'scan' key.\n\n" ) - raise Exception(err) + raise FileNotFoundError(err) # Nipype restriction (may have changed) if "." in scan or "+" in scan or "*" in scan: - raise Exception( + msg = ( "\n\n[!] Scan names cannot contain any special " "characters (., +, *, etc.). Please update this " f"and try again.\n\nScan: {scan}" "\n\n" ) + raise ValueError(msg) def create_func_datasource(rest_dict, rpool, wf_name="func_datasource"): - """Return the functional timeseries-related file paths for each - series/scan, from the dictionary of functional files described in the data + """Return the functional timeseries-related file paths for each series/scan... + + ...from the dictionary of functional files described in the data configuration (sublist) YAML file. Scan input (from inputnode) is an iterable. @@ -288,8 +282,10 @@ def create_func_datasource(rest_dict, rpool, wf_name="func_datasource"): def create_fmap_datasource(fmap_dct, wf_name="fmap_datasource"): - """Return the field map files, from the dictionary of functional files - described in the data configuration (sublist) YAML file. + """Return the field map files... + + ...from the dictionary of functional files described in the data configuration + (sublist) YAML file. """ import nipype.interfaces.utility as util @@ -379,6 +375,7 @@ def create_fmap_datasource(fmap_dct, wf_name="fmap_datasource"): def get_fmap_phasediff_metadata(data_config_scan_params): + """Return the scan parameters for a field map phasediff scan.""" if ( not isinstance(data_config_scan_params, dict) and ".json" in data_config_scan_params @@ -411,11 +408,10 @@ def get_fmap_phasediff_metadata(data_config_scan_params): ) -@Function.sig_imports(["from CPAC.utils.typing import TUPLE"]) def calc_delta_te_and_asym_ratio( effective_echo_spacing: float, echo_times: list -) -> TUPLE[float, float]: - """Calcluate ``deltaTE`` and ``ees_asym_ratio`` from given metadata +) -> tuple[float, float]: + """Calcluate ``deltaTE`` and ``ees_asym_ratio`` from given metadata. Parameters ---------- @@ -431,15 +427,19 @@ def calc_delta_te_and_asym_ratio( ees_asym_ratio : float """ if not isinstance(effective_echo_spacing, float): - raise LookupError( + msg = ( "C-PAC could not find `EffectiveEchoSpacing` in " "either fmap or func sidecar JSON, but that field " "is required for PhaseDiff distortion correction." ) + raise LookupError(msg) # convert into milliseconds if necessary # these values will/should never be more than 10ms - if ((echo_times[0] * 1000) < 10) and ((echo_times[1] * 1000) < 10): + if ( + ((echo_times[0] * 1000) < 10) # noqa: PLR2004 + and ((echo_times[1] * 1000) < 10) # noqa: PLR2004 + ): echo_times[0] = echo_times[0] * 1000 echo_times[1] = echo_times[1] * 1000 @@ -448,16 +448,17 @@ def calc_delta_te_and_asym_ratio( return deltaTE, ees_asym_ratio -def gather_echo_times(echotime_1, echotime_2=None, echotime_3=None, echotime_4=None): +def gather_echo_times(echotime_1, echotime_2, echotime_3=None, echotime_4=None): + """Gather the echo times from the field map data.""" echotime_list = [echotime_1, echotime_2, echotime_3, echotime_4] echotime_list = list(filter(lambda item: item is not None, echotime_list)) echotime_list = list(set(echotime_list)) - if len(echotime_list) != 2: - raise Exception( - "\n[!] Something went wrong with the field map echo " - "times - there should be two distinct values.\n\n" - f"Echo Times:\n{echotime_list}\n" + if len(echotime_list) != 2: # noqa: PLR2004 + msg = ( + "\n[!] Something went wrong with the field map echo times - there should" + f" be two distinct values.\n\nEcho Times:\n{echotime_list}\n" ) + raise ValueError(msg) return echotime_list @@ -468,7 +469,9 @@ def match_epi_fmaps( epi_fmap_two=None, epi_fmap_params_two=None, ): - """Parse the field map files in the data configuration and determine which + """Match EPI field maps to the BOLD scan. + + Parse the field map files in the data configuration and determine which ones have the same and opposite phase-encoding directions as the BOLD scan in the current pipeline. @@ -520,6 +523,7 @@ def ingress_func_metadata( unique_id=None, num_strat=None, ): + """Ingress metadata for functional scans.""" name_suffix = "" for suffix_part in (unique_id, num_strat): if suffix_part is not None: @@ -531,18 +535,18 @@ def ingress_func_metadata( fmap_TE_list = [] if "fmap" in sub_dict: second = False - for key in sub_dict["fmap"]: + for orig_key in sub_dict["fmap"]: gather_fmap = create_fmap_datasource( - sub_dict["fmap"], f"fmap_gather_{key}_{subject_id}" + sub_dict["fmap"], f"fmap_gather_{orig_key}_{subject_id}" ) gather_fmap.inputs.inputnode.set( subject=subject_id, creds_path=input_creds_path, dl_dir=cfg.pipeline_setup["working_directory"]["path"], ) - gather_fmap.inputs.inputnode.scan = key + gather_fmap.inputs.inputnode.scan = orig_key - orig_key = key + key = orig_key if "epi" in key and not second: key = "epi-1" second = True @@ -672,7 +676,7 @@ def ingress_func_metadata( input_names=["effective_echo_spacing", "echo_times"], output_names=["deltaTE", "ees_asym_ratio"], function=calc_delta_te_and_asym_ratio, - imports=["from typing import Optional, Tuple"], + imports=["from typing import Optional"], ), name=f"diff_distcor_calc_delta{name_suffix}", ) @@ -726,7 +730,6 @@ def ingress_func_metadata( "effective_echo_spacing", ], function=get_scan_params, - imports=["from CPAC.utils.utils import check, try_fetch_parameter"], ), name=f"bold_scan_params_{subject_id}{name_suffix}", ) @@ -803,6 +806,7 @@ def ingress_func_metadata( def create_general_datasource(wf_name): + """Create a general-purpose datasource node.""" import nipype.interfaces.utility as util from CPAC.pipeline import nipype_pipeline_engine as pe @@ -846,6 +850,7 @@ def create_general_datasource(wf_name): def create_check_for_s3_node( name, file_path, img_type="other", creds_path=None, dl_dir=None, map_node=False ): + """Create a node to check if a file is on S3.""" if map_node: check_s3_node = pe.MapNode( function.Function( @@ -855,7 +860,7 @@ def create_check_for_s3_node( as_module=True, ), iterfield=["file_path"], - name="check_for_s3_%s" % name, + name=f"check_for_s3_{name}", ) else: check_s3_node = pe.Node( @@ -865,7 +870,7 @@ def create_check_for_s3_node( function=check_for_s3, as_module=True, ), - name="check_for_s3_%s" % name, + name=f"check_for_s3_{name}", ) check_s3_node.inputs.set( @@ -875,10 +880,10 @@ def create_check_for_s3_node( return check_s3_node -# Check if passed-in file is on S3 def check_for_s3( file_path, creds_path=None, dl_dir=None, img_type="other", verbose=False ): + """Check if passed-in file is on S3.""" # Import packages import os @@ -903,8 +908,7 @@ def check_for_s3( # TODO: remove this once scan parameter input as dictionary is phased out if isinstance(file_path, dict): # if this is a dictionary, just skip altogether - local_path = file_path - return local_path + return file_path if file_path.lower().startswith(s3_str): file_path = s3_str + file_path[len(s3_str) :] @@ -922,42 +926,45 @@ def check_for_s3( os.makedirs(local_dir, exist_ok=True) if os.path.exists(local_path): - print(f"{local_path} already exists- skipping download.") + FMLOGGER.info("%s already exists- skipping download.", local_path) else: # Download file try: bucket = fetch_creds.return_bucket(creds_path, bucket_name) - print(f"Attempting to download from AWS S3: {file_path}") + FMLOGGER.info("Attempting to download from AWS S3: %s", file_path) bucket.download_file(Key=s3_key, Filename=local_path) except botocore.exceptions.ClientError as exc: error_code = int(exc.response["Error"]["Code"]) err_msg = str(exc) - if error_code == 403: + if error_code == 403: # noqa: PLR2004 err_msg = ( - 'Access to bucket: "%s" is denied; using credentials ' - 'in subject list: "%s"; cannot access the file "%s"' - % (bucket_name, creds_path, file_path) + f'Access to bucket: "{bucket_name}" is denied; using' + f' credentials in subject list: "{creds_path}"; cannot access' + f' the file "{file_path}"' ) - elif error_code == 404: + error_type = PermissionError + elif error_code == 404: # noqa: PLR2004 err_msg = ( - f"File: {os.path.join(bucket_name, s3_key)} does not exist; check spelling and try " - "again" + f"File: {os.path.join(bucket_name, s3_key)} does not exist;" + " check spelling and try again" ) + error_type = FileNotFoundError else: err_msg = ( - 'Unable to connect to bucket: "%s". Error message:\n%s' - % (bucket_name, exc) + f'Unable to connect to bucket: "{bucket_name}". Error message:' + f"\n{exc}" ) + error_type = ConnectionError - raise Exception(err_msg) + raise error_type(err_msg) except Exception as exc: - err_msg = 'Unable to connect to bucket: "%s". Error message:\n%s' % ( - bucket_name, - exc, + err_msg = ( + f'Unable to connect to bucket: "{bucket_name}". Error message:' + f"\n{exc}" ) - raise Exception(err_msg) + raise ConnectionError(err_msg) # Otherwise just return what was passed in, resolving if a link else: @@ -983,51 +990,48 @@ def check_for_s3( ] ) if local_path in ndmg_atlases["v0"]: - raise FileNotFoundError( - "".join( - [ - "Neuroparc atlas paths were updated on July 20, 2020. " - "C-PAC configuration files using Neuroparc v0 atlas paths " - "(including C-PAC default and preconfigured pipeline " - "configurations from v1.6.2a and earlier) need to be " - "updated to use Neuroparc atlases. Your current " - "configuration includes the Neuroparc v0 path " - f"{local_path} which needs to be updated to ", - ndmg_atlases["v1"][ndmg_atlases["v0"].index(local_path)], - ". For a full list such paths, see https://fcp-indi." - "github.io/docs/nightly/user/ndmg_atlases", - ] - ) + from CPAC.utils.docs import DOCS_URL_PREFIX + + msg = ( + "Neuroparc atlas paths were updated on July 20, 2020. C-PAC" + " configuration files using Neuroparc v0 atlas paths (including C-PAC" + " default and preconfigured pipeline configurations from v1.6.2a and" + " earlier) need to be updated to use Neuroparc atlases. Your current" + f" configuration includes the Neuroparc v0 path {local_path} which" + " needs to be updated to" + f" {ndmg_atlases['v1'][ndmg_atlases['v0'].index(local_path)]}. For a" + f" full list such paths, see {DOCS_URL_PREFIX}/user/ndmg_atlases" ) else: - raise FileNotFoundError(f"File {local_path} does not exist!") + msg = f"File {local_path} does not exist!" + raise FileNotFoundError(msg) if verbose: - print(f"Downloaded file:\n{local_path}\n") + FMLOGGER.info("Downloaded file:\n%s\n", local_path) # Check image dimensionality if local_path.endswith(".nii") or local_path.endswith(".nii.gz"): img_nii = nib.load(local_path) if img_type == "anat": - if len(img_nii.shape) != 3: - raise IOError( - "File: %s must be an anatomical image with 3 " - "dimensions but %d dimensions found!" - % (local_path, len(img_nii.shape)) + if len(img_nii.shape) != 3: # noqa: PLR2004 + msg = ( + f"File: {local_path} must be an anatomical image with 3 " + f"dimensions but {len(img_nii.shape)} dimensions found!" ) elif img_type == "func": if len(img_nii.shape) not in [3, 4]: - raise IOError( - "File: %s must be a functional image with 3 or " - "4 dimensions but %d dimensions found!" - % (local_path, len(img_nii.shape)) + msg = ( + f"File: {local_path} must be a functional image with 3 or " + f"4 dimensions but {len(img_nii.shape)} dimensions found!" ) + raise IOError(msg) return local_path def gather_extraction_maps(c): + """Gather the timeseries and SCA analysis configurations.""" ts_analysis_dict = {} sca_analysis_dict = {} @@ -1052,7 +1056,7 @@ def gather_extraction_maps(c): "set to run, but no ROI NIFTI file paths were " "provided!\n\n" ) - raise Exception(err) + raise RequiredFieldInvalid(err) if c.seed_based_correlation_analysis["run"]: try: @@ -1063,13 +1067,13 @@ def gather_extraction_maps(c): "is set to run, but no ROI NIFTI file paths were " "provided!\n\n" ) - raise Exception(err) + raise RequiredFieldInvalid(err) # flip the dictionary for roi_path in sca_roi_dict.keys(): # update analysis dict - for analysis_type in sca_roi_dict[roi_path].split(","): - analysis_type = analysis_type.replace(" ", "") + for _analysis_type in sca_roi_dict[roi_path].split(","): + analysis_type = _analysis_type.replace(" ", "") if analysis_type not in sca_analysis_dict.keys(): sca_analysis_dict[analysis_type] = [] @@ -1079,8 +1083,10 @@ def gather_extraction_maps(c): return (ts_analysis_dict, sca_analysis_dict) -def get_highest_local_res(template: Union[Path, str], tagname: str) -> Path: - """Given a reference template path and a resolution string, get all +def get_highest_local_res(template: Path | str, tagname: str) -> Path: + """Return the highest resolution of a template in the same local path. + + Given a reference template path and a resolution string, get all resolutions of that template in the same local path and return the highest resolution. @@ -1128,12 +1134,12 @@ def get_highest_local_res(template: Union[Path, str], tagname: str) -> Path: try: return matching_templates[0] except (FileNotFoundError, IndexError): - raise LookupError(f"Could not find template {template}") + msg = f"Could not find template {template}" + raise LookupError(msg) def res_string_to_tuple(resolution): - """ - Converts a resolution string to a tuple of floats. + """Convert a resolution string to a tuple of floats. Parameters ---------- @@ -1150,7 +1156,8 @@ def res_string_to_tuple(resolution): return (float(resolution.replace("mm", "")),) * 3 -def resolve_resolution(resolution, template, template_name, tag=None): +def resolve_resolution(orientation, resolution, template, template_name, tag=None): + """Resample a template to a given resolution.""" from nipype.interfaces import afni from CPAC.pipeline import nipype_pipeline_engine as pe @@ -1196,6 +1203,7 @@ def resolve_resolution(resolution, template, template_name, tag=None): resample.inputs.resample_mode = "Cu" resample.inputs.in_file = local_path resample.base_dir = "." + resample.inputs.orientation = orientation resampled_template = resample.run() local_path = resampled_template.outputs.out_file @@ -1204,6 +1212,7 @@ def resolve_resolution(resolution, template, template_name, tag=None): def create_anat_datasource(wf_name="anat_datasource"): + """Create a dataflow for anatomical images.""" import nipype.interfaces.utility as util from CPAC.pipeline import nipype_pipeline_engine as pe @@ -1245,12 +1254,13 @@ def create_anat_datasource(wf_name="anat_datasource"): def create_roi_mask_dataflow(masks, wf_name="datasource_roi_mask"): + """Create a dataflow for ROI masks.""" import os mask_dict = {} - for mask_file in masks: - mask_file = mask_file.rstrip("\r\n") + for _mask_file in masks: + mask_file = _mask_file.rstrip("\r\n") if mask_file.strip() == "" or mask_file.startswith("#"): continue @@ -1263,22 +1273,23 @@ def create_roi_mask_dataflow(masks, wf_name="datasource_roi_mask"): try: valid_extensions = [".nii", ".nii.gz"] - base_name = [ + base_name = next( base_file[: -len(ext)] for ext in valid_extensions if base_file.endswith(ext) - ][0] + ) for key in ["res", "space"]: base_name = bids_remove_entity(base_name, key) except IndexError: # pylint: disable=raise-missing-from - raise ValueError( + msg = ( "Error in spatial_map_dataflow: File " f'extension of {base_file} not ".nii" or ' ".nii.gz" ) + raise ValueError(msg) except Exception as e: raise e @@ -1286,10 +1297,11 @@ def create_roi_mask_dataflow(masks, wf_name="datasource_roi_mask"): base_name = format_identifier(name, desc) if base_name in mask_dict: - raise ValueError( + msg = ( "Duplicate templates/atlases not allowed: " f"{mask_file} {mask_dict[base_name]}" ) + raise ValueError(msg) mask_dict[base_name] = mask_file @@ -1336,38 +1348,40 @@ def create_roi_mask_dataflow(masks, wf_name="datasource_roi_mask"): def create_spatial_map_dataflow(spatial_maps, wf_name="datasource_maps"): + """Create a dataflow for spatial maps.""" import os wf = pe.Workflow(name=wf_name) spatial_map_dict = {} - for spatial_map_file in spatial_maps: - spatial_map_file = spatial_map_file.rstrip("\r\n") + for _spatial_map_file in spatial_maps: + spatial_map_file = _spatial_map_file.rstrip("\r\n") base_file = os.path.basename(spatial_map_file) try: valid_extensions = [".nii", ".nii.gz"] - base_name = [ + base_name = next( base_file[: -len(ext)] for ext in valid_extensions if base_file.endswith(ext) - ][0] + ) if base_name in spatial_map_dict: - raise ValueError( - "Files with same name not allowed: %s %s" - % (spatial_map_file, spatial_map_dict[base_name]) + msg = ( + f"Files with same name not allowed: {spatial_map_file}" + f" {spatial_map_dict[base_name]}" ) + raise ValueError(msg) spatial_map_dict[base_name] = spatial_map_file except IndexError: - raise Exception( - "Error in spatial_map_dataflow: " - "File extension not in .nii and .nii.gz" + msg = ( + "Error in spatial_map_dataflow: File extension not in .nii and .nii.gz" ) + raise ValueError(msg) inputnode = pe.Node( util.IdentityInterface( @@ -1412,6 +1426,7 @@ def create_spatial_map_dataflow(spatial_maps, wf_name="datasource_maps"): def create_grp_analysis_dataflow(wf_name="gp_dataflow"): + """Create a dataflow for group analysis.""" import nipype.interfaces.utility as util from CPAC.pipeline import nipype_pipeline_engine as pe @@ -1456,16 +1471,17 @@ def create_grp_analysis_dataflow(wf_name="gp_dataflow"): def resample_func_roi(in_func, in_roi, realignment, identity_matrix): + """Resample functional image to ROI or ROI to functional image using flirt.""" import os - import nibabel as nb + import nibabel as nib from CPAC.utils.monitoring.custom_logging import log_subprocess # load func and ROI dimension - func_img = nb.load(in_func) + func_img = nib.load(in_func) func_shape = func_img.shape - roi_img = nb.load(in_roi) + roi_img = nib.load(in_roi) roi_shape = roi_img.shape # check if func size = ROI size, return func and ROI; else resample using flirt diff --git a/CPAC/utils/test_mocks.py b/CPAC/utils/test_mocks.py index 084f299c0a..ea16c0be36 100644 --- a/CPAC/utils/test_mocks.py +++ b/CPAC/utils/test_mocks.py @@ -11,7 +11,8 @@ def file_node(path, file_node_num=0): input_node = pe.Node( - util.IdentityInterface(fields=["file"]), name=f"file_node_{file_node_num}" + util.IdentityInterface(fields=["file"]), + name=f"file_node_{file_node_num}", ) input_node.inputs.file = path return input_node, "file" @@ -34,7 +35,7 @@ def configuration_strategy_mock(method="FSL"): "functional_registration": { "EPI_registration": { "FSL-FNIRT": { - "identity_matrix": f"{fsldir}/etc/flirtsch/" "ident.mat", + "identity_matrix": f"{fsldir}/etc/flirtsch/ident.mat", "interpolation": "sinc", } }, @@ -104,11 +105,11 @@ def configuration_strategy_mock(method="FSL"): ), "anatomical_brain": os.path.join( c["pipeline_setup", "output_directory", "path"], - "anatomical_brain/" "sub-M10978008_ses-NFB3_acq-ao_brain_resample.nii.gz", + "anatomical_brain/sub-M10978008_ses-NFB3_acq-ao_brain_resample.nii.gz", ), "ants_initial_xfm": os.path.join( c["pipeline_setup", "output_directory", "path"], - "ants_initial_xfm/" "transform0DerivedInitialMovingTranslation.mat", + "ants_initial_xfm/transform0DerivedInitialMovingTranslation.mat", ), "ants_affine_xfm": os.path.join( c["pipeline_setup", "output_directory", "path"], @@ -131,7 +132,7 @@ def configuration_strategy_mock(method="FSL"): ), "ants_symm_warp_field": os.path.join( c["pipeline_setup", "output_directory", "path"], - "anatomical_to_symmetric_mni_nonlinear_xfm/" "transform3Warp.nii.gz", + "anatomical_to_symmetric_mni_nonlinear_xfm/transform3Warp.nii.gz", ), "ants_symm_affine_xfm": os.path.join( c["pipeline_setup", "output_directory", "path"], @@ -234,6 +235,7 @@ def configuration_strategy_mock(method="FSL"): resampled_template.inputs.template = template resampled_template.inputs.template_name = template_name resampled_template.inputs.tag = tag + resampled_template.inputs.orientation = "RPI" strat.update_resource_pool( {template_name: (resampled_template, "resampled_template")} From 590b43711648fb628f5eabede555b5aaa8d6157a Mon Sep 17 00:00:00 2001 From: "birajstha:construction_worker::penguin" Date: Tue, 5 Nov 2024 12:47:38 -0500 Subject: [PATCH 138/507] adding f to f-string fixing typo --- CPAC/connectome/connectivity_matrix.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/CPAC/connectome/connectivity_matrix.py b/CPAC/connectome/connectivity_matrix.py index c0be9f3f27..38c0411e1b 100644 --- a/CPAC/connectome/connectivity_matrix.py +++ b/CPAC/connectome/connectivity_matrix.py @@ -171,7 +171,7 @@ def create_connectome_afni(name, method, pipe_num): imports=["import subprocess"], function=strip_afni_output_header, ), - name="netcorrStripHeader{method}_{pipe_num}", + name=f"netcorrStripHeader{method}_{pipe_num}", ) name_output_node = pe.Node( From 043a004380075203971ad6c205200a1555389ded Mon Sep 17 00:00:00 2001 From: "birajstha:construction_worker::penguin" Date: Thu, 7 Nov 2024 10:39:20 -0500 Subject: [PATCH 139/507] changing bold to desc-reorient_bold --- CPAC/nuisance/nuisance.py | 4 ++-- CPAC/registration/registration.py | 20 ++++++++++---------- 2 files changed, 12 insertions(+), 12 deletions(-) diff --git a/CPAC/nuisance/nuisance.py b/CPAC/nuisance/nuisance.py index e353aae03b..48a7686d7e 100644 --- a/CPAC/nuisance/nuisance.py +++ b/CPAC/nuisance/nuisance.py @@ -75,8 +75,8 @@ def choose_nuisance_blocks(cfg, rpool, generate_only=False): ] apply_transform_using = to_template_cfg["apply_transform"]["using"] input_interface = { - "default": ("desc-preproc_bold", ["desc-preproc_bold", "bold"]), - "abcd": ("desc-preproc_bold", "bold"), + "default": ("desc-preproc_bold", ["desc-preproc_bold", "desc-reorient_bold"]), + "abcd": ("desc-preproc_bold", "desc-reorient_bold"), "single_step_resampling_from_stc": ("desc-preproc_bold", "desc-stc_bold"), }.get(apply_transform_using) if input_interface is not None: diff --git a/CPAC/registration/registration.py b/CPAC/registration/registration.py index 33af078797..4848637d4d 100644 --- a/CPAC/registration/registration.py +++ b/CPAC/registration/registration.py @@ -3093,7 +3093,7 @@ def overwrite_transform_anat_to_template(wf, cfg, strat_pool, pipe_num, opt=None "input", ], option_val="Selected_Functional_Volume", - inputs=[("desc-brain_bold", ["desc-motion_bold", "bold"], "sbref")], + inputs=[("desc-brain_bold", ["desc-motion_bold", "desc-reorient_bold"], "sbref")], outputs=["sbref"], ) def coregistration_prep_vol(wf, cfg, strat_pool, pipe_num, opt=None): @@ -3115,7 +3115,7 @@ def coregistration_prep_vol(wf, cfg, strat_pool, pipe_num, opt=None): else: # TODO check which file is functional_skull_leaf # TODO add a function to choose brain or skull? - node, out = strat_pool.get_data(["desc-motion_bold", "bold"]) + node, out = strat_pool.get_data(["desc-motion_bold", "desc-reorient_bold"]) wf.connect(node, out, get_func_volume, "in_file_a") @@ -3579,7 +3579,7 @@ def create_func_to_T1template_symmetric_xfm(wf, cfg, strat_pool, pipe_num, opt=N "sbref", "desc-preproc_bold", "desc-stc_bold", - "bold", + "desc-reorient_bold", "from-bold_to-T1w_mode-image_desc-linear_xfm", ), "despiked-fieldmap", @@ -3667,7 +3667,7 @@ def apply_phasediff_to_timeseries_separately(wf, cfg, strat_pool, pipe_num, opt= node, out = strat_pool.get_data("desc-stc_bold") out_label = "desc-stc_bold" elif opt == "abcd": - node, out = strat_pool.get_data("bold") + node, out = strat_pool.get_data("desc-reorient_bold") out_label = "bold" wf.connect(node, out, warp_bold, "in_file") @@ -3718,7 +3718,7 @@ def apply_phasediff_to_timeseries_separately(wf, cfg, strat_pool, pipe_num, opt= "sbref", "desc-preproc_bold", "desc-stc_bold", - "bold", + "desc-reorient_bold", "from-bold_to-template_mode-image_xfm", "ants-blip-warp", "fsl-blip-warp", @@ -3775,8 +3775,8 @@ def apply_blip_to_timeseries_separately(wf, cfg, strat_pool, pipe_num, opt=None) node, out = strat_pool.get_data("desc-stc_bold") out_label = "desc-stc_bold" elif opt == "abcd": - node, out = strat_pool.get_data("bold") - out_label = "bold" + node, out = strat_pool.get_data("desc-reorient_bold") + out_label = "desc-reorient_bold" wf.connect(node, out, apply_xfm, "inputspec.input_image") @@ -4421,7 +4421,7 @@ def warp_timeseries_to_T1template_abcd(wf, cfg, strat_pool, pipe_num, opt=None): option_val="dcan_nhp", inputs=[ ( - ["desc-reorient_bold", "bold"], + ["desc-reorient_bold", "desc-preproc_bold"], "coordinate-transformation", "from-T1w_to-template_mode-image_warp", "from-bold_to-T1w_mode-image_desc-linear_warp", @@ -4552,7 +4552,7 @@ def warp_timeseries_to_T1template_dcan_nhp(wf, cfg, strat_pool, pipe_num, opt=No extract_func_roi.inputs.t_min = 0 extract_func_roi.inputs.t_size = 3 - node, out = strat_pool.get_data(["desc-reorient_bold", "bold"]) + node, out = strat_pool.get_data(["desc-reorient_bold", "desc-preproc_bold"]) wf.connect(node, out, extract_func_roi, "in_file") # fslmaths "$fMRIFolder"/"$NameOffMRI"_gdc_warp -mul 0 "$fMRIFolder"/"$NameOffMRI"_gdc_warp @@ -4570,7 +4570,7 @@ def warp_timeseries_to_T1template_dcan_nhp(wf, cfg, strat_pool, pipe_num, opt=No split_func.inputs.dimension = "t" - node, out = strat_pool.get_data(["desc-reorient_bold", "bold"]) + node, out = strat_pool.get_data(["desc-reorient_bold", "desc-preproc_bold"]) wf.connect(node, out, split_func, "in_file") ### Loop starts! ### From 182649fea654594e25e03608bac0979e21c18caf Mon Sep 17 00:00:00 2001 From: Jon Clucas Date: Fri, 8 Nov 2024 14:22:03 -0500 Subject: [PATCH 140/507] =?UTF-8?q?:truck:=20`.yml`=20=E2=86=92=20`.yaml`?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit [run reg-suite lite] --- .github/README/README.md | 10 +++++----- .../{build_and_test.yml => build_and_test.yaml} | 6 +++--- .github/workflows/{on_push.yml => on_push.yaml} | 2 +- .github/workflows/{regtest.yml => regtest.yaml} | 0 4 files changed, 9 insertions(+), 9 deletions(-) rename .github/workflows/{build_and_test.yml => build_and_test.yaml} (98%) rename .github/workflows/{on_push.yml => on_push.yaml} (99%) rename .github/workflows/{regtest.yml => regtest.yaml} (100%) diff --git a/.github/README/README.md b/.github/README/README.md index 158a762313..19e0a337d9 100644 --- a/.github/README/README.md +++ b/.github/README/README.md @@ -46,7 +46,7 @@ flowchart LR subgraph build_C-PAC.yml bCPAC[[C-PAC]] end - subgraph build_and_test.yml + subgraph build_and_test.yaml ubuntu[[Ubnutu]]-->stages[[stages]]-->build-base[[build-base]]-->build-base-standard[[build-base-standard]] Circle_tests[[Circle_tests]] @@ -65,7 +65,7 @@ flowchart LR smoke-tests-participant[[smoke-tests-participant]] end - on_push.yml-->build_and_test.yml + on_push.yaml-->build_and_test.yaml delete_images.yml end @@ -79,8 +79,8 @@ flowchart LR Circle_tests-->CircleCI((Run tests on Circle CI)) - on_push.yml<-->get_pr_base_shas - on_push.yml-->update_all_preconfigs + on_push.yaml<-->get_pr_base_shas + on_push.yaml-->update_all_preconfigs cpacdockerfiles<-->C-PAC @@ -94,7 +94,7 @@ flowchart LR bCPAC<-->local_ghcr stages<-->local_ghcr - push>git push]-->on_push.yml + push>git push]-->on_push.yaml smoke-tests-participant-->smoke_test_human smoke-tests-participant-->smoke_test_nhp diff --git a/.github/workflows/build_and_test.yml b/.github/workflows/build_and_test.yaml similarity index 98% rename from .github/workflows/build_and_test.yml rename to .github/workflows/build_and_test.yaml index ec3a76932c..c84619b2e4 100644 --- a/.github/workflows/build_and_test.yml +++ b/.github/workflows/build_and_test.yaml @@ -190,7 +190,7 @@ jobs: run: | sed -i -e 's/^/\.github\/Dockerfiles\//' .github/stage_requirements/${{ matrix.variant }}.txt echo 'dev/docker_data/required_afni_pkgs.txt' >> .github/stage_requirements/${{ matrix.variant }}.txt - echo '.github/workflows/build_and_test.yml' >> .github/stage_requirements/${{ matrix.variant }}.txt + echo '.github/workflows/build_and_test.yaml' >> .github/stage_requirements/${{ matrix.variant }}.txt echo '.github/stage_requirements/${{ matrix.variant }}.txt' >> .github/stage_requirements/${{ matrix.variant }}.txt - name: Set tag & see if it exists continue-on-error: true @@ -262,7 +262,7 @@ jobs: run: | sed -i -e 's/^/\.github\/Dockerfiles\//' .github/stage_requirements/standard.txt echo 'dev/docker_data/required_afni_pkgs.txt' >> .github/stage_requirements/standard.txt - echo '.github/workflows/build_and_test.yml' >> .github/stage_requirements/standard.txt + echo '.github/workflows/build_and_test.yaml' >> .github/stage_requirements/standard.txt echo '.github/stage_requirements/standard.txt' >> .github/stage_requirements/standard.txt - name: Set tag & see if it exists continue-on-error: true @@ -343,7 +343,7 @@ jobs: - C-PAC secrets: inherit if: inputs.test_mode == 'lite' - uses: ./.github/workflows/regtest.yml + uses: ./.github/workflows/regtest.yaml with: test_mode: ${{ inputs.test_mode }} diff --git a/.github/workflows/on_push.yml b/.github/workflows/on_push.yaml similarity index 99% rename from .github/workflows/on_push.yml rename to .github/workflows/on_push.yaml index a84359edd3..9fbd8873d6 100644 --- a/.github/workflows/on_push.yml +++ b/.github/workflows/on_push.yaml @@ -136,7 +136,7 @@ jobs: needs: - check_pr - check-updated-preconfigs - uses: ./.github/workflows/build_and_test.yml + uses: ./.github/workflows/build_and_test.yaml secrets: inherit with: phase_one: ${{ needs.check-updated-preconfigs.outputs.phase_one }} diff --git a/.github/workflows/regtest.yml b/.github/workflows/regtest.yaml similarity index 100% rename from .github/workflows/regtest.yml rename to .github/workflows/regtest.yaml From 2f703facf6e4b03eef72c1c0190b4cef8b72abb9 Mon Sep 17 00:00:00 2001 From: Jon Clucas Date: Fri, 8 Nov 2024 14:39:00 -0500 Subject: [PATCH 141/507] :memo: Add automated integration & regression tests to CHANGELOG --- CHANGELOG.md | 1 + 1 file changed, 1 insertion(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index be5ec4a432..9882b59b3e 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -30,6 +30,7 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 - Moved `pygraphviz` from requirements to `graphviz` optional dependencies group. - Automatically tag untagged `subject_id` and `unique_id` as `!!str` when loading data config files. - Made orientation configurable (was hard-coded as "RPI"). +- Updated GitHub Actions to run automated integration and regression tests on HPC. ### Fixed From c725a9db37c7bc29c53ce8ba3224f890a3f719cb Mon Sep 17 00:00:00 2001 From: Jon Cluce Date: Mon, 11 Nov 2024 14:36:41 -0500 Subject: [PATCH 142/507] :bug: :alien: Patch `NetCorr._list_outputs` --- CPAC/utils/interfaces/netcorr.py | 55 ++++++++++++++++++++++++++++++++ 1 file changed, 55 insertions(+) diff --git a/CPAC/utils/interfaces/netcorr.py b/CPAC/utils/interfaces/netcorr.py index aee9a4d13d..728afdeef1 100644 --- a/CPAC/utils/interfaces/netcorr.py +++ b/CPAC/utils/interfaces/netcorr.py @@ -19,6 +19,61 @@ class NetCorr(NipypeNetCorr): # noqa: D101 input_spec = NetCorrInputSpec + def _list_outputs(self): + """``nipype.interfaces.afni.preprocess.NetCorr._list_outputs`` with a bugfix. + + Notes + ----- + This method can be removed once nipy/nipype#3697 is merged and a release + including that PR is included in the C-PAC image. + """ + # STATEMENT OF CHANGES: + # This function is derived from sources licensed under the Apache-2.0 terms, + # and this function has been changed. + + # CHANGES: + # * Includes changes from https://github.com/nipy/nipype/pull/3697 prior to all commits between https://github.com/nipy/nipype/tree/1.8.6 and that PR being perged. + + # ORIGINAL WORK'S ATTRIBUTION NOTICE: + # Copyright (c) 2009-2016, Nipype developers + + # Licensed under the Apache License, Version 2.0 (the "License"); + # you may not use this file except in compliance with the License. + # You may obtain a copy of the License at + + # http://www.apache.org/licenses/LICENSE-2.0 + + # Unless required by applicable law or agreed to in writing, software + # distributed under the License is distributed on an "AS IS" BASIS, + # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + # See the License for the specific language governing permissions and + # limitations under the License. + + # Prior to release 0.12, Nipype was licensed under a BSD license. + + # Modifications copyright (C) 2024 C-PAC Developers + import glob + import os + + from nipype.interfaces.base.traits_extension import isdefined + + outputs = self.output_spec().get() + + if not isdefined(self.inputs.out_file): + prefix = self._gen_fname(self.inputs.in_file, suffix="_netcorr") + else: + prefix = self.inputs.out_file + + # All outputs should be in the same directory as the prefix + odir = os.path.dirname(os.path.abspath(prefix)) + outputs["out_corr_matrix"] = glob.glob(os.path.join(odir, "*.netcc"))[0] + + if self.inputs.ts_wb_corr or self.inputs.ts_wb_Z: + corrdir = os.path.join(odir, prefix + "_000_INDIV") + outputs["out_corr_maps"] = glob.glob(os.path.join(corrdir, "*.nii.gz")) + + return outputs + NetCorr.__doc__ = f"""{NipypeNetCorr.__doc__} `CPAC.utils.interfaces.netcorr.NetCorr` adds an additional optional input, `automask_off` From f1dac0c6d85c90f0dfe8aa9702a5764dabcdcc4c Mon Sep 17 00:00:00 2001 From: Jon Cluce Date: Mon, 18 Nov 2024 09:35:39 -0500 Subject: [PATCH 143/507] :pencil2: Fix "perged" typo in comment --- CPAC/utils/interfaces/netcorr.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/CPAC/utils/interfaces/netcorr.py b/CPAC/utils/interfaces/netcorr.py index 728afdeef1..6af44a15ab 100644 --- a/CPAC/utils/interfaces/netcorr.py +++ b/CPAC/utils/interfaces/netcorr.py @@ -32,7 +32,7 @@ def _list_outputs(self): # and this function has been changed. # CHANGES: - # * Includes changes from https://github.com/nipy/nipype/pull/3697 prior to all commits between https://github.com/nipy/nipype/tree/1.8.6 and that PR being perged. + # * Includes changes from https://github.com/nipy/nipype/pull/3697 prior to all commits between https://github.com/nipy/nipype/tree/1.8.6 and that PR being merged and released. # ORIGINAL WORK'S ATTRIBUTION NOTICE: # Copyright (c) 2009-2016, Nipype developers From fc97ae86a0043f410fcd07a426351d2f5f16acdb Mon Sep 17 00:00:00 2001 From: Jon Cluce Date: Thu, 5 Dec 2024 12:24:27 -0500 Subject: [PATCH 144/507] :goal_net: Alert missing resource trying to output Co-authored-by: Tamsin Rogers --- CPAC/pipeline/engine.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/CPAC/pipeline/engine.py b/CPAC/pipeline/engine.py index bf31c957f7..a05b12b58f 100644 --- a/CPAC/pipeline/engine.py +++ b/CPAC/pipeline/engine.py @@ -1357,6 +1357,9 @@ def gather_pipes(self, wf, cfg, all=False, add_incl=None, add_excl=None): wf.connect(id_string, "out_filename", nii_name, "format_string") node, out = self.rpool[resource][pipe_idx]["data"] + if not node: + msg = f"Resource {resource} not found in resource pool." + raise FileNotFoundError(msg) try: wf.connect(node, out, nii_name, "in_file") except OSError as os_error: From be61d7f392376ed41980287d3e89706d20f28caf Mon Sep 17 00:00:00 2001 From: Jon Cluce Date: Mon, 6 Jan 2025 10:46:21 -0500 Subject: [PATCH 145/507] :sparkles: Add inventory utility --- CPAC/pipeline/engine.py | 16 +- CPAC/pipeline/resource_inventory.py | 300 ++++++++++++++++++++++++++++ CPAC/utils/outputs.py | 26 ++- setup.py | 9 +- 4 files changed, 340 insertions(+), 11 deletions(-) create mode 100755 CPAC/pipeline/resource_inventory.py diff --git a/CPAC/pipeline/engine.py b/CPAC/pipeline/engine.py index be1d0c0c17..7494ae92ee 100644 --- a/CPAC/pipeline/engine.py +++ b/CPAC/pipeline/engine.py @@ -1,4 +1,4 @@ -# Copyright (C) 2021-2024 C-PAC Developers +# Copyright (C) 2021-2025 C-PAC Developers # This file is part of C-PAC. @@ -17,6 +17,7 @@ import ast import copy import hashlib +from importlib.resources import files from itertools import chain import json import os @@ -24,6 +25,7 @@ from typing import Optional import warnings +import pandas as pd from nipype import config, logging from nipype.interfaces import afni from nipype.interfaces.utility import Rename @@ -2408,15 +2410,17 @@ def strip_template(data_label, dir_path, filename): return data_label, json +def template_dataframe() -> pd.DataFrame: + """Return the template dataframe.""" + template_csv = files("CPAC").joinpath("resources/cpac_templates.csv") + return pd.read_csv(str(template_csv), keep_default_na=False) + + def ingress_pipeconfig_paths(wf, cfg, rpool, unique_id, creds_path=None): # ingress config file paths # TODO: may want to change the resource keys for each to include one level up in the YAML as well - import pandas as pd - import pkg_resources as p - - template_csv = p.resource_filename("CPAC", "resources/cpac_templates.csv") - template_df = pd.read_csv(template_csv, keep_default_na=False) + template_df = template_dataframe() desired_orientation = cfg.pipeline_setup["desired_orientation"] for row in template_df.itertuples(): diff --git a/CPAC/pipeline/resource_inventory.py b/CPAC/pipeline/resource_inventory.py new file mode 100755 index 0000000000..fc0abc2bfa --- /dev/null +++ b/CPAC/pipeline/resource_inventory.py @@ -0,0 +1,300 @@ +#!/usr/bin/env python +# Copyright (C) 2025 C-PAC Developers + +# This file is part of C-PAC. + +# C-PAC is free software: you can redistribute it and/or modify it under +# the terms of the GNU Lesser General Public License as published by the +# Free Software Foundation, either version 3 of the License, or (at your +# option) any later version. + +# C-PAC is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or +# FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public +# License for more details. + +# You should have received a copy of the GNU Lesser General Public +# License along with C-PAC. If not, see . +"""Inspect inputs and outputs for NodeBlockFunctions.""" + +import ast +from dataclasses import dataclass, field +import importlib +from importlib.resources import files +import inspect +from itertools import chain +import os +from typing import Any, cast, Iterable + +import yaml + +from CPAC.pipeline.engine import template_dataframe +from CPAC.pipeline.nodeblock import NodeBlockFunction +from CPAC.utils.monitoring import UTLOGGER +from CPAC.utils.outputs import Outputs + + +def import_nodeblock_functions(package_name: str) -> list[NodeBlockFunction]: + """ + Import all functions with the @nodeblock decorator from all modules and submodules in a package. + + Parameters + ---------- + package_name + The name of the package to import from. + """ + functions: list[NodeBlockFunction] = [] + package = importlib.import_module(package_name) + package_path = package.__path__[0] # Path to the package directory + + for root, _, package_files in os.walk(package_path): + for file in package_files: + if file.endswith(".py") and file != "__init__.py": + # Get the module path + rel_path = os.path.relpath(os.path.join(root, file), package_path) + module_name = f"{package_name}.{rel_path[:-3].replace(os.sep, '.')}" + + # Import the module + try: + module = importlib.import_module(module_name) + except ImportError as e: + UTLOGGER.debug(f"Failed to import {module_name}: {e}") + continue + # Extract nodeblock-decorated functions from the module + for _name, obj in inspect.getmembers( + module, predicate=lambda obj: isinstance(obj, NodeBlockFunction) + ): + functions.append(obj) + + return functions + + +@dataclass +class ResourceSourceList: + """A list of resource sources without duplicates.""" + + sources: list[str] = field(default_factory=list) + + def __add__(self, other: str | list[str]) -> list[str]: + """Add a list of sources to the list.""" + if isinstance(other, str): + other = [other] + new_set = {*self.sources, *other} + return sorted(new_set, key=str.casefold) + + def __contains__(self, item: str) -> bool: + """Check if a source is in the list.""" + return item in self.sources + + def __delitem__(self, key: int) -> None: + """Delete a source by index.""" + del self.sources[key] + + def __eq__(self, value: Any) -> bool: + """Check if the lists of sources are the same.""" + return set(self) == set(value) + + def __getitem__(self, item: int) -> str: + """Get a source by index.""" + return self.sources[item] + + def __hash__(self) -> int: + """Get the hash of the list of sources.""" + return hash(self.sources) + + def __iadd__(self, other: str | list[str]) -> "ResourceSourceList": + """Add a list of sources to the list.""" + self.sources = self + other + return self + + def __iter__(self): + """Iterate over the sources.""" + return iter(self.sources) + + def __len__(self) -> int: + """Get the number of sources.""" + return len(self.sources) + + def __repr__(self) -> str: + """Get the reproducable string representation of the sources.""" + return f"ResourceSourceList({(self.sources)})" + + def __reversed__(self) -> list[str]: + """Get the sources reversed.""" + return list(reversed(self.sources)) + + def __setitem__(self, key: int, value: str) -> None: + """Set a source by index.""" + self.sources[key] = value + + def __sorted__(self) -> list[str]: + """Get the sources sorted.""" + return sorted(self.sources, key=str.casefold) + + def __str__(self) -> str: + """Get the string representation of the sources.""" + return str(self.sources) + + +@dataclass +class ResourceIO: + """NodeBlockFunctions that use a resource for IO.""" + + name: str + """The name of the resource.""" + output_from: ResourceSourceList | list[str] = field( + default_factory=ResourceSourceList + ) + """The functions that output the resource.""" + output_to: ResourceSourceList | list[str] = field( + default_factory=ResourceSourceList + ) + """The subdirectory the resource is output to.""" + input_for: ResourceSourceList | list[str] = field( + default_factory=ResourceSourceList + ) + """The functions that use the resource as input.""" + + def __post_init__(self) -> None: + """Handle optionals.""" + if isinstance(self.output_from, list): + self.output_from = ResourceSourceList(self.output_from) + if isinstance(self.output_to, list): + self.output_to = ResourceSourceList(self.output_to) + if isinstance(self.input_for, list): + self.input_for = ResourceSourceList(self.input_for) + + def __str__(self) -> str: + """Return string representation for ResourceIO instance.""" + return f"{{{self.name}: {{'input_for': {self.input_for!s}, 'output_from': {self.output_from!s}}}}})" + + def as_dict(self) -> dict[str, list[str]]: + """Return the ResourceIO as a built-in dictionary type.""" + return { + k: v + for k, v in { + "input_for": [str(source) for source in self.input_for], + "output_from": [str(source) for source in self.output_from], + "output_to": [str(source) for source in self.output_to], + }.items() + if v + } + + +def _flatten_io(io: list[Iterable]) -> list[str]: + """Given a list of strings or iterables thereof, flatten the list to all strings.""" + if all(isinstance(resource, str) for resource in io): + return cast(list[str], io) + while not all(isinstance(resource, str) for resource in io): + io = list( + chain.from_iterable( + [ + resource if not isinstance(resource, str) else [resource] + for resource in io + ] + ) + ) + return cast(list[str], io) + + +def find_directly_set_resources(package_name: str) -> dict[str, list[str]]: + """Find all resources set explicitly via :pyy:method:`~CPAC.pipeline.engine.ResourcePool.set_data`. + + Parameters + ---------- + package_name + The name of the package to search for resources. + + Returns + ------- + dict + A dictionary containing the name of the resource and the name of the functions that set it. + """ + resources: dict[str[list[str]]] = {} + for dirpath, _, filenames in os.walk(str(files(package_name))): + for filename in filenames: + if filename.endswith(".py"): + filepath = os.path.join(dirpath, filename) + with open(filepath, "r", encoding="utf-8") as file: + tree = ast.parse(file.read(), filename=filepath) + for node in ast.walk(tree): + if isinstance(node, ast.Call) and isinstance( + node.func, ast.Attribute + ): + if node.func.attr == "set_data": + try: + resource: str = ast.literal_eval(node.args[0]) + if resource not in resources: + resources[resource] = [] + resources[resource].append( + ast.literal_eval(node.args[-1]) + ) + except ValueError: + # The resource name or function name is not a literal, so this `set_data` is a dynamic call + pass + return resources + + +def resource_inventory(package: str = "CPAC") -> dict[str, ResourceIO]: + """Gather all inputs and outputs for a list of NodeBlockFunctions.""" + resources: dict[str, ResourceIO] = {} + # Node block function inputs and outputs + for nbf in import_nodeblock_functions(package): + nbf_name = f"{nbf.__module__}.{nbf.__qualname__}" + if hasattr(nbf, "inputs"): + for nbf_input in _flatten_io(cast(list[Iterable], nbf.inputs)): + if nbf_input: + if nbf_input not in resources: + resources[nbf_input] = ResourceIO( + nbf_input, input_for=[nbf_name] + ) + else: + resources[nbf_input].input_for += nbf_name + if hasattr(nbf, "outputs"): + for nbf_output in _flatten_io(cast(list[Iterable], nbf.outputs)): + if nbf_output: + if nbf_output not in resources: + resources[nbf_output] = ResourceIO( + nbf_output, output_from=[nbf_name] + ) + else: + resources[nbf_output].output_from += nbf_name + # Template resources set from pipeline config + templates_from_config_df = template_dataframe() + for _, row in templates_from_config_df.iterrows(): + output_from = f"pipeline configuration: {row.Pipeline_Config_Entry}" + if row.Key not in resources: + resources[row.Key] = ResourceIO(row.Key, output_from=[output_from]) + else: + resources[row.Key].output_from += output_from + # Hard-coded resources + for resource, functions in find_directly_set_resources(package).items(): + if resource not in resources: + resources[resource] = ResourceIO(resource, output_from=functions) + else: + resources[resource].output_from += functions + # Outputs + for _, row in Outputs.reference.iterrows(): + if row.Resource not in resources: + resources[row.Resource] = ResourceIO( + row.Resource, output_to=[row["Sub-Directory"]] + ) + else: + resources[row.Resource].output_to += row["Sub-Directory"] + return dict(sorted(resources.items(), key=lambda item: item[0].casefold())) + + +def dump_inventory_to_yaml(inventory: dict[str, ResourceIO]) -> str: + """Dump NodeBlock Interfaces to a YAML string.""" + return yaml.dump( + {key: value.as_dict() for key, value in inventory.items()}, sort_keys=False + ) + + +def main() -> None: + """Print the NodeBlock IO to the console.""" + UTLOGGER.info(dump_inventory_to_yaml(resource_inventory("CPAC"))) # noqa: T201 + + +if __name__ == "__main__": + main() diff --git a/CPAC/utils/outputs.py b/CPAC/utils/outputs.py index 11b81eb60f..451d893987 100644 --- a/CPAC/utils/outputs.py +++ b/CPAC/utils/outputs.py @@ -1,10 +1,30 @@ +# Copyright (C) 2018-2025 C-PAC Developers + +# This file is part of C-PAC. + +# C-PAC is free software: you can redistribute it and/or modify it under +# the terms of the GNU Lesser General Public License as published by the +# Free Software Foundation, either version 3 of the License, or (at your +# option) any later version. + +# C-PAC is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or +# FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public +# License for more details. + +# You should have received a copy of the GNU Lesser General Public +# License along with C-PAC. If not, see . +"""Specify the resources that C-PAC writes to the output direcotry.""" + +from importlib.resources import files + import pandas as pd -import pkg_resources as p class Outputs: - # Settle some things about the resource pool reference and the output directory - reference_csv = p.resource_filename("CPAC", "resources/cpac_outputs.tsv") + """Settle some things about the resource pool reference and the output directory.""" + + reference_csv = str(files("CPAC").joinpath("resources/cpac_outputs.tsv")) try: reference = pd.read_csv(reference_csv, delimiter="\t", keep_default_na=False) diff --git a/setup.py b/setup.py index 17919395d2..bb20b66c6c 100755 --- a/setup.py +++ b/setup.py @@ -1,4 +1,4 @@ -# Copyright (C) 2022-2024 C-PAC Developers +# Copyright (C) 2022-2025 C-PAC Developers # This file is part of C-PAC. @@ -84,7 +84,12 @@ def main(**extra_args): extras_require={"graphviz": ["pygraphviz"]}, configuration=configuration, scripts=glob("scripts/*"), - entry_points={"console_scripts": ["cpac = CPAC.__main__:main"]}, + entry_points={ + "console_scripts": [ + "cpac = CPAC.__main__:main", + "C-PAC_nb_io = CPAC.pipeline.nb_io:main", + ] + }, package_data={ "CPAC": [ "test_data/*", From 5758eedad189fed6bde65a895d1a100095fcb296 Mon Sep 17 00:00:00 2001 From: Jon Cluce Date: Mon, 6 Jan 2025 10:48:00 -0500 Subject: [PATCH 146/507] =?UTF-8?q?:bug:=20Fix=20resource=20name:=20`unet?= =?UTF-8?q?=5Fmodel`=20=E2=86=92=20`unet-model`?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- CPAC/anat_preproc/anat_preproc.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/CPAC/anat_preproc/anat_preproc.py b/CPAC/anat_preproc/anat_preproc.py index a561f8e077..f4bd6f7049 100644 --- a/CPAC/anat_preproc/anat_preproc.py +++ b/CPAC/anat_preproc/anat_preproc.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright (C) 2012-2023 C-PAC Developers +# Copyright (C) 2012-2025 C-PAC Developers # This file is part of C-PAC. @@ -2572,7 +2572,7 @@ def brain_mask_acpc_niworkflows_ants_T2(wf, cfg, strat_pool, pipe_num, opt=None) config=["anatomical_preproc", "brain_extraction"], option_key="using", option_val="UNet", - inputs=["desc-preproc_T2w", "T1w-brain-template", "T1w-template", "unet_model"], + inputs=["desc-preproc_T2w", "T1w-brain-template", "T1w-template", "unet-model"], outputs=["space-T2w_desc-brain_mask"], ) def brain_mask_unet_T2(wf, cfg, strat_pool, pipe_num, opt=None): @@ -2586,7 +2586,7 @@ def brain_mask_unet_T2(wf, cfg, strat_pool, pipe_num, opt=None): config=["anatomical_preproc", "brain_extraction"], option_key="using", option_val="UNet", - inputs=["desc-preproc_T2w", "T1w-brain-template", "T1w-template", "unet_model"], + inputs=["desc-preproc_T2w", "T1w-brain-template", "T1w-template", "unet-model"], outputs=["space-T2w_desc-acpcbrain_mask"], ) def brain_mask_acpc_unet_T2(wf, cfg, strat_pool, pipe_num, opt=None): From 236e0febc0af3a34f26630260042793cc5240598 Mon Sep 17 00:00:00 2001 From: Jon Cluce Date: Mon, 6 Jan 2025 11:07:53 -0500 Subject: [PATCH 147/507] :children_crossing: Add CLI for resource inventory --- CPAC/pipeline/resource_inventory.py | 24 ++++++++++++++++++++++-- 1 file changed, 22 insertions(+), 2 deletions(-) diff --git a/CPAC/pipeline/resource_inventory.py b/CPAC/pipeline/resource_inventory.py index fc0abc2bfa..28c9937cb5 100755 --- a/CPAC/pipeline/resource_inventory.py +++ b/CPAC/pipeline/resource_inventory.py @@ -17,6 +17,7 @@ # License along with C-PAC. If not, see . """Inspect inputs and outputs for NodeBlockFunctions.""" +from argparse import ArgumentParser, Namespace import ast from dataclasses import dataclass, field import importlib @@ -24,6 +25,7 @@ import inspect from itertools import chain import os +from pathlib import Path from typing import Any, cast, Iterable import yaml @@ -181,6 +183,22 @@ def as_dict(self) -> dict[str, list[str]]: } +def cli_parser() -> Namespace: + """Parse command line argument.""" + parser = ArgumentParser( + description="Inventory resources for C-PAC NodeBlockFunctions." + ) + parser.add_argument( + "-o", + "--output", + nargs="?", + help="The output file to write the inventory to.", + type=Path, + default=Path("resource_inventory.yaml"), + ) + return parser.parse_args() + + def _flatten_io(io: list[Iterable]) -> list[str]: """Given a list of strings or iterables thereof, flatten the list to all strings.""" if all(isinstance(resource, str) for resource in io): @@ -292,8 +310,10 @@ def dump_inventory_to_yaml(inventory: dict[str, ResourceIO]) -> str: def main() -> None: - """Print the NodeBlock IO to the console.""" - UTLOGGER.info(dump_inventory_to_yaml(resource_inventory("CPAC"))) # noqa: T201 + """Save the NodeBlock inventory to a file.""" + args = cli_parser() + with args.output.open("w") as file: + file.write(dump_inventory_to_yaml(resource_inventory("CPAC"))) if __name__ == "__main__": From 36528bdb81243550480cc1ab9f88887d36123d8e Mon Sep 17 00:00:00 2001 From: Jon Cluce Date: Mon, 6 Jan 2025 11:19:45 -0500 Subject: [PATCH 148/507] :children_crossing: Rename `resource_inventory` CLI command --- setup.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/setup.py b/setup.py index bb20b66c6c..f22a744e2d 100755 --- a/setup.py +++ b/setup.py @@ -87,7 +87,7 @@ def main(**extra_args): entry_points={ "console_scripts": [ "cpac = CPAC.__main__:main", - "C-PAC_nb_io = CPAC.pipeline.nb_io:main", + "resource_inventory = CPAC.pipeline.resource_inventory:main", ] }, package_data={ From 20ce4289028378c48c35a86987b1dd42a89fa1be Mon Sep 17 00:00:00 2001 From: Jon Cluce Date: Tue, 7 Jan 2025 14:14:59 -0500 Subject: [PATCH 149/507] :children_crossing: Specify default in helpstring --- CPAC/pipeline/resource_inventory.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/CPAC/pipeline/resource_inventory.py b/CPAC/pipeline/resource_inventory.py index 28c9937cb5..cee3570dc1 100755 --- a/CPAC/pipeline/resource_inventory.py +++ b/CPAC/pipeline/resource_inventory.py @@ -17,7 +17,7 @@ # License along with C-PAC. If not, see . """Inspect inputs and outputs for NodeBlockFunctions.""" -from argparse import ArgumentParser, Namespace +from argparse import ArgumentDefaultsHelpFormatter, ArgumentParser, Namespace import ast from dataclasses import dataclass, field import importlib @@ -186,7 +186,8 @@ def as_dict(self) -> dict[str, list[str]]: def cli_parser() -> Namespace: """Parse command line argument.""" parser = ArgumentParser( - description="Inventory resources for C-PAC NodeBlockFunctions." + description="Inventory resources for C-PAC NodeBlockFunctions.", + formatter_class=ArgumentDefaultsHelpFormatter, ) parser.add_argument( "-o", From 780600a2b4b0544c9fc8419c7a2be49b1a46fc2d Mon Sep 17 00:00:00 2001 From: Jon Cluce Date: Thu, 9 Jan 2025 10:44:32 -0500 Subject: [PATCH 150/507] :zap: Don't install torch just to look for NodeBlockFunctions --- CPAC/pipeline/resource_inventory.py | 31 ++++++++++++++++++++++++----- 1 file changed, 26 insertions(+), 5 deletions(-) diff --git a/CPAC/pipeline/resource_inventory.py b/CPAC/pipeline/resource_inventory.py index cee3570dc1..85090f533b 100755 --- a/CPAC/pipeline/resource_inventory.py +++ b/CPAC/pipeline/resource_inventory.py @@ -26,8 +26,10 @@ from itertools import chain import os from pathlib import Path -from typing import Any, cast, Iterable +from typing import Any, cast, Iterable, Optional +from unittest.mock import patch +from traits.trait_errors import TraitError import yaml from CPAC.pipeline.engine import template_dataframe @@ -36,7 +38,9 @@ from CPAC.utils.outputs import Outputs -def import_nodeblock_functions(package_name: str) -> list[NodeBlockFunction]: +def import_nodeblock_functions( + package_name: str, exclude: Optional[list[str]] = None +) -> list[NodeBlockFunction]: """ Import all functions with the @nodeblock decorator from all modules and submodules in a package. @@ -44,7 +48,12 @@ def import_nodeblock_functions(package_name: str) -> list[NodeBlockFunction]: ---------- package_name The name of the package to import from. + + exclude + A list of module names to exclude from the import. """ + if exclude is None: + exclude = [] functions: list[NodeBlockFunction] = [] package = importlib.import_module(package_name) package_path = package.__path__[0] # Path to the package directory @@ -55,11 +64,16 @@ def import_nodeblock_functions(package_name: str) -> list[NodeBlockFunction]: # Get the module path rel_path = os.path.relpath(os.path.join(root, file), package_path) module_name = f"{package_name}.{rel_path[:-3].replace(os.sep, '.')}" + if module_name in exclude: + continue # Import the module try: - module = importlib.import_module(module_name) - except ImportError as e: + with patch.dict( + "sys.modules", {exclusion: None for exclusion in exclude} + ): + module = importlib.import_module(module_name) + except (ImportError, TraitError, ValueError) as e: UTLOGGER.debug(f"Failed to import {module_name}: {e}") continue # Extract nodeblock-decorated functions from the module @@ -258,7 +272,14 @@ def resource_inventory(package: str = "CPAC") -> dict[str, ResourceIO]: """Gather all inputs and outputs for a list of NodeBlockFunctions.""" resources: dict[str, ResourceIO] = {} # Node block function inputs and outputs - for nbf in import_nodeblock_functions(package): + for nbf in import_nodeblock_functions( + package, + [ + # No nodeblock functions in these modules that dynamically isntall torch + "CPAC.unet.__init__", + "CPAC.unet._torch", + ], + ): nbf_name = f"{nbf.__module__}.{nbf.__qualname__}" if hasattr(nbf, "inputs"): for nbf_input in _flatten_io(cast(list[Iterable], nbf.inputs)): From 0f02aeeda9a83dc352de5e3eebdb578a5ac9b53b Mon Sep 17 00:00:00 2001 From: Jon Cluce Date: Thu, 9 Jan 2025 10:58:13 -0500 Subject: [PATCH 151/507] :memo: Add `resource_inventory` to CHANGELOG [skip ci] --- CHANGELOG.md | 1 + 1 file changed, 1 insertion(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index be5ec4a432..9de89dc58e 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -24,6 +24,7 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 - Required positional parameter "wf" in input and output of `ingress_pipeconfig_paths` function, where a node to reorient templates is added to the `wf`. - Required positional parameter "orientation" to `resolve_resolution`. - Optional positional argument "cfg" to `create_lesion_preproc`. +- `resource_inventory` utility to inventory NodeBlock function inputs and outputs. ### Changed From 38cdabe5844d4d32ec825e82a828f3db70b733f3 Mon Sep 17 00:00:00 2001 From: Jon Cluce Date: Wed, 15 Jan 2025 15:24:07 -0500 Subject: [PATCH 152/507] :sparkles: Pick up more hard-coded resources --- CPAC/pipeline/resource_inventory.py | 208 +++++++++++++++++++++++++--- 1 file changed, 188 insertions(+), 20 deletions(-) diff --git a/CPAC/pipeline/resource_inventory.py b/CPAC/pipeline/resource_inventory.py index 85090f533b..dfce0d1c2d 100755 --- a/CPAC/pipeline/resource_inventory.py +++ b/CPAC/pipeline/resource_inventory.py @@ -23,7 +23,7 @@ import importlib from importlib.resources import files import inspect -from itertools import chain +from itertools import chain, product import os from pathlib import Path from typing import Any, cast, Iterable, Optional @@ -91,9 +91,11 @@ class ResourceSourceList: sources: list[str] = field(default_factory=list) - def __add__(self, other: str | list[str]) -> list[str]: + def __add__(self, other: "str | list[str] | ResourceSourceList") -> list[str]: """Add a list of sources to the list.""" if isinstance(other, str): + if not other: + other = "(dummy node)" other = [other] new_set = {*self.sources, *other} return sorted(new_set, key=str.casefold) @@ -118,7 +120,9 @@ def __hash__(self) -> int: """Get the hash of the list of sources.""" return hash(self.sources) - def __iadd__(self, other: str | list[str]) -> "ResourceSourceList": + def __iadd__( + self, other: "str | list[str] | ResourceSourceList" + ) -> "ResourceSourceList": """Add a list of sources to the list.""" self.sources = self + other return self @@ -230,7 +234,168 @@ def _flatten_io(io: list[Iterable]) -> list[str]: return cast(list[str], io) -def find_directly_set_resources(package_name: str) -> dict[str, list[str]]: +class MultipleContext(list): + """Subclass of list to store multilpe contexts.""" + + +class DirectlySetResources(ast.NodeVisitor): + """Class to track resources set directly, rather than through NodeBlocks.""" + + def __init__(self) -> None: + """Initialize the visitor.""" + super().__init__() + self._context: dict[str, Any] = {} + self.dynamic_resources: dict[str, ResourceSourceList] = {} + self._history: dict[str, list[Any]] = {} + self.resources: dict[str, ResourceSourceList] = {} + + def assign_resource(self, resource: str, value: str) -> None: + """Assign a value to a resource.""" + target = self.dynamic_resources if r".*" in value else self.resources + resource = str(resource) + if resource not in target: + target[resource] = ResourceSourceList() + target[resource] += value + + @property + def context(self) -> dict[str, Any]: + """Return the context.""" + return self._context + + @context.setter + def context(self, value: tuple[Iterable, Any]) -> None: + """Set the context.""" + key, _value = value + if not isinstance(key, str): + for subkey in key: + self.context = subkey, _value + else: + self._context[key] = _value + if key not in self._history: + self._history[key] = [] + self._history[key].append(_value) + + def lookup_context(self, variable: str) -> str | MultipleContext: + """Plug in variable.""" + + def lookup() -> str | list[str]: + """Look up context.""" + if variable in self.context: + if self.context[variable] == variable: + history = list(self._history[variable]) + while history and history[-1] == variable: + history.pop() + if history: + return history[-1] + return self.context[variable] + return ".*" + + context = lookup() + if isinstance(context, list): + context = MultipleContext(context) + return context + + @staticmethod + def handle_multiple_contexts(contexts: list[str | list[str]]) -> list[str]: + """Parse multiple contexts.""" + if isinstance(contexts, list): + return MultipleContext( + [ + "".join(list(ctx)) + for ctx in product( + *[ + context if isinstance(context, list) else [context] + for context in contexts + ] + ) + ] + ) + return contexts + + def parse_ast(self, node: Any) -> Any: + """Parse AST.""" + if not isinstance(node, ast.AST): + if isinstance(node, str): + return node + if not isinstance(node, Iterable): + return str(node) + if isinstance(node, ast.Dict): + return { + self.parse_ast(key): self.parse_ast(value) + for key, value in dict(zip(node.keys, node.values)).items() + } + if isinstance(node, (MultipleContext, list, set, tuple)): + return type(node)(self.parse_ast(subnode) for subnode in node) + if isinstance(node, ast.FormattedValue): + if hasattr(node, "value") and hasattr(node.value, "id"): + return self.lookup_context(getattr(node.value, "id")) + if isinstance(node, ast.JoinedStr): + node_values = [self.parse_ast(value) for value in node.values] + if any(isinstance(value, MultipleContext) for value in node_values): + return self.handle_multiple_contexts(node_values) + return "".join(str(item) for item in node_values) + if isinstance(node, ast.Dict): + return { + self.parse_ast(key): self.parse_ast(value) + for key, value in dict(zip(node.keys, node.values)).items() + } + for attr in ["values", "elts"]: + if hasattr(node, attr): + return [self.parse_ast(subnode) for subnode in getattr(node, attr)] + for attr in ["value", "id"]: + if hasattr(node, attr): + return self.parse_ast(getattr(node, attr)) + return r".*" # wildcard for regex matching + + def visit_Assign(self, node: ast.Assign) -> None: + """Visit an assignment.""" + value = self.parse_ast(node.value) + for target in node.targets: + resource = self.parse_ast(target) + self.context = resource, value + # self.assign_resource(str(self.parse_ast(target)), value) + self.generic_visit(node) + + def visit_Call(self, node: ast.Call) -> None: + """Visit a function call.""" + if isinstance(node.func, ast.Attribute) and node.func.attr == "set_data": + value = self.parse_ast(node.args[5]) + if hasattr(node.args[0], "value"): + resource: str = getattr(node.args[0], "value") + elif hasattr(node.args[0], "id"): + resource = self.lookup_context(getattr(node.args[0], "id")) + if isinstance(resource, MultipleContext): + for resource_context in resource: + self.assign_resource(resource_context, value) + self.generic_visit(node) + return + elif isinstance(node.args[0], ast.JoinedStr): + resource = self.parse_ast(node.args[0]) + else: + self.generic_visit(node) + return + self.assign_resource(resource, value) + self.generic_visit(node) + + def visit_For(self, node: ast.For) -> None: + """Vist for loop.""" + # This is probably too specific, + # will need to be updated if we add more out-of-nodeblock settings. + target = self.parse_ast(node.target) + if ( + hasattr(node.iter, "func") + and hasattr(node.iter.func, "value") + and hasattr(node.iter.func.value, "id") + ): + context = self.context.get(self.parse_ast(node.iter.func.value.id), ".*") + if isinstance(target, list) and isinstance(context, dict): + self.context = target[0], list(context.keys()) + else: + self.context = target, self.parse_ast(node.iter) + self.generic_visit(node) + + +def find_directly_set_resources(package_name: str) -> dict[str, ResourceSourceList]: """Find all resources set explicitly via :pyy:method:`~CPAC.pipeline.engine.ResourcePool.set_data`. Parameters @@ -243,28 +408,31 @@ def find_directly_set_resources(package_name: str) -> dict[str, list[str]]: dict A dictionary containing the name of the resource and the name of the functions that set it. """ - resources: dict[str[list[str]]] = {} + resources: dict[str, ResourceSourceList] = {} + dynamic_resources: dict[str, ResourceSourceList] = {} for dirpath, _, filenames in os.walk(str(files(package_name))): for filename in filenames: if filename.endswith(".py"): filepath = os.path.join(dirpath, filename) with open(filepath, "r", encoding="utf-8") as file: tree = ast.parse(file.read(), filename=filepath) - for node in ast.walk(tree): - if isinstance(node, ast.Call) and isinstance( - node.func, ast.Attribute - ): - if node.func.attr == "set_data": - try: - resource: str = ast.literal_eval(node.args[0]) - if resource not in resources: - resources[resource] = [] - resources[resource].append( - ast.literal_eval(node.args[-1]) - ) - except ValueError: - # The resource name or function name is not a literal, so this `set_data` is a dynamic call - pass + directly_set = DirectlySetResources() + directly_set.visit(tree) + for resource in directly_set.resources: + if resource not in resources: + resources[resource] = ResourceSourceList() + resources[resource] += directly_set.resources[resource] + for resource in directly_set.dynamic_resources: + if resource not in dynamic_resources: + dynamic_resources[resource] = ResourceSourceList() + dynamic_resources[resource] += directly_set.dynamic_resources[ + resource + ] + # for dynamic_key, dynamic_value in dynamic_resources.items(): + # dynamic_resource = re.compile(dynamic_key) + # for resource in resources.keys(): + # if dynamic_resource.search(resource): + # resources[resource] += dynamic_value return resources From 74a715bccfd762e627d9796f639d67272e2719a3 Mon Sep 17 00:00:00 2001 From: Jon Cluce Date: Thu, 16 Jan 2025 10:39:38 -0500 Subject: [PATCH 153/507] :necktie: Match keys and values for assignment loops --- CPAC/pipeline/resource_inventory.py | 9 ++++++--- 1 file changed, 6 insertions(+), 3 deletions(-) diff --git a/CPAC/pipeline/resource_inventory.py b/CPAC/pipeline/resource_inventory.py index dfce0d1c2d..e3fa668d58 100755 --- a/CPAC/pipeline/resource_inventory.py +++ b/CPAC/pipeline/resource_inventory.py @@ -353,7 +353,6 @@ def visit_Assign(self, node: ast.Assign) -> None: for target in node.targets: resource = self.parse_ast(target) self.context = resource, value - # self.assign_resource(str(self.parse_ast(target)), value) self.generic_visit(node) def visit_Call(self, node: ast.Call) -> None: @@ -365,8 +364,12 @@ def visit_Call(self, node: ast.Call) -> None: elif hasattr(node.args[0], "id"): resource = self.lookup_context(getattr(node.args[0], "id")) if isinstance(resource, MultipleContext): - for resource_context in resource: - self.assign_resource(resource_context, value) + if len(resource) == len(value): + for k, v in zip(resource, value): + self.assign_resource(k, v) + else: + for resource_context in resource: + self.assign_resource(resource_context, value) self.generic_visit(node) return elif isinstance(node.args[0], ast.JoinedStr): From 9730dad954296c1889061395ee0743795580b41e Mon Sep 17 00:00:00 2001 From: Jon Cluce Date: Thu, 16 Jan 2025 12:45:47 -0500 Subject: [PATCH 154/507] :necktie: Include func def args in context --- CPAC/pipeline/resource_inventory.py | 36 +++++++++++++++++++++-------- 1 file changed, 27 insertions(+), 9 deletions(-) diff --git a/CPAC/pipeline/resource_inventory.py b/CPAC/pipeline/resource_inventory.py index e3fa668d58..1f66cea394 100755 --- a/CPAC/pipeline/resource_inventory.py +++ b/CPAC/pipeline/resource_inventory.py @@ -251,8 +251,14 @@ def __init__(self) -> None: def assign_resource(self, resource: str, value: str) -> None: """Assign a value to a resource.""" - target = self.dynamic_resources if r".*" in value else self.resources + if isinstance(resource, ast.AST): + resource = self.parse_ast(resource) resource = str(resource) + target = ( + self.dynamic_resources + if r".*" in value or r".*" in resource + else self.resources + ) if resource not in target: target[resource] = ResourceSourceList() target[resource] += value @@ -295,8 +301,8 @@ def lookup() -> str | list[str]: context = MultipleContext(context) return context - @staticmethod - def handle_multiple_contexts(contexts: list[str | list[str]]) -> list[str]: + # @staticmethod + def handle_multiple_contexts(self, contexts: list[str | list[str]]) -> list[str]: """Parse multiple contexts.""" if isinstance(contexts, list): return MultipleContext( @@ -339,12 +345,18 @@ def parse_ast(self, node: Any) -> Any: self.parse_ast(key): self.parse_ast(value) for key, value in dict(zip(node.keys, node.values)).items() } - for attr in ["values", "elts"]: - if hasattr(node, attr): - return [self.parse_ast(subnode) for subnode in getattr(node, attr)] - for attr in ["value", "id"]: - if hasattr(node, attr): - return self.parse_ast(getattr(node, attr)) + if not isinstance(node, ast.Call): + for attr in ["values", "elts", "args"]: + if hasattr(node, attr): + iterable = getattr(node, attr) + if isinstance(iterable, Iterable): + return [ + self.parse_ast(subnode) for subnode in getattr(node, attr) + ] + return self.parse_ast(iterable) + for attr in ["value", "id", "arg"]: + if hasattr(node, attr): + return self.parse_ast(getattr(node, attr)) return r".*" # wildcard for regex matching def visit_Assign(self, node: ast.Assign) -> None: @@ -397,6 +409,12 @@ def visit_For(self, node: ast.For) -> None: self.context = target, self.parse_ast(node.iter) self.generic_visit(node) + def visit_FunctionDef(self, node: ast.FunctionDef) -> None: + """Visit a function definition.""" + for arg in self.parse_ast(node): + self.context = arg, ".*" + self.generic_visit(node) + def find_directly_set_resources(package_name: str) -> dict[str, ResourceSourceList]: """Find all resources set explicitly via :pyy:method:`~CPAC.pipeline.engine.ResourcePool.set_data`. From 252098b776b17e775c2a5f7207774bad4e15c2bf Mon Sep 17 00:00:00 2001 From: Jon Cluce Date: Thu, 16 Jan 2025 12:49:10 -0500 Subject: [PATCH 155/507] :children_crossing: Exclude dummy node from inventory --- CPAC/pipeline/resource_inventory.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/CPAC/pipeline/resource_inventory.py b/CPAC/pipeline/resource_inventory.py index 1f66cea394..be7bdafa5f 100755 --- a/CPAC/pipeline/resource_inventory.py +++ b/CPAC/pipeline/resource_inventory.py @@ -94,8 +94,9 @@ class ResourceSourceList: def __add__(self, other: "str | list[str] | ResourceSourceList") -> list[str]: """Add a list of sources to the list.""" if isinstance(other, str): - if not other: - other = "(dummy node)" + if not other or other == "created_before_this_test": + # dummy node in a testing function, no need to include in inventory + return list(self) other = [other] new_set = {*self.sources, *other} return sorted(new_set, key=str.casefold) From b3521fc38509b7c5f080e92ff11496185c3514ca Mon Sep 17 00:00:00 2001 From: Jon Cluce Date: Fri, 17 Jan 2025 14:31:21 -0500 Subject: [PATCH 156/507] :necktie: Handle some special cases --- CPAC/pipeline/engine.py | 27 ++-- CPAC/pipeline/resource_inventory.py | 212 ++++++++++++++++++++++------ 2 files changed, 179 insertions(+), 60 deletions(-) diff --git a/CPAC/pipeline/engine.py b/CPAC/pipeline/engine.py index 7494ae92ee..91066d820f 100644 --- a/CPAC/pipeline/engine.py +++ b/CPAC/pipeline/engine.py @@ -1009,6 +1009,19 @@ def post_process(self, wf, label, connection, json_info, pipe_idx, pipe_x, outs) for label_con_tpl in post_labels: label = label_con_tpl[0] connection = (label_con_tpl[1], label_con_tpl[2]) + if "desc-" not in label: + if "space-template" in label: + new_label = label.replace( + "space-template", "space-template_desc-zstd" + ) + else: + new_label = f"desc-zstd_{label}" + else: + for tag in label.split("_"): + if "desc-" in tag: + newtag = f"{tag}-zstd" + new_label = label.replace(tag, newtag) + break if label in Outputs.to_zstd: zstd = z_score_standardize(f"{label}_zstd_{pipe_x}", input_type) @@ -1017,20 +1030,6 @@ def post_process(self, wf, label, connection, json_info, pipe_idx, pipe_x, outs) node, out = self.get_data(mask, pipe_idx=mask_idx) wf.connect(node, out, zstd, "inputspec.mask") - if "desc-" not in label: - if "space-template" in label: - new_label = label.replace( - "space-template", "space-template_desc-zstd" - ) - else: - new_label = f"desc-zstd_{label}" - else: - for tag in label.split("_"): - if "desc-" in tag: - newtag = f"{tag}-zstd" - new_label = label.replace(tag, newtag) - break - post_labels.append((new_label, zstd, "outputspec.out_file")) self.set_data( diff --git a/CPAC/pipeline/resource_inventory.py b/CPAC/pipeline/resource_inventory.py index be7bdafa5f..36038913f4 100755 --- a/CPAC/pipeline/resource_inventory.py +++ b/CPAC/pipeline/resource_inventory.py @@ -19,6 +19,7 @@ from argparse import ArgumentDefaultsHelpFormatter, ArgumentParser, Namespace import ast +from collections.abc import Hashable from dataclasses import dataclass, field import importlib from importlib.resources import files @@ -26,6 +27,7 @@ from itertools import chain, product import os from pathlib import Path +import re from typing import Any, cast, Iterable, Optional from unittest.mock import patch @@ -34,9 +36,30 @@ from CPAC.pipeline.engine import template_dataframe from CPAC.pipeline.nodeblock import NodeBlockFunction +from CPAC.pipeline.schema import latest_schema from CPAC.utils.monitoring import UTLOGGER from CPAC.utils.outputs import Outputs +ONE_OFFS: dict[str, list[str]] = { + r".*desc-preproc_bold": ["func_ingress"], + r".*-sm.*": [ + f"spatial_smoothing_{smooth_opt}" + for smooth_opt in latest_schema.schema["post_processing"]["spatial_smoothing"][ + "smoothing_method" + ][0].container + ], + r".*-zstd.*": [f"{fisher}zscore_standardize" for fisher in ["", "fisher_"]], +} +"""A few out-of-nodeblock generated resources. + +Easier to note these manually than to code up the AST rules.""" + +SKIPS: list[str] = [ + "CPAC.unet.__init__", + "CPAC.unet._torch", +] +"""No nodeblock functions in these modules that dynamically install `torch`.""" + def import_nodeblock_functions( package_name: str, exclude: Optional[list[str]] = None @@ -238,6 +261,47 @@ def _flatten_io(io: list[Iterable]) -> list[str]: class MultipleContext(list): """Subclass of list to store multilpe contexts.""" + def __init__(self, /, *args, **kwargs) -> None: + """Initialize MultipleContext.""" + super().__init__(*args, **kwargs) + data = self._unique(self) + self.clear() + self.extend(data) + + def __hash__(self) -> int: + """Hash a MultipleContext instance.""" + return hash(str(self)) + + def __str__(self) -> str: + """Return a stringified MultipleContext instance.""" + if len(self) == 1: + return str(self[0]) + return super().__str__() + + def append(self, item: Any) -> None: + """Append if not already included.""" + if item not in self: + super().append(item) + + def extend(self, iterable: Iterable) -> None: + """Extend MultipleContext.""" + for item in iterable: + self.append(item) + + @staticmethod + def _unique(iterable: Iterable) -> list: + """Dedupe.""" + try: + seen = set() + return [x for x in iterable if not (x in seen or seen.add(x))] + except TypeError: + seen = set() + return [ + x + for x in (MultipleContext(item) for item in iterable) + if not (x in seen or seen.add(x)) + ] + class DirectlySetResources(ast.NodeVisitor): """Class to track resources set directly, rather than through NodeBlocks.""" @@ -246,15 +310,22 @@ def __init__(self) -> None: """Initialize the visitor.""" super().__init__() self._context: dict[str, Any] = {} - self.dynamic_resources: dict[str, ResourceSourceList] = {} + self.dynamic_resources: dict[str, ResourceSourceList] = { + resource: ResourceSourceList(sources) + for resource, sources in ONE_OFFS.items() + } self._history: dict[str, list[Any]] = {} self.resources: dict[str, ResourceSourceList] = {} - def assign_resource(self, resource: str, value: str) -> None: + def assign_resource(self, resource: str, value: str | MultipleContext) -> None: """Assign a value to a resource.""" if isinstance(resource, ast.AST): resource = self.parse_ast(resource) resource = str(resource) + if isinstance(value, MultipleContext): + for subvalue in value: + self.assign_resource(resource, subvalue) + return target = ( self.dynamic_resources if r".*" in value or r".*" in resource @@ -279,31 +350,36 @@ def context(self, value: tuple[Iterable, Any]) -> None: else: self._context[key] = _value if key not in self._history: - self._history[key] = [] + self._history[key] = [".*"] self._history[key].append(_value) - def lookup_context(self, variable: str) -> str | MultipleContext: + def lookup_context( + self, variable: str, return_type: Optional[type] = None + ) -> str | MultipleContext: """Plug in variable.""" - - def lookup() -> str | list[str]: - """Look up context.""" - if variable in self.context: - if self.context[variable] == variable: - history = list(self._history[variable]) - while history and history[-1] == variable: - history.pop() - if history: - return history[-1] - return self.context[variable] - return ".*" - - context = lookup() - if isinstance(context, list): - context = MultipleContext(context) - return context - - # @staticmethod - def handle_multiple_contexts(self, contexts: list[str | list[str]]) -> list[str]: + if variable in self.context: + if self.context[variable] == variable or ( + return_type and not isinstance(self.context[variable], return_type) + ): + history = list(self._history[variable]) + while history and history[-1] == variable: + history.pop() + if history: + context = history[-1] + while ( + return_type + and len(history) + and not isinstance(context, return_type) + ): + context = history.pop() + if return_type and not isinstance(context, return_type): + return ".*" + return context + return self.context[variable] + return ".*" + + @staticmethod + def handle_multiple_contexts(contexts: list[str | list[str]]) -> list[str]: """Parse multiple contexts.""" if isinstance(contexts, list): return MultipleContext( @@ -322,9 +398,7 @@ def handle_multiple_contexts(self, contexts: list[str | list[str]]) -> list[str] def parse_ast(self, node: Any) -> Any: """Parse AST.""" if not isinstance(node, ast.AST): - if isinstance(node, str): - return node - if not isinstance(node, Iterable): + if isinstance(node, str) or not isinstance(node, Iterable): return str(node) if isinstance(node, ast.Dict): return { @@ -343,7 +417,9 @@ def parse_ast(self, node: Any) -> Any: return "".join(str(item) for item in node_values) if isinstance(node, ast.Dict): return { - self.parse_ast(key): self.parse_ast(value) + self.parse_ast(key) + if isinstance(self.parse_ast(key), Hashable) + else ".*": self.parse_ast(value) for key, value in dict(zip(node.keys, node.values)).items() } if not isinstance(node, ast.Call): @@ -358,11 +434,22 @@ def parse_ast(self, node: Any) -> Any: for attr in ["value", "id", "arg"]: if hasattr(node, attr): return self.parse_ast(getattr(node, attr)) + elif ( + hasattr(node, "func") + and getattr(node.func, "attr", None) in ["items", "keys", "values"] + and getattr(getattr(node.func, "value", None), "id", None) in self.context + ): + context = self.lookup_context(node.func.value.id, return_type=dict) + if isinstance(context, dict): + return MultipleContext(getattr(context, node.func.attr)()) return r".*" # wildcard for regex matching def visit_Assign(self, node: ast.Assign) -> None: """Visit an assignment.""" value = self.parse_ast(node.value) + if value == "row" and getattr(node.value, "attr", None): + # hack for template dataframe + value = MultipleContext(getattr(template_dataframe(), node.value.attr)) for target in node.targets: resource = self.parse_ast(target) self.context = resource, value @@ -372,6 +459,9 @@ def visit_Call(self, node: ast.Call) -> None: """Visit a function call.""" if isinstance(node.func, ast.Attribute) and node.func.attr == "set_data": value = self.parse_ast(node.args[5]) + if isinstance(node.args[5], ast.Name): + if isinstance(value, str): + value = self.lookup_context(value) if hasattr(node.args[0], "value"): resource: str = getattr(node.args[0], "value") elif hasattr(node.args[0], "id"): @@ -395,29 +485,52 @@ def visit_Call(self, node: ast.Call) -> None: def visit_For(self, node: ast.For) -> None: """Vist for loop.""" - # This is probably too specific, - # will need to be updated if we add more out-of-nodeblock settings. target = self.parse_ast(node.target) if ( hasattr(node.iter, "func") and hasattr(node.iter.func, "value") and hasattr(node.iter.func.value, "id") ): - context = self.context.get(self.parse_ast(node.iter.func.value.id), ".*") - if isinstance(target, list) and isinstance(context, dict): - self.context = target[0], list(context.keys()) + context = self.parse_ast(node.iter) + if not context: + context = r".*" + if isinstance(target, list): + target_len = len(target) + if isinstance(context, dict): + self.context = target[0], MultipleContext(context.keys()) + if isinstance(context, list) and all( + (isinstance(item, tuple) and len(item) == target_len) + for item in context + ): + for index, item in enumerate(target): + self.context = ( + item, + MultipleContext( + subcontext[index] for subcontext in context + ), + ) + elif hasattr(node.iter, "value") and ( + getattr(node.iter.value, "id", None) == "self" + or getattr(node.iter, "attr", False) + ): + self.context = target, ".*" else: self.context = target, self.parse_ast(node.iter) self.generic_visit(node) def visit_FunctionDef(self, node: ast.FunctionDef) -> None: """Visit a function definition.""" + if node.name == "set_data": + # skip the method definition + return for arg in self.parse_ast(node): self.context = arg, ".*" self.generic_visit(node) -def find_directly_set_resources(package_name: str) -> dict[str, ResourceSourceList]: +def find_directly_set_resources( + package_name: str, +) -> tuple[dict[str, ResourceSourceList], dict[str, ResourceSourceList]]: """Find all resources set explicitly via :pyy:method:`~CPAC.pipeline.engine.ResourcePool.set_data`. Parameters @@ -429,6 +542,9 @@ def find_directly_set_resources(package_name: str) -> dict[str, ResourceSourceLi ------- dict A dictionary containing the name of the resource and the name of the functions that set it. + + dict + A dictionary containing regex strings for special cases """ resources: dict[str, ResourceSourceList] = {} dynamic_resources: dict[str, ResourceSourceList] = {} @@ -450,12 +566,7 @@ def find_directly_set_resources(package_name: str) -> dict[str, ResourceSourceLi dynamic_resources[resource] += directly_set.dynamic_resources[ resource ] - # for dynamic_key, dynamic_value in dynamic_resources.items(): - # dynamic_resource = re.compile(dynamic_key) - # for resource in resources.keys(): - # if dynamic_resource.search(resource): - # resources[resource] += dynamic_value - return resources + return resources, dynamic_resources def resource_inventory(package: str = "CPAC") -> dict[str, ResourceIO]: @@ -464,11 +575,7 @@ def resource_inventory(package: str = "CPAC") -> dict[str, ResourceIO]: # Node block function inputs and outputs for nbf in import_nodeblock_functions( package, - [ - # No nodeblock functions in these modules that dynamically isntall torch - "CPAC.unet.__init__", - "CPAC.unet._torch", - ], + exclude=SKIPS, ): nbf_name = f"{nbf.__module__}.{nbf.__qualname__}" if hasattr(nbf, "inputs"): @@ -498,7 +605,8 @@ def resource_inventory(package: str = "CPAC") -> dict[str, ResourceIO]: else: resources[row.Key].output_from += output_from # Hard-coded resources - for resource, functions in find_directly_set_resources(package).items(): + direct, dynamic = find_directly_set_resources(package) + for resource, functions in direct.items(): if resource not in resources: resources[resource] = ResourceIO(resource, output_from=functions) else: @@ -511,6 +619,18 @@ def resource_inventory(package: str = "CPAC") -> dict[str, ResourceIO]: ) else: resources[row.Resource].output_to += row["Sub-Directory"] + # Special cases + for dynamic_key, dynamic_value in dynamic.items(): + if dynamic_key != r".*": + dynamic_resource = re.compile(dynamic_key) + for resource in resources.keys(): + if dynamic_resource.search(resource): + resources[resource].output_from += dynamic_value + if "interface" in resources: + # this is a loop in setting up nodeblocks + # https://github.com/FCP-INDI/C-PAC/blob/61ad414447023daf0e401a81c92267b09c64ed94/CPAC/pipeline/engine.py#L1453-L1464 + # it's already handled in the NodeBlock resources + del resources["interface"] return dict(sorted(resources.items(), key=lambda item: item[0].casefold())) From 352516dd14a320afad5cd1262479b70436a4c8a3 Mon Sep 17 00:00:00 2001 From: Jon Cluce Date: Thu, 9 Jan 2025 11:38:11 -0500 Subject: [PATCH 157/507] :children_crossing: Include resource soure information in resource-not-found errors --- CHANGELOG.md | 1 + CPAC/pipeline/engine.py | 14 ++++++++++---- CPAC/pipeline/resource_inventory.py | 22 ++++++++++++++++++++-- 3 files changed, 31 insertions(+), 6 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 9de89dc58e..b67477ffde 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -31,6 +31,7 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 - Moved `pygraphviz` from requirements to `graphviz` optional dependencies group. - Automatically tag untagged `subject_id` and `unique_id` as `!!str` when loading data config files. - Made orientation configurable (was hard-coded as "RPI"). +- Resource-not-found errors now include information about where to source those resources. ### Fixed diff --git a/CPAC/pipeline/engine.py b/CPAC/pipeline/engine.py index 91066d820f..878b743bfe 100644 --- a/CPAC/pipeline/engine.py +++ b/CPAC/pipeline/engine.py @@ -420,10 +420,12 @@ def get( if report_fetched: return (None, None) return None + from CPAC.pipeline.resource_inventory import where_to_find + msg = ( "\n\n[!] C-PAC says: None of the listed resources are in " - f"the resource pool:\n\n {resource}\n\nOptions:\n- You " - "can enable a node block earlier in the pipeline which " + f"the resource pool:\n\n {where_to_find(resource)}\n\nOptions:\n" + "- You can enable a node block earlier in the pipeline which " "produces these resources. Check the 'outputs:' field in " "a node block's documentation.\n- You can directly " "provide this required data by pulling it from another " @@ -458,7 +460,9 @@ def copy_resource(self, resource, new_name): try: self.rpool[new_name] = self.rpool[resource] except KeyError: - msg = f"[!] {resource} not in the resource pool." + from CPAC.pipeline.resource_inventory import where_to_find + + msg = f"[!] Not in the resource pool:\n{where_to_find(resource)}" raise Exception(msg) def update_resource(self, resource, new_name): @@ -630,11 +634,13 @@ def get_strats(self, resources, debug=False): total_pool.append(sub_pool) if not total_pool: + from CPAC.pipeline.resource_inventory import where_to_find + raise LookupError( "\n\n[!] C-PAC says: None of the listed " "resources in the node block being connected " "exist in the resource pool.\n\nResources:\n" - "%s\n\n" % resource_list + "%s\n\n" % where_to_find(resource_list) ) # TODO: right now total_pool is: diff --git a/CPAC/pipeline/resource_inventory.py b/CPAC/pipeline/resource_inventory.py index 36038913f4..45396dfe06 100755 --- a/CPAC/pipeline/resource_inventory.py +++ b/CPAC/pipeline/resource_inventory.py @@ -242,7 +242,7 @@ def cli_parser() -> Namespace: return parser.parse_args() -def _flatten_io(io: list[Iterable]) -> list[str]: +def _flatten_io(io: Iterable[Iterable]) -> list[str]: """Given a list of strings or iterables thereof, flatten the list to all strings.""" if all(isinstance(resource, str) for resource in io): return cast(list[str], io) @@ -577,7 +577,7 @@ def resource_inventory(package: str = "CPAC") -> dict[str, ResourceIO]: package, exclude=SKIPS, ): - nbf_name = f"{nbf.__module__}.{nbf.__qualname__}" + nbf_name = f"{nbf.name} ({nbf.__module__}.{nbf.__qualname__})" if hasattr(nbf, "inputs"): for nbf_input in _flatten_io(cast(list[Iterable], nbf.inputs)): if nbf_input: @@ -641,6 +641,24 @@ def dump_inventory_to_yaml(inventory: dict[str, ResourceIO]) -> str: ) +def where_to_find(resources: list[str] | str) -> str: + """Return a multiline string describing where each listed resource is output from.""" + if isinstance(resources, str): + resources = [resources] + resources = _flatten_io(resources) + inventory = resource_inventory("CPAC") + output = "" + for resource in resources: + output += f"'{resource}' is output from:\n" + if resource in inventory: + for source in inventory[resource].output_from: + output += f" {source}\n" + else: + output += " !! Nowhere !!\n" + output += "\n" + return output.rstrip() + + def main() -> None: """Save the NodeBlock inventory to a file.""" args = cli_parser() From 77caa2a7cef0c85defe7e01644569f4728e1a469 Mon Sep 17 00:00:00 2001 From: Jon Cluce Date: Fri, 10 Jan 2025 15:53:04 -0500 Subject: [PATCH 158/507] :recycle: Move bids_examples into a reusable pyest fixture --- CPAC/conftest.py | 34 ++++++++++++++++++++++++ dev/circleci_data/conftest.py | 19 +++++++++++++ dev/circleci_data/test_external_utils.py | 13 ++------- 3 files changed, 55 insertions(+), 11 deletions(-) create mode 100644 CPAC/conftest.py create mode 100644 dev/circleci_data/conftest.py diff --git a/CPAC/conftest.py b/CPAC/conftest.py new file mode 100644 index 0000000000..ea2be416a5 --- /dev/null +++ b/CPAC/conftest.py @@ -0,0 +1,34 @@ +# Copyright (C) 2025 C-PAC Developers + +# This file is part of C-PAC. + +# C-PAC is free software: you can redistribute it and/or modify it under +# the terms of the GNU Lesser General Public License as published by the +# Free Software Foundation, either version 3 of the License, or (at your +# option) any later version. + +# C-PAC is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or +# FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public +# License for more details. + +# You should have received a copy of the GNU Lesser General Public +# License along with C-PAC. If not, see . +"""Global fixtures for C-PAC tests.""" + +from pathlib import Path + +from _pytest.tmpdir import TempPathFactory +from git import Repo +import pytest + + +@pytest.fixture(scope="session") +def bids_examples(tmp_path_factory: TempPathFactory) -> Path: + """Get the BIDS examples dataset.""" + example_dir = tmp_path_factory.mktemp("bids-examples") + if not example_dir.exists(): + Repo.clone_from( + "https://github.com/bids-standard/bids-examples.git", str(example_dir) + ) + return example_dir diff --git a/dev/circleci_data/conftest.py b/dev/circleci_data/conftest.py new file mode 100644 index 0000000000..ba239b2b4f --- /dev/null +++ b/dev/circleci_data/conftest.py @@ -0,0 +1,19 @@ +# Copyright (C) 2025 C-PAC Developers + +# This file is part of C-PAC. + +# C-PAC is free software: you can redistribute it and/or modify it under +# the terms of the GNU Lesser General Public License as published by the +# Free Software Foundation, either version 3 of the License, or (at your +# option) any later version. + +# C-PAC is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or +# FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public +# License for more details. + +# You should have received a copy of the GNU Lesser General Public +# License along with C-PAC. If not, see . +"""Global fixtures for C-PAC tests.""" + +from CPAC.conftest import * # noqa: F403 diff --git a/dev/circleci_data/test_external_utils.py b/dev/circleci_data/test_external_utils.py index f516b0c903..31f6b243da 100644 --- a/dev/circleci_data/test_external_utils.py +++ b/dev/circleci_data/test_external_utils.py @@ -31,8 +31,6 @@ from CPAC.__main__ import utils as CPAC_main_utils # noqa: E402 -# pylint: disable=wrong-import-position - def _click_backport(command, key): """Switch back to underscores for older versions of click.""" @@ -93,18 +91,11 @@ def test_build_data_config(caplog, cli_runner, multiword_connector): _delete_test_yaml(test_yaml) -def test_new_settings_template(caplog, cli_runner): +def test_new_settings_template(bids_examples: Path, caplog, cli_runner): """Test CLI ``utils new-settings-template``.""" caplog.set_level(INFO) os.chdir(CPAC_DIR) - - example_dir = os.path.join(CPAC_DIR, "bids-examples") - if not os.path.exists(example_dir): - from git import Repo - - Repo.clone_from( - "https://github.com/bids-standard/bids-examples.git", example_dir - ) + assert bids_examples.exists() result = cli_runner.invoke( CPAC_main_utils.commands[ From 5d1112551fd0f7103bf22e32ab6e4ea6ea0c34c7 Mon Sep 17 00:00:00 2001 From: Jon Cluce Date: Fri, 10 Jan 2025 17:12:29 -0500 Subject: [PATCH 159/507] :white_check_mark: Add test for resource inventory error message --- CPAC/_entrypoints/run.py | 2 +- CPAC/conftest.py | 2 +- CPAC/pipeline/resource_inventory.py | 2 +- CPAC/pipeline/test/test_engine.py | 140 +++++++++++++++++++++++++++- 4 files changed, 140 insertions(+), 6 deletions(-) diff --git a/CPAC/_entrypoints/run.py b/CPAC/_entrypoints/run.py index 98a30ba094..f84b6cf799 100755 --- a/CPAC/_entrypoints/run.py +++ b/CPAC/_entrypoints/run.py @@ -795,7 +795,7 @@ def run_main(): args.data_config_file, args.participant_label, args.aws_input_creds ) sub_list = sub_list_filter_by_labels( - sub_list, {"T1w": args.T1w_label, "bold": args.bold_label} + list(sub_list), {"T1w": args.T1w_label, "bold": args.bold_label} ) # C-PAC only handles single anatomical images (for now) diff --git a/CPAC/conftest.py b/CPAC/conftest.py index ea2be416a5..7b765736ee 100644 --- a/CPAC/conftest.py +++ b/CPAC/conftest.py @@ -27,7 +27,7 @@ def bids_examples(tmp_path_factory: TempPathFactory) -> Path: """Get the BIDS examples dataset.""" example_dir = tmp_path_factory.mktemp("bids-examples") - if not example_dir.exists(): + if not example_dir.exists() or not any(example_dir.iterdir()): Repo.clone_from( "https://github.com/bids-standard/bids-examples.git", str(example_dir) ) diff --git a/CPAC/pipeline/resource_inventory.py b/CPAC/pipeline/resource_inventory.py index 45396dfe06..01c28ae74e 100755 --- a/CPAC/pipeline/resource_inventory.py +++ b/CPAC/pipeline/resource_inventory.py @@ -649,7 +649,7 @@ def where_to_find(resources: list[str] | str) -> str: inventory = resource_inventory("CPAC") output = "" for resource in resources: - output += f"'{resource}' is output from:\n" + output += f"'{resource}' can be output from:\n" if resource in inventory: for source in inventory[resource].output_from: output += f" {source}\n" diff --git a/CPAC/pipeline/test/test_engine.py b/CPAC/pipeline/test/test_engine.py index cf85f50dbe..25b16d9e44 100644 --- a/CPAC/pipeline/test/test_engine.py +++ b/CPAC/pipeline/test/test_engine.py @@ -1,5 +1,27 @@ +# Copyright (C) 2021-2025 C-PAC Developers + +# This file is part of C-PAC. + +# C-PAC is free software: you can redistribute it and/or modify it under +# the terms of the GNU Lesser General Public License as published by the +# Free Software Foundation, either version 3 of the License, or (at your +# option) any later version. + +# C-PAC is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or +# FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public +# License for more details. + +# You should have received a copy of the GNU Lesser General Public +# License along with C-PAC. If not, see . +"""Unit tests for the C-PAC pipeline engine.""" + +from argparse import Namespace import os +from pathlib import Path +from typing import cast +from _pytest.logging import LogCaptureFixture import pytest from CPAC.pipeline.cpac_pipeline import ( @@ -138,17 +160,129 @@ def test_build_workflow(pipe_config, bids_dir, test_dir): wf.run() +def test_missing_resource( + bids_examples: Path, caplog: LogCaptureFixture, tmp_path: Path +) -> None: + """Test the error message thrown when a resource is missing.""" + from datetime import datetime + + import yaml + + from CPAC.pipeline.cpac_runner import run + from CPAC.utils.bids_utils import sub_list_filter_by_labels + from CPAC.utils.configuration import Preconfiguration, set_subject + from CPAC.utils.configuration.yaml_template import create_yaml_from_template + + st = datetime.now().strftime("%Y-%m-%dT%H-%M-%SZ") + namespace = Namespace( + bids_dir=str(bids_examples / "ds113b"), + output_dir=str(tmp_path / "output"), + analysis_level="test_config", + participant_label="sub-01", + ) + c = Preconfiguration("anat-only") + c["pipeline_setup", "output_directory", "path"] = namespace.output_dir + c["pipeline_setup", "log_directory", "path"] = str(tmp_path / "logs") + c["pipeline_setup", "working_directory", "path"] = str(tmp_path / "work") + c["pipeline_setup", "system_config", "maximum_memory_per_participant"] = 1.0 + c["pipeline_setup", "system_config", "max_cores_per_participant"] = 1 + c["pipeline_setup", "system_config", "num_participants_at_once"] = 1 + c["pipeline_setup", "system_config", "num_ants_threads"] = 1 + c["pipeline_setup", "working_directory", "remove_working_dir"] = True + sub_list = create_cpac_data_config( + namespace.bids_dir, + namespace.participant_label, + None, + True, + only_one_anat=False, + ) + sub_list = sub_list_filter_by_labels(list(sub_list), {"T1w": None, "bold": None}) + for i, sub in enumerate(sub_list): + if isinstance(sub.get("anat"), dict): + for anat_key in sub["anat"]: + if isinstance(sub["anat"][anat_key], list) and len( + sub["anat"][anat_key] + ): + sub_list[i]["anat"][anat_key] = sub["anat"][anat_key][0] + if isinstance(sub.get("anat"), list) and len(sub["anat"]): + sub_list[i]["anat"] = sub["anat"][0] + data_config_file = f"cpac_data_config_{st}.yml" + sublogdirs = [set_subject(sub, c)[2] for sub in sub_list] + # write out the data configuration file + data_config_file = os.path.join(sublogdirs[0], data_config_file) + with open(data_config_file, "w", encoding="utf-8") as _f: + noalias_dumper = yaml.dumper.SafeDumper + noalias_dumper.ignore_aliases = lambda self, data: True + yaml.dump(sub_list, _f, default_flow_style=False, Dumper=noalias_dumper) + + # update and write out pipeline config file + pipeline_config_file = os.path.join(sublogdirs[0], f"cpac_pipeline_config_{st}.yml") + with open(pipeline_config_file, "w", encoding="utf-8") as _f: + _f.write(create_yaml_from_template(c)) + minimized_config = f"{pipeline_config_file[:-4]}_min.yml" + with open(minimized_config, "w", encoding="utf-8") as _f: + _f.write(create_yaml_from_template(c, import_from="blank")) + for config_file in (data_config_file, pipeline_config_file, minimized_config): + os.chmod(config_file, 0o444) # Make config files readonly + + if len(sublogdirs) > 1: + # If more than one run is included in the given data config + # file, an identical copy of the data and pipeline config + # will be included in the log directory for each run + for sublogdir in sublogdirs[1:]: + for config_file in ( + data_config_file, + pipeline_config_file, + minimized_config, + ): + try: + os.link(config_file, config_file.replace(sublogdirs[0], sublogdir)) + except FileExistsError: + pass + + run( + data_config_file, + pipeline_config_file, + plugin="Linear", + plugin_args={ + "n_procs": int( + cast( + int | str, + c["pipeline_setup", "system_config", "max_cores_per_participant"], + ) + ), + "memory_gb": int( + cast( + int | str, + c[ + "pipeline_setup", + "system_config", + "maximum_memory_per_participant", + ], + ) + ), + "raise_insufficient": c[ + "pipeline_setup", "system_config", "raise_insufficient" + ], + }, + tracking=False, + test_config=namespace.analysis_level == "test_config", + ) + + assert "can be output from" in caplog.text + + # bids_dir = "/Users/steven.giavasis/data/HBN-SI_dataset/rawdata" # test_dir = "/test_dir" # cfg = "/Users/hecheng.jin/GitHub/DevBranch/CPAC/resources/configs/pipeline_config_monkey-ABCD.yml" -cfg = "/Users/hecheng.jin/GitHub/pipeline_config_monkey-ABCDlocal.yml" -bids_dir = "/Users/hecheng.jin/Monkey/monkey_data_oxford/site-ucdavis" -test_dir = "/Users/hecheng.jin/GitHub/Test/T2preproc" # test_ingress_func_raw_data(cfg, bids_dir, test_dir) # test_ingress_anat_raw_data(cfg, bids_dir, test_dir) # test_ingress_pipeconfig_data(cfg, bids_dir, test_dir) # test_build_anat_preproc_stack(cfg, bids_dir, test_dir) if __name__ == "__main__": + cfg = "/Users/hecheng.jin/GitHub/pipeline_config_monkey-ABCDlocal.yml" + bids_dir = "/Users/hecheng.jin/Monkey/monkey_data_oxford/site-ucdavis" + test_dir = "/Users/hecheng.jin/GitHub/Test/T2preproc" test_build_workflow(cfg, bids_dir, test_dir) From 8b369f3105390773277b202fc54ad2831a4ecbff Mon Sep 17 00:00:00 2001 From: Jon Cluce Date: Mon, 13 Jan 2025 14:19:12 -0500 Subject: [PATCH 160/507] :construction_worker: Install `openssh-client` for bids-examples fixture --- .circleci/main.yml | 8 +------- 1 file changed, 1 insertion(+), 7 deletions(-) diff --git a/.circleci/main.yml b/.circleci/main.yml index a13300a78d..f936f9230d 100644 --- a/.circleci/main.yml +++ b/.circleci/main.yml @@ -48,7 +48,7 @@ commands: sudo apt-key adv --recv-keys --keyserver keyserver.ubuntu.com 78BD65473CB3BD13 curl -L https://packagecloud.io/circleci/trusty/gpgkey | sudo apt-key add - sudo apt-get update - sudo apt-get install git -y + sudo apt-get install git openssh-client -y git config --global user.email "CMI_CPAC_Support@childmind.org" git config --global user.name "Theodore (machine user) @ CircleCI" create-docker-test-container: @@ -64,11 +64,6 @@ commands: mkdir -p ~/project/test-results docker pull ${DOCKER_TAG} docker run -v /etc/passwd:/etc/passwd --user=$(id -u):c-pac -dit -P -e COVERAGE_FILE=<< parameters.coverage-file >> -v /home/circleci/project/test-results:/code/test-results -v /home/circleci:/home/circleci -v /home/circleci/project/CPAC/resources/configs/test_configs:/test_configs -v $PWD:/code -v $PWD/dev/circleci_data:$PWD/dev/circleci_data --workdir=/home/circleci/project --entrypoint=/bin/bash --name docker_test ${DOCKER_TAG} - get-sample-bids-data: - steps: - - run: - name: Getting Sample BIDS Data - command: git clone https://github.com/bids-standard/bids-examples.git get-singularity: parameters: version: @@ -231,7 +226,6 @@ jobs: - set-up-variant: variant: "<< parameters.variant >>" - set-python-version - - get-sample-bids-data - run-pytest-docker - store_test_results: path: test-results From c6b3a18f66138159ca5aab9f6cd9716a3d9ec067 Mon Sep 17 00:00:00 2001 From: Jon Cluce Date: Mon, 20 Jan 2025 10:14:54 -0500 Subject: [PATCH 161/507] :children_crossing: Increase indent before missing resource sources --- CPAC/pipeline/resource_inventory.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/CPAC/pipeline/resource_inventory.py b/CPAC/pipeline/resource_inventory.py index 01c28ae74e..a181ea6567 100755 --- a/CPAC/pipeline/resource_inventory.py +++ b/CPAC/pipeline/resource_inventory.py @@ -652,9 +652,9 @@ def where_to_find(resources: list[str] | str) -> str: output += f"'{resource}' can be output from:\n" if resource in inventory: for source in inventory[resource].output_from: - output += f" {source}\n" + output += f" {source}\n" else: - output += " !! Nowhere !!\n" + output += " !! Nowhere !!\n" output += "\n" return output.rstrip() From 1a6c9dccc12413004b77d732dbd4ee8ec6c3d4ca Mon Sep 17 00:00:00 2001 From: "birajstha:construction_worker::penguin" Date: Tue, 28 Jan 2025 13:24:05 -0500 Subject: [PATCH 162/507] =?UTF-8?q?=F0=9F=9A=9A=20ref=5Fmask=5Fres-2=20and?= =?UTF-8?q?=20T1w=5Ftemplate=5Fres-2=20fields=20are=20moved=20out=20of=20r?= =?UTF-8?q?egistration=20and=20into=20surface.?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- CHANGELOG.md | 1 + CPAC/pipeline/schema.py | 4 ++-- .../configs/pipeline_config_abcd-options.yml | 13 ++++++------- .../resources/configs/pipeline_config_abcd-prep.yml | 13 ++++++------- CPAC/resources/configs/pipeline_config_blank.yml | 12 ++++++------ CPAC/resources/cpac_templates.csv | 4 ++-- 6 files changed, 23 insertions(+), 24 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index be5ec4a432..b0d70433b9 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -30,6 +30,7 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 - Moved `pygraphviz` from requirements to `graphviz` optional dependencies group. - Automatically tag untagged `subject_id` and `unique_id` as `!!str` when loading data config files. - Made orientation configurable (was hard-coded as "RPI"). +- Moved `ref_mask_res_2` and `T1w_template_res-2` fields from registration into surface under `abcd_prefreesurfer_prep`. ### Fixed diff --git a/CPAC/pipeline/schema.py b/CPAC/pipeline/schema.py index 997c6267b8..cdb72747d0 100644 --- a/CPAC/pipeline/schema.py +++ b/CPAC/pipeline/schema.py @@ -712,8 +712,6 @@ def sanitize(filename): "interpolation": In({"trilinear", "sinc", "spline"}), "identity_matrix": Maybe(str), "ref_mask": Maybe(str), - "ref_mask_res-2": Maybe(str), - "T1w_template_res-2": Maybe(str), }, }, "overwrite_transform": { @@ -816,6 +814,8 @@ def sanitize(filename): "surface_analysis": { "abcd_prefreesurfer_prep": { "run": bool1_1, + "ref_mask_res-2": Maybe(str), + "T1w_template_res-2": Maybe(str), }, "freesurfer": { "run_reconall": bool1_1, diff --git a/CPAC/resources/configs/pipeline_config_abcd-options.yml b/CPAC/resources/configs/pipeline_config_abcd-options.yml index 1cb360cdc9..be0025d113 100644 --- a/CPAC/resources/configs/pipeline_config_abcd-options.yml +++ b/CPAC/resources/configs/pipeline_config_abcd-options.yml @@ -31,6 +31,12 @@ surface_analysis: # Run freesurfer_abcd_preproc to obtain preprocessed T1w for reconall abcd_prefreesurfer_prep: run: On + + # Reference mask with 2mm resolution to be used during FNIRT-based brain extraction in ABCD-options pipeline. + ref_mask_res-2: /opt/dcan-tools/pipeline/global/templates/MNI152_T1_2mm_brain_mask_dil.nii.gz + + # Template with 2mm resolution to be used during FNIRT-based brain extraction in ABCD-options pipeline. + T1w_template_res-2: /opt/dcan-tools/pipeline/global/templates/MNI152_T1_2mm.nii.gz # Will run Freesurfer for surface-based analysis. Will output traditional Freesurfer derivatives. # If you wish to employ Freesurfer outputs for brain masking or tissue segmentation in the voxel-based pipeline, @@ -102,13 +108,6 @@ registration_workflows: anatomical_registration: run: On registration: - FSL-FNIRT: - - # Reference mask with 2mm resolution to be used during FNIRT-based brain extraction in ABCD-options pipeline. - ref_mask_res-2: /opt/dcan-tools/pipeline/global/templates/MNI152_T1_2mm_brain_mask_dil.nii.gz - - # Template with 2mm resolution to be used during FNIRT-based brain extraction in ABCD-options pipeline. - T1w_template_res-2: /opt/dcan-tools/pipeline/global/templates/MNI152_T1_2mm.nii.gz # option parameters ANTs: diff --git a/CPAC/resources/configs/pipeline_config_abcd-prep.yml b/CPAC/resources/configs/pipeline_config_abcd-prep.yml index d6542ea358..b1f696cbdd 100644 --- a/CPAC/resources/configs/pipeline_config_abcd-prep.yml +++ b/CPAC/resources/configs/pipeline_config_abcd-prep.yml @@ -31,6 +31,12 @@ surface_analysis: # Run freesurfer_abcd_preproc to obtain preprocessed T1w for reconall abcd_prefreesurfer_prep: run: On + + # Reference mask with 2mm resolution to be used during FNIRT-based brain extraction in ABCD-options pipeline. + ref_mask_res-2: /opt/dcan-tools/pipeline/global/templates/MNI152_T1_2mm_brain_mask_dil.nii.gz + + # Template with 2mm resolution to be used during FNIRT-based brain extraction in ABCD-options pipeline. + T1w_template_res-2: /opt/dcan-tools/pipeline/global/templates/MNI152_T1_2mm.nii.gz anatomical_preproc: run: On @@ -72,13 +78,6 @@ anatomical_preproc: registration_workflows: anatomical_registration: registration: - FSL-FNIRT: - - # Reference mask with 2mm resolution to be used during FNIRT-based brain extraction in ABCD-options pipeline. - ref_mask_res-2: /opt/dcan-tools/pipeline/global/templates/MNI152_T1_2mm_brain_mask_dil.nii.gz - - # Template with 2mm resolution to be used during FNIRT-based brain extraction in ABCD-options pipeline. - T1w_template_res-2: /opt/dcan-tools/pipeline/global/templates/MNI152_T1_2mm.nii.gz # option parameters ANTs: diff --git a/CPAC/resources/configs/pipeline_config_blank.yml b/CPAC/resources/configs/pipeline_config_blank.yml index 454d8add59..5b7f3f5188 100644 --- a/CPAC/resources/configs/pipeline_config_blank.yml +++ b/CPAC/resources/configs/pipeline_config_blank.yml @@ -209,6 +209,12 @@ surface_analysis: abcd_prefreesurfer_prep: run: Off + # Reference mask with 2mm resolution to be used during FNIRT-based brain extraction in ABCD-options pipeline. + ref_mask_res-2: $FSLDIR/data/standard/MNI152_T1_2mm_brain_mask_dil.nii.gz + + # Template with 2mm resolution to be used during FNIRT-based brain extraction in ABCD-options pipeline. + T1w_template_res-2: $FSLDIR/data/standard/MNI152_T1_2mm.nii.gz + # Will run Freesurfer for surface-based analysis. Will output traditional Freesurfer derivatives. # If you wish to employ Freesurfer outputs for brain masking or tissue segmentation in the voxel-based pipeline, # select those 'Freesurfer-' labeled options further below in anatomical_preproc. @@ -580,12 +586,6 @@ registration_workflows: # It is for monkey pipeline specifically. FNIRT_T1w_template: - # Reference mask with 2mm resolution to be used during FNIRT-based brain extraction in ABCD-options pipeline. - ref_mask_res-2: $FSLDIR/data/standard/MNI152_T1_2mm_brain_mask_dil.nii.gz - - # Template with 2mm resolution to be used during FNIRT-based brain extraction in ABCD-options pipeline. - T1w_template_res-2: $FSLDIR/data/standard/MNI152_T1_2mm.nii.gz - # Configuration file to be used by FSL to set FNIRT parameters. # It is not necessary to change this path unless you intend to use custom FNIRT parameters or a non-standard template. fnirt_config: T1_2_MNI152_2mm diff --git a/CPAC/resources/cpac_templates.csv b/CPAC/resources/cpac_templates.csv index 5c2abc9947..cf4cad758f 100644 --- a/CPAC/resources/cpac_templates.csv +++ b/CPAC/resources/cpac_templates.csv @@ -31,8 +31,8 @@ T1w-template-symmetric,"voxel_mirrored_homotopic_connectivity, symmetric_registr T1w-template-symmetric-deriv,"voxel_mirrored_homotopic_connectivity, symmetric_registration, T1w_template_symmetric_funcreg","Symmetric version of the T1w-based whole-head template, resampled to the desired functional derivative resolution","registration_workflows, functional_registration, func_registration_to_template, output_resolution, func_derivative_outputs" T1w-template-symmetric-for-resample,"voxel_mirrored_homotopic_connectivity, symmetric_registration, T1w_template_symmetric_for_resample",, template-ref-mask,"registration_workflows, anatomical_registration, registration, FSL-FNIRT, ref_mask",,"registration_workflows, anatomical_registration, resolution_for_anat" -template-ref-mask-res-2,"registration_workflows, anatomical_registration, registration, FSL-FNIRT, ref_mask_res-2",, -T1w-template-res-2,"registration_workflows, anatomical_registration, registration, FSL-FNIRT, T1w_template_res-2",, +template-ref-mask-res-2,"surface_analysis, abcd_prefreesurfer_prep, ref_mask_res-2",, +T1w-template-res-2,"surface_analysis, abcd_prefreesurfer_prep, T1w_template_res-2",, template-specification-file,"network_centrality, template_specification_file",Binary ROI mask for network centrality calculations, unet-model,"anatomical_preproc, brain_extraction, UNet, unet_model",, WM-path,"segmentation, tissue_segmentation, FSL-FAST, use_priors, WM_path",Template-space WM tissue prior, From 75612ae5893011e3dbf89802ccc334de4c48ba24 Mon Sep 17 00:00:00 2001 From: "birajstha:construction_worker::penguin" Date: Tue, 28 Jan 2025 14:45:56 -0500 Subject: [PATCH 163/507] =?UTF-8?q?=F0=9F=9A=9A=20moving=20ref=5Fmask=5Fre?= =?UTF-8?q?s-2=20and=20T1w=5Ftemplate=5Fres-2=20from=20default=20and=20fmr?= =?UTF-8?q?iprep-options=20config=20too?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- CPAC/resources/configs/pipeline_config_default.yml | 12 ++++++------ .../configs/pipeline_config_fmriprep-options.yml | 6 ------ 2 files changed, 6 insertions(+), 12 deletions(-) diff --git a/CPAC/resources/configs/pipeline_config_default.yml b/CPAC/resources/configs/pipeline_config_default.yml index b7aa56c13f..3d067fbbcf 100644 --- a/CPAC/resources/configs/pipeline_config_default.yml +++ b/CPAC/resources/configs/pipeline_config_default.yml @@ -197,6 +197,12 @@ surface_analysis: abcd_prefreesurfer_prep: run: Off + # Reference mask with 2mm resolution to be used during FNIRT-based brain extraction in ABCD-options pipeline. + ref_mask_res-2: $FSLDIR/data/standard/MNI152_T1_2mm_brain_mask_dil.nii.gz + + # Template with 2mm resolution to be used during FNIRT-based brain extraction in ABCD-options pipeline. + T1w_template_res-2: $FSLDIR/data/standard/MNI152_T1_2mm.nii.gz + # Will run Freesurfer for surface-based analysis. Will output traditional Freesurfer derivatives. # If you wish to employ Freesurfer outputs for brain masking or tissue segmentation in the voxel-based pipeline, # select those 'Freesurfer-' labeled options further below in anatomical_preproc. @@ -738,12 +744,6 @@ registration_workflows: # It is not necessary to change this path unless you intend to use a different template. identity_matrix: $FSLDIR/etc/flirtsch/ident.mat - # Reference mask with 2mm resolution to be used during FNIRT-based brain extraction in ABCD-options pipeline. - ref_mask_res-2: $FSLDIR/data/standard/MNI152_T1_2mm_brain_mask_dil.nii.gz - - # Template with 2mm resolution to be used during FNIRT-based brain extraction in ABCD-options pipeline. - T1w_template_res-2: $FSLDIR/data/standard/MNI152_T1_2mm.nii.gz - overwrite_transform: run: Off diff --git a/CPAC/resources/configs/pipeline_config_fmriprep-options.yml b/CPAC/resources/configs/pipeline_config_fmriprep-options.yml index 555b52302d..f97bcf3180 100644 --- a/CPAC/resources/configs/pipeline_config_fmriprep-options.yml +++ b/CPAC/resources/configs/pipeline_config_fmriprep-options.yml @@ -151,12 +151,6 @@ registration_workflows: registration: FSL-FNIRT: - # Reference mask with 2mm resolution to be used during FNIRT-based brain extraction in ABCD-options pipeline. - ref_mask_res-2: - - # Template with 2mm resolution to be used during FNIRT-based brain extraction in ABCD-options pipeline. - T1w_template_res-2: - # Reference mask for FSL registration. ref_mask: From 6d65824a3102fa4cc367bc9828877fcb7ad67921 Mon Sep 17 00:00:00 2001 From: "birajstha:construction_worker::penguin" Date: Tue, 28 Jan 2025 14:58:55 -0500 Subject: [PATCH 164/507] =?UTF-8?q?=F0=9F=A6=BA=20pushing=20with=20pre-com?= =?UTF-8?q?mit?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- CPAC/resources/configs/pipeline_config_abcd-options.yml | 4 ++-- CPAC/resources/configs/pipeline_config_abcd-prep.yml | 4 ++-- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/CPAC/resources/configs/pipeline_config_abcd-options.yml b/CPAC/resources/configs/pipeline_config_abcd-options.yml index be0025d113..937ab7a636 100644 --- a/CPAC/resources/configs/pipeline_config_abcd-options.yml +++ b/CPAC/resources/configs/pipeline_config_abcd-options.yml @@ -31,10 +31,10 @@ surface_analysis: # Run freesurfer_abcd_preproc to obtain preprocessed T1w for reconall abcd_prefreesurfer_prep: run: On - + # Reference mask with 2mm resolution to be used during FNIRT-based brain extraction in ABCD-options pipeline. ref_mask_res-2: /opt/dcan-tools/pipeline/global/templates/MNI152_T1_2mm_brain_mask_dil.nii.gz - + # Template with 2mm resolution to be used during FNIRT-based brain extraction in ABCD-options pipeline. T1w_template_res-2: /opt/dcan-tools/pipeline/global/templates/MNI152_T1_2mm.nii.gz diff --git a/CPAC/resources/configs/pipeline_config_abcd-prep.yml b/CPAC/resources/configs/pipeline_config_abcd-prep.yml index b1f696cbdd..32ba79beac 100644 --- a/CPAC/resources/configs/pipeline_config_abcd-prep.yml +++ b/CPAC/resources/configs/pipeline_config_abcd-prep.yml @@ -31,10 +31,10 @@ surface_analysis: # Run freesurfer_abcd_preproc to obtain preprocessed T1w for reconall abcd_prefreesurfer_prep: run: On - + # Reference mask with 2mm resolution to be used during FNIRT-based brain extraction in ABCD-options pipeline. ref_mask_res-2: /opt/dcan-tools/pipeline/global/templates/MNI152_T1_2mm_brain_mask_dil.nii.gz - + # Template with 2mm resolution to be used during FNIRT-based brain extraction in ABCD-options pipeline. T1w_template_res-2: /opt/dcan-tools/pipeline/global/templates/MNI152_T1_2mm.nii.gz From eb0400d5187f0a33e9d686cb64bcb084e7940836 Mon Sep 17 00:00:00 2001 From: "birajstha:construction_worker::penguin" Date: Wed, 29 Jan 2025 19:04:22 -0500 Subject: [PATCH 165/507] =?UTF-8?q?=F0=9F=94=A8=20adding=20f=20in=20fstrin?= =?UTF-8?q?g?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- CPAC/connectome/connectivity_matrix.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/CPAC/connectome/connectivity_matrix.py b/CPAC/connectome/connectivity_matrix.py index c0be9f3f27..38c0411e1b 100644 --- a/CPAC/connectome/connectivity_matrix.py +++ b/CPAC/connectome/connectivity_matrix.py @@ -171,7 +171,7 @@ def create_connectome_afni(name, method, pipe_num): imports=["import subprocess"], function=strip_afni_output_header, ), - name="netcorrStripHeader{method}_{pipe_num}", + name=f"netcorrStripHeader{method}_{pipe_num}", ) name_output_node = pe.Node( From b7680581f48c83060c3d58c54904c7e8df3bc50b Mon Sep 17 00:00:00 2001 From: "birajstha:construction_worker::penguin" Date: Thu, 30 Jan 2025 18:17:28 -0500 Subject: [PATCH 166/507] =?UTF-8?q?=E2=99=BB=EF=B8=8F=20Generalizing=20des?= =?UTF-8?q?c-ref=5Fbold=20to=20desc-unifize=5Fbold=20and=20fMRIprep=20styl?= =?UTF-8?q?e=20bold=20masking?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- CPAC/func_preproc/func_preproc.py | 367 +++++++++++++++--------------- CPAC/registration/registration.py | 15 +- 2 files changed, 198 insertions(+), 184 deletions(-) diff --git a/CPAC/func_preproc/func_preproc.py b/CPAC/func_preproc/func_preproc.py index 3bac53cc87..21013ccaeb 100644 --- a/CPAC/func_preproc/func_preproc.py +++ b/CPAC/func_preproc/func_preproc.py @@ -501,6 +501,190 @@ def get_idx(in_files, stop_idx=None, start_idx=None): return stopidx, startidx +def fsl_afni_subworkflow(wf, cfg, strat_pool, pipe_num, opt=None): + # Initialize transforms with antsAI + init_aff = pe.Node( + AI( + metric=("Mattes", 32, "Regular", 0.2), + transform=("Affine", 0.1), + search_factor=(20, 0.12), + principal_axes=False, + convergence=(10, 1e-6, 10), + verbose=True, + ), + name=f"init_aff_{pipe_num}", + n_procs=cfg.pipeline_setup["system_config"]["num_OMP_threads"], + ) + node, out = strat_pool.get_data("FSL-AFNI-bold-ref") + wf.connect(node, out, init_aff, "fixed_image") + + node, out = strat_pool.get_data("FSL-AFNI-brain-mask") + wf.connect(node, out, init_aff, "fixed_image_mask") + + init_aff.inputs.search_grid = (40, (0, 40, 40)) + + # Set up spatial normalization + norm = pe.Node( + ants.Registration( + winsorize_upper_quantile=0.98, + winsorize_lower_quantile=0.05, + float=True, + metric=["Mattes"], + metric_weight=[1], + radius_or_number_of_bins=[64], + transforms=["Affine"], + transform_parameters=[[0.1]], + number_of_iterations=[[200]], + convergence_window_size=[10], + convergence_threshold=[1.0e-9], + sampling_strategy=["Random", "Random"], + smoothing_sigmas=[[2]], + sigma_units=["mm", "mm", "mm"], + shrink_factors=[[2]], + sampling_percentage=[0.2], + use_histogram_matching=[True], + ), + name=f"norm_{pipe_num}", + n_procs=cfg.pipeline_setup["system_config"]["num_OMP_threads"], + ) + + node, out = strat_pool.get_data("FSL-AFNI-bold-ref") + wf.connect(node, out, norm, "fixed_image") + + map_brainmask = pe.Node( + ants.ApplyTransforms( + interpolation="BSpline", + float=True, + ), + name=f"map_brainmask_{pipe_num}", + ) + + # Use the higher resolution and probseg for numerical stability in rounding + node, out = strat_pool.get_data("FSL-AFNI-brain-probseg") + wf.connect(node, out, map_brainmask, "input_image") + + binarize_mask = pe.Node( + interface=fsl.maths.MathsCommand(), name=f"binarize_mask_{pipe_num}" + ) + binarize_mask.inputs.args = "-thr 0.85 -bin" + + # Dilate pre_mask + pre_dilate = pe.Node( + fsl.DilateImage( + operation="max", + kernel_shape="sphere", + kernel_size=3.0, + internal_datatype="char", + ), + name=f"pre_mask_dilate_{pipe_num}", + ) + + # Fix precision errors + # https://github.com/ANTsX/ANTs/wiki/Inputs-do-not-occupy-the-same-physical-space#fixing-precision-errors + print_header = pe.Node( + PrintHeader(what_information=4), name=f"print_header_{pipe_num}" + ) + set_direction = pe.Node(SetDirectionByMatrix(), name=f"set_direction_{pipe_num}") + + # Run N4 normally, force num_threads=1 for stability (images are + # small, no need for >1) + n4_correct = pe.Node( + ants.N4BiasFieldCorrection( + dimension=3, copy_header=True, bspline_fitting_distance=200 + ), + shrink_factor=2, + rescale_intensities=True, + name=f"n4_correct_{pipe_num}", + n_procs=1, + ) + + # Create a generous BET mask out of the bias-corrected EPI + skullstrip_first_pass = pe.Node( + fsl.BET(frac=0.2, mask=True, functional=False), + name=f"skullstrip_first_pass_{pipe_num}", + ) + + bet_dilate = pe.Node( + fsl.DilateImage( + operation="max", + kernel_shape="sphere", + kernel_size=6.0, + internal_datatype="char", + ), + name=f"skullstrip_first_dilate_{pipe_num}", + ) + + bet_mask = pe.Node(fsl.ApplyMask(), name=f"skullstrip_first_mask_{pipe_num}") + + # Use AFNI's unifize for T2 constrast + unifize = pe.Node( + afni_utils.Unifize( + t2=True, + outputtype="NIFTI_GZ", + args="-clfrac 0.2 -rbt 18.3 65.0 90.0", + out_file="uni.nii.gz", + ), + name=f"unifize_{pipe_num}", + ) + + # Run ANFI's 3dAutomask to extract a refined brain mask + skullstrip_second_pass = pe.Node( + preprocess.Automask(dilate=1, outputtype="NIFTI_GZ"), + name=f"skullstrip_second_pass_{pipe_num}", + ) + + # Take intersection of both masks + combine_masks = pe.Node( + fsl.BinaryMaths(operation="mul"), name=f"combine_masks_{pipe_num}" + ) + + # Compute masked brain + apply_mask = pe.Node(fsl.ApplyMask(), name=f"extract_ref_brain_bold_{pipe_num}") + + node, out = strat_pool.get_data(["motion-basefile"]) + + wf.connect( + [ + (node, init_aff, [(out, "moving_image")]), + (node, map_brainmask, [(out, "reference_image")]), + (node, norm, [(out, "moving_image")]), + (init_aff, norm, [("output_transform", "initial_moving_transform")]), + ( + norm, + map_brainmask, + [ + ("reverse_invert_flags", "invert_transform_flags"), + ("reverse_transforms", "transforms"), + ], + ), + (map_brainmask, binarize_mask, [("output_image", "in_file")]), + (binarize_mask, pre_dilate, [("out_file", "in_file")]), + (pre_dilate, print_header, [("out_file", "image")]), + (print_header, set_direction, [("header", "direction")]), + (node, set_direction, [(out, "infile"), (out, "outfile")]), + (set_direction, n4_correct, [("outfile", "mask_image")]), + (node, n4_correct, [(out, "input_image")]), + (n4_correct, skullstrip_first_pass, [("output_image", "in_file")]), + (skullstrip_first_pass, bet_dilate, [("mask_file", "in_file")]), + (bet_dilate, bet_mask, [("out_file", "mask_file")]), + (skullstrip_first_pass, bet_mask, [("out_file", "in_file")]), + (bet_mask, unifize, [("out_file", "in_file")]), + (unifize, skullstrip_second_pass, [("out_file", "in_file")]), + (skullstrip_first_pass, combine_masks, [("mask_file", "in_file")]), + (skullstrip_second_pass, combine_masks, [("out_file", "operand_file")]), + (unifize, apply_mask, [("out_file", "in_file")]), + (combine_masks, apply_mask, [("out_file", "mask_file")]), + ] + ) + + outputs = { + "fMRIprep_brain_mask": (combine_masks, "out_file"), + "desc-unifized_bold": (apply_mask, "out_file"), + } + + return (wf, outputs) + + @nodeblock( name="func_reorient", config=["functional_preproc", "update_header"], @@ -953,7 +1137,7 @@ def form_thr_string(thr): "space-bold_desc-brain_mask": { "Description": "mask of the skull-stripped input file" }, - "desc-ref_bold": { + "desc-unifized_bold": { "Description": "the ``bias_corrected_file`` after skull-stripping" }, }, @@ -1048,185 +1232,8 @@ def bold_mask_fsl_afni(wf, cfg, strat_pool, pipe_num, opt=None): # Modifications copyright (C) 2021 - 2024 C-PAC Developers - # Initialize transforms with antsAI - init_aff = pe.Node( - AI( - metric=("Mattes", 32, "Regular", 0.2), - transform=("Affine", 0.1), - search_factor=(20, 0.12), - principal_axes=False, - convergence=(10, 1e-6, 10), - verbose=True, - ), - name=f"init_aff_{pipe_num}", - n_procs=cfg.pipeline_setup["system_config"]["num_OMP_threads"], - ) - node, out = strat_pool.get_data("FSL-AFNI-bold-ref") - wf.connect(node, out, init_aff, "fixed_image") - - node, out = strat_pool.get_data("FSL-AFNI-brain-mask") - wf.connect(node, out, init_aff, "fixed_image_mask") - - init_aff.inputs.search_grid = (40, (0, 40, 40)) - - # Set up spatial normalization - norm = pe.Node( - ants.Registration( - winsorize_upper_quantile=0.98, - winsorize_lower_quantile=0.05, - float=True, - metric=["Mattes"], - metric_weight=[1], - radius_or_number_of_bins=[64], - transforms=["Affine"], - transform_parameters=[[0.1]], - number_of_iterations=[[200]], - convergence_window_size=[10], - convergence_threshold=[1.0e-9], - sampling_strategy=["Random", "Random"], - smoothing_sigmas=[[2]], - sigma_units=["mm", "mm", "mm"], - shrink_factors=[[2]], - sampling_percentage=[0.2], - use_histogram_matching=[True], - ), - name=f"norm_{pipe_num}", - n_procs=cfg.pipeline_setup["system_config"]["num_OMP_threads"], - ) - - node, out = strat_pool.get_data("FSL-AFNI-bold-ref") - wf.connect(node, out, norm, "fixed_image") - - map_brainmask = pe.Node( - ants.ApplyTransforms( - interpolation="BSpline", - float=True, - ), - name=f"map_brainmask_{pipe_num}", - ) - - # Use the higher resolution and probseg for numerical stability in rounding - node, out = strat_pool.get_data("FSL-AFNI-brain-probseg") - wf.connect(node, out, map_brainmask, "input_image") - - binarize_mask = pe.Node( - interface=fsl.maths.MathsCommand(), name=f"binarize_mask_{pipe_num}" - ) - binarize_mask.inputs.args = "-thr 0.85 -bin" - - # Dilate pre_mask - pre_dilate = pe.Node( - fsl.DilateImage( - operation="max", - kernel_shape="sphere", - kernel_size=3.0, - internal_datatype="char", - ), - name=f"pre_mask_dilate_{pipe_num}", - ) - - # Fix precision errors - # https://github.com/ANTsX/ANTs/wiki/Inputs-do-not-occupy-the-same-physical-space#fixing-precision-errors - print_header = pe.Node( - PrintHeader(what_information=4), name=f"print_header_{pipe_num}" - ) - set_direction = pe.Node(SetDirectionByMatrix(), name=f"set_direction_{pipe_num}") - - # Run N4 normally, force num_threads=1 for stability (images are - # small, no need for >1) - n4_correct = pe.Node( - ants.N4BiasFieldCorrection( - dimension=3, copy_header=True, bspline_fitting_distance=200 - ), - shrink_factor=2, - rescale_intensities=True, - name=f"n4_correct_{pipe_num}", - n_procs=1, - ) - - # Create a generous BET mask out of the bias-corrected EPI - skullstrip_first_pass = pe.Node( - fsl.BET(frac=0.2, mask=True, functional=False), - name=f"skullstrip_first_pass_{pipe_num}", - ) - - bet_dilate = pe.Node( - fsl.DilateImage( - operation="max", - kernel_shape="sphere", - kernel_size=6.0, - internal_datatype="char", - ), - name=f"skullstrip_first_dilate_{pipe_num}", - ) - - bet_mask = pe.Node(fsl.ApplyMask(), name=f"skullstrip_first_mask_{pipe_num}") - - # Use AFNI's unifize for T2 constrast - unifize = pe.Node( - afni_utils.Unifize( - t2=True, - outputtype="NIFTI_GZ", - args="-clfrac 0.2 -rbt 18.3 65.0 90.0", - out_file="uni.nii.gz", - ), - name=f"unifize_{pipe_num}", - ) - - # Run ANFI's 3dAutomask to extract a refined brain mask - skullstrip_second_pass = pe.Node( - preprocess.Automask(dilate=1, outputtype="NIFTI_GZ"), - name=f"skullstrip_second_pass_{pipe_num}", - ) - - # Take intersection of both masks - combine_masks = pe.Node( - fsl.BinaryMaths(operation="mul"), name=f"combine_masks_{pipe_num}" - ) - - # Compute masked brain - apply_mask = pe.Node(fsl.ApplyMask(), name=f"extract_ref_brain_bold_{pipe_num}") - - node, out = strat_pool.get_data(["motion-basefile"]) - - wf.connect( - [ - (node, init_aff, [(out, "moving_image")]), - (node, map_brainmask, [(out, "reference_image")]), - (node, norm, [(out, "moving_image")]), - (init_aff, norm, [("output_transform", "initial_moving_transform")]), - ( - norm, - map_brainmask, - [ - ("reverse_invert_flags", "invert_transform_flags"), - ("reverse_transforms", "transforms"), - ], - ), - (map_brainmask, binarize_mask, [("output_image", "in_file")]), - (binarize_mask, pre_dilate, [("out_file", "in_file")]), - (pre_dilate, print_header, [("out_file", "image")]), - (print_header, set_direction, [("header", "direction")]), - (node, set_direction, [(out, "infile"), (out, "outfile")]), - (set_direction, n4_correct, [("outfile", "mask_image")]), - (node, n4_correct, [(out, "input_image")]), - (n4_correct, skullstrip_first_pass, [("output_image", "in_file")]), - (skullstrip_first_pass, bet_dilate, [("mask_file", "in_file")]), - (bet_dilate, bet_mask, [("out_file", "mask_file")]), - (skullstrip_first_pass, bet_mask, [("out_file", "in_file")]), - (bet_mask, unifize, [("out_file", "in_file")]), - (unifize, skullstrip_second_pass, [("out_file", "in_file")]), - (skullstrip_first_pass, combine_masks, [("mask_file", "in_file")]), - (skullstrip_second_pass, combine_masks, [("out_file", "operand_file")]), - (unifize, apply_mask, [("out_file", "in_file")]), - (combine_masks, apply_mask, [("out_file", "mask_file")]), - ] - ) - - outputs = { - "space-bold_desc-brain_mask": (combine_masks, "out_file"), - "desc-ref_bold": (apply_mask, "out_file"), - } + wf, outputs = fsl_afni_subworkflow(wf, cfg, strat_pool, pipe_num, opt) + outputs["space-bold_desc-brain_mask"] = outputs["fMRIprep_brain_mask"] return (wf, outputs) diff --git a/CPAC/registration/registration.py b/CPAC/registration/registration.py index 1c6b6fa71a..11d5ed0f78 100644 --- a/CPAC/registration/registration.py +++ b/CPAC/registration/registration.py @@ -24,6 +24,7 @@ from nipype.interfaces.afni import utils as afni_utils from CPAC.anat_preproc.lesion_preproc import create_lesion_preproc +from CPAC.func_preproc.func_preproc import fsl_afni_subworkflow from CPAC.func_preproc.utils import chunk_ts, split_ts_chunks from CPAC.pipeline import nipype_pipeline_engine as pe from CPAC.pipeline.nodeblock import nodeblock @@ -3223,14 +3224,20 @@ def coregistration_prep_mean(wf, cfg, strat_pool, pipe_num, opt=None): "input", ], option_val="fmriprep_reference", - inputs=["desc-ref_bold"], - outputs=["sbref"], + inputs=[ + ("motion-basefile", "desc-preproc_bold"), + "FSL-AFNI-bold-ref", + "FSL-AFNI-brain-mask", + "FSL-AFNI-brain-probseg", + ], + outputs=["sbref", "desc-unifized_bold", "fMRIprep_brain_mask"], ) def coregistration_prep_fmriprep(wf, cfg, strat_pool, pipe_num, opt=None): """Generate fMRIPrep-style single-band reference for coregistration.""" - coreg_input = strat_pool.get_data("desc-ref_bold") + if not strat_pool.check_rpool("desc-unifized_bold"): + wf, outputs = fsl_afni_subworkflow(wf, cfg, strat_pool, pipe_num) - outputs = {"sbref": coreg_input} + outputs["sbref"] = outputs["desc-unifized_bold"] return (wf, outputs) From b65de8cabfa1a1eb705f414b179db7e5e3e4a04b Mon Sep 17 00:00:00 2001 From: "birajstha:construction_worker::penguin" Date: Mon, 3 Feb 2025 12:46:41 -0500 Subject: [PATCH 167/507] :heavy_minus_sign: removed space-template_desc-T1w_mask resource from outputs and replaced with space-template_desc-brain_mask for nodeblocks requesting it --- CHANGELOG.md | 5 +++++ CPAC/func_preproc/func_preproc.py | 4 ++-- CPAC/registration/registration.py | 11 +++-------- CPAC/resources/cpac_outputs.tsv | 1 - CPAC/surface/surf_preproc.py | 4 ++-- 5 files changed, 12 insertions(+), 13 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index be5ec4a432..85b0b65232 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -44,6 +44,11 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 - `ABCD-HCP` - `fMRIPrep-LTS` - Typehinting support for Python < 3.10. +- Resource `space-template_desc-T1w_mask` + - as output from FNIRT registration. + - as inputs from Nodeblocks requesting it and, replaced with `space-template_desc-brain_mask`. + - from outputs tsv. + ## [1.8.7] - 2024-05-03 diff --git a/CPAC/func_preproc/func_preproc.py b/CPAC/func_preproc/func_preproc.py index 3bac53cc87..e577ac3ffc 100644 --- a/CPAC/func_preproc/func_preproc.py +++ b/CPAC/func_preproc/func_preproc.py @@ -1478,7 +1478,7 @@ def bold_mask_anatomical_based(wf, cfg, strat_pool, pipe_num, opt=None): "desc-preproc_bold", "T1w-template-funcreg", "space-template_desc-preproc_T1w", - "space-template_desc-T1w_mask", + "space-template_desc-brain_mask", ], outputs=[ "space-template_res-bold_desc-brain_T1w", @@ -1519,7 +1519,7 @@ def bold_mask_anatomical_resampled(wf, cfg, strat_pool, pipe_num, opt=None): "anatomical_registration" ]["registration"]["FSL-FNIRT"]["identity_matrix"] - node, out = strat_pool.get_data("space-template_desc-T1w_mask") + node, out = strat_pool.get_data("space-template_desc-brain_mask") wf.connect(node, out, anat_brain_mask_to_func_res, "in_file") wf.connect( diff --git a/CPAC/registration/registration.py b/CPAC/registration/registration.py index 1c6b6fa71a..57a3cf6dfd 100644 --- a/CPAC/registration/registration.py +++ b/CPAC/registration/registration.py @@ -2271,7 +2271,7 @@ def bold_to_T1template_xfm_connector( outputs={ "space-template_desc-preproc_T1w": {"Template": "T1w-brain-template"}, "space-template_desc-head_T1w": {"Template": "T1w-template"}, - "space-template_desc-T1w_mask": {"Template": "T1w-template"}, + "space-template_desc-brain_mask": {"Template": "T1w-template"}, "space-template_desc-T1wT2w_biasfield": {"Template": "T1w-template"}, "from-T1w_to-template_mode-image_desc-linear_xfm": {"Template": "T1w-template"}, "from-template_to-T1w_mode-image_desc-linear_xfm": {"Template": "T1w-template"}, @@ -2896,7 +2896,6 @@ def register_ANTs_EPI_to_template(wf, cfg, strat_pool, pipe_num, opt=None): outputs={ "space-template_desc-preproc_T1w": {"Template": "T1w-template"}, "space-template_desc-head_T1w": {"Template": "T1w-template"}, - "space-template_desc-T1w_mask": {"Template": "T1w-template"}, "from-T1w_to-template_mode-image_xfm": {"Template": "T1w-template"}, "from-template_to-T1w_mode-image_xfm": {"Template": "T1w-template"}, }, @@ -3116,10 +3115,6 @@ def overwrite_transform_anat_to_template(wf, cfg, strat_pool, pipe_num, opt=None outputs = { "space-template_desc-preproc_T1w": (apply_mask, "out_file"), "space-template_desc-head_T1w": (fsl_apply_warp_t1_to_template, "out_file"), - "space-template_desc-T1w_mask": ( - fsl_apply_warp_t1_brain_mask_to_template, - "out_file", - ), "from-T1w_to-template_mode-image_xfm": (merge_xfms, "merged_file"), "from-template_to-T1w_mode-image_xfm": (merge_inv_xfms, "merged_file"), } @@ -4419,7 +4414,7 @@ def warp_timeseries_to_T1template_abcd(wf, cfg, strat_pool, pipe_num, opt=None): "from-bold_to-T1w_mode-image_desc-linear_warp", "T1w-template", "space-template_desc-head_T1w", - "space-template_desc-T1w_mask", + "space-template_desc-brain_mask", "space-template_desc-T1wT2w_biasfield", ) ], @@ -4480,7 +4475,7 @@ def warp_timeseries_to_T1template_dcan_nhp(wf, cfg, strat_pool, pipe_num, opt=No "anatomical_registration" ]["registration"]["FSL-FNIRT"]["identity_matrix"] - node, out = strat_pool.get_data("space-template_desc-T1w_mask") + node, out = strat_pool.get_data("space-template_desc-brain_mask") wf.connect(node, out, applywarp_anat_mask_res, "in_file") wf.connect(applywarp_anat_res, "out_file", applywarp_anat_mask_res, "ref_file") diff --git a/CPAC/resources/cpac_outputs.tsv b/CPAC/resources/cpac_outputs.tsv index 873defbbff..8fe4cd284f 100644 --- a/CPAC/resources/cpac_outputs.tsv +++ b/CPAC/resources/cpac_outputs.tsv @@ -166,7 +166,6 @@ desc-restore-brain_T1w T1w T1w anat NIfTI space-template_desc-brain_T1w T1w template anat NIfTI Yes space-template_desc-preproc_T1w T1w template anat NIfTI space-template_desc-head_T1w T1w template anat NIfTI -space-template_desc-T1w_mask mask template anat NIfTI space-template_desc-Mean_timeseries timeseries func 1D desc-MeanSCA_timeseries timeseries func 1D desc-SpatReg_timeseries timeseries func 1D diff --git a/CPAC/surface/surf_preproc.py b/CPAC/surface/surf_preproc.py index 1defe4e2d1..017ce4d604 100644 --- a/CPAC/surface/surf_preproc.py +++ b/CPAC/surface/surf_preproc.py @@ -928,7 +928,7 @@ def run_surface( [ "space-template_desc-head_T1w", "space-template_desc-brain_T1w", - "space-template_desc-T1w_mask", + "space-template_desc-brain_mask", ], [ "from-T1w_to-template_mode-image_xfm", @@ -1202,7 +1202,7 @@ def surface_postproc(wf, cfg, strat_pool, pipe_num, opt=None): space_temp = [ "space-template_desc-head_T1w", "space-template_desc-brain_T1w", - "space-template_desc-T1w_mask", + "space-template_desc-brain_mask", ] atlas_xfm = [ "from-T1w_to-template_mode-image_xfm", From f14f0a3601f12295c623fe1a4dac94ffe34e2166 Mon Sep 17 00:00:00 2001 From: "birajstha:construction_worker::penguin" Date: Mon, 3 Feb 2025 18:08:24 -0500 Subject: [PATCH 168/507] :heavy_minus_sign: removed extra unused outputs from the list --- CHANGELOG.md | 1 + CPAC/anat_preproc/anat_preproc.py | 18 ------------------ 2 files changed, 1 insertion(+), 18 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index be5ec4a432..7dd8592bb6 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -44,6 +44,7 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 - `ABCD-HCP` - `fMRIPrep-LTS` - Typehinting support for Python < 3.10. +- Extra outputs listed in the `freesurfer_abcd_preproc`. ## [1.8.7] - 2024-05-03 diff --git a/CPAC/anat_preproc/anat_preproc.py b/CPAC/anat_preproc/anat_preproc.py index a561f8e077..eda6ecf62a 100644 --- a/CPAC/anat_preproc/anat_preproc.py +++ b/CPAC/anat_preproc/anat_preproc.py @@ -2764,24 +2764,6 @@ def brain_extraction_temp_T2(wf, cfg, strat_pool, pipe_num, opt=None): "desc-restore-brain_T1w", "desc-ABCDpreproc_T1w", "pipeline-fs_desc-fast_biasfield", - "pipeline-fs_hemi-L_desc-surface_curv", - "pipeline-fs_hemi-R_desc-surface_curv", - "pipeline-fs_hemi-L_desc-surfaceMesh_pial", - "pipeline-fs_hemi-R_desc-surfaceMesh_pial", - "pipeline-fs_hemi-L_desc-surfaceMesh_smoothwm", - "pipeline-fs_hemi-R_desc-surfaceMesh_smoothwm", - "pipeline-fs_hemi-L_desc-surfaceMesh_sphere", - "pipeline-fs_hemi-R_desc-surfaceMesh_sphere", - "pipeline-fs_hemi-L_desc-surfaceMap_sulc", - "pipeline-fs_hemi-R_desc-surfaceMap_sulc", - "pipeline-fs_hemi-L_desc-surfaceMap_thickness", - "pipeline-fs_hemi-R_desc-surfaceMap_thickness", - "pipeline-fs_hemi-L_desc-surfaceMap_volume", - "pipeline-fs_hemi-R_desc-surfaceMap_volume", - "pipeline-fs_hemi-L_desc-surfaceMesh_white", - "pipeline-fs_hemi-R_desc-surfaceMesh_white", - "pipeline-fs_wmparc", - "freesurfer-subject-dir", ], ) def freesurfer_abcd_preproc(wf, cfg, strat_pool, pipe_num, opt=None): From 97b5c4b1622ff165e035dd9f40e5e52d2b525c13 Mon Sep 17 00:00:00 2001 From: Jon Cluce Date: Thu, 6 Feb 2025 11:43:58 -0500 Subject: [PATCH 169/507] :construction_worker: :wrench: Add GH Actions to Dependabot config --- .github/dependabot.yaml | 10 ++++++++++ 1 file changed, 10 insertions(+) create mode 100644 .github/dependabot.yaml diff --git a/.github/dependabot.yaml b/.github/dependabot.yaml new file mode 100644 index 0000000000..6055c3ada4 --- /dev/null +++ b/.github/dependabot.yaml @@ -0,0 +1,10 @@ +version: 2 +updates: + - package-ecosystem: "github-actions" + directory: "/" + # Check for updates once a week + schedule: + interval: "weekly" + groups: + all-actions: + patterns: [ "*" ] From 01f760bca0e77c2d04a4e508db9cab547a3d24e4 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Thu, 6 Feb 2025 16:49:01 +0000 Subject: [PATCH 170/507] Bump the all-actions group with 10 updates Bumps the all-actions group with 10 updates: | Package | From | To | | --- | --- | --- | | [easimon/maximize-build-space](https://github.com/easimon/maximize-build-space) | `6` | `10` | | [actions/checkout](https://github.com/actions/checkout) | `2` | `4` | | [docker/setup-buildx-action](https://github.com/docker/setup-buildx-action) | `2.2.1` | `3.9.0` | | [docker/login-action](https://github.com/docker/login-action) | `2` | `3` | | [docker/build-push-action](https://github.com/docker/build-push-action) | `4.0.0` | `6.13.0` | | [Chizkiyahu/delete-untagged-ghcr-action](https://github.com/chizkiyahu/delete-untagged-ghcr-action) | `2` | `6` | | [tj-actions/changed-files](https://github.com/tj-actions/changed-files) | `41.0.0` | `45.0.7` | | [actions/upload-artifact](https://github.com/actions/upload-artifact) | `3` | `4` | | [guibranco/github-status-action-v2](https://github.com/guibranco/github-status-action-v2) | `1.1.7` | `1.1.13` | | [appleboy/ssh-action](https://github.com/appleboy/ssh-action) | `1.0.0` | `1.2.0` | Updates `easimon/maximize-build-space` from 6 to 10 - [Release notes](https://github.com/easimon/maximize-build-space/releases) - [Changelog](https://github.com/easimon/maximize-build-space/blob/master/CHANGELOG.md) - [Commits](https://github.com/easimon/maximize-build-space/compare/v6...v10) Updates `actions/checkout` from 2 to 4 - [Release notes](https://github.com/actions/checkout/releases) - [Changelog](https://github.com/actions/checkout/blob/main/CHANGELOG.md) - [Commits](https://github.com/actions/checkout/compare/v2...v4) Updates `docker/setup-buildx-action` from 2.2.1 to 3.9.0 - [Release notes](https://github.com/docker/setup-buildx-action/releases) - [Commits](https://github.com/docker/setup-buildx-action/compare/v2.2.1...v3.9.0) Updates `docker/login-action` from 2 to 3 - [Release notes](https://github.com/docker/login-action/releases) - [Commits](https://github.com/docker/login-action/compare/v2...v3) Updates `docker/build-push-action` from 4.0.0 to 6.13.0 - [Release notes](https://github.com/docker/build-push-action/releases) - [Commits](https://github.com/docker/build-push-action/compare/v4.0.0...v6.13.0) Updates `Chizkiyahu/delete-untagged-ghcr-action` from 2 to 6 - [Release notes](https://github.com/chizkiyahu/delete-untagged-ghcr-action/releases) - [Commits](https://github.com/chizkiyahu/delete-untagged-ghcr-action/compare/v2...v6) Updates `tj-actions/changed-files` from 41.0.0 to 45.0.7 - [Release notes](https://github.com/tj-actions/changed-files/releases) - [Changelog](https://github.com/tj-actions/changed-files/blob/main/HISTORY.md) - [Commits](https://github.com/tj-actions/changed-files/compare/v41.0.0...v45.0.7) Updates `actions/upload-artifact` from 3 to 4 - [Release notes](https://github.com/actions/upload-artifact/releases) - [Commits](https://github.com/actions/upload-artifact/compare/v3...v4) Updates `guibranco/github-status-action-v2` from 1.1.7 to 1.1.13 - [Release notes](https://github.com/guibranco/github-status-action-v2/releases) - [Commits](https://github.com/guibranco/github-status-action-v2/compare/v1.1.7...v1.1.13) Updates `appleboy/ssh-action` from 1.0.0 to 1.2.0 - [Release notes](https://github.com/appleboy/ssh-action/releases) - [Changelog](https://github.com/appleboy/ssh-action/blob/master/.goreleaser.yaml) - [Commits](https://github.com/appleboy/ssh-action/compare/v1.0.0...v1.2.0) --- updated-dependencies: - dependency-name: easimon/maximize-build-space dependency-type: direct:production update-type: version-update:semver-major dependency-group: all-actions - dependency-name: actions/checkout dependency-type: direct:production update-type: version-update:semver-major dependency-group: all-actions - dependency-name: docker/setup-buildx-action dependency-type: direct:production update-type: version-update:semver-major dependency-group: all-actions - dependency-name: docker/login-action dependency-type: direct:production update-type: version-update:semver-major dependency-group: all-actions - dependency-name: docker/build-push-action dependency-type: direct:production update-type: version-update:semver-major dependency-group: all-actions - dependency-name: Chizkiyahu/delete-untagged-ghcr-action dependency-type: direct:production update-type: version-update:semver-major dependency-group: all-actions - dependency-name: tj-actions/changed-files dependency-type: direct:production update-type: version-update:semver-major dependency-group: all-actions - dependency-name: actions/upload-artifact dependency-type: direct:production update-type: version-update:semver-major dependency-group: all-actions - dependency-name: guibranco/github-status-action-v2 dependency-type: direct:production update-type: version-update:semver-patch dependency-group: all-actions - dependency-name: appleboy/ssh-action dependency-type: direct:production update-type: version-update:semver-minor dependency-group: all-actions ... Signed-off-by: dependabot[bot] --- .github/workflows/build_C-PAC.yml | 10 +++--- .github/workflows/build_and_test.yml | 38 ++++++++++---------- .github/workflows/delete_images.yml | 4 +-- .github/workflows/deploy_to_Docker_Hub.yml | 2 +- .github/workflows/on_push.yml | 4 +-- .github/workflows/regression_test_full.yml | 4 +-- .github/workflows/regression_test_lite.yml | 4 +-- .github/workflows/smoke_test_participant.yml | 6 ++-- 8 files changed, 36 insertions(+), 36 deletions(-) diff --git a/.github/workflows/build_C-PAC.yml b/.github/workflows/build_C-PAC.yml index d126f6a778..fb103acdd7 100644 --- a/.github/workflows/build_C-PAC.yml +++ b/.github/workflows/build_C-PAC.yml @@ -13,20 +13,20 @@ jobs: runs-on: ubuntu-latest steps: - name: Maximize build space - uses: easimon/maximize-build-space@v6 + uses: easimon/maximize-build-space@v10 with: remove-dotnet: 'true' remove-android: 'true' remove-haskell: 'true' overprovision-lvm: 'true' - name: Check out C-PAC - uses: actions/checkout@v3 + uses: actions/checkout@v4 with: fetch-depth: 2 - name: Set up Docker Buildx - uses: docker/setup-buildx-action@v2.2.1 + uses: docker/setup-buildx-action@v3.9.0 - name: Log in to GitHub Container Registry - uses: docker/login-action@v2 + uses: docker/login-action@v3 with: registry: ghcr.io username: ${{ github.actor }} @@ -87,7 +87,7 @@ jobs: echo $DOCKERFILE cat $DOCKERFILE - name: Build and push Docker image - uses: docker/build-push-action@v4.0.0 + uses: docker/build-push-action@v6.13.0 with: context: . file: ${{ env.DOCKERFILE }} diff --git a/.github/workflows/build_and_test.yml b/.github/workflows/build_and_test.yml index 6dadd8f9f9..ddfabc4001 100644 --- a/.github/workflows/build_and_test.yml +++ b/.github/workflows/build_and_test.yml @@ -58,7 +58,7 @@ jobs: runs-on: ubuntu-latest steps: - name: Check out C-PAC - uses: actions/checkout@v3 + uses: actions/checkout@v4 with: fetch-depth: 0 - name: Set tag & see if it exists @@ -80,17 +80,17 @@ jobs: sudo rm -rf "$AGENT_TOOLSDIRECTORY" - name: Set up Docker Buildx if: contains(fromJSON(env.REBUILD), matrix.Dockerfile) || steps.docker_tag.outputs.not_yet_exists == 1 - uses: docker/setup-buildx-action@v2.2.1 + uses: docker/setup-buildx-action@v3.9.0 - name: Log in to GitHub Container Registry if: contains(fromJSON(env.REBUILD), matrix.Dockerfile) || steps.docker_tag.outputs.not_yet_exists == 1 - uses: docker/login-action@v2 + uses: docker/login-action@v3 with: registry: ghcr.io username: ${{ github.actor }} password: ${{ secrets.GITHUB_TOKEN }} - name: Build and push Docker image if: contains(fromJSON(env.REBUILD), matrix.Dockerfile) || steps.docker_tag.outputs.not_yet_exists == 1 - uses: docker/build-push-action@v4.0.0 + uses: docker/build-push-action@v6.13.0 with: file: .github/Dockerfiles/${{ matrix.Dockerfile }}.Dockerfile push: true @@ -110,7 +110,7 @@ jobs: runs-on: ubuntu-latest steps: - name: Check out C-PAC - uses: actions/checkout@v3 + uses: actions/checkout@v4 with: fetch-depth: 0 - name: Set tag & see if it exists @@ -140,17 +140,17 @@ jobs: sudo rm -rf "$AGENT_TOOLSDIRECTORY" - name: Set up Docker Buildx if: contains(fromJSON(env.REBUILD), matrix.Dockerfile) || steps.docker_tag.outputs.not_yet_exists == 1 - uses: docker/setup-buildx-action@v2.2.1 + uses: docker/setup-buildx-action@v3.9.0 - name: Log in to GitHub Container Registry if: contains(fromJSON(env.REBUILD), matrix.Dockerfile) || steps.docker_tag.outputs.not_yet_exists == 1 - uses: docker/login-action@v2 + uses: docker/login-action@v3 with: registry: ghcr.io username: ${{ github.actor }} password: ${{ secrets.GITHUB_TOKEN }} - name: Build and push Docker image if: contains(fromJSON(env.REBUILD), matrix.Dockerfile) || steps.docker_tag.outputs.not_yet_exists == 1 - uses: docker/build-push-action@v4.0.0 + uses: docker/build-push-action@v6.13.0 with: context: . file: .github/Dockerfiles/${{ matrix.Dockerfile }}.Dockerfile @@ -172,14 +172,14 @@ jobs: variant: ${{ fromJSON(inputs.phase_three) }} steps: - name: Maximize build space - uses: easimon/maximize-build-space@v6 + uses: easimon/maximize-build-space@v10 with: remove-dotnet: 'true' remove-android: 'true' remove-haskell: 'true' overprovision-lvm: 'true' - name: Check out C-PAC - uses: actions/checkout@v3 + uses: actions/checkout@v4 with: fetch-depth: 0 - name: Prep source files @@ -215,17 +215,17 @@ jobs: sudo rm -rf "$AGENT_TOOLSDIRECTORY" - name: Set up Docker Buildx if: contains(fromJSON(env.REBUILD), matrix.variant) || steps.docker_tag.outputs.not_yet_exists == 1 - uses: docker/setup-buildx-action@v2.2.1 + uses: docker/setup-buildx-action@v3.9.0 - name: Log in to GitHub Container Registry if: contains(fromJSON(env.REBUILD), matrix.variant) || steps.docker_tag.outputs.not_yet_exists == 1 - uses: docker/login-action@v2 + uses: docker/login-action@v3 with: registry: ghcr.io username: ${{ github.actor }} password: ${{ secrets.GITHUB_TOKEN }} - name: Build and push base image if: contains(fromJSON(env.REBUILD), matrix.variant) || steps.docker_tag.outputs.not_yet_exists == 1 - uses: docker/build-push-action@v4.0.0 + uses: docker/build-push-action@v6.13.0 with: context: . file: .github/Dockerfiles/base-${{ matrix.variant }}.Dockerfile @@ -244,14 +244,14 @@ jobs: REBUILD: ${{ inputs.rebuild_phase_three }} steps: - name: Maximize build space - uses: easimon/maximize-build-space@v6 + uses: easimon/maximize-build-space@v10 with: remove-dotnet: 'true' remove-android: 'true' remove-haskell: 'true' overprovision-lvm: 'true' - name: Check out C-PAC - uses: actions/checkout@v3 + uses: actions/checkout@v4 with: fetch-depth: 0 - name: Prep source files @@ -287,17 +287,17 @@ jobs: sudo rm -rf "$AGENT_TOOLSDIRECTORY" - name: Set up Docker Buildx if: contains(fromJSON(env.REBUILD), 'standard') || steps.docker_tag.outputs.not_yet_exists == 1 - uses: docker/setup-buildx-action@v2.2.1 + uses: docker/setup-buildx-action@v3.9.0 - name: Log in to GitHub Container Registry if: contains(fromJSON(env.REBUILD), 'standard') || steps.docker_tag.outputs.not_yet_exists == 1 - uses: docker/login-action@v2 + uses: docker/login-action@v3 with: registry: ghcr.io username: ${{ github.actor }} password: ${{ secrets.GITHUB_TOKEN }} - name: Build and push base image if: contains(fromJSON(env.REBUILD), 'standard') || steps.docker_tag.outputs.not_yet_exists == 1 - uses: docker/build-push-action@v4.0.0 + uses: docker/build-push-action@v6.13.0 with: context: . file: .github/Dockerfiles/base-standard.Dockerfile @@ -350,7 +350,7 @@ jobs: runs-on: ubuntu-latest steps: - name: Check out C-PAC - uses: actions/checkout@v3 + uses: actions/checkout@v4 with: fetch-depth: 2 - name: Trigger CircleCI tests diff --git a/.github/workflows/delete_images.yml b/.github/workflows/delete_images.yml index 91ed5e98df..ce259fd811 100644 --- a/.github/workflows/delete_images.yml +++ b/.github/workflows/delete_images.yml @@ -18,7 +18,7 @@ jobs: IMAGE: c-pac steps: - name: Check out C-PAC - uses: actions/checkout@v3 + uses: actions/checkout@v4 - name: 'Delete branch image' run: | OWNER=$(echo ${GITHUB_REPOSITORY} | cut -d '/' -f 1) @@ -41,7 +41,7 @@ jobs: -X DELETE \ https://api.github.com/${OWNER_TYPE}/${OWNER}/packages/container/c-pac/versions/${VERSION_ID} - name: Delete all containers from repository without tags - uses: Chizkiyahu/delete-untagged-ghcr-action@v2 + uses: Chizkiyahu/delete-untagged-ghcr-action@v6 with: token: ${GITHUB_TOKEN} repository_owner: ${{ github.repository_owner }} diff --git a/.github/workflows/deploy_to_Docker_Hub.yml b/.github/workflows/deploy_to_Docker_Hub.yml index a9aaec8fab..a8ee3e6547 100644 --- a/.github/workflows/deploy_to_Docker_Hub.yml +++ b/.github/workflows/deploy_to_Docker_Hub.yml @@ -18,7 +18,7 @@ jobs: runs-on: ubuntu-latest steps: - name: Log in to Docker Hub - uses: docker/login-action@v2 + uses: docker/login-action@v3 with: username: ${{ secrets.DOCKERHUB_USER }} password: ${{ secrets.DOCKERHUB_TOKEN }} diff --git a/.github/workflows/on_push.yml b/.github/workflows/on_push.yml index 60f6354dc5..c584dd14f5 100644 --- a/.github/workflows/on_push.yml +++ b/.github/workflows/on_push.yml @@ -32,11 +32,11 @@ jobs: runs-on: ubuntu-latest steps: - name: Check out C-PAC - uses: actions/checkout@v3 + uses: actions/checkout@v4 with: fetch-depth: 2 - name: Get changed files since last commit - uses: tj-actions/changed-files@v41.0.0 + uses: tj-actions/changed-files@v45.0.7 id: changed-files with: since_last_remote_commit: "true" diff --git a/.github/workflows/regression_test_full.yml b/.github/workflows/regression_test_full.yml index 6dba2d1bf2..2c95bf209b 100644 --- a/.github/workflows/regression_test_full.yml +++ b/.github/workflows/regression_test_full.yml @@ -23,7 +23,7 @@ jobs: fi - name: Checkout Code - uses: actions/checkout@v2 + uses: actions/checkout@v4 - name: Clone reg-suite run: | git clone https://github.com/amygutierrez/reg-suite.git @@ -34,7 +34,7 @@ jobs: echo "Running full regression test" echo "୧(๑•̀ヮ•́)૭ LET'S GO! ٩(^ᗜ^ )و " - - uses: actions/upload-artifact@v3 + - uses: actions/upload-artifact@v4 if: always() with: name: logs diff --git a/.github/workflows/regression_test_lite.yml b/.github/workflows/regression_test_lite.yml index 4e6b5a46f6..a68baa0cda 100644 --- a/.github/workflows/regression_test_lite.yml +++ b/.github/workflows/regression_test_lite.yml @@ -57,7 +57,7 @@ jobs: ssh-keyscan -H -t rsa "${{ env.SSH_HOST }}" > ~/.ssh/known_hosts - name: Initiate check - uses: guibranco/github-status-action-v2@v1.1.7 + uses: guibranco/github-status-action-v2@v1.1.13 with: authToken: ${{ secrets.GITHUB_TOKEN }} context: Launch lite regression test @@ -65,7 +65,7 @@ jobs: state: pending - name: Connect and Run Regression Test Lite - uses: appleboy/ssh-action@v1.0.0 + uses: appleboy/ssh-action@v1.2.0 with: host: ${{ secrets.SSH_HOST }} username: ${{ secrets.SSH_USER }} diff --git a/.github/workflows/smoke_test_participant.yml b/.github/workflows/smoke_test_participant.yml index 3fde0de8aa..344d1ff420 100644 --- a/.github/workflows/smoke_test_participant.yml +++ b/.github/workflows/smoke_test_participant.yml @@ -104,7 +104,7 @@ jobs: --participant_label ${{ matrix.participant }} \ --preconfig ${{ matrix.preconfig }} \ --n_cpus 2 - - uses: actions/upload-artifact@v3 + - uses: actions/upload-artifact@v4 if: ${{ always() }} with: name: expectedOutputs human ${{ matrix.preconfig }} ${{ matrix.variant }} ${{ matrix.participant }} @@ -168,7 +168,7 @@ jobs: --preconfig ${{ matrix.preconfig }} \ --participant_label ${{ matrix.participant }} \ --n_cpus 2 - - uses: actions/upload-artifact@v3 + - uses: actions/upload-artifact@v4 if: ${{ always() }} with: name: expectedOutputs nhp ${{ matrix.preconfig }} ${{ matrix.variant }} ${{ matrix.participant }} @@ -226,7 +226,7 @@ jobs: /test-data /outputs test_config \ --preconfig rodent \ --n_cpus 2 - - uses: actions/upload-artifact@v3 + - uses: actions/upload-artifact@v4 if: ${{ always() }} with: name: expectedOutputs rodent ${{ matrix.variant }} From 5b0779da1f415d25505bc299560dfec8719eed36 Mon Sep 17 00:00:00 2001 From: Jon Cluce Date: Thu, 6 Feb 2025 12:25:28 -0500 Subject: [PATCH 171/507] :construction_worker: :arrow_down: Keep Python < 3.12 for OSF datalad tests --- .github/workflows/smoke_test_participant.yml | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/.github/workflows/smoke_test_participant.yml b/.github/workflows/smoke_test_participant.yml index 344d1ff420..1cba166a8b 100644 --- a/.github/workflows/smoke_test_participant.yml +++ b/.github/workflows/smoke_test_participant.yml @@ -124,6 +124,9 @@ jobs: participant: - 032102 032106 032164 032167 032130 032128 2215 2312 032191 032195 steps: + - uses: actions/setup-python@v5 + with: + python-version: '>=3.7 <3.12' - name: Get C-PAC run: | if [[ "${{ matrix.variant }}" != "" ]] @@ -183,6 +186,9 @@ jobs: variant: - '' steps: + - uses: actions/setup-python@v5 + with: + python-version: '>=3.7 <3.12' - name: Get C-PAC run: | if [[ "${{ matrix.variant }}" != "" ]] From 3955717af2a38ea8a21008c0c09256922599e874 Mon Sep 17 00:00:00 2001 From: Jon Cluce Date: Thu, 6 Feb 2025 12:58:44 -0500 Subject: [PATCH 172/507] :construction_worker: :alien: Update git-annex installation --- .github/workflows/smoke_test_participant.yml | 10 ++-------- 1 file changed, 2 insertions(+), 8 deletions(-) diff --git a/.github/workflows/smoke_test_participant.yml b/.github/workflows/smoke_test_participant.yml index 1cba166a8b..2966d4dccb 100644 --- a/.github/workflows/smoke_test_participant.yml +++ b/.github/workflows/smoke_test_participant.yml @@ -124,9 +124,6 @@ jobs: participant: - 032102 032106 032164 032167 032130 032128 2215 2312 032191 032195 steps: - - uses: actions/setup-python@v5 - with: - python-version: '>=3.7 <3.12' - name: Get C-PAC run: | if [[ "${{ matrix.variant }}" != "" ]] @@ -154,7 +151,7 @@ jobs: wget -O- http://neuro.debian.net/lists/jammy.us-tn.libre | sudo tee /etc/apt/sources.list.d/neurodebian.sources.list sudo apt-key adv --recv-keys --keyserver hkps://keyserver.ubuntu.com 0xA5D32F012649A5A9 sudo apt-get update - sudo apt-get install datalad git-annex-standalone + sudo apt-get install datalad git-annex pip install datalad-osf - name: Get NHP test data run: | @@ -186,9 +183,6 @@ jobs: variant: - '' steps: - - uses: actions/setup-python@v5 - with: - python-version: '>=3.7 <3.12' - name: Get C-PAC run: | if [[ "${{ matrix.variant }}" != "" ]] @@ -216,7 +210,7 @@ jobs: wget -O- http://neuro.debian.net/lists/jammy.us-tn.libre | sudo tee /etc/apt/sources.list.d/neurodebian.sources.list sudo apt-key adv --recv-keys --keyserver hkps://keyserver.ubuntu.com 0xA5D32F012649A5A9 sudo apt-get update - sudo apt-get install datalad git-annex-standalone + sudo apt-get install datalad git-annex pip install datalad-osf - name: Get rodent test data run: | From 7646484553204ed829e6a48c7af3f57829d57b6f Mon Sep 17 00:00:00 2001 From: Jon Cluce Date: Thu, 6 Feb 2025 14:11:45 -0500 Subject: [PATCH 173/507] :construction_worker: Install datalad via conda instead of neurodebian --- .github/workflows/smoke_test_participant.yml | 14 ++++++-------- 1 file changed, 6 insertions(+), 8 deletions(-) diff --git a/.github/workflows/smoke_test_participant.yml b/.github/workflows/smoke_test_participant.yml index 2966d4dccb..347a489d07 100644 --- a/.github/workflows/smoke_test_participant.yml +++ b/.github/workflows/smoke_test_participant.yml @@ -144,14 +144,13 @@ jobs: TAG=$TAG$VARIANT echo DOCKER_TAG=$(echo "ghcr.io/${{ github.repository }}" | tr '[:upper:]' '[:lower:]'):$TAG >> $GITHUB_ENV cat $GITHUB_ENV + - name: setup-conda + uses: s-weigand/setup-conda@v1.2.3 - name: Set up datalad-OSF run: | git config --global user.email "CMI_CPAC_Support@childmind.org" git config --global user.name "Theodore (Machine User)" - wget -O- http://neuro.debian.net/lists/jammy.us-tn.libre | sudo tee /etc/apt/sources.list.d/neurodebian.sources.list - sudo apt-key adv --recv-keys --keyserver hkps://keyserver.ubuntu.com 0xA5D32F012649A5A9 - sudo apt-get update - sudo apt-get install datalad git-annex + yes | conda install -c conda-forge datalad pip install datalad-osf - name: Get NHP test data run: | @@ -203,14 +202,13 @@ jobs: TAG=$TAG$VARIANT echo DOCKER_TAG=$(echo "ghcr.io/${{ github.repository }}" | tr '[:upper:]' '[:lower:]'):$TAG >> $GITHUB_ENV cat $GITHUB_ENV + - name: setup-conda + uses: s-weigand/setup-conda@v1.2.3 - name: Set up datalad-OSF run: | git config --global user.email "CMI_CPAC_Support@childmind.org" git config --global user.name "Theodore (Machine User)" - wget -O- http://neuro.debian.net/lists/jammy.us-tn.libre | sudo tee /etc/apt/sources.list.d/neurodebian.sources.list - sudo apt-key adv --recv-keys --keyserver hkps://keyserver.ubuntu.com 0xA5D32F012649A5A9 - sudo apt-get update - sudo apt-get install datalad git-annex + yes | conda install -c conda-forge datalad pip install datalad-osf - name: Get rodent test data run: | From 6e3f97b0efdf356da3b5acb361556dba93d26d02 Mon Sep 17 00:00:00 2001 From: Jon Clucas Date: Tue, 16 Jan 2024 12:21:27 -0500 Subject: [PATCH 174/507] :wrench: Configure dependabot --- .github/dependabot.yaml | 14 ++++++++++++-- 1 file changed, 12 insertions(+), 2 deletions(-) diff --git a/.github/dependabot.yaml b/.github/dependabot.yaml index 6055c3ada4..d251de1d46 100644 --- a/.github/dependabot.yaml +++ b/.github/dependabot.yaml @@ -1,10 +1,20 @@ version: 2 updates: - package-ecosystem: "github-actions" - directory: "/" + directory: / # Check for updates once a week schedule: - interval: "weekly" + interval: weekly groups: all-actions: patterns: [ "*" ] +- package-ecosystem: pip + directory: / + schedule: + interval: weekly + groups: + production dependencies: + dependency-type: production + development dependencies: + dependency-type: development + target-branch: develop From f133aecffb97e75560e74cc85fd50562c2ec02bf Mon Sep 17 00:00:00 2001 From: birajstha <111654544+birajstha@users.noreply.github.com> Date: Fri, 7 Feb 2025 12:59:01 -0600 Subject: [PATCH 175/507] Update CHANGELOG.md Co-authored-by: Jon Cluce --- CHANGELOG.md | 1 - 1 file changed, 1 deletion(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 7297e4b4d1..ac8787bdc1 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -52,7 +52,6 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 - from outputs tsv. - ## [1.8.7] - 2024-05-03 ### Added From 06d4f28ccdaabd9f747d889f3707f6caedc86a5d Mon Sep 17 00:00:00 2001 From: birajstha <111654544+birajstha@users.noreply.github.com> Date: Fri, 7 Feb 2025 12:59:11 -0600 Subject: [PATCH 176/507] Update CHANGELOG.md Co-authored-by: Jon Cluce --- CHANGELOG.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index ac8787bdc1..fa2ece42fb 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -45,7 +45,7 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 - `ABCD-HCP` - `fMRIPrep-LTS` - Typehinting support for Python < 3.10. -- Extra outputs listed in the `freesurfer_abcd_preproc`. +- Extra outputs listed in `freesurfer_abcd_preproc`. - Resource `space-template_desc-T1w_mask` - as output from FNIRT registration. - as inputs from Nodeblocks requesting it and, replaced with `space-template_desc-brain_mask`. From fb254decf6f936d08b556e29b6924280e69c62b5 Mon Sep 17 00:00:00 2001 From: "birajstha:construction_worker::penguin" Date: Mon, 10 Feb 2025 14:31:17 -0500 Subject: [PATCH 177/507] =?UTF-8?q?=F0=9F=93=9D=20updated=20the=20changelo?= =?UTF-8?q?g?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- CHANGELOG.md | 4 ++++ CPAC/func_preproc/func_preproc.py | 3 +++ 2 files changed, 7 insertions(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index fa2ece42fb..c44628a6dd 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -31,6 +31,10 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 - Automatically tag untagged `subject_id` and `unique_id` as `!!str` when loading data config files. - Made orientation configurable (was hard-coded as "RPI"). - Moved `ref_mask_res_2` and `T1w_template_res-2` fields from registration into surface under `abcd_prefreesurfer_prep`. +- [FSL-AFNI subworkflow](https://github.com/FCP-INDI/C-PAC/blob/4bdd6c410ef0a9b90f53100ea005af1f7d6e76c0/CPAC/func_preproc/func_preproc.py#L1052C4-L1231C25) + - Moved `FSL-AFNI subworkflow` from inside a `bold_mask_fsl_afni` nodeblock into a separate function. + - Renamed `desc-ref_bold` created in this workflow to `desc-unifized_bold`. + - `coregistration_prep_fmriprep` nodeblock now checks if `desc-unifized_bold` exists in the Resource Pool, if not it runs the `FSL-AFNI subworkflow` to create it. ### Fixed diff --git a/CPAC/func_preproc/func_preproc.py b/CPAC/func_preproc/func_preproc.py index bbe2081492..e47b768846 100644 --- a/CPAC/func_preproc/func_preproc.py +++ b/CPAC/func_preproc/func_preproc.py @@ -1188,6 +1188,7 @@ def bold_mask_fsl_afni(wf, cfg, strat_pool, pipe_num, opt=None): # * Removed ``if not pre_mask`` conditional block # * Modified docstring to reflect local changes # * Refactored some variables and connections and updated style to match C-PAC codebase + # * Moved fsl-afni subworkflow into a separate function and added a function call in this nodeblock. # ORIGINAL WORK'S ATTRIBUTION NOTICE: # Copyright (c) 2016, the CRN developers team. @@ -1233,6 +1234,8 @@ def bold_mask_fsl_afni(wf, cfg, strat_pool, pipe_num, opt=None): # Modifications copyright (C) 2021 - 2024 C-PAC Developers wf, outputs = fsl_afni_subworkflow(wf, cfg, strat_pool, pipe_num, opt) + + # both masks are available, but they are the same in this nodeblock outputs["space-bold_desc-brain_mask"] = outputs["fMRIprep_brain_mask"] return (wf, outputs) From 11948a64c94c05ef02338d3d7c4452ca3941eed2 Mon Sep 17 00:00:00 2001 From: "birajstha:construction_worker::penguin" Date: Tue, 11 Feb 2025 18:39:58 -0500 Subject: [PATCH 178/507] Checking if overwrite transform method is same as the anatomical registration method --- CPAC/error_handler/__init__.py | 0 CPAC/error_handler/exceptions.py | 12 +++++++ CPAC/pipeline/schema.py | 9 ++++++ CPAC/pipeline/test/test_schema_validation.py | 34 ++++++++++++++++++++ CPAC/registration/registration.py | 5 +++ 5 files changed, 60 insertions(+) create mode 100644 CPAC/error_handler/__init__.py create mode 100644 CPAC/error_handler/exceptions.py diff --git a/CPAC/error_handler/__init__.py b/CPAC/error_handler/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/CPAC/error_handler/exceptions.py b/CPAC/error_handler/exceptions.py new file mode 100644 index 0000000000..1f51fcd575 --- /dev/null +++ b/CPAC/error_handler/exceptions.py @@ -0,0 +1,12 @@ + +class SchemaError(Exception): + """Exception raised for errors in the schema.""" + def __init__(self, message): + self.message = message + super().__init__(self.message) + +class NodeBlockError(Exception): + """Exception raised for errors in the node block.""" + def __init__(self, message): + self.message = message + super().__init__(self.message) \ No newline at end of file diff --git a/CPAC/pipeline/schema.py b/CPAC/pipeline/schema.py index fa36a0dd2e..e08261a16d 100644 --- a/CPAC/pipeline/schema.py +++ b/CPAC/pipeline/schema.py @@ -50,6 +50,7 @@ from CPAC.utils.datatypes import ItemFromList, ListFromItem from CPAC.utils.docs import DOCS_URL_PREFIX from CPAC.utils.utils import YAML_BOOLS +from CPAC.error_handler.exceptions import SchemaError # 1 or more digits, optional decimal, 'e', optional '-', 1 or more digits SCIENTIFIC_NOTATION_STR_REGEX = r"^([0-9]+(\.[0-9]*)*(e)-{0,1}[0-9]+)*$" @@ -1388,6 +1389,14 @@ def schema(config_dict): " Try turning one option off.\n " ) raise ExclusiveInvalid(msg) + + overwrite = partially_validated["registration_workflows"]["anatomical_registration"]["overwrite_transform"] + + if overwrite["run"] and overwrite["using"] in partially_validated["registration_workflows"]["anatomical_registration"]["registration"]["using"]: + raise ExclusiveInvalid( + "[!] Overwrite transform is found same as the anatomical registration method! " + "No need to overwrite transform with the same registration method." + ) except KeyError: pass try: diff --git a/CPAC/pipeline/test/test_schema_validation.py b/CPAC/pipeline/test/test_schema_validation.py index 36a75a1a00..8f03bafc82 100644 --- a/CPAC/pipeline/test/test_schema_validation.py +++ b/CPAC/pipeline/test/test_schema_validation.py @@ -113,3 +113,37 @@ def test_pipeline_name(): """Test that pipeline_name sucessfully sanitizes.""" c = Configuration({"pipeline_setup": {"pipeline_name": ":va:lid name"}}) assert c["pipeline_setup", "pipeline_name"] == "valid_name" + + +@pytest.mark.parametrize( + "registration_using", + [ + list(combo) + for _ in [ + list(combinations(["ANTS", "FSL", "FSL-linear"], i)) for i in range(1, 4) + ] + for combo in _ + ], +) +def test_overwrite_transform(registration_using): + """Test that if overwrite transform method is already a registration method.""" + # pylint: disable=invalid-name + d = { + "registration_workflows": { + "anatomical_registration": { + "registration": { + "using": registration_using + }, + "overwrite_transform": { + "run": "On", + "using": "FSL" + } + } + } + } + if "FSL" not in registration_using: + Configuration(d) # validates without exception + else: + with pytest.raises(ExclusiveInvalid) as e: + Configuration(d) + assert "Overwrite transform is found same as the registration method" in str(e.value) \ No newline at end of file diff --git a/CPAC/registration/registration.py b/CPAC/registration/registration.py index 4848637d4d..3adcad676a 100644 --- a/CPAC/registration/registration.py +++ b/CPAC/registration/registration.py @@ -42,6 +42,7 @@ from CPAC.utils.interfaces import Function from CPAC.utils.interfaces.fsl import Merge as fslMerge from CPAC.utils.utils import check_prov_for_motion_tool, check_prov_for_regtool +from CPAC.error_handler.exceptions import NodeBlockError def apply_transform( @@ -3079,6 +3080,10 @@ def overwrite_transform_anat_to_template(wf, cfg, strat_pool, pipe_num, opt=None "from-template_to-T1w_mode-image_xfm": (merge_inv_xfms, "merged_file"), } + else: + outputs = {} + raise NodeBlockError("Invalid registration tool or option provided. Please make sure the registration tool is ANTs and the option is FSL.") + return (wf, outputs) From 7b3603af7862211f598701c837654e13f3e1a673 Mon Sep 17 00:00:00 2001 From: "birajstha:construction_worker::penguin" Date: Tue, 11 Feb 2025 18:45:37 -0500 Subject: [PATCH 179/507] with precommit changes --- CPAC/error_handler/exceptions.py | 6 ++++-- CPAC/pipeline/schema.py | 13 ++++++++++--- CPAC/pipeline/test/test_schema_validation.py | 13 +++++-------- CPAC/registration/registration.py | 6 ++++-- 4 files changed, 23 insertions(+), 15 deletions(-) diff --git a/CPAC/error_handler/exceptions.py b/CPAC/error_handler/exceptions.py index 1f51fcd575..5c17a4028f 100644 --- a/CPAC/error_handler/exceptions.py +++ b/CPAC/error_handler/exceptions.py @@ -1,12 +1,14 @@ - class SchemaError(Exception): """Exception raised for errors in the schema.""" + def __init__(self, message): self.message = message super().__init__(self.message) + class NodeBlockError(Exception): """Exception raised for errors in the node block.""" + def __init__(self, message): self.message = message - super().__init__(self.message) \ No newline at end of file + super().__init__(self.message) diff --git a/CPAC/pipeline/schema.py b/CPAC/pipeline/schema.py index e08261a16d..825b682fb9 100644 --- a/CPAC/pipeline/schema.py +++ b/CPAC/pipeline/schema.py @@ -50,7 +50,6 @@ from CPAC.utils.datatypes import ItemFromList, ListFromItem from CPAC.utils.docs import DOCS_URL_PREFIX from CPAC.utils.utils import YAML_BOOLS -from CPAC.error_handler.exceptions import SchemaError # 1 or more digits, optional decimal, 'e', optional '-', 1 or more digits SCIENTIFIC_NOTATION_STR_REGEX = r"^([0-9]+(\.[0-9]*)*(e)-{0,1}[0-9]+)*$" @@ -1390,9 +1389,17 @@ def schema(config_dict): ) raise ExclusiveInvalid(msg) - overwrite = partially_validated["registration_workflows"]["anatomical_registration"]["overwrite_transform"] + overwrite = partially_validated["registration_workflows"][ + "anatomical_registration" + ]["overwrite_transform"] - if overwrite["run"] and overwrite["using"] in partially_validated["registration_workflows"]["anatomical_registration"]["registration"]["using"]: + if ( + overwrite["run"] + and overwrite["using"] + in partially_validated["registration_workflows"]["anatomical_registration"][ + "registration" + ]["using"] + ): raise ExclusiveInvalid( "[!] Overwrite transform is found same as the anatomical registration method! " "No need to overwrite transform with the same registration method." diff --git a/CPAC/pipeline/test/test_schema_validation.py b/CPAC/pipeline/test/test_schema_validation.py index 8f03bafc82..2b680ddd5a 100644 --- a/CPAC/pipeline/test/test_schema_validation.py +++ b/CPAC/pipeline/test/test_schema_validation.py @@ -131,13 +131,8 @@ def test_overwrite_transform(registration_using): d = { "registration_workflows": { "anatomical_registration": { - "registration": { - "using": registration_using - }, - "overwrite_transform": { - "run": "On", - "using": "FSL" - } + "registration": {"using": registration_using}, + "overwrite_transform": {"run": "On", "using": "FSL"}, } } } @@ -146,4 +141,6 @@ def test_overwrite_transform(registration_using): else: with pytest.raises(ExclusiveInvalid) as e: Configuration(d) - assert "Overwrite transform is found same as the registration method" in str(e.value) \ No newline at end of file + assert "Overwrite transform is found same as the registration method" in str( + e.value + ) diff --git a/CPAC/registration/registration.py b/CPAC/registration/registration.py index 3adcad676a..f7f429aeda 100644 --- a/CPAC/registration/registration.py +++ b/CPAC/registration/registration.py @@ -24,6 +24,7 @@ from nipype.interfaces.afni import utils as afni_utils from CPAC.anat_preproc.lesion_preproc import create_lesion_preproc +from CPAC.error_handler.exceptions import NodeBlockError from CPAC.func_preproc.utils import chunk_ts, split_ts_chunks from CPAC.pipeline import nipype_pipeline_engine as pe from CPAC.pipeline.nodeblock import nodeblock @@ -42,7 +43,6 @@ from CPAC.utils.interfaces import Function from CPAC.utils.interfaces.fsl import Merge as fslMerge from CPAC.utils.utils import check_prov_for_motion_tool, check_prov_for_regtool -from CPAC.error_handler.exceptions import NodeBlockError def apply_transform( @@ -3082,7 +3082,9 @@ def overwrite_transform_anat_to_template(wf, cfg, strat_pool, pipe_num, opt=None else: outputs = {} - raise NodeBlockError("Invalid registration tool or option provided. Please make sure the registration tool is ANTs and the option is FSL.") + raise NodeBlockError( + "Invalid registration tool or option provided. Please make sure the registration tool is ANTs and the option is FSL." + ) return (wf, outputs) From 9ed1772d7b281aba623a1158932cc32cd1f99f86 Mon Sep 17 00:00:00 2001 From: sgiavasis Date: Fri, 31 Jan 2025 16:05:19 -0500 Subject: [PATCH 180/507] Removed the erroneous connection that can sometimes lead to dropping TRs unnecessarily. --- CPAC/nuisance/nuisance.py | 15 ++++++--------- 1 file changed, 6 insertions(+), 9 deletions(-) diff --git a/CPAC/nuisance/nuisance.py b/CPAC/nuisance/nuisance.py index e353aae03b..90f9ed1230 100644 --- a/CPAC/nuisance/nuisance.py +++ b/CPAC/nuisance/nuisance.py @@ -1757,17 +1757,14 @@ def create_nuisance_regression_workflow(nuisance_selectors, name="nuisance_regre nuisance_regression.inputs.norm = False if nuisance_selectors.get("Censor"): - if nuisance_selectors["Censor"]["method"] == "SpikeRegression": - nuisance_wf.connect(find_censors, "out_file", nuisance_regression, "censor") + if nuisance_selectors["Censor"]["method"] == "Interpolate": + nuisance_regression.inputs.cenmode = "NTRP" else: - if nuisance_selectors["Censor"]["method"] == "Interpolate": - nuisance_regression.inputs.cenmode = "NTRP" - else: - nuisance_regression.inputs.cenmode = nuisance_selectors["Censor"][ - "method" - ].upper() + nuisance_regression.inputs.cenmode = nuisance_selectors["Censor"][ + "method" + ].upper() - nuisance_wf.connect(find_censors, "out_file", nuisance_regression, "censor") + nuisance_wf.connect(find_censors, "out_file", nuisance_regression, "censor") if nuisance_selectors.get("PolyOrt"): if not nuisance_selectors["PolyOrt"].get("degree"): From a0da964f5b5dd7e070f53718d84c838e84369131 Mon Sep 17 00:00:00 2001 From: sgiavasis Date: Fri, 31 Jan 2025 16:26:20 -0500 Subject: [PATCH 181/507] Updated the CHANGELOG. --- CHANGELOG.md | 1 + 1 file changed, 1 insertion(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index fa2ece42fb..796d789ef6 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -38,6 +38,7 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 - Restored `bids-validator` functionality. - Fixed empty `shell` variable in cluster run scripts. - A bug in which bandpass filters always assumed 1D regressor files have exactly 5 header rows. +- Removed an erroneous connection to AFNI 3dTProject in nuisance denoising that would unnecessarily send a spike regressor as a censor. This would sometimes cause TRs to unnecessarily be dropped from the timeseries as if scrubbing were being performed. ### Removed From faf6338f1c37bafe6f3b4304998cd61fcf099c43 Mon Sep 17 00:00:00 2001 From: "birajstha:construction_worker::penguin" Date: Thu, 13 Feb 2025 12:54:57 -0500 Subject: [PATCH 182/507] =?UTF-8?q?=F0=9F=9A=9A=20Moving=20find=5Fcensors?= =?UTF-8?q?=20into=20its=20own=20function=20and=20subworkflow?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- CPAC/nuisance/nuisance.py | 245 +++++++++++++++++++++++++------------- 1 file changed, 159 insertions(+), 86 deletions(-) diff --git a/CPAC/nuisance/nuisance.py b/CPAC/nuisance/nuisance.py index 90f9ed1230..0aed17428a 100644 --- a/CPAC/nuisance/nuisance.py +++ b/CPAC/nuisance/nuisance.py @@ -496,6 +496,115 @@ def gather_nuisance( return output_file_path, censor_indices +def offending_timepoints_connector( + nuisance_selectors, name="offending_timepoints_connector" +): + inputspec = pe.Node( + util.IdentityInterface( + fields=[ + "fd_j_file_path", + "fd_p_file_path", + "dvars_file_path", + ] + ), + name="inputspec", + ) + + wf = pe.Workflow(name=name) + + outputspec = pe.Node( + util.IdentityInterface(fields=["out_file"]), + name="outputspec", + ) + + censor_methods = ["Kill", "Zero", "Interpolate", "SpikeRegression"] + + censor_selector = nuisance_selectors.get("Censor") + if censor_selector is None or censor_selector.get("method") not in censor_methods: + msg = ( + "Improper censoring method specified ({0}), " + "should be one of {1}.".format( + censor_selector.get("method", None) if censor_selector else None, + censor_methods, + ) + ) + raise ValueError(msg) + + find_censors = pe.Node( + Function( + input_names=[ + "fd_j_file_path", + "fd_j_threshold", + "fd_p_file_path", + "fd_p_threshold", + "dvars_file_path", + "dvars_threshold", + "number_of_previous_trs_to_censor", + "number_of_subsequent_trs_to_censor", + ], + output_names=["out_file"], + function=find_offending_time_points, + as_module=True, + ), + name="find_offending_time_points", + ) + + if not censor_selector.get("thresholds"): + msg = "Censoring requested, but thresh_metric not provided." + raise ValueError(msg) + + for threshold in censor_selector["thresholds"]: + if "type" not in threshold or threshold["type"] not in [ + "DVARS", + "FD_J", + "FD_P", + ]: + msg = "Censoring requested, but with invalid threshold type." + raise ValueError(msg) + + if "value" not in threshold: + msg = "Censoring requested, but threshold not provided." + raise ValueError(msg) + + if threshold["type"] == "FD_J": + find_censors.inputs.fd_j_threshold = threshold["value"] + wf.connect(inputspec, "fd_j_file_path", find_censors, "fd_j_file_path") + + if threshold["type"] == "FD_P": + find_censors.inputs.fd_p_threshold = threshold["value"] + wf.connect(inputspec, "fd_p_file_path", find_censors, "fd_p_file_path") + + if threshold["type"] == "DVARS": + find_censors.inputs.dvars_threshold = threshold["value"] + wf.connect(inputspec, "dvars_file_path", find_censors, "dvars_file_path") + + if ( + censor_selector.get("number_of_previous_trs_to_censor") + and censor_selector["method"] != "SpikeRegression" + ): + find_censors.inputs.number_of_previous_trs_to_censor = censor_selector[ + "number_of_previous_trs_to_censor" + ] + + else: + find_censors.inputs.number_of_previous_trs_to_censor = 0 + + if ( + censor_selector.get("number_of_subsequent_trs_to_censor") + and censor_selector["method"] != "SpikeRegression" + ): + find_censors.inputs.number_of_subsequent_trs_to_censor = censor_selector[ + "number_of_subsequent_trs_to_censor" + ] + + else: + find_censors.inputs.number_of_subsequent_trs_to_censor = 0 + + wf.connect(find_censors, "out_file", outputspec, "out_file") + + return wf + + def create_regressor_workflow( nuisance_selectors, use_ants, @@ -1547,6 +1656,30 @@ def create_regressor_workflow( "functional_file_path", ) + if nuisance_selectors.get("Censor"): + offending_timepoints_connector_wf = offending_timepoints_connector( + nuisance_selectors + ) + nuisance_wf.connect( + [ + ( + inputspec, + offending_timepoints_connector_wf, + [("fd_j_file_path", "inputspec.fd_j_file_path")], + ), + ( + inputspec, + offending_timepoints_connector_wf, + [("fd_p_file_path", "inputspec.fd_p_file_path")], + ), + ( + inputspec, + offending_timepoints_connector_wf, + [("dvars_file_path", "inputspec.dvars_file_path")], + ), + ] + ) + build_nuisance_regressors.inputs.selector = nuisance_selectors # Check for any regressors to combine into files @@ -1656,93 +1789,28 @@ def create_nuisance_regression_workflow(nuisance_selectors, name="nuisance_regre nuisance_wf = pe.Workflow(name=name) if nuisance_selectors.get("Censor"): - censor_methods = ["Kill", "Zero", "Interpolate", "SpikeRegression"] - - censor_selector = nuisance_selectors.get("Censor") - if censor_selector.get("method") not in censor_methods: - msg = ( - "Improper censoring method specified ({0}), " - "should be one of {1}.".format( - censor_selector.get("method"), censor_methods - ) - ) - raise ValueError(msg) - - find_censors = pe.Node( - Function( - input_names=[ - "fd_j_file_path", - "fd_j_threshold", - "fd_p_file_path", - "fd_p_threshold", - "dvars_file_path", - "dvars_threshold", - "number_of_previous_trs_to_censor", - "number_of_subsequent_trs_to_censor", - ], - output_names=["out_file"], - function=find_offending_time_points, - as_module=True, - ), - name="find_offending_time_points", + offending_timepoints_connector_wf = offending_timepoints_connector( + nuisance_selectors ) - - if not censor_selector.get("thresholds"): - msg = "Censoring requested, but thresh_metric not provided." - raise ValueError(msg) - - for threshold in censor_selector["thresholds"]: - if "type" not in threshold or threshold["type"] not in [ - "DVARS", - "FD_J", - "FD_P", - ]: - msg = "Censoring requested, but with invalid threshold type." - raise ValueError(msg) - - if "value" not in threshold: - msg = "Censoring requested, but threshold not provided." - raise ValueError(msg) - - if threshold["type"] == "FD_J": - find_censors.inputs.fd_j_threshold = threshold["value"] - nuisance_wf.connect( - inputspec, "fd_j_file_path", find_censors, "fd_j_file_path" - ) - - if threshold["type"] == "FD_P": - find_censors.inputs.fd_p_threshold = threshold["value"] - nuisance_wf.connect( - inputspec, "fd_p_file_path", find_censors, "fd_p_file_path" - ) - - if threshold["type"] == "DVARS": - find_censors.inputs.dvars_threshold = threshold["value"] - nuisance_wf.connect( - inputspec, "dvars_file_path", find_censors, "dvars_file_path" - ) - - if ( - censor_selector.get("number_of_previous_trs_to_censor") - and censor_selector["method"] != "SpikeRegression" - ): - find_censors.inputs.number_of_previous_trs_to_censor = censor_selector[ - "number_of_previous_trs_to_censor" - ] - - else: - find_censors.inputs.number_of_previous_trs_to_censor = 0 - - if ( - censor_selector.get("number_of_subsequent_trs_to_censor") - and censor_selector["method"] != "SpikeRegression" - ): - find_censors.inputs.number_of_subsequent_trs_to_censor = censor_selector[ - "number_of_subsequent_trs_to_censor" + nuisance_wf.connect( + [ + ( + inputspec, + offending_timepoints_connector_wf, + [("fd_j_file_path", "inputspec.fd_j_file_path")], + ), + ( + inputspec, + offending_timepoints_connector_wf, + [("fd_p_file_path", "inputspec.fd_p_file_path")], + ), + ( + inputspec, + offending_timepoints_connector_wf, + [("dvars_file_path", "inputspec.dvars_file_path")], + ), ] - - else: - find_censors.inputs.number_of_subsequent_trs_to_censor = 0 + ) # Use 3dTproject to perform nuisance variable regression nuisance_regression = pe.Node( @@ -1764,7 +1832,12 @@ def create_nuisance_regression_workflow(nuisance_selectors, name="nuisance_regre "method" ].upper() - nuisance_wf.connect(find_censors, "out_file", nuisance_regression, "censor") + nuisance_wf.connect( + offending_timepoints_connector_wf, + "outputspec.out_file", + nuisance_regression, + "censor", + ) if nuisance_selectors.get("PolyOrt"): if not nuisance_selectors["PolyOrt"].get("degree"): From 14e6bbbf600260ce4514bd62a014274c91028c9a Mon Sep 17 00:00:00 2001 From: "birajstha:construction_worker::penguin" Date: Thu, 13 Feb 2025 12:56:13 -0500 Subject: [PATCH 183/507] Revert "Removed the erroneous connection that can sometimes lead to dropping TRs unnecessarily." This reverts commit e03349fbc7b9fc7fcb5077a2c032e9b3016bfe75. --- CPAC/nuisance/nuisance.py | 13 ++++++++----- 1 file changed, 8 insertions(+), 5 deletions(-) diff --git a/CPAC/nuisance/nuisance.py b/CPAC/nuisance/nuisance.py index 0aed17428a..57b61596b9 100644 --- a/CPAC/nuisance/nuisance.py +++ b/CPAC/nuisance/nuisance.py @@ -1825,12 +1825,15 @@ def create_nuisance_regression_workflow(nuisance_selectors, name="nuisance_regre nuisance_regression.inputs.norm = False if nuisance_selectors.get("Censor"): - if nuisance_selectors["Censor"]["method"] == "Interpolate": - nuisance_regression.inputs.cenmode = "NTRP" + if nuisance_selectors["Censor"]["method"] == "SpikeRegression": + nuisance_wf.connect(find_censors, "out_file", nuisance_regression, "censor") else: - nuisance_regression.inputs.cenmode = nuisance_selectors["Censor"][ - "method" - ].upper() + if nuisance_selectors["Censor"]["method"] == "Interpolate": + nuisance_regression.inputs.cenmode = "NTRP" + else: + nuisance_regression.inputs.cenmode = nuisance_selectors["Censor"][ + "method" + ].upper() nuisance_wf.connect( offending_timepoints_connector_wf, From 8604bf4c073ef559234b6e5c43450b7dcdc90359 Mon Sep 17 00:00:00 2001 From: "birajstha:construction_worker::penguin" Date: Thu, 13 Feb 2025 13:18:15 -0500 Subject: [PATCH 184/507] =?UTF-8?q?=F0=9F=94=A8=20Fixing=20check=20for=20S?= =?UTF-8?q?pikeRegression?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- CPAC/nuisance/nuisance.py | 14 +++++--------- 1 file changed, 5 insertions(+), 9 deletions(-) diff --git a/CPAC/nuisance/nuisance.py b/CPAC/nuisance/nuisance.py index 57b61596b9..ac5da07ac7 100644 --- a/CPAC/nuisance/nuisance.py +++ b/CPAC/nuisance/nuisance.py @@ -1825,15 +1825,11 @@ def create_nuisance_regression_workflow(nuisance_selectors, name="nuisance_regre nuisance_regression.inputs.norm = False if nuisance_selectors.get("Censor"): - if nuisance_selectors["Censor"]["method"] == "SpikeRegression": - nuisance_wf.connect(find_censors, "out_file", nuisance_regression, "censor") - else: - if nuisance_selectors["Censor"]["method"] == "Interpolate": - nuisance_regression.inputs.cenmode = "NTRP" - else: - nuisance_regression.inputs.cenmode = nuisance_selectors["Censor"][ - "method" - ].upper() + if nuisance_selectors["Censor"]["method"] != "SpikeRegression": + nuisance_regression.inputs.cenmode = ( + "NTRP" if nuisance_selectors["Censor"]["method"] == "Interpolate" + else nuisance_selectors["Censor"]["method"].upper() + ) nuisance_wf.connect( offending_timepoints_connector_wf, From 7530295d88cec47f90b56e1142f4d6b1dadb7be3 Mon Sep 17 00:00:00 2001 From: "birajstha:construction_worker::penguin" Date: Thu, 13 Feb 2025 14:29:18 -0500 Subject: [PATCH 185/507] =?UTF-8?q?=F0=9F=94=A8=20Added=20output=20connect?= =?UTF-8?q?ion=20for=20the=20censor=5Ffile=5Fpath=20that=20was=20missing?= =?UTF-8?q?=20in=20last=20commit=20and=20updated=20CHANGELOG?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- CHANGELOG.md | 3 ++- CPAC/nuisance/nuisance.py | 6 ++++++ 2 files changed, 8 insertions(+), 1 deletion(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 796d789ef6..42dfd66e69 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -24,13 +24,14 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 - Required positional parameter "wf" in input and output of `ingress_pipeconfig_paths` function, where a node to reorient templates is added to the `wf`. - Required positional parameter "orientation" to `resolve_resolution`. - Optional positional argument "cfg" to `create_lesion_preproc`. +- `censor_file_path` from `offending_timepoints_connector` in the `build_nuisance_regressor` node. ### Changed - Moved `pygraphviz` from requirements to `graphviz` optional dependencies group. - Automatically tag untagged `subject_id` and `unique_id` as `!!str` when loading data config files. - Made orientation configurable (was hard-coded as "RPI"). -- Moved `ref_mask_res_2` and `T1w_template_res-2` fields from registration into surface under `abcd_prefreesurfer_prep`. +- Moved `find_censors node` inside `create_nuisance_regression_workflow` into its own function/subworkflow as `offending_timepoints_connector`. ### Fixed diff --git a/CPAC/nuisance/nuisance.py b/CPAC/nuisance/nuisance.py index ac5da07ac7..5f878ff29d 100644 --- a/CPAC/nuisance/nuisance.py +++ b/CPAC/nuisance/nuisance.py @@ -1679,6 +1679,12 @@ def create_regressor_workflow( ), ] ) + nuisance_wf.connect( + offending_timepoints_connector_wf, + "outputspec.out_file", + build_nuisance_regressors, + "censor_file_path", + ) build_nuisance_regressors.inputs.selector = nuisance_selectors From 9d6d98e59db3d76964642e758947067234b068ff Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Thu, 13 Feb 2025 19:40:42 +0000 Subject: [PATCH 186/507] [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci --- CHANGELOG.md | 1 + CPAC/nuisance/nuisance.py | 3 ++- 2 files changed, 3 insertions(+), 1 deletion(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 42dfd66e69..a8f1b33209 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -31,6 +31,7 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 - Moved `pygraphviz` from requirements to `graphviz` optional dependencies group. - Automatically tag untagged `subject_id` and `unique_id` as `!!str` when loading data config files. - Made orientation configurable (was hard-coded as "RPI"). +- Moved `ref_mask_res_2` and `T1w_template_res-2` fields from registration into surface under `abcd_prefreesurfer_prep`. - Moved `find_censors node` inside `create_nuisance_regression_workflow` into its own function/subworkflow as `offending_timepoints_connector`. ### Fixed diff --git a/CPAC/nuisance/nuisance.py b/CPAC/nuisance/nuisance.py index 5f878ff29d..142ed2a19b 100644 --- a/CPAC/nuisance/nuisance.py +++ b/CPAC/nuisance/nuisance.py @@ -1833,7 +1833,8 @@ def create_nuisance_regression_workflow(nuisance_selectors, name="nuisance_regre if nuisance_selectors.get("Censor"): if nuisance_selectors["Censor"]["method"] != "SpikeRegression": nuisance_regression.inputs.cenmode = ( - "NTRP" if nuisance_selectors["Censor"]["method"] == "Interpolate" + "NTRP" + if nuisance_selectors["Censor"]["method"] == "Interpolate" else nuisance_selectors["Censor"]["method"].upper() ) From accba633ea3b53d714343ec9acca17e1273572c9 Mon Sep 17 00:00:00 2001 From: "birajstha:construction_worker::penguin" Date: Thu, 13 Feb 2025 15:35:45 -0500 Subject: [PATCH 187/507] added missing tab that caused the crash in ci --- CPAC/nuisance/nuisance.py | 13 +++++++------ 1 file changed, 7 insertions(+), 6 deletions(-) diff --git a/CPAC/nuisance/nuisance.py b/CPAC/nuisance/nuisance.py index 142ed2a19b..a4851d5d28 100644 --- a/CPAC/nuisance/nuisance.py +++ b/CPAC/nuisance/nuisance.py @@ -1679,12 +1679,13 @@ def create_regressor_workflow( ), ] ) - nuisance_wf.connect( - offending_timepoints_connector_wf, - "outputspec.out_file", - build_nuisance_regressors, - "censor_file_path", - ) + + nuisance_wf.connect( + offending_timepoints_connector_wf, + "outputspec.out_file", + build_nuisance_regressors, + "censor_file_path", + ) build_nuisance_regressors.inputs.selector = nuisance_selectors From 3373acd3a0fabf8dadd4ba3d66c1d353843d1604 Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Thu, 13 Feb 2025 20:36:04 +0000 Subject: [PATCH 188/507] [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci --- CPAC/nuisance/nuisance.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/CPAC/nuisance/nuisance.py b/CPAC/nuisance/nuisance.py index a4851d5d28..c4a9370038 100644 --- a/CPAC/nuisance/nuisance.py +++ b/CPAC/nuisance/nuisance.py @@ -1679,7 +1679,7 @@ def create_regressor_workflow( ), ] ) - + nuisance_wf.connect( offending_timepoints_connector_wf, "outputspec.out_file", From fb91f90c14683930ef515d7d943d86f26739eb49 Mon Sep 17 00:00:00 2001 From: Jon Cluce Date: Mon, 24 Feb 2025 14:02:24 -0500 Subject: [PATCH 189/507] :arrow_up: Bump cryptography from 42.0.3 to 44.0.1 Bumps [cryptography](https://github.com/pyca/cryptography) from 42.0.3 to 44.0.1. - [Changelog](https://github.com/pyca/cryptography/blob/main/CHANGELOG.rst) - [Commits](https://github.com/pyca/cryptography/compare/42.0.3...44.0.1) --- updated-dependencies: - dependency-name: cryptography dependency-type: direct:production ... Signed-off-by: dependabot[bot] --- requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements.txt b/requirements.txt index f54d4ba8bc..2e3c085220 100644 --- a/requirements.txt +++ b/requirements.txt @@ -40,7 +40,7 @@ voluptuous==0.13.1 # the below are pinned specifically to match what the FSL installer installs botocore==1.31.4 charset-normalizer==3.1.0 -cryptography==43.0.1 +cryptography==44.0.1 h5py==3.8.0 importlib-metadata==6.8.0 lxml==4.9.2 From 1771f7be37619bee50d9ac548ccdacc5f5bfe04c Mon Sep 17 00:00:00 2001 From: Jon Cluce Date: Wed, 26 Feb 2025 14:26:18 -0500 Subject: [PATCH 190/507] :bug: Restore outputs removed from "freesurfer_abcd_preproc" to "freesurfer_reconall" --- CPAC/anat_preproc/anat_preproc.py | 12 ++++++++++++ 1 file changed, 12 insertions(+) diff --git a/CPAC/anat_preproc/anat_preproc.py b/CPAC/anat_preproc/anat_preproc.py index eda6ecf62a..215e495449 100644 --- a/CPAC/anat_preproc/anat_preproc.py +++ b/CPAC/anat_preproc/anat_preproc.py @@ -2904,6 +2904,18 @@ def freesurfer_abcd_preproc(wf, cfg, strat_pool, pipe_num, opt=None): "pipeline-fs_brainmask", "pipeline-fs_wmparc", "pipeline-fs_T1", + *[ + f"pipeline-fs_hemi-{hemi}_{entity}" + for hemi in ["L", "R"] + for entity in [ + "desc-surface_curv", + *[ + f"desc-surfaceMesh_{_}" + for _ in ["pial", "smoothwm", "sphere", "white"] + ], + *[f"desc-surfaceMap_{_}" for _ in ["sulc", "thickness", "volume"]], + ] + ], *freesurfer_abcd_preproc.outputs, # we're grabbing the postproc outputs and appending them to # the reconall outputs From 43ed9ffc622590cd297e0907f65db1b8e195fdc5 Mon Sep 17 00:00:00 2001 From: Jon Cluce Date: Wed, 26 Feb 2025 15:17:47 -0500 Subject: [PATCH 191/507] :alien: Replace broken setup-conda action --- .github/workflows/smoke_test_participant.yml | 9 +++++++-- 1 file changed, 7 insertions(+), 2 deletions(-) diff --git a/.github/workflows/smoke_test_participant.yml b/.github/workflows/smoke_test_participant.yml index 347a489d07..f773ff7e69 100644 --- a/.github/workflows/smoke_test_participant.yml +++ b/.github/workflows/smoke_test_participant.yml @@ -145,7 +145,7 @@ jobs: echo DOCKER_TAG=$(echo "ghcr.io/${{ github.repository }}" | tr '[:upper:]' '[:lower:]'):$TAG >> $GITHUB_ENV cat $GITHUB_ENV - name: setup-conda - uses: s-weigand/setup-conda@v1.2.3 + uses: conda-incubator/setup-miniconda@v3.1.1 - name: Set up datalad-OSF run: | git config --global user.email "CMI_CPAC_Support@childmind.org" @@ -203,7 +203,12 @@ jobs: echo DOCKER_TAG=$(echo "ghcr.io/${{ github.repository }}" | tr '[:upper:]' '[:lower:]'):$TAG >> $GITHUB_ENV cat $GITHUB_ENV - name: setup-conda - uses: s-weigand/setup-conda@v1.2.3 + uses: conda-incubator/setup-miniconda@v3.1.1 + with: + activate-environment: datalad-osf + channels: conda-forge + conda-remove-defaults: "true" + python-version: 3.12 - name: Set up datalad-OSF run: | git config --global user.email "CMI_CPAC_Support@childmind.org" From ea3a304146422cfa00781eaa32ae7ece31bc5845 Mon Sep 17 00:00:00 2001 From: Jon Cluce Date: Thu, 27 Feb 2025 15:03:30 -0500 Subject: [PATCH 192/507] :loud_sound: Increase logging for `datalad clone` --- .github/workflows/smoke_test_participant.yml | 2 ++ 1 file changed, 2 insertions(+) diff --git a/.github/workflows/smoke_test_participant.yml b/.github/workflows/smoke_test_participant.yml index f773ff7e69..73ea99a887 100644 --- a/.github/workflows/smoke_test_participant.yml +++ b/.github/workflows/smoke_test_participant.yml @@ -217,6 +217,8 @@ jobs: pip install datalad-osf - name: Get rodent test data run: | + export GIT_TRACE=1 + export DATALAD_LOG_LEVEL=DEBUG datalad clone osf://uya3r test-data - name: Run rodent smoke test run: | From de9f5303cdc0abd40eb9e85b99156531ed97f902 Mon Sep 17 00:00:00 2001 From: Jon Cluce Date: Thu, 27 Feb 2025 16:02:53 -0500 Subject: [PATCH 193/507] :construction_worker: Install `git-annex` before `datalad-osf` [skip ci] --- .github/workflows/smoke_test_participant.yml | 2 ++ 1 file changed, 2 insertions(+) diff --git a/.github/workflows/smoke_test_participant.yml b/.github/workflows/smoke_test_participant.yml index 73ea99a887..e41292535f 100644 --- a/.github/workflows/smoke_test_participant.yml +++ b/.github/workflows/smoke_test_participant.yml @@ -148,6 +148,7 @@ jobs: uses: conda-incubator/setup-miniconda@v3.1.1 - name: Set up datalad-OSF run: | + sudo apt-get update && sudo apt-get install -y git-annex git config --global user.email "CMI_CPAC_Support@childmind.org" git config --global user.name "Theodore (Machine User)" yes | conda install -c conda-forge datalad @@ -211,6 +212,7 @@ jobs: python-version: 3.12 - name: Set up datalad-OSF run: | + sudo apt-get update && sudo apt-get install -y git-annex git config --global user.email "CMI_CPAC_Support@childmind.org" git config --global user.name "Theodore (Machine User)" yes | conda install -c conda-forge datalad From f86d9304b29417ddb7b8cdef5c9c554bef6a335a Mon Sep 17 00:00:00 2001 From: Jon Cluce Date: Thu, 6 Mar 2025 15:08:46 -0500 Subject: [PATCH 194/507] :truck: Move automated regtest working directory [run reg-suite lite] --- .github/workflows/regtest.yaml | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/.github/workflows/regtest.yaml b/.github/workflows/regtest.yaml index 92a09a924c..84a58b6dca 100644 --- a/.github/workflows/regtest.yaml +++ b/.github/workflows/regtest.yaml @@ -95,9 +95,10 @@ jobs: # If the package is not installed, install it python3 -m pip install --user "https://github.com/${{ env.SLURM_TESTING_REPO }}/archive/${{ env.SLURM_TESTING_BRANCH }}.zip" fi - mkdir -p ./logs/${{ github.sha }} + _CPAC_SLURM_TESTING_WD="${{ env.SSH_WORK_DIR }}/automatic_tests/${{ inputs.test_mode }}/${{ github.sha }}" + mkdir -p "${_CPAC_SLURM_TESTING_WD}" sbatch cpac-slurm-status launch \ - --wd="${{ env.SSH_WORK_DIR }}/logs/${{ github.sha }}" \ + --wd="${_CPAC_SLURM_TESTING_WD}" \ --comparison-path="${{ env.COMPARISON_PATH }}" \ --dashboard-repo="${{ env.DASHBOARD_REPO}}" \ --home-dir="${{ env.SSH_WORK_DIR }}" \ From 0465162fe5767b0887e8497a0095f8b2515c3e36 Mon Sep 17 00:00:00 2001 From: sgiavasis Date: Thu, 6 Mar 2025 21:27:51 +0000 Subject: [PATCH 195/507] Offending timepoints call should only happen before nuisance for spike regression. --- CPAC/nuisance/nuisance.py | 69 ++++++++++++++++++------------------ CPAC/nuisance/utils/utils.py | 2 +- 2 files changed, 36 insertions(+), 35 deletions(-) diff --git a/CPAC/nuisance/nuisance.py b/CPAC/nuisance/nuisance.py index c4a9370038..1806d8523a 100644 --- a/CPAC/nuisance/nuisance.py +++ b/CPAC/nuisance/nuisance.py @@ -1657,35 +1657,36 @@ def create_regressor_workflow( ) if nuisance_selectors.get("Censor"): - offending_timepoints_connector_wf = offending_timepoints_connector( - nuisance_selectors - ) - nuisance_wf.connect( - [ - ( - inputspec, - offending_timepoints_connector_wf, - [("fd_j_file_path", "inputspec.fd_j_file_path")], - ), - ( - inputspec, - offending_timepoints_connector_wf, - [("fd_p_file_path", "inputspec.fd_p_file_path")], - ), - ( - inputspec, - offending_timepoints_connector_wf, - [("dvars_file_path", "inputspec.dvars_file_path")], - ), - ] - ) + if nuisance_selectors["Censor"]["method"] == "SpikeRegression": + offending_timepoints_connector_wf = offending_timepoints_connector( + nuisance_selectors + ) + nuisance_wf.connect( + [ + ( + inputspec, + offending_timepoints_connector_wf, + [("fd_j_file_path", "inputspec.fd_j_file_path")], + ), + ( + inputspec, + offending_timepoints_connector_wf, + [("fd_p_file_path", "inputspec.fd_p_file_path")], + ), + ( + inputspec, + offending_timepoints_connector_wf, + [("dvars_file_path", "inputspec.dvars_file_path")], + ), + ] + ) - nuisance_wf.connect( - offending_timepoints_connector_wf, - "outputspec.out_file", - build_nuisance_regressors, - "censor_file_path", - ) + nuisance_wf.connect( + offending_timepoints_connector_wf, + "outputspec.out_file", + build_nuisance_regressors, + "censor_file_path", + ) build_nuisance_regressors.inputs.selector = nuisance_selectors @@ -1839,12 +1840,12 @@ def create_nuisance_regression_workflow(nuisance_selectors, name="nuisance_regre else nuisance_selectors["Censor"]["method"].upper() ) - nuisance_wf.connect( - offending_timepoints_connector_wf, - "outputspec.out_file", - nuisance_regression, - "censor", - ) + nuisance_wf.connect( + offending_timepoints_connector_wf, + "outputspec.out_file", + nuisance_regression, + "censor", + ) if nuisance_selectors.get("PolyOrt"): if not nuisance_selectors["PolyOrt"].get("degree"): diff --git a/CPAC/nuisance/utils/utils.py b/CPAC/nuisance/utils/utils.py index db6667dcb3..9067b72ab6 100644 --- a/CPAC/nuisance/utils/utils.py +++ b/CPAC/nuisance/utils/utils.py @@ -139,7 +139,7 @@ def find_offending_time_points( censor_vector[extended_censors] = 0 out_file_path = os.path.join(os.getcwd(), "censors.tsv") - np.savetxt(out_file_path, censor_vector, fmt="%d", header="censor", comments="") + np.savetxt(out_file_path, censor_vector, fmt="%d", comments="") return out_file_path From a8adeb48480c8b3176e581852100066dda8ad336 Mon Sep 17 00:00:00 2001 From: birajstha <111654544+birajstha@users.noreply.github.com> Date: Thu, 6 Mar 2025 17:46:48 -0500 Subject: [PATCH 196/507] Update CPAC/pipeline/schema.py Co-authored-by: Jon Cluce --- CPAC/pipeline/schema.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/CPAC/pipeline/schema.py b/CPAC/pipeline/schema.py index 825b682fb9..0966d9c44f 100644 --- a/CPAC/pipeline/schema.py +++ b/CPAC/pipeline/schema.py @@ -1395,7 +1395,7 @@ def schema(config_dict): if ( overwrite["run"] - and overwrite["using"] + and "ANTS" not in partially_validated["registration_workflows"]["anatomical_registration"][ "registration" ]["using"] From 8cf804f06ed11b885be4cfd4e85d4fc471b6d1e4 Mon Sep 17 00:00:00 2001 From: birajstha <111654544+birajstha@users.noreply.github.com> Date: Thu, 6 Mar 2025 17:46:56 -0500 Subject: [PATCH 197/507] Update CPAC/pipeline/test/test_schema_validation.py Co-authored-by: Jon Cluce --- CPAC/pipeline/test/test_schema_validation.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/CPAC/pipeline/test/test_schema_validation.py b/CPAC/pipeline/test/test_schema_validation.py index 2b680ddd5a..02362ed0f2 100644 --- a/CPAC/pipeline/test/test_schema_validation.py +++ b/CPAC/pipeline/test/test_schema_validation.py @@ -136,7 +136,7 @@ def test_overwrite_transform(registration_using): } } } - if "FSL" not in registration_using: + if "ANTS" in registration_using: Configuration(d) # validates without exception else: with pytest.raises(ExclusiveInvalid) as e: From 8c4a7affc56b28cb6800bcad15a4a1f1487f6a49 Mon Sep 17 00:00:00 2001 From: birajstha <111654544+birajstha@users.noreply.github.com> Date: Thu, 6 Mar 2025 17:47:02 -0500 Subject: [PATCH 198/507] Update CPAC/registration/registration.py Co-authored-by: Jon Cluce --- CPAC/registration/registration.py | 1 - 1 file changed, 1 deletion(-) diff --git a/CPAC/registration/registration.py b/CPAC/registration/registration.py index f7f429aeda..abd58e9bc0 100644 --- a/CPAC/registration/registration.py +++ b/CPAC/registration/registration.py @@ -24,7 +24,6 @@ from nipype.interfaces.afni import utils as afni_utils from CPAC.anat_preproc.lesion_preproc import create_lesion_preproc -from CPAC.error_handler.exceptions import NodeBlockError from CPAC.func_preproc.utils import chunk_ts, split_ts_chunks from CPAC.pipeline import nipype_pipeline_engine as pe from CPAC.pipeline.nodeblock import nodeblock From 76bd2ae9ba12ffcbcf8a86cd3befb5566f29e7f0 Mon Sep 17 00:00:00 2001 From: birajstha <111654544+birajstha@users.noreply.github.com> Date: Thu, 6 Mar 2025 17:47:08 -0500 Subject: [PATCH 199/507] Update CPAC/registration/registration.py Co-authored-by: Jon Cluce --- CPAC/registration/registration.py | 3 --- 1 file changed, 3 deletions(-) diff --git a/CPAC/registration/registration.py b/CPAC/registration/registration.py index abd58e9bc0..754071f199 100644 --- a/CPAC/registration/registration.py +++ b/CPAC/registration/registration.py @@ -3081,9 +3081,6 @@ def overwrite_transform_anat_to_template(wf, cfg, strat_pool, pipe_num, opt=None else: outputs = {} - raise NodeBlockError( - "Invalid registration tool or option provided. Please make sure the registration tool is ANTs and the option is FSL." - ) return (wf, outputs) From ba2cff458f8d5ccc3ee304a71b9dacb96ee8bd7d Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Thu, 6 Mar 2025 22:47:15 +0000 Subject: [PATCH 200/507] [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci --- CPAC/pipeline/schema.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/CPAC/pipeline/schema.py b/CPAC/pipeline/schema.py index 0966d9c44f..693ffbaf77 100644 --- a/CPAC/pipeline/schema.py +++ b/CPAC/pipeline/schema.py @@ -1395,10 +1395,10 @@ def schema(config_dict): if ( overwrite["run"] - and "ANTS" not - in partially_validated["registration_workflows"]["anatomical_registration"][ - "registration" - ]["using"] + and "ANTS" + not in partially_validated["registration_workflows"][ + "anatomical_registration" + ]["registration"]["using"] ): raise ExclusiveInvalid( "[!] Overwrite transform is found same as the anatomical registration method! " From 82e5b93a5d9b3cd8f13b59bf295de4ceb6de002d Mon Sep 17 00:00:00 2001 From: "birajstha:construction_worker::penguin" Date: Thu, 6 Mar 2025 17:50:34 -0500 Subject: [PATCH 201/507] revert back adding error handler dir --- CPAC/error_handler/__init__.py | 0 CPAC/error_handler/exceptions.py | 14 -------------- 2 files changed, 14 deletions(-) delete mode 100644 CPAC/error_handler/__init__.py delete mode 100644 CPAC/error_handler/exceptions.py diff --git a/CPAC/error_handler/__init__.py b/CPAC/error_handler/__init__.py deleted file mode 100644 index e69de29bb2..0000000000 diff --git a/CPAC/error_handler/exceptions.py b/CPAC/error_handler/exceptions.py deleted file mode 100644 index 5c17a4028f..0000000000 --- a/CPAC/error_handler/exceptions.py +++ /dev/null @@ -1,14 +0,0 @@ -class SchemaError(Exception): - """Exception raised for errors in the schema.""" - - def __init__(self, message): - self.message = message - super().__init__(self.message) - - -class NodeBlockError(Exception): - """Exception raised for errors in the node block.""" - - def __init__(self, message): - self.message = message - super().__init__(self.message) From 22f3a86758f643f99f1f366a4ac62b4139b07b27 Mon Sep 17 00:00:00 2001 From: "birajstha:construction_worker::penguin" Date: Thu, 6 Mar 2025 18:20:43 -0500 Subject: [PATCH 202/507] adding to changelog --- CHANGELOG.md | 1 + CPAC/pipeline/schema.py | 2 +- 2 files changed, 2 insertions(+), 1 deletion(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index be5ec4a432..14d8cbf807 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -24,6 +24,7 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 - Required positional parameter "wf" in input and output of `ingress_pipeconfig_paths` function, where a node to reorient templates is added to the `wf`. - Required positional parameter "orientation" to `resolve_resolution`. - Optional positional argument "cfg" to `create_lesion_preproc`. +- Allow enabling `overwrite_transform` only when the registration method is `ANTS`. ### Changed diff --git a/CPAC/pipeline/schema.py b/CPAC/pipeline/schema.py index 693ffbaf77..56a4c54fc6 100644 --- a/CPAC/pipeline/schema.py +++ b/CPAC/pipeline/schema.py @@ -1402,7 +1402,7 @@ def schema(config_dict): ): raise ExclusiveInvalid( "[!] Overwrite transform is found same as the anatomical registration method! " - "No need to overwrite transform with the same registration method." + "No need to overwrite transform with the same registration method. Please turn it off or use a different registration method." ) except KeyError: pass From a91c8542bf49f2320a997f85ecd6dec0c29f5685 Mon Sep 17 00:00:00 2001 From: "birajstha:construction_worker::penguin" Date: Fri, 7 Mar 2025 11:39:45 -0500 Subject: [PATCH 203/507] fixing test for overwrite trasnsform --- CPAC/pipeline/test/test_schema_validation.py | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/CPAC/pipeline/test/test_schema_validation.py b/CPAC/pipeline/test/test_schema_validation.py index 02362ed0f2..18503efeb9 100644 --- a/CPAC/pipeline/test/test_schema_validation.py +++ b/CPAC/pipeline/test/test_schema_validation.py @@ -128,6 +128,7 @@ def test_pipeline_name(): def test_overwrite_transform(registration_using): """Test that if overwrite transform method is already a registration method.""" # pylint: disable=invalid-name + d = { "registration_workflows": { "anatomical_registration": { @@ -141,6 +142,4 @@ def test_overwrite_transform(registration_using): else: with pytest.raises(ExclusiveInvalid) as e: Configuration(d) - assert "Overwrite transform is found same as the registration method" in str( - e.value - ) + assert "Overwrite transform is found same" in str(e.value) From 01cc82227eb274688dab5151bd21699fed08945e Mon Sep 17 00:00:00 2001 From: "birajstha:construction_worker::penguin" Date: Fri, 7 Mar 2025 11:39:55 -0500 Subject: [PATCH 204/507] fixing test for overwrite trasnsform --- CPAC/pipeline/test/test_schema_validation.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/CPAC/pipeline/test/test_schema_validation.py b/CPAC/pipeline/test/test_schema_validation.py index 18503efeb9..7fd1d86103 100644 --- a/CPAC/pipeline/test/test_schema_validation.py +++ b/CPAC/pipeline/test/test_schema_validation.py @@ -128,7 +128,7 @@ def test_pipeline_name(): def test_overwrite_transform(registration_using): """Test that if overwrite transform method is already a registration method.""" # pylint: disable=invalid-name - + d = { "registration_workflows": { "anatomical_registration": { From 5469237bc174c4c7b0064f073c3784d21e145ee3 Mon Sep 17 00:00:00 2001 From: Jon Cluce Date: Mon, 10 Mar 2025 12:55:42 -0400 Subject: [PATCH 205/507] :construction_worker: Add test mode postional argument for integration test launch command [run reg-suite lite] --- .github/workflows/regtest.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/regtest.yaml b/.github/workflows/regtest.yaml index 84a58b6dca..04c6b14d15 100644 --- a/.github/workflows/regtest.yaml +++ b/.github/workflows/regtest.yaml @@ -97,7 +97,7 @@ jobs: fi _CPAC_SLURM_TESTING_WD="${{ env.SSH_WORK_DIR }}/automatic_tests/${{ inputs.test_mode }}/${{ github.sha }}" mkdir -p "${_CPAC_SLURM_TESTING_WD}" - sbatch cpac-slurm-status launch \ + sbatch cpac-slurm-status ${{ inputs.test_mode }} launch \ --wd="${_CPAC_SLURM_TESTING_WD}" \ --comparison-path="${{ env.COMPARISON_PATH }}" \ --dashboard-repo="${{ env.DASHBOARD_REPO}}" \ From eff8c4521f8b29a5aec57a6d24ffa10b791df15e Mon Sep 17 00:00:00 2001 From: Jon Cluce Date: Mon, 10 Mar 2025 14:44:57 -0400 Subject: [PATCH 206/507] :alien: Update Dockerfile `LABEL`s re: deprecated `LegacyKeyValueFormat` Ref https://docs.docker.com/reference/build-checks/legacy-key-value-format/ --- .github/Dockerfiles/AFNI.23.3.09-jammy.Dockerfile | 4 ++-- .github/Dockerfiles/ANTs.2.4.3-jammy.Dockerfile | 4 ++-- .github/Dockerfiles/C-PAC.develop-jammy.Dockerfile | 4 ++-- .github/Dockerfiles/C-PAC.develop-lite-jammy.Dockerfile | 4 ++-- .github/Dockerfiles/FSL.6.0.6.5-jammy.Dockerfile | 4 ++-- .github/Dockerfiles/FSL.data.Dockerfile | 4 ++-- .../FreeSurfer.6.0.0-min.neurodocker-jammy.Dockerfile | 4 ++-- .github/Dockerfiles/ICA-AROMA.0.4.4-beta-jammy.Dockerfile | 2 +- .github/Dockerfiles/Ubuntu.jammy-non-free.Dockerfile | 4 ++-- .github/Dockerfiles/base-lite.Dockerfile | 4 ++-- .github/Dockerfiles/base-standard.Dockerfile | 4 ++-- .github/Dockerfiles/c3d.1.0.0-jammy.Dockerfile | 4 ++-- .../connectome-workbench.1.5.0.neurodebian-jammy.Dockerfile | 4 ++-- .github/Dockerfiles/neuroparc.1.0-human-bionic.Dockerfile | 4 ++-- Dockerfile | 4 ++-- variant-lite.Dockerfile | 4 ++-- 16 files changed, 31 insertions(+), 31 deletions(-) diff --git a/.github/Dockerfiles/AFNI.23.3.09-jammy.Dockerfile b/.github/Dockerfiles/AFNI.23.3.09-jammy.Dockerfile index 654146ec78..86fa68315b 100644 --- a/.github/Dockerfiles/AFNI.23.3.09-jammy.Dockerfile +++ b/.github/Dockerfiles/AFNI.23.3.09-jammy.Dockerfile @@ -155,9 +155,9 @@ RUN apt-get clean \ && rm -rf /var/lib/apt/lists/* /tmp/* /var/tmp/* /root/.cache/* FROM scratch -LABEL org.opencontainers.image.description "NOT INTENDED FOR USE OTHER THAN AS A STAGE IMAGE IN A MULTI-STAGE BUILD \ +LABEL org.opencontainers.image.description="NOT INTENDED FOR USE OTHER THAN AS A STAGE IMAGE IN A MULTI-STAGE BUILD \ AFNI ${AFNI_VERSION} (${VERSION_NAME}) stage" -LABEL org.opencontainers.image.source https://github.com/FCP-INDI/C-PAC +LABEL org.opencontainers.image.source=https://github.com/FCP-INDI/C-PAC COPY --from=AFNI /lib/x86_64-linux-gnu/ld* /lib/x86_64-linux-gnu/ COPY --from=AFNI /lib/x86_64-linux-gnu/lib*so* /lib/x86_64-linux-gnu/ COPY --from=AFNI /lib64/ld* /lib64/ diff --git a/.github/Dockerfiles/ANTs.2.4.3-jammy.Dockerfile b/.github/Dockerfiles/ANTs.2.4.3-jammy.Dockerfile index 03dd017b84..67cb8fdfad 100644 --- a/.github/Dockerfiles/ANTs.2.4.3-jammy.Dockerfile +++ b/.github/Dockerfiles/ANTs.2.4.3-jammy.Dockerfile @@ -30,8 +30,8 @@ RUN curl -sL https://github.com/ANTsX/ANTs/releases/download/v2.4.3/ants-2.4.3-u # Only keep what we need FROM scratch -LABEL org.opencontainers.image.description "NOT INTENDED FOR USE OTHER THAN AS A STAGE IMAGE IN A MULTI-STAGE BUILD \ +LABEL org.opencontainers.image.description="NOT INTENDED FOR USE OTHER THAN AS A STAGE IMAGE IN A MULTI-STAGE BUILD \ ANTs 2.4.3 stage" -LABEL org.opencontainers.image.source https://github.com/FCP-INDI/C-PAC +LABEL org.opencontainers.image.source=https://github.com/FCP-INDI/C-PAC COPY --from=ANTs /usr/lib/ants/ /usr/lib/ants/ COPY --from=ANTs /ants_template/ /ants_template/ diff --git a/.github/Dockerfiles/C-PAC.develop-jammy.Dockerfile b/.github/Dockerfiles/C-PAC.develop-jammy.Dockerfile index 2fa4ae4a23..1debc54c4a 100644 --- a/.github/Dockerfiles/C-PAC.develop-jammy.Dockerfile +++ b/.github/Dockerfiles/C-PAC.develop-jammy.Dockerfile @@ -15,8 +15,8 @@ # You should have received a copy of the GNU Lesser General Public # License along with C-PAC. If not, see . FROM ghcr.io/fcp-indi/c-pac/stage-base:standard-v1.8.8.dev1 -LABEL org.opencontainers.image.description "Full C-PAC image" -LABEL org.opencontainers.image.source https://github.com/FCP-INDI/C-PAC +LABEL org.opencontainers.image.description="Full C-PAC image" +LABEL org.opencontainers.image.source=https://github.com/FCP-INDI/C-PAC USER root # install C-PAC diff --git a/.github/Dockerfiles/C-PAC.develop-lite-jammy.Dockerfile b/.github/Dockerfiles/C-PAC.develop-lite-jammy.Dockerfile index 8e76675dc4..20561f09aa 100644 --- a/.github/Dockerfiles/C-PAC.develop-lite-jammy.Dockerfile +++ b/.github/Dockerfiles/C-PAC.develop-lite-jammy.Dockerfile @@ -15,8 +15,8 @@ # You should have received a copy of the GNU Lesser General Public # License along with C-PAC. If not, see . FROM ghcr.io/fcp-indi/c-pac/stage-base:lite-v1.8.8.dev1 -LABEL org.opencontainers.image.description "Full C-PAC image without FreeSurfer" -LABEL org.opencontainers.image.source https://github.com/FCP-INDI/C-PAC +LABEL org.opencontainers.image.description="Full C-PAC image without FreeSurfer" +LABEL org.opencontainers.image.source=https://github.com/FCP-INDI/C-PAC USER root # install C-PAC diff --git a/.github/Dockerfiles/FSL.6.0.6.5-jammy.Dockerfile b/.github/Dockerfiles/FSL.6.0.6.5-jammy.Dockerfile index e4ff0f9b25..112b0feda1 100644 --- a/.github/Dockerfiles/FSL.6.0.6.5-jammy.Dockerfile +++ b/.github/Dockerfiles/FSL.6.0.6.5-jammy.Dockerfile @@ -101,9 +101,9 @@ ENTRYPOINT ["/bin/bash"] # # Only keep what we need FROM scratch -LABEL org.opencontainers.image.description "NOT INTENDED FOR USE OTHER THAN AS A STAGE IMAGE IN A MULTI-STAGE BUILD \ +LABEL org.opencontainers.image.description="NOT INTENDED FOR USE OTHER THAN AS A STAGE IMAGE IN A MULTI-STAGE BUILD \ FSL 6.0.6.5 stage" -LABEL org.opencontainers.image.source https://github.com/FCP-INDI/C-PAC +LABEL org.opencontainers.image.source=https://github.com/FCP-INDI/C-PAC COPY --from=FSL /lib/x86_64-linux-gnu /lib/x86_64-linux-gnu COPY --from=FSL /usr/lib/x86_64-linux-gnu /usr/lib/x86_64-linux-gnu COPY --from=FSL /usr/bin /usr/bin diff --git a/.github/Dockerfiles/FSL.data.Dockerfile b/.github/Dockerfiles/FSL.data.Dockerfile index c7e0b593e4..816b5e1547 100644 --- a/.github/Dockerfiles/FSL.data.Dockerfile +++ b/.github/Dockerfiles/FSL.data.Dockerfile @@ -18,9 +18,9 @@ RUN mkdir -p /fsl_data/atlases/HarvardOxford fsl_data/standard/tissuepriors \ && chmod -R ugo+r /fsl_data/atlases FROM scratch -LABEL org.opencontainers.image.description "NOT INTENDED FOR USE OTHER THAN AS A STAGE IMAGE IN A MULTI-STAGE BUILD \ +LABEL org.opencontainers.image.description="NOT INTENDED FOR USE OTHER THAN AS A STAGE IMAGE IN A MULTI-STAGE BUILD \ FSL data" -LABEL org.opencontainers.image.source https://github.com/FCP-INDI/C-PAC +LABEL org.opencontainers.image.source=https://github.com/FCP-INDI/C-PAC COPY --from=FSL /fsl_data/standard fsl_data/standard COPY --from=FSL /fsl_data/atlases fsl_data/atlases diff --git a/.github/Dockerfiles/FreeSurfer.6.0.0-min.neurodocker-jammy.Dockerfile b/.github/Dockerfiles/FreeSurfer.6.0.0-min.neurodocker-jammy.Dockerfile index 811d20f617..ae6eac7548 100644 --- a/.github/Dockerfiles/FreeSurfer.6.0.0-min.neurodocker-jammy.Dockerfile +++ b/.github/Dockerfiles/FreeSurfer.6.0.0-min.neurodocker-jammy.Dockerfile @@ -32,7 +32,7 @@ RUN apt-get clean && \ rm -rf /var/lib/apt/lists/* /tmp/* /var/tmp/* FROM scratch -LABEL org.opencontainers.image.description "NOT INTENDED FOR USE OTHER THAN AS A STAGE IMAGE IN A MULTI-STAGE BUILD \ +LABEL org.opencontainers.image.description="NOT INTENDED FOR USE OTHER THAN AS A STAGE IMAGE IN A MULTI-STAGE BUILD \ FreeSurfer 6.0.0-min stage" -LABEL org.opencontainers.image.source https://github.com/FCP-INDI/C-PAC +LABEL org.opencontainers.image.source=https://github.com/FCP-INDI/C-PAC COPY --from=FreeSurfer6 /usr/lib/freesurfer/ /usr/lib/freesurfer/ diff --git a/.github/Dockerfiles/ICA-AROMA.0.4.4-beta-jammy.Dockerfile b/.github/Dockerfiles/ICA-AROMA.0.4.4-beta-jammy.Dockerfile index 2759c529eb..cc188c9aa2 100644 --- a/.github/Dockerfiles/ICA-AROMA.0.4.4-beta-jammy.Dockerfile +++ b/.github/Dockerfiles/ICA-AROMA.0.4.4-beta-jammy.Dockerfile @@ -24,6 +24,6 @@ USER c-pac_user # Only keep what we need FROM scratch -LABEL org.opencontainers.image.description "NOT INTENDED FOR USE OTHER THAN AS A STAGE IMAGE IN A MULTI-STAGE BUILD \ +LABEL org.opencontainers.image.description="NOT INTENDED FOR USE OTHER THAN AS A STAGE IMAGE IN A MULTI-STAGE BUILD \ ICA-AROMA 0.4.4-beta stage" COPY --from=ICA-AROMA /opt/ICA-AROMA/ /opt/ICA-AROMA/ diff --git a/.github/Dockerfiles/Ubuntu.jammy-non-free.Dockerfile b/.github/Dockerfiles/Ubuntu.jammy-non-free.Dockerfile index 3017126770..5b8a653751 100644 --- a/.github/Dockerfiles/Ubuntu.jammy-non-free.Dockerfile +++ b/.github/Dockerfiles/Ubuntu.jammy-non-free.Dockerfile @@ -26,9 +26,9 @@ RUN apt-get update \ # use neurodebian runtime as parent image FROM neurodebian:jammy-non-free -LABEL org.opencontainers.image.description "NOT INTENDED FOR USE OTHER THAN AS A STAGE IMAGE IN A MULTI-STAGE BUILD \ +LABEL org.opencontainers.image.description="NOT INTENDED FOR USE OTHER THAN AS A STAGE IMAGE IN A MULTI-STAGE BUILD \ Ubuntu Jammy base image" -LABEL org.opencontainers.image.source https://github.com/FCP-INDI/C-PAC +LABEL org.opencontainers.image.source=https://github.com/FCP-INDI/C-PAC ARG BIDS_VALIDATOR_VERSION=1.14.6 \ DEBIAN_FRONTEND=noninteractive ENV TZ=America/New_York \ diff --git a/.github/Dockerfiles/base-lite.Dockerfile b/.github/Dockerfiles/base-lite.Dockerfile index 25c494942f..e5c85d258a 100644 --- a/.github/Dockerfiles/base-lite.Dockerfile +++ b/.github/Dockerfiles/base-lite.Dockerfile @@ -22,9 +22,9 @@ FROM ghcr.io/fcp-indi/c-pac/fsl:6.0.6.5-jammy as FSL FROM ghcr.io/fcp-indi/c-pac/ica-aroma:0.4.4-beta-jammy as ICA-AROMA FROM ghcr.io/fcp-indi/c-pac/ubuntu:jammy-non-free -LABEL org.opencontainers.image.description "NOT INTENDED FOR USE OTHER THAN AS A STAGE IMAGE IN A MULTI-STAGE BUILD \ +LABEL org.opencontainers.image.description="NOT INTENDED FOR USE OTHER THAN AS A STAGE IMAGE IN A MULTI-STAGE BUILD \ Standard software dependencies for C-PAC standard and lite images" -LABEL org.opencontainers.image.source https://github.com/FCP-INDI/C-PAC +LABEL org.opencontainers.image.source=https://github.com/FCP-INDI/C-PAC USER root # Installing connectome-workbench diff --git a/.github/Dockerfiles/base-standard.Dockerfile b/.github/Dockerfiles/base-standard.Dockerfile index de7d3841e2..0ba2cd5158 100644 --- a/.github/Dockerfiles/base-standard.Dockerfile +++ b/.github/Dockerfiles/base-standard.Dockerfile @@ -17,9 +17,9 @@ FROM ghcr.io/fcp-indi/c-pac/freesurfer:6.0.0-min.neurodocker-jammy as FreeSurfer FROM ghcr.io/fcp-indi/c-pac/stage-base:lite-v1.8.8.dev1 -LABEL org.opencontainers.image.description "NOT INTENDED FOR USE OTHER THAN AS A STAGE IMAGE IN A MULTI-STAGE BUILD \ +LABEL org.opencontainers.image.description="NOT INTENDED FOR USE OTHER THAN AS A STAGE IMAGE IN A MULTI-STAGE BUILD \ Standard software dependencies for C-PAC standard images" -LABEL org.opencontainers.image.source https://github.com/FCP-INDI/C-PAC +LABEL org.opencontainers.image.source=https://github.com/FCP-INDI/C-PAC USER root # Installing FreeSurfer diff --git a/.github/Dockerfiles/c3d.1.0.0-jammy.Dockerfile b/.github/Dockerfiles/c3d.1.0.0-jammy.Dockerfile index 2c1a7f1d87..9fbcdd2386 100644 --- a/.github/Dockerfiles/c3d.1.0.0-jammy.Dockerfile +++ b/.github/Dockerfiles/c3d.1.0.0-jammy.Dockerfile @@ -36,7 +36,7 @@ RUN apt-get clean && \ rm -rf /var/lib/apt/lists/* /tmp/* /var/tmp/* FROM scratch -LABEL org.opencontainers.image.description "NOT INTENDED FOR USE OTHER THAN AS A STAGE IMAGE IN A MULTI-STAGE BUILD \ +LABEL org.opencontainers.image.description="NOT INTENDED FOR USE OTHER THAN AS A STAGE IMAGE IN A MULTI-STAGE BUILD \ c3d 1.0.0 (Jammy) stage" -LABEL org.opencontainers.image.source https://github.com/FCP-INDI/C-PAC +LABEL org.opencontainers.image.source=https://github.com/FCP-INDI/C-PAC COPY --from=c3d /opt/c3d/ /opt/c3d/ diff --git a/.github/Dockerfiles/connectome-workbench.1.5.0.neurodebian-jammy.Dockerfile b/.github/Dockerfiles/connectome-workbench.1.5.0.neurodebian-jammy.Dockerfile index 2c958fd5d5..1932efbc8f 100644 --- a/.github/Dockerfiles/connectome-workbench.1.5.0.neurodebian-jammy.Dockerfile +++ b/.github/Dockerfiles/connectome-workbench.1.5.0.neurodebian-jammy.Dockerfile @@ -24,9 +24,9 @@ RUN apt-get update \ USER c-pac_user # FROM scratch -# LABEL org.opencontainers.image.description "NOT INTENDED FOR USE OTHER THAN AS A STAGE IMAGE IN A MULTI-STAGE BUILD \ +# LABEL org.opencontainers.image.description="NOT INTENDED FOR USE OTHER THAN AS A STAGE IMAGE IN A MULTI-STAGE BUILD \ # connectome-workbench 1.5.0 stage" -# LABEL org.opencontainers.image.source https://github.com/FCP-INDI/C-PAC +# LABEL org.opencontainers.image.source=https://github.com/FCP-INDI/C-PAC # COPY --from=base /lib/x86_64-linux-gnu/ld-linux-x86-64.so.2 /lib/x86_64-linux-gnu/ld-linux-x86-64.so.2 # COPY --from=base /lib/x86_64-linux-gnu/libGL.so.1 /lib/x86_64-linux-gnu/libGL.so.1 # COPY --from=base /lib/x86_64-linux-gnu/libGLU.so.1 /lib/x86_64-linux-gnu/libGLU.so.1 diff --git a/.github/Dockerfiles/neuroparc.1.0-human-bionic.Dockerfile b/.github/Dockerfiles/neuroparc.1.0-human-bionic.Dockerfile index 2f64e0ae6f..519093d3bd 100644 --- a/.github/Dockerfiles/neuroparc.1.0-human-bionic.Dockerfile +++ b/.github/Dockerfiles/neuroparc.1.0-human-bionic.Dockerfile @@ -1,8 +1,8 @@ # using neurodebian runtime as parent image FROM neurodebian:bionic-non-free -LABEL org.opencontainers.image.description "NOT INTENDED FOR USE OTHER THAN AS A STAGE IMAGE IN A MULTI-STAGE BUILD \ +LABEL org.opencontainers.image.description="NOT INTENDED FOR USE OTHER THAN AS A STAGE IMAGE IN A MULTI-STAGE BUILD \ neuroparc v1.0-human stage" -LABEL org.opencontainers.image.source https://github.com/FCP-INDI/C-PAC +LABEL org.opencontainers.image.source=https://github.com/FCP-INDI/C-PAC ARG DEBIAN_FRONTEND=noninteractive diff --git a/Dockerfile b/Dockerfile index 2fa4ae4a23..1debc54c4a 100644 --- a/Dockerfile +++ b/Dockerfile @@ -15,8 +15,8 @@ # You should have received a copy of the GNU Lesser General Public # License along with C-PAC. If not, see . FROM ghcr.io/fcp-indi/c-pac/stage-base:standard-v1.8.8.dev1 -LABEL org.opencontainers.image.description "Full C-PAC image" -LABEL org.opencontainers.image.source https://github.com/FCP-INDI/C-PAC +LABEL org.opencontainers.image.description="Full C-PAC image" +LABEL org.opencontainers.image.source=https://github.com/FCP-INDI/C-PAC USER root # install C-PAC diff --git a/variant-lite.Dockerfile b/variant-lite.Dockerfile index 8e76675dc4..20561f09aa 100644 --- a/variant-lite.Dockerfile +++ b/variant-lite.Dockerfile @@ -15,8 +15,8 @@ # You should have received a copy of the GNU Lesser General Public # License along with C-PAC. If not, see . FROM ghcr.io/fcp-indi/c-pac/stage-base:lite-v1.8.8.dev1 -LABEL org.opencontainers.image.description "Full C-PAC image without FreeSurfer" -LABEL org.opencontainers.image.source https://github.com/FCP-INDI/C-PAC +LABEL org.opencontainers.image.description="Full C-PAC image without FreeSurfer" +LABEL org.opencontainers.image.source=https://github.com/FCP-INDI/C-PAC USER root # install C-PAC From 2388a1b00507d667a36ed86d1225829485ee70d1 Mon Sep 17 00:00:00 2001 From: birajstha <111654544+birajstha@users.noreply.github.com> Date: Mon, 10 Mar 2025 22:14:15 -0500 Subject: [PATCH 207/507] Update CPAC/pipeline/schema.py Co-authored-by: Jon Cluce --- CPAC/pipeline/schema.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/CPAC/pipeline/schema.py b/CPAC/pipeline/schema.py index 56a4c54fc6..989ccb1ea0 100644 --- a/CPAC/pipeline/schema.py +++ b/CPAC/pipeline/schema.py @@ -1401,7 +1401,7 @@ def schema(config_dict): ]["registration"]["using"] ): raise ExclusiveInvalid( - "[!] Overwrite transform is found same as the anatomical registration method! " + "[!] Overwrite transform method is the same as the anatomical registration method! " "No need to overwrite transform with the same registration method. Please turn it off or use a different registration method." ) except KeyError: From 9396a0c2b47a41df5d4c021647e7378d13bf5b38 Mon Sep 17 00:00:00 2001 From: birajstha <111654544+birajstha@users.noreply.github.com> Date: Mon, 10 Mar 2025 22:14:31 -0500 Subject: [PATCH 208/507] Update CPAC/pipeline/test/test_schema_validation.py Co-authored-by: Jon Cluce --- CPAC/pipeline/test/test_schema_validation.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/CPAC/pipeline/test/test_schema_validation.py b/CPAC/pipeline/test/test_schema_validation.py index 7fd1d86103..0b5e20da3f 100644 --- a/CPAC/pipeline/test/test_schema_validation.py +++ b/CPAC/pipeline/test/test_schema_validation.py @@ -142,4 +142,4 @@ def test_overwrite_transform(registration_using): else: with pytest.raises(ExclusiveInvalid) as e: Configuration(d) - assert "Overwrite transform is found same" in str(e.value) + assert "Overwrite transform method is the same" in str(e.value) From 00709226f10b5be65d0f83e8f2fccb7dada7f5b7 Mon Sep 17 00:00:00 2001 From: Steve Giavasis Date: Fri, 14 Mar 2025 15:42:10 -0400 Subject: [PATCH 209/507] Remove validation to move to schema Co-authored-by: Jon Cluce --- CPAC/nuisance/nuisance.py | 11 ----------- 1 file changed, 11 deletions(-) diff --git a/CPAC/nuisance/nuisance.py b/CPAC/nuisance/nuisance.py index 1806d8523a..86afe43e0d 100644 --- a/CPAC/nuisance/nuisance.py +++ b/CPAC/nuisance/nuisance.py @@ -517,18 +517,7 @@ def offending_timepoints_connector( name="outputspec", ) - censor_methods = ["Kill", "Zero", "Interpolate", "SpikeRegression"] - censor_selector = nuisance_selectors.get("Censor") - if censor_selector is None or censor_selector.get("method") not in censor_methods: - msg = ( - "Improper censoring method specified ({0}), " - "should be one of {1}.".format( - censor_selector.get("method", None) if censor_selector else None, - censor_methods, - ) - ) - raise ValueError(msg) find_censors = pe.Node( Function( From 3611ddc402e44c28dc5845cdbfeea915d3dd4379 Mon Sep 17 00:00:00 2001 From: sgiavasis Date: Fri, 14 Mar 2025 19:55:18 +0000 Subject: [PATCH 210/507] Moved Censoring "method" input validation to the schema as per @shnizzedy's suggestion. --- CPAC/pipeline/schema.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/CPAC/pipeline/schema.py b/CPAC/pipeline/schema.py index cdb72747d0..88aa91a5b6 100644 --- a/CPAC/pipeline/schema.py +++ b/CPAC/pipeline/schema.py @@ -1036,7 +1036,7 @@ def sanitize(filename): { "Name": Required(str), "Censor": { - "method": str, + "method": In(["Kill", "Zero", "Interpolate", "SpikeRegression"]), "thresholds": [ { "type": str, From abeb1cbcc3e19d85c84cb990d024e6d9c9486f74 Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Fri, 14 Mar 2025 19:57:00 +0000 Subject: [PATCH 211/507] [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci --- CPAC/pipeline/schema.py | 9 ++++++++- 1 file changed, 8 insertions(+), 1 deletion(-) diff --git a/CPAC/pipeline/schema.py b/CPAC/pipeline/schema.py index 88aa91a5b6..459c5e22ef 100644 --- a/CPAC/pipeline/schema.py +++ b/CPAC/pipeline/schema.py @@ -1036,7 +1036,14 @@ def sanitize(filename): { "Name": Required(str), "Censor": { - "method": In(["Kill", "Zero", "Interpolate", "SpikeRegression"]), + "method": In( + [ + "Kill", + "Zero", + "Interpolate", + "SpikeRegression", + ] + ), "thresholds": [ { "type": str, From 335e2dbb3a4b36d819b53ef5035253e643faea20 Mon Sep 17 00:00:00 2001 From: "birajstha:construction_worker::penguin" Date: Fri, 14 Mar 2025 16:46:49 -0400 Subject: [PATCH 212/507] and now with noverify --- CPAC/func_preproc/func_preproc.py | 85 ++++++++++++++++++++----------- CPAC/registration/registration.py | 20 ++++++-- 2 files changed, 73 insertions(+), 32 deletions(-) diff --git a/CPAC/func_preproc/func_preproc.py b/CPAC/func_preproc/func_preproc.py index e47b768846..ac3335d8de 100644 --- a/CPAC/func_preproc/func_preproc.py +++ b/CPAC/func_preproc/func_preproc.py @@ -501,7 +501,28 @@ def get_idx(in_files, stop_idx=None, start_idx=None): return stopidx, startidx -def fsl_afni_subworkflow(wf, cfg, strat_pool, pipe_num, opt=None): +def fsl_afni_subworkflow(cfg, pipe_num, opt=None): + wf = pe.Workflow(name=f"fsl_afni_subworkflow_{pipe_num}") + + inputNode = pe.Node( + util.IdentityInterface( + fields=[ + "FSL-AFNI-bold-ref", + "FSL-AFNI-brain-mask", + "FSL-AFNI-brain-probseg", + "motion-basefile", + ] + ), + name="inputspec", + ) + + outputNode = pe.Node( + util.IdentityInterface( + fields=["space-bold_desc-brain_mask", "desc-unifized_bold"] + ), + name="outputspec", + ) + # Initialize transforms with antsAI init_aff = pe.Node( AI( @@ -515,11 +536,6 @@ def fsl_afni_subworkflow(wf, cfg, strat_pool, pipe_num, opt=None): name=f"init_aff_{pipe_num}", n_procs=cfg.pipeline_setup["system_config"]["num_OMP_threads"], ) - node, out = strat_pool.get_data("FSL-AFNI-bold-ref") - wf.connect(node, out, init_aff, "fixed_image") - - node, out = strat_pool.get_data("FSL-AFNI-brain-mask") - wf.connect(node, out, init_aff, "fixed_image_mask") init_aff.inputs.search_grid = (40, (0, 40, 40)) @@ -548,9 +564,6 @@ def fsl_afni_subworkflow(wf, cfg, strat_pool, pipe_num, opt=None): n_procs=cfg.pipeline_setup["system_config"]["num_OMP_threads"], ) - node, out = strat_pool.get_data("FSL-AFNI-bold-ref") - wf.connect(node, out, norm, "fixed_image") - map_brainmask = pe.Node( ants.ApplyTransforms( interpolation="BSpline", @@ -559,10 +572,6 @@ def fsl_afni_subworkflow(wf, cfg, strat_pool, pipe_num, opt=None): name=f"map_brainmask_{pipe_num}", ) - # Use the higher resolution and probseg for numerical stability in rounding - node, out = strat_pool.get_data("FSL-AFNI-brain-probseg") - wf.connect(node, out, map_brainmask, "input_image") - binarize_mask = pe.Node( interface=fsl.maths.MathsCommand(), name=f"binarize_mask_{pipe_num}" ) @@ -641,14 +650,17 @@ def fsl_afni_subworkflow(wf, cfg, strat_pool, pipe_num, opt=None): # Compute masked brain apply_mask = pe.Node(fsl.ApplyMask(), name=f"extract_ref_brain_bold_{pipe_num}") - node, out = strat_pool.get_data(["motion-basefile"]) - wf.connect( [ - (node, init_aff, [(out, "moving_image")]), - (node, map_brainmask, [(out, "reference_image")]), - (node, norm, [(out, "moving_image")]), + (inputNode, init_aff, [("FSL-AFNI-bold-ref", "fixed_image")]), + (inputNode, init_aff, [("FSL-AFNI-brain-mask", "fixed_image_mask")]), + (inputNode, init_aff, [("motion-basefile", "moving_image")]), (init_aff, norm, [("output_transform", "initial_moving_transform")]), + (inputNode, norm, [("FSL-AFNI-bold-ref", "fixed_image")]), + (inputNode, norm, [("motion-basefile", "moving_image")]), + # Use the higher resolution and probseg for numerical stability in rounding + (inputNode, map_brainmask, [("FSL-AFNI-brain-probseg", "input_image")]), + (inputNode, map_brainmask, [("motion-basefile", "reference_image")]), ( norm, map_brainmask, @@ -661,9 +673,13 @@ def fsl_afni_subworkflow(wf, cfg, strat_pool, pipe_num, opt=None): (binarize_mask, pre_dilate, [("out_file", "in_file")]), (pre_dilate, print_header, [("out_file", "image")]), (print_header, set_direction, [("header", "direction")]), - (node, set_direction, [(out, "infile"), (out, "outfile")]), + ( + inputNode, + set_direction, + [("motion-basefile", "infile"), ("motion-basefile", "outfile")], + ), (set_direction, n4_correct, [("outfile", "mask_image")]), - (node, n4_correct, [(out, "input_image")]), + (inputNode, n4_correct, [("motion-basefile", "input_image")]), (n4_correct, skullstrip_first_pass, [("output_image", "in_file")]), (skullstrip_first_pass, bet_dilate, [("mask_file", "in_file")]), (bet_dilate, bet_mask, [("out_file", "mask_file")]), @@ -674,15 +690,12 @@ def fsl_afni_subworkflow(wf, cfg, strat_pool, pipe_num, opt=None): (skullstrip_second_pass, combine_masks, [("out_file", "operand_file")]), (unifize, apply_mask, [("out_file", "in_file")]), (combine_masks, apply_mask, [("out_file", "mask_file")]), + (combine_masks, outputNode, [("out_file", "space-bold_desc-brain_mask")]), + (apply_mask, outputNode, [("out_file", "desc-unifized_bold")]), ] ) - outputs = { - "fMRIprep_brain_mask": (combine_masks, "out_file"), - "desc-unifized_bold": (apply_mask, "out_file"), - } - - return (wf, outputs) + return wf @nodeblock( @@ -1233,10 +1246,24 @@ def bold_mask_fsl_afni(wf, cfg, strat_pool, pipe_num, opt=None): # Modifications copyright (C) 2021 - 2024 C-PAC Developers - wf, outputs = fsl_afni_subworkflow(wf, cfg, strat_pool, pipe_num, opt) + fsl_afni_wf = fsl_afni_subworkflow(cfg, pipe_num, opt) - # both masks are available, but they are the same in this nodeblock - outputs["space-bold_desc-brain_mask"] = outputs["fMRIprep_brain_mask"] + for key in [ + "FSL-AFNI-bold-ref", + "FSL-AFNI-brain-mask", + "FSL-AFNI-brain-probseg", + "motion-basefile", + ]: + node, out = strat_pool.get_data(key) + wf.connect(node, out, fsl_afni_wf, f"inputspec.{key}") + + outputs = { + "desc-unifized_bold": (fsl_afni_wf, "outputspec.desc-unifized_bold"), + "space-bold_desc-brain_mask": ( + fsl_afni_wf, + "outputspec.space-bold_desc-brain_mask", + ), + } return (wf, outputs) diff --git a/CPAC/registration/registration.py b/CPAC/registration/registration.py index a557725fc7..8c4a20e605 100644 --- a/CPAC/registration/registration.py +++ b/CPAC/registration/registration.py @@ -3224,15 +3224,29 @@ def coregistration_prep_mean(wf, cfg, strat_pool, pipe_num, opt=None): "FSL-AFNI-bold-ref", "FSL-AFNI-brain-mask", "FSL-AFNI-brain-probseg", + "desc-unifized_bold", ], - outputs=["sbref", "desc-unifized_bold", "fMRIprep_brain_mask"], + outputs=["sbref"], ) def coregistration_prep_fmriprep(wf, cfg, strat_pool, pipe_num, opt=None): """Generate fMRIPrep-style single-band reference for coregistration.""" + outputs = {} + if not strat_pool.check_rpool("desc-unifized_bold"): - wf, outputs = fsl_afni_subworkflow(wf, cfg, strat_pool, pipe_num) + fsl_afni_wf = fsl_afni_subworkflow(cfg, pipe_num, opt) - outputs["sbref"] = outputs["desc-unifized_bold"] + for key in [ + "FSL-AFNI-bold-ref", + "FSL-AFNI-brain-mask", + "FSL-AFNI-brain-probseg", + "motion-basefile", + ]: + node, out = strat_pool.get_data(key) + wf.connect(node, out, fsl_afni_wf, f"inputspec.{key}") + + outputs["sbref"] = (fsl_afni_wf, "outputspec.desc-unifized_bold") + else: + outputs["sbref"] = strat_pool.get_data("desc-unifized_bold") return (wf, outputs) From 77f4769343b96301fb2bc7148ad3d4cc4ec39e2a Mon Sep 17 00:00:00 2001 From: Jon Cluce Date: Fri, 14 Mar 2025 19:08:51 -0400 Subject: [PATCH 213/507] :construction_worker: :loud_sound: Log `git config --global url` --- .circleci/main.yml | 3 +++ 1 file changed, 3 insertions(+) diff --git a/.circleci/main.yml b/.circleci/main.yml index f936f9230d..14e144a0d0 100644 --- a/.circleci/main.yml +++ b/.circleci/main.yml @@ -117,6 +117,9 @@ commands: steps: - create-docker-test-container: coverage-file: .coverage.docker${VARIANT} + - run: + name: Check Git URL Rewrite Config + command: git config --global --get-regexp url - run: name: Running pytest in Docker image command: docker exec --user $(id -u) docker_test /bin/bash /code/dev/circleci_data/test_in_image.sh From 740ad1dc33e6a648c9ee69525aa64d441c34e326 Mon Sep 17 00:00:00 2001 From: Jon Cluce Date: Sat, 15 Mar 2025 14:25:01 -0400 Subject: [PATCH 214/507] :construction_worker: :whale: :wrench: :octocat: :lock: Don't force SSH when `git clone`ing test data in test image container --- .circleci/main.yml | 7 ------- dev/circleci_data/test_in_image.sh | 3 +++ 2 files changed, 3 insertions(+), 7 deletions(-) diff --git a/.circleci/main.yml b/.circleci/main.yml index 14e144a0d0..4dfc06f738 100644 --- a/.circleci/main.yml +++ b/.circleci/main.yml @@ -45,10 +45,6 @@ commands: - run: name: "Configuring git user" command: | - sudo apt-key adv --recv-keys --keyserver keyserver.ubuntu.com 78BD65473CB3BD13 - curl -L https://packagecloud.io/circleci/trusty/gpgkey | sudo apt-key add - - sudo apt-get update - sudo apt-get install git openssh-client -y git config --global user.email "CMI_CPAC_Support@childmind.org" git config --global user.name "Theodore (machine user) @ CircleCI" create-docker-test-container: @@ -117,9 +113,6 @@ commands: steps: - create-docker-test-container: coverage-file: .coverage.docker${VARIANT} - - run: - name: Check Git URL Rewrite Config - command: git config --global --get-regexp url - run: name: Running pytest in Docker image command: docker exec --user $(id -u) docker_test /bin/bash /code/dev/circleci_data/test_in_image.sh diff --git a/dev/circleci_data/test_in_image.sh b/dev/circleci_data/test_in_image.sh index b62de84994..d03b6e8015 100755 --- a/dev/circleci_data/test_in_image.sh +++ b/dev/circleci_data/test_in_image.sh @@ -1,5 +1,8 @@ export PATH=$PATH:/home/$(whoami)/.local/bin +# don't force SSH for git clones in testing image +git config --global --unset url.ssh://git@github.com.insteadof + # install testing requirements pip install -r /code/dev/circleci_data/requirements.txt From aecf98a6e642423fefefa3aa3f28932b1349b8c5 Mon Sep 17 00:00:00 2001 From: Jon Cluce Date: Sat, 15 Mar 2025 14:40:20 -0400 Subject: [PATCH 215/507] :lock: :alien: Remove compromized GitHub Action --- .github/workflows/on_push.yml | 17 ++++------------- 1 file changed, 4 insertions(+), 13 deletions(-) diff --git a/.github/workflows/on_push.yml b/.github/workflows/on_push.yml index c584dd14f5..741ac4e86a 100644 --- a/.github/workflows/on_push.yml +++ b/.github/workflows/on_push.yml @@ -35,13 +35,6 @@ jobs: uses: actions/checkout@v4 with: fetch-depth: 2 - - name: Get changed files since last commit - uses: tj-actions/changed-files@v45.0.7 - id: changed-files - with: - since_last_remote_commit: "true" - files: .github/Dockerfiles/* - json: "true" - name: Determine stages to rebuild env: MESSAGE: ${{ github.event.head_commit.message }} @@ -49,14 +42,12 @@ jobs: run: | # initialize phase arrays declare -a PHASE_ONE PHASE_TWO PHASE_THREE REBUILD_PHASE_ONE REBUILD_PHASE_TWO REBUILD_PHASE_THREE - # turn JSON array into BASH array - CHANGED_FILES=( $(echo ${{ steps.changed-files.outputs.all_changed_files }} | sed -e 's/\[//g' -e 's/\]//g' -e 's/\,/ /g') ) # loop through stages to maybe rebuild for STAGE in $(cat ${GITHUB_WORKSPACE}/.github/stage_requirements/phase_one.txt) do PHASE_ONE+=($STAGE) # check commit message for [rebuild STAGE] or if STAGE has changed - if [[ "${MESSAGE}" == *"[rebuild ${STAGE}]"* ]] || [[ " ${CHANGED_FILES[*]} " =~ " ${STAGE} " ]] + if [[ "${MESSAGE}" == *"[rebuild ${STAGE}]"* ]] then REBUILD_PHASE_ONE+=($STAGE) fi @@ -64,7 +55,7 @@ jobs: for STAGE in $(cat ${GITHUB_WORKSPACE}/.github/stage_requirements/phase_two.txt) do PHASE_TWO+=($STAGE) - if [[ "${MESSAGE}" == *"[rebuild ${STAGE}]"* ]] || [[ " ${CHANGED_FILES[*]} " =~ " ${STAGE} " ]] + if [[ "${MESSAGE}" == *"[rebuild ${STAGE}]"* ]] then REBUILD_PHASE_TWO+=($STAGE) fi @@ -72,14 +63,14 @@ jobs: for STAGE in $(cat ${GITHUB_WORKSPACE}/.github/stage_requirements/phase_three.txt) do PHASE_THREE+=($STAGE) - if [[ "${MESSAGE}" == *"[rebuild ${STAGE}]"* ]] || [[ "${MESSAGE}" == *"[rebuild base-${STAGE}]"* ]] || [[ " ${CHANGED_FILES[*]} " =~ " ${STAGE} " ]] + if [[ "${MESSAGE}" == *"[rebuild ${STAGE}]"* ]] || [[ "${MESSAGE}" == *"[rebuild base-${STAGE}]"* ]] then REBUILD_PHASE_THREE+=($STAGE) fi done # add base stages based on their dependencies BASES=("${PHASE_THREE[@]}" standard) - if [[ "${MESSAGE}" == *"[rebuild standard]"* ]] || [[ "${MESSAGE}" == *"[rebuild base-standard]"* ]] || [[ " ${CHANGED_FILES[*]} " =~ " standard " ]] + if [[ "${MESSAGE}" == *"[rebuild standard]"* ]] || [[ "${MESSAGE}" == *"[rebuild base-standard]"* ]] then REBUILD_PHASE_THREE+=(standard) fi From 340f6139c46c11425ae0f7fe99f36f8b4ebc9a1d Mon Sep 17 00:00:00 2001 From: Jon Cluce Date: Sat, 15 Mar 2025 14:44:20 -0400 Subject: [PATCH 216/507] :memo: Document CI process change --- CHANGELOG.md | 4 ++++ CONTRIBUTING.md | 1 + 2 files changed, 5 insertions(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index c44628a6dd..cd1205f6bf 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -127,6 +127,10 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 - `wxpython` - `yamlordereddictloader` +#### Removed CI dependency + +- `tj-actions/changed-files` ([CVE-2023-51664](https://www.stepsecurity.io/blog/harden-runner-detection-tj-actions-changed-files-action-is-compromised)) + ### Upgraded dependencies - `AFNI` 21.1.00 'Domitian' → 23.3.09 'Septimius Severus' diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index 24b37bcd47..2f54c2a947 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -80,3 +80,4 @@ We have 3 types of staging Dockerfiles: operating system, software dependency, a * To change a dependency in a C-PAC image, update the stage images at the top of the relevant `.github/Dockerfiles/C-PAC.develop-*.Dockerfile`. * If a Dockerfile does not yet exist for the added dependency, create a Dockerfile for the new dependency and add the filename (without extension) to [`jobs.stages.strategy.matrix.Dockerfile` in `.github/workflows/build_stages.yml`](https://github.com/FCP-INDI/C-PAC/blob/4e18916384e52c3dc9610aea3eed537c19d480e3/.github/workflows/build_stages.yml#L77-L97) * If no Dockerfiles use the removed dependency, remove the Dockerfile for the dependency and remove the filename from [`jobs.stages.strategy.matrix.Dockerfile` in `.github/workflows/build_stages.yml`](https://github.com/FCP-INDI/C-PAC/blob/4e18916384e52c3dc9610aea3eed537c19d480e3/.github/workflows/build_stages.yml#L77-L97) +* When making changes to a Dockerfile, include the line `[rebuild {filename}]` where `filename` is the name of the Dockerfile without the extension (e.g., `[rebuild Ubuntu.jammy-non-free]`). From 30dfc6fcffa3d08ecf5447d39b606e5b7b7a2142 Mon Sep 17 00:00:00 2001 From: Jon Cluce Date: Sat, 15 Mar 2025 15:46:24 -0400 Subject: [PATCH 217/507] :truck: Move global fixtures to own file --- CPAC/_global_fixtures.py | 34 ++++++++++++++++++++++++++++++++++ CPAC/conftest.py | 17 ++--------------- dev/circleci_data/conftest.py | 2 +- 3 files changed, 37 insertions(+), 16 deletions(-) create mode 100644 CPAC/_global_fixtures.py diff --git a/CPAC/_global_fixtures.py b/CPAC/_global_fixtures.py new file mode 100644 index 0000000000..7b765736ee --- /dev/null +++ b/CPAC/_global_fixtures.py @@ -0,0 +1,34 @@ +# Copyright (C) 2025 C-PAC Developers + +# This file is part of C-PAC. + +# C-PAC is free software: you can redistribute it and/or modify it under +# the terms of the GNU Lesser General Public License as published by the +# Free Software Foundation, either version 3 of the License, or (at your +# option) any later version. + +# C-PAC is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or +# FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public +# License for more details. + +# You should have received a copy of the GNU Lesser General Public +# License along with C-PAC. If not, see . +"""Global fixtures for C-PAC tests.""" + +from pathlib import Path + +from _pytest.tmpdir import TempPathFactory +from git import Repo +import pytest + + +@pytest.fixture(scope="session") +def bids_examples(tmp_path_factory: TempPathFactory) -> Path: + """Get the BIDS examples dataset.""" + example_dir = tmp_path_factory.mktemp("bids-examples") + if not example_dir.exists() or not any(example_dir.iterdir()): + Repo.clone_from( + "https://github.com/bids-standard/bids-examples.git", str(example_dir) + ) + return example_dir diff --git a/CPAC/conftest.py b/CPAC/conftest.py index 7b765736ee..330489ce0d 100644 --- a/CPAC/conftest.py +++ b/CPAC/conftest.py @@ -16,19 +16,6 @@ # License along with C-PAC. If not, see . """Global fixtures for C-PAC tests.""" -from pathlib import Path +from CPAC._global_fixtures import bids_examples -from _pytest.tmpdir import TempPathFactory -from git import Repo -import pytest - - -@pytest.fixture(scope="session") -def bids_examples(tmp_path_factory: TempPathFactory) -> Path: - """Get the BIDS examples dataset.""" - example_dir = tmp_path_factory.mktemp("bids-examples") - if not example_dir.exists() or not any(example_dir.iterdir()): - Repo.clone_from( - "https://github.com/bids-standard/bids-examples.git", str(example_dir) - ) - return example_dir +__all__ = ["bids_examples"] diff --git a/dev/circleci_data/conftest.py b/dev/circleci_data/conftest.py index ba239b2b4f..4966b986c5 100644 --- a/dev/circleci_data/conftest.py +++ b/dev/circleci_data/conftest.py @@ -16,4 +16,4 @@ # License along with C-PAC. If not, see . """Global fixtures for C-PAC tests.""" -from CPAC.conftest import * # noqa: F403 +from CPAC._global_fixtures import * # noqa: F403 From 766bfd48485989dea5fdecad06abe48bc77e6c05 Mon Sep 17 00:00:00 2001 From: Jon Cluce Date: Sat, 15 Mar 2025 16:21:41 -0400 Subject: [PATCH 218/507] :white_check_mark: Adjust paths for updated test --- .../data_settings_bids_examples_ds051_default_BIDS.yml | 4 ++-- dev/circleci_data/test_external_utils.py | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/dev/circleci_data/data_settings_bids_examples_ds051_default_BIDS.yml b/dev/circleci_data/data_settings_bids_examples_ds051_default_BIDS.yml index 5449692350..c196250ac8 100644 --- a/dev/circleci_data/data_settings_bids_examples_ds051_default_BIDS.yml +++ b/dev/circleci_data/data_settings_bids_examples_ds051_default_BIDS.yml @@ -15,7 +15,7 @@ dataFormat: BIDS # BIDS Data Format only. # # This should be the path to the overarching directory containing the entire dataset. -bidsBaseDir: ./bids-examples/ds051 +bidsBaseDir: ./ds051 # File Path Template for Anatomical Files @@ -49,7 +49,7 @@ awsCredentialsFile: None # Directory where CPAC should place data configuration files. -outputSubjectListLocation: ./dev/circleci_data +outputSubjectListLocation: /code/dev/circleci_data # A label to be appended to the generated participant list files. diff --git a/dev/circleci_data/test_external_utils.py b/dev/circleci_data/test_external_utils.py index 31f6b243da..c55e264c8b 100644 --- a/dev/circleci_data/test_external_utils.py +++ b/dev/circleci_data/test_external_utils.py @@ -94,8 +94,8 @@ def test_build_data_config(caplog, cli_runner, multiword_connector): def test_new_settings_template(bids_examples: Path, caplog, cli_runner): """Test CLI ``utils new-settings-template``.""" caplog.set_level(INFO) - os.chdir(CPAC_DIR) assert bids_examples.exists() + os.chdir(bids_examples) result = cli_runner.invoke( CPAC_main_utils.commands[ From b9d8c1526b98d9c96100fee4aeaca048d8ec588a Mon Sep 17 00:00:00 2001 From: "birajstha:construction_worker::penguin" Date: Mon, 3 Feb 2025 13:04:16 -0500 Subject: [PATCH 219/507] =?UTF-8?q?=E2=9C=A8Introduced=20desc-head=5Fbold?= =?UTF-8?q?=20and=20changed=20sbref=20generating=20nodeblock?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- CHANGELOG.md | 3 +++ CPAC/func_preproc/func_preproc.py | 9 +++++-- CPAC/pipeline/schema.py | 5 +++- CPAC/registration/registration.py | 25 ++++++++++++++++--- .../configs/pipeline_config_blank.yml | 2 ++ 5 files changed, 37 insertions(+), 7 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index cd1205f6bf..07192d9024 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -24,6 +24,8 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 - Required positional parameter "wf" in input and output of `ingress_pipeconfig_paths` function, where a node to reorient templates is added to the `wf`. - Required positional parameter "orientation" to `resolve_resolution`. - Optional positional argument "cfg" to `create_lesion_preproc`. +- New switch `mask_sbref` under `Selected Functional Volume` in functional registration. +- New resource `desc-head_bold` as non skull-stripped bold from nodeblock `bold_masking`. ### Changed @@ -35,6 +37,7 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 - Moved `FSL-AFNI subworkflow` from inside a `bold_mask_fsl_afni` nodeblock into a separate function. - Renamed `desc-ref_bold` created in this workflow to `desc-unifized_bold`. - `coregistration_prep_fmriprep` nodeblock now checks if `desc-unifized_bold` exists in the Resource Pool, if not it runs the `FSL-AFNI subworkflow` to create it. +- Input `desc-brain_bold` to `desc-preproc_bold` for `sbref` generation nodeblock `coregistration_prep_vol`. ### Fixed diff --git a/CPAC/func_preproc/func_preproc.py b/CPAC/func_preproc/func_preproc.py index ac3335d8de..602a0d73c1 100644 --- a/CPAC/func_preproc/func_preproc.py +++ b/CPAC/func_preproc/func_preproc.py @@ -1740,6 +1740,10 @@ def bold_mask_ccs(wf, cfg, strat_pool, pipe_num, opt=None): "Description": "The skull-stripped BOLD time-series.", "SkullStripped": True, }, + "desc-head_bold": { + "Description": "The non skull-stripped BOLD time-series.", + "SkullStripped": False, + }, }, ) def bold_masking(wf, cfg, strat_pool, pipe_num, opt=None): @@ -1751,8 +1755,8 @@ def bold_masking(wf, cfg, strat_pool, pipe_num, opt=None): func_edge_detect.inputs.expr = "a*b" func_edge_detect.inputs.outputtype = "NIFTI_GZ" - node, out = strat_pool.get_data("desc-preproc_bold") - wf.connect(node, out, func_edge_detect, "in_file_a") + node_head_bold, out_head_bold = strat_pool.get_data("desc-preproc_bold") + wf.connect(node_head_bold, out_head_bold, func_edge_detect, "in_file_a") node, out = strat_pool.get_data("space-bold_desc-brain_mask") wf.connect(node, out, func_edge_detect, "in_file_b") @@ -1760,6 +1764,7 @@ def bold_masking(wf, cfg, strat_pool, pipe_num, opt=None): outputs = { "desc-preproc_bold": (func_edge_detect, "out_file"), "desc-brain_bold": (func_edge_detect, "out_file"), + "desc-head_bold": (node_head_bold, out_head_bold), } return (wf, outputs) diff --git a/CPAC/pipeline/schema.py b/CPAC/pipeline/schema.py index cdb72747d0..a423aab903 100644 --- a/CPAC/pipeline/schema.py +++ b/CPAC/pipeline/schema.py @@ -741,7 +741,10 @@ def sanitize(filename): ) ], "Mean Functional": {"n4_correct_func": bool1_1}, - "Selected Functional Volume": {"func_reg_input_volume": int}, + "Selected Functional Volume": { + "func_reg_input_volume": int, + "mask_sbref": bool1_1, + }, }, "boundary_based_registration": { "run": forkable, diff --git a/CPAC/registration/registration.py b/CPAC/registration/registration.py index 8c4a20e605..d5fc68aa2c 100644 --- a/CPAC/registration/registration.py +++ b/CPAC/registration/registration.py @@ -3134,7 +3134,7 @@ def overwrite_transform_anat_to_template(wf, cfg, strat_pool, pipe_num, opt=None "input", ], option_val="Selected_Functional_Volume", - inputs=[("desc-brain_bold", ["desc-motion_bold", "bold"], "sbref")], + inputs=[("desc-preproc_bold", ["desc-head_bold", "bold"], "sbref")], outputs=["sbref"], ) def coregistration_prep_vol(wf, cfg, strat_pool, pipe_num, opt=None): @@ -3152,15 +3152,32 @@ def coregistration_prep_vol(wf, cfg, strat_pool, pipe_num, opt=None): if not cfg.registration_workflows["functional_registration"]["coregistration"][ "func_input_prep" ]["reg_with_skull"]: - node, out = strat_pool.get_data("desc-brain_bold") + node, out = strat_pool.get_data("desc-preproc_bold") else: # TODO check which file is functional_skull_leaf # TODO add a function to choose brain or skull? - node, out = strat_pool.get_data(["desc-motion_bold", "bold"]) + node, out = strat_pool.get_data(["desc-head_bold", "bold"]) wf.connect(node, out, get_func_volume, "in_file_a") - coreg_input = (get_func_volume, "out_file") + if cfg.registration_workflows["functional_registration"]["coregistration"][ + "func_input_prep" + ]["Selected Functional Volume"]["mask_sbref"] and strat_pool.check_rpool( + "space-bold_desc-brain_mask" + ): + mask_sbref = pe.Node(interface=afni.Calc(), name=f"mask_sbref_{pipe_num}") + + mask_sbref.inputs.expr = "a*b" + mask_sbref.inputs.outputtype = "NIFTI_GZ" + + wf.connect(get_func_volume, "out_file", mask_sbref, "in_file_a") + node, out = strat_pool.get_data("space-bold_desc-brain_mask") + wf.connect(node, out, mask_sbref, "in_file_b") + + coreg_input = (mask_sbref, "out_file") + + else: + coreg_input = (get_func_volume, "out_file") outputs = {"sbref": coreg_input} diff --git a/CPAC/resources/configs/pipeline_config_blank.yml b/CPAC/resources/configs/pipeline_config_blank.yml index 5b7f3f5188..3c568611f5 100644 --- a/CPAC/resources/configs/pipeline_config_blank.yml +++ b/CPAC/resources/configs/pipeline_config_blank.yml @@ -724,6 +724,8 @@ registration_workflows: #Input the index of which volume from the functional 4D timeseries input file you wish to use as the input for functional-to-anatomical registration. func_reg_input_volume: 0 + mask_sbref: true + boundary_based_registration: # this is a fork point From cc38711bc528283300d5188909df6da9ad6112cd Mon Sep 17 00:00:00 2001 From: "birajstha:construction_worker::penguin" Date: Mon, 10 Mar 2025 16:31:19 -0400 Subject: [PATCH 220/507] precommit fixes --- CHANGELOG.md | 2 +- CPAC/pipeline/schema.py | 2 +- CPAC/registration/registration.py | 53 ++++++++++++------- .../configs/pipeline_config_blank.yml | 3 +- 4 files changed, 39 insertions(+), 21 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 07192d9024..5aff2f30d9 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -24,7 +24,7 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 - Required positional parameter "wf" in input and output of `ingress_pipeconfig_paths` function, where a node to reorient templates is added to the `wf`. - Required positional parameter "orientation" to `resolve_resolution`. - Optional positional argument "cfg" to `create_lesion_preproc`. -- New switch `mask_sbref` under `Selected Functional Volume` in functional registration. +- New switch `mask_sbref` under `func_input_prep` in functional registration. - New resource `desc-head_bold` as non skull-stripped bold from nodeblock `bold_masking`. ### Changed diff --git a/CPAC/pipeline/schema.py b/CPAC/pipeline/schema.py index a423aab903..eb09c94c98 100644 --- a/CPAC/pipeline/schema.py +++ b/CPAC/pipeline/schema.py @@ -743,8 +743,8 @@ def sanitize(filename): "Mean Functional": {"n4_correct_func": bool1_1}, "Selected Functional Volume": { "func_reg_input_volume": int, - "mask_sbref": bool1_1, }, + "mask_sbref": bool1_1, }, "boundary_based_registration": { "run": forkable, diff --git a/CPAC/registration/registration.py b/CPAC/registration/registration.py index d5fc68aa2c..a0e43c150f 100644 --- a/CPAC/registration/registration.py +++ b/CPAC/registration/registration.py @@ -3123,6 +3123,40 @@ def overwrite_transform_anat_to_template(wf, cfg, strat_pool, pipe_num, opt=None return (wf, outputs) +@nodeblock( + name="mask_sbref", + switch=[ + ["registration_workflows", "functional_registration", "coregistration", "run"], + [ + "registration_workflows", + "functional_registration", + "coregistration", + "func_input_prep", + "mask_sbref", + "run", + ], + ], + inputs=["sbref", "space-bold_desc-brain_mask"], + outputs=["sbref"], +) +def mask_sbref(wf, cfg, strat_pool, pipe_num, opt=None): + """Mask sbref with brain mask.""" + mask_sbref = pe.Node(interface=afni.Calc(), name=f"mask_sbref_{pipe_num}") + + mask_sbref.inputs.expr = "a*b" + mask_sbref.inputs.outputtype = "NIFTI_GZ" + + node, out = strat_pool.get_data("sbref") + wf.connect(node, out, mask_sbref, "in_file_a") + + node, out = strat_pool.get_data("space-bold_desc-brain_mask") + wf.connect(node, out, mask_sbref, "in_file_b") + + outputs = {"sbref": (mask_sbref, "out_file")} + + return (wf, outputs) + + @nodeblock( name="coregistration_prep_vol", switch=["functional_preproc", "run"], @@ -3160,24 +3194,7 @@ def coregistration_prep_vol(wf, cfg, strat_pool, pipe_num, opt=None): wf.connect(node, out, get_func_volume, "in_file_a") - if cfg.registration_workflows["functional_registration"]["coregistration"][ - "func_input_prep" - ]["Selected Functional Volume"]["mask_sbref"] and strat_pool.check_rpool( - "space-bold_desc-brain_mask" - ): - mask_sbref = pe.Node(interface=afni.Calc(), name=f"mask_sbref_{pipe_num}") - - mask_sbref.inputs.expr = "a*b" - mask_sbref.inputs.outputtype = "NIFTI_GZ" - - wf.connect(get_func_volume, "out_file", mask_sbref, "in_file_a") - node, out = strat_pool.get_data("space-bold_desc-brain_mask") - wf.connect(node, out, mask_sbref, "in_file_b") - - coreg_input = (mask_sbref, "out_file") - - else: - coreg_input = (get_func_volume, "out_file") + coreg_input = (get_func_volume, "out_file") outputs = {"sbref": coreg_input} diff --git a/CPAC/resources/configs/pipeline_config_blank.yml b/CPAC/resources/configs/pipeline_config_blank.yml index 3c568611f5..fee7d46770 100644 --- a/CPAC/resources/configs/pipeline_config_blank.yml +++ b/CPAC/resources/configs/pipeline_config_blank.yml @@ -724,7 +724,8 @@ registration_workflows: #Input the index of which volume from the functional 4D timeseries input file you wish to use as the input for functional-to-anatomical registration. func_reg_input_volume: 0 - mask_sbref: true + # Independent of the above `reg_with_skull` option + mask_sbref: On boundary_based_registration: From 83fad00563eefa0c6e48f800243d6d7c0a1f4dcc Mon Sep 17 00:00:00 2001 From: "birajstha:construction_worker::penguin" Date: Mon, 10 Mar 2025 16:50:05 -0400 Subject: [PATCH 221/507] making mask_sbref default off --- CPAC/resources/configs/pipeline_config_blank.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/CPAC/resources/configs/pipeline_config_blank.yml b/CPAC/resources/configs/pipeline_config_blank.yml index fee7d46770..212387464b 100644 --- a/CPAC/resources/configs/pipeline_config_blank.yml +++ b/CPAC/resources/configs/pipeline_config_blank.yml @@ -725,7 +725,7 @@ registration_workflows: func_reg_input_volume: 0 # Independent of the above `reg_with_skull` option - mask_sbref: On + mask_sbref: Off boundary_based_registration: From 72a91d0e58d728598a795c9ad53cd5f3f3d67786 Mon Sep 17 00:00:00 2001 From: "birajstha:construction_worker::penguin" Date: Mon, 10 Mar 2025 17:07:56 -0400 Subject: [PATCH 222/507] precommit changes --- CPAC/pipeline/cpac_pipeline.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/CPAC/pipeline/cpac_pipeline.py b/CPAC/pipeline/cpac_pipeline.py index 26f67c970f..c237b59ffd 100644 --- a/CPAC/pipeline/cpac_pipeline.py +++ b/CPAC/pipeline/cpac_pipeline.py @@ -148,6 +148,7 @@ coregistration_prep_vol, create_func_to_T1template_symmetric_xfm, create_func_to_T1template_xfm, + mask_sbref, overwrite_transform_anat_to_template, register_ANTs_anat_to_template, register_ANTs_EPI_to_template, @@ -1287,6 +1288,7 @@ def build_workflow(subject_id, sub_dict, cfg, pipeline_name=None): coregistration_prep_vol, coregistration_prep_mean, coregistration_prep_fmriprep, + mask_sbref, ], ] From fd65fb505b308dbf825d77fd4d21a3b45a328116 Mon Sep 17 00:00:00 2001 From: "birajstha:construction_worker::penguin" Date: Mon, 10 Mar 2025 18:20:20 -0400 Subject: [PATCH 223/507] correcting the switch for mask_sbref --- CPAC/registration/registration.py | 1 - 1 file changed, 1 deletion(-) diff --git a/CPAC/registration/registration.py b/CPAC/registration/registration.py index a0e43c150f..b8f69b31f6 100644 --- a/CPAC/registration/registration.py +++ b/CPAC/registration/registration.py @@ -3133,7 +3133,6 @@ def overwrite_transform_anat_to_template(wf, cfg, strat_pool, pipe_num, opt=None "coregistration", "func_input_prep", "mask_sbref", - "run", ], ], inputs=["sbref", "space-bold_desc-brain_mask"], From e497d62b930f48dbbd95df9edda6dfc237b426ea Mon Sep 17 00:00:00 2001 From: "birajstha:construction_worker::penguin" Date: Fri, 14 Mar 2025 17:42:58 -0400 Subject: [PATCH 224/507] adding changes suggested --- CHANGELOG.md | 6 ++++-- CPAC/pipeline/cpac_pipeline.py | 2 +- CPAC/pipeline/schema.py | 2 -- CPAC/registration/registration.py | 11 ++--------- .../configs/pipeline_config_abcd-options.yml | 3 --- CPAC/resources/configs/pipeline_config_blank.yml | 10 +++------- CPAC/resources/configs/pipeline_config_default.yml | 9 ++++----- 7 files changed, 14 insertions(+), 29 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 5aff2f30d9..fe16ffe037 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -24,7 +24,7 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 - Required positional parameter "wf" in input and output of `ingress_pipeconfig_paths` function, where a node to reorient templates is added to the `wf`. - Required positional parameter "orientation" to `resolve_resolution`. - Optional positional argument "cfg" to `create_lesion_preproc`. -- New switch `mask_sbref` under `func_input_prep` in functional registration. +- New switch `mask_sbref` under `func_input_prep` in functional registration and set to default `on`. - New resource `desc-head_bold` as non skull-stripped bold from nodeblock `bold_masking`. ### Changed @@ -57,7 +57,9 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 - as output from FNIRT registration. - as inputs from Nodeblocks requesting it and, replaced with `space-template_desc-brain_mask`. - from outputs tsv. - +- Inputs `[desc-motion_bold, bold]` from `coregistration_prep_vol` nodeblock. +- `input` field from `coregistration` in blank and default config. +- `reg_with_skull` swtich from `func_input_prep` in blank and default config. ## [1.8.7] - 2024-05-03 diff --git a/CPAC/pipeline/cpac_pipeline.py b/CPAC/pipeline/cpac_pipeline.py index c237b59ffd..1b64b286a8 100644 --- a/CPAC/pipeline/cpac_pipeline.py +++ b/CPAC/pipeline/cpac_pipeline.py @@ -1288,8 +1288,8 @@ def build_workflow(subject_id, sub_dict, cfg, pipeline_name=None): coregistration_prep_vol, coregistration_prep_mean, coregistration_prep_fmriprep, - mask_sbref, ], + mask_sbref, ] # Distortion/Susceptibility Correction diff --git a/CPAC/pipeline/schema.py b/CPAC/pipeline/schema.py index eb09c94c98..1706547e2c 100644 --- a/CPAC/pipeline/schema.py +++ b/CPAC/pipeline/schema.py @@ -725,12 +725,10 @@ def sanitize(filename): "reference": In({"brain", "restore-brain"}), "interpolation": In({"trilinear", "sinc", "spline"}), "using": str, - "input": str, "cost": str, "dof": int, "arguments": Maybe(str), "func_input_prep": { - "reg_with_skull": bool1_1, "input": [ In( { diff --git a/CPAC/registration/registration.py b/CPAC/registration/registration.py index b8f69b31f6..015598f757 100644 --- a/CPAC/registration/registration.py +++ b/CPAC/registration/registration.py @@ -3167,7 +3167,7 @@ def mask_sbref(wf, cfg, strat_pool, pipe_num, opt=None): "input", ], option_val="Selected_Functional_Volume", - inputs=[("desc-preproc_bold", ["desc-head_bold", "bold"], "sbref")], + inputs=[("desc-preproc_bold", "sbref")], outputs=["sbref"], ) def coregistration_prep_vol(wf, cfg, strat_pool, pipe_num, opt=None): @@ -3182,14 +3182,7 @@ def coregistration_prep_vol(wf, cfg, strat_pool, pipe_num, opt=None): outputtype="NIFTI_GZ", ) - if not cfg.registration_workflows["functional_registration"]["coregistration"][ - "func_input_prep" - ]["reg_with_skull"]: - node, out = strat_pool.get_data("desc-preproc_bold") - else: - # TODO check which file is functional_skull_leaf - # TODO add a function to choose brain or skull? - node, out = strat_pool.get_data(["desc-head_bold", "bold"]) + node, out = strat_pool.get_data("desc-preproc_bold") wf.connect(node, out, get_func_volume, "in_file_a") diff --git a/CPAC/resources/configs/pipeline_config_abcd-options.yml b/CPAC/resources/configs/pipeline_config_abcd-options.yml index 937ab7a636..714a301899 100644 --- a/CPAC/resources/configs/pipeline_config_abcd-options.yml +++ b/CPAC/resources/configs/pipeline_config_abcd-options.yml @@ -199,9 +199,6 @@ registration_workflows: run: On func_input_prep: - # Choose whether to use functional brain or skull as the input to functional-to-anatomical registration - reg_with_skull: On - # Choose whether to use the mean of the functional/EPI as the input to functional-to-anatomical registration or one of the volumes from the functional 4D timeseries that you choose. # input: ['Mean_Functional', 'Selected_Functional_Volume', 'fmriprep_reference'] input: [Selected_Functional_Volume] diff --git a/CPAC/resources/configs/pipeline_config_blank.yml b/CPAC/resources/configs/pipeline_config_blank.yml index 212387464b..2ad2a53560 100644 --- a/CPAC/resources/configs/pipeline_config_blank.yml +++ b/CPAC/resources/configs/pipeline_config_blank.yml @@ -706,9 +706,6 @@ registration_workflows: run: Off func_input_prep: - # Choose whether to use functional brain or skull as the input to functional-to-anatomical registration - reg_with_skull: Off - # Choose whether to use the mean of the functional/EPI as the input to functional-to-anatomical registration or one of the volumes from the functional 4D timeseries that you choose. # input: ['Mean_Functional', 'Selected_Functional_Volume', 'fmriprep_reference'] input: [Mean_Functional] @@ -724,8 +721,8 @@ registration_workflows: #Input the index of which volume from the functional 4D timeseries input file you wish to use as the input for functional-to-anatomical registration. func_reg_input_volume: 0 - # Independent of the above `reg_with_skull` option - mask_sbref: Off + # Mask the sbref created by coregistration input prep nodeblocks above before registration + mask_sbref: On boundary_based_registration: @@ -755,8 +752,7 @@ registration_workflows: # Choose FSL or ABCD as coregistration method using: FSL - # Choose brain or whole-head as coregistration input - input: brain + #TODO Add input field here to choose between whole head or brain # Choose coregistration interpolation interpolation: trilinear diff --git a/CPAC/resources/configs/pipeline_config_default.yml b/CPAC/resources/configs/pipeline_config_default.yml index 3d067fbbcf..2699ff6b52 100644 --- a/CPAC/resources/configs/pipeline_config_default.yml +++ b/CPAC/resources/configs/pipeline_config_default.yml @@ -766,8 +766,7 @@ registration_workflows: # Choose FSL or ABCD as coregistration method using: FSL - # Choose brain or whole-head as coregistration input - input: brain + #TODO Add input field here to choose between whole head or brain # Choose coregistration interpolation interpolation: trilinear @@ -783,9 +782,6 @@ registration_workflows: func_input_prep: - # Choose whether to use functional brain or skull as the input to functional-to-anatomical registration - reg_with_skull: Off - # Choose whether to use the mean of the functional/EPI as the input to functional-to-anatomical registration or one of the volumes from the functional 4D timeseries that you choose. # input: ['Mean_Functional', 'Selected_Functional_Volume', 'fmriprep_reference'] input: ['Mean_Functional'] @@ -802,6 +798,9 @@ registration_workflows: #Input the index of which volume from the functional 4D timeseries input file you wish to use as the input for functional-to-anatomical registration. func_reg_input_volume: 0 + # Mask the sbref created by coregistration input prep nodeblocks above before registration + mask_sbref: On + boundary_based_registration: # this is a fork point # run: [On, Off] - this will run both and fork the pipeline From 80272bbdf13950b61918e53d594d43e401ded1b7 Mon Sep 17 00:00:00 2001 From: "birajstha:construction_worker::penguin" Date: Fri, 21 Mar 2025 11:41:36 -0400 Subject: [PATCH 225/507] fixing typos --- CPAC/func_preproc/func_preproc.py | 2 +- CPAC/registration/registration.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/CPAC/func_preproc/func_preproc.py b/CPAC/func_preproc/func_preproc.py index 218b083f2e..ff626765c4 100644 --- a/CPAC/func_preproc/func_preproc.py +++ b/CPAC/func_preproc/func_preproc.py @@ -1634,7 +1634,7 @@ def bold_mask_anatomical_resampled(wf, cfg, strat_pool, pipe_num, opt=None): node, out, anat_brain_mask_to_func_res, - "inputspec.pace-template_desc-brain_mask", + "inputspec.space-template_desc-brain_mask", ) wf.connect( diff --git a/CPAC/registration/registration.py b/CPAC/registration/registration.py index 345231284e..982005932a 100644 --- a/CPAC/registration/registration.py +++ b/CPAC/registration/registration.py @@ -4405,7 +4405,7 @@ def warp_timeseries_to_T1template_abcd(wf, cfg, strat_pool, pipe_num, opt=None): node, out, anat_brain_mask_to_func_res, - "inputspec.pace-template_desc-brain_mask", + "inputspec.space-template_desc-brain_mask", ) wf.connect( From 2f514ca0e11aa815a20f3680d27c3d04c475fd17 Mon Sep 17 00:00:00 2001 From: "birajstha:construction_worker::penguin" Date: Mon, 24 Mar 2025 14:44:00 -0400 Subject: [PATCH 226/507] with precommit --- CPAC/anat_preproc/utils.py | 32 ++++++++++++++++++++++++++++++++ CPAC/pipeline/engine.py | 29 ++++++++++++++++++++++++++--- CPAC/utils/utils.py | 8 ++++++++ 3 files changed, 66 insertions(+), 3 deletions(-) diff --git a/CPAC/anat_preproc/utils.py b/CPAC/anat_preproc/utils.py index 39904bbb66..f848ce64b1 100644 --- a/CPAC/anat_preproc/utils.py +++ b/CPAC/anat_preproc/utils.py @@ -502,6 +502,38 @@ def mri_convert(in_file, reslice_like=None, out_file=None, args=None): return out_file +def mri_convert_reorient(in_file, orientation, out_file=None): + """ + Convert files from mgz to nifti format. + + Parameters + ---------- + in_file : string + A path of mgz input file + orientation : string + Orientation of the output file + out_file : string + A path of nifti output file + args : string + Arguments of mri_convert + + Returns + ------- + out_file : string + A path of nifti output file + """ + import os + + if out_file is None: + out_file = in_file.split(".")[0] + "_reoriented.mgz" + + cmd = "mri_convert %s %s --out_orientation %s" % (in_file, out_file, orientation) + + os.system(cmd) + + return out_file + + def wb_command(in_file): import os diff --git a/CPAC/pipeline/engine.py b/CPAC/pipeline/engine.py index 8749b1f787..f4b4662879 100644 --- a/CPAC/pipeline/engine.py +++ b/CPAC/pipeline/engine.py @@ -30,6 +30,7 @@ from nipype.interfaces import afni from nipype.interfaces.utility import Rename +from CPAC.anat_preproc.utils import mri_convert_reorient from CPAC.image_utils.spatial_smoothing import spatial_smoothing from CPAC.image_utils.statistical_transforms import ( fisher_z_score_standardize, @@ -66,6 +67,7 @@ from CPAC.utils.utils import ( check_prov_for_regtool, create_id_string, + flip_orientation_code, get_last_prov_entry, read_json, write_output_json, @@ -2043,9 +2045,30 @@ def ingress_freesurfer(wf, rpool, cfg, data_paths, unique_id, part_id, ses_id): creds_path=data_paths["creds_path"], dl_dir=cfg.pipeline_setup["working_directory"]["path"], ) - rpool.set_data( - key, fs_ingress, "outputspec.data", {}, "", f"fs_{key}_ingress" - ) + # if .mgz reorient to RPI + if outfile.endswith(".mgz"): + reorient_mgz = pe.Node( + Function( + input_names=["in_file", "orientation", "out_file"], + output_names=["out_file"], + function=mri_convert_reorient, + ), + name=f"reorient_mgz_{key}", + ) + # Flip orientation before reorient because mri_convert's orientation is opposite that of AFNI + reorient_mgz.inputs.orientation = flip_orientation_code( + cfg.pipeline_setup["desired_orientation"] + ) + reorient_mgz.inputs.out_file = None + wf.connect(fs_ingress, "outputspec.data", reorient_mgz, "in_file") + + rpool.set_data( + key, reorient_mgz, "out_file", {}, "", f"fs_{key}_ingress" + ) + else: + rpool.set_data( + key, fs_ingress, "outputspec.data", {}, "", f"fs_{key}_ingress" + ) else: warnings.warn( str(LookupError(f"\n[!] Path does not exist for {fullpath}.\n")) diff --git a/CPAC/utils/utils.py b/CPAC/utils/utils.py index b459262993..69b96be4ca 100644 --- a/CPAC/utils/utils.py +++ b/CPAC/utils/utils.py @@ -2631,3 +2631,11 @@ def _replace_in_value_list(current_value, replacement_tuple): for v in current_value if bool(v) and v not in {"None", "Off", ""} ] + + +def flip_orientation_code(code): + """ + Reverts an orientation code by flipping R↔L, A↔P, and I↔S. + """ + flip_dict = {"R": "L", "L": "R", "A": "P", "P": "A", "I": "S", "S": "I"} + return "".join(flip_dict[c] for c in code) From 7371e155da6c8a4c264b28dc819c73ffaf81053b Mon Sep 17 00:00:00 2001 From: "birajstha:construction_worker::penguin" Date: Mon, 24 Mar 2025 15:06:19 -0400 Subject: [PATCH 227/507] correcting the function doc-string --- CPAC/anat_preproc/utils.py | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/CPAC/anat_preproc/utils.py b/CPAC/anat_preproc/utils.py index f848ce64b1..a494ebceda 100644 --- a/CPAC/anat_preproc/utils.py +++ b/CPAC/anat_preproc/utils.py @@ -504,23 +504,23 @@ def mri_convert(in_file, reslice_like=None, out_file=None, args=None): def mri_convert_reorient(in_file, orientation, out_file=None): """ - Convert files from mgz to nifti format. + Reorient the mgz files using mri_orient. Parameters ---------- in_file : string - A path of mgz input file + A path of mgz input file. orientation : string - Orientation of the output file + Orientation of the output file. out_file : string - A path of nifti output file + A path of mgz output file. args : string - Arguments of mri_convert + Arguments of mri_convert. Returns ------- out_file : string - A path of nifti output file + A path of reoriented mgz output file. """ import os From 325deb5170ed239cd3ca54af78e96da73416bda0 Mon Sep 17 00:00:00 2001 From: birajstha <111654544+birajstha@users.noreply.github.com> Date: Mon, 24 Mar 2025 16:28:33 -0400 Subject: [PATCH 228/507] Update CPAC/pipeline/engine.py Co-authored-by: Jon Cluce --- CPAC/pipeline/engine.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/CPAC/pipeline/engine.py b/CPAC/pipeline/engine.py index f4b4662879..11f5965e8e 100644 --- a/CPAC/pipeline/engine.py +++ b/CPAC/pipeline/engine.py @@ -2045,7 +2045,7 @@ def ingress_freesurfer(wf, rpool, cfg, data_paths, unique_id, part_id, ses_id): creds_path=data_paths["creds_path"], dl_dir=cfg.pipeline_setup["working_directory"]["path"], ) - # if .mgz reorient to RPI + # reorient *.mgz if outfile.endswith(".mgz"): reorient_mgz = pe.Node( Function( From 252eed4ab3fd3353868f9061f85b7065dd5ab0c7 Mon Sep 17 00:00:00 2001 From: Jon Cluce Date: Wed, 26 Mar 2025 10:33:31 -0400 Subject: [PATCH 229/507] :pencil2: Put `tj-actions/changed-files` in correct place in CHANGELOG --- CHANGELOG.md | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index e9624712c4..8bf75894d2 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -67,6 +67,10 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 - `input` field from `coregistration` in blank and default config. - `reg_with_skull` swtich from `func_input_prep` in blank and default config. +#### Removed CI dependency + +- `tj-actions/changed-files` ([CVE-2023-51664](https://www.stepsecurity.io/blog/harden-runner-detection-tj-actions-changed-files-action-is-compromised)) + ## [1.8.7] - 2024-05-03 ### Added @@ -138,10 +142,6 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 - `wxpython` - `yamlordereddictloader` -#### Removed CI dependency - -- `tj-actions/changed-files` ([CVE-2023-51664](https://www.stepsecurity.io/blog/harden-runner-detection-tj-actions-changed-files-action-is-compromised)) - ### Upgraded dependencies - `AFNI` 21.1.00 'Domitian' → 23.3.09 'Septimius Severus' From 24e7f4b57a316b9465a545bf2c4989eee6c2de89 Mon Sep 17 00:00:00 2001 From: Jon Cluce Date: Thu, 27 Mar 2025 11:29:22 -0400 Subject: [PATCH 230/507] =?UTF-8?q?=F0=9F=94=A7=20Turn=20on=20xcp-qc=20fil?= =?UTF-8?q?e=20generation=20in=20all=20pre-configs?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Co-authored-by: Florian Rupprecht Co-authored-by: Greg Kiar --- CPAC/resources/configs/pipeline_config_abcd-options.yml | 4 ++++ CPAC/resources/configs/pipeline_config_abcd-prep.yml | 4 ++++ CPAC/resources/configs/pipeline_config_benchmark-ANTS.yml | 3 +++ CPAC/resources/configs/pipeline_config_benchmark-FNIRT.yml | 3 +++ CPAC/resources/configs/pipeline_config_ccs-options.yml | 3 +++ CPAC/resources/configs/pipeline_config_default-deprecated.yml | 3 +++ CPAC/resources/configs/pipeline_config_default.yml | 4 ++-- CPAC/resources/configs/pipeline_config_fmriprep-ingress.yml | 3 +++ CPAC/resources/configs/pipeline_config_fmriprep-options.yml | 4 ++++ CPAC/resources/configs/pipeline_config_monkey-ABCD.yml | 3 +++ CPAC/resources/configs/pipeline_config_monkey.yml | 3 +++ CPAC/resources/configs/pipeline_config_ndmg.yml | 3 +++ CPAC/resources/configs/pipeline_config_regtest-1.yml | 3 +++ CPAC/resources/configs/pipeline_config_regtest-2.yml | 3 +++ CPAC/resources/configs/pipeline_config_regtest-3.yml | 3 +++ CPAC/resources/configs/pipeline_config_regtest-4.yml | 3 +++ CPAC/resources/configs/pipeline_config_rodent.yml | 3 +++ 17 files changed, 53 insertions(+), 2 deletions(-) diff --git a/CPAC/resources/configs/pipeline_config_abcd-options.yml b/CPAC/resources/configs/pipeline_config_abcd-options.yml index badb8636a7..02859027ff 100644 --- a/CPAC/resources/configs/pipeline_config_abcd-options.yml +++ b/CPAC/resources/configs/pipeline_config_abcd-options.yml @@ -13,6 +13,10 @@ pipeline_setup: # Name for this pipeline configuration - useful for identification. # This string will be sanitized and used in filepaths pipeline_name: cpac_abcd-options + output_directory: + quality_control: + # Generate eXtensible Connectivity Pipeline-style quality control files + generate_xcpqc_files: On system_config: # The maximum amount of memory each participant's workflow can allocate. diff --git a/CPAC/resources/configs/pipeline_config_abcd-prep.yml b/CPAC/resources/configs/pipeline_config_abcd-prep.yml index c0bfda8dda..22ec01b021 100644 --- a/CPAC/resources/configs/pipeline_config_abcd-prep.yml +++ b/CPAC/resources/configs/pipeline_config_abcd-prep.yml @@ -13,6 +13,10 @@ pipeline_setup: # Name for this pipeline configuration - useful for identification. # This string will be sanitized and used in filepaths pipeline_name: cpac_abcd-prep + output_directory: + quality_control: + # Generate eXtensible Connectivity Pipeline-style quality control files + generate_xcpqc_files: On system_config: # The maximum amount of memory each participant's workflow can allocate. diff --git a/CPAC/resources/configs/pipeline_config_benchmark-ANTS.yml b/CPAC/resources/configs/pipeline_config_benchmark-ANTS.yml index af356132a9..0bb3fd15ac 100644 --- a/CPAC/resources/configs/pipeline_config_benchmark-ANTS.yml +++ b/CPAC/resources/configs/pipeline_config_benchmark-ANTS.yml @@ -21,6 +21,9 @@ pipeline_setup: # Generate quality control pages containing preprocessing and derivative outputs. generate_quality_control_images: On + # Generate eXtensible Connectivity Pipeline-style quality control files + generate_xcpqc_files: On + # Include extra versions and intermediate steps of functional preprocessing in the output directory. write_func_outputs: On diff --git a/CPAC/resources/configs/pipeline_config_benchmark-FNIRT.yml b/CPAC/resources/configs/pipeline_config_benchmark-FNIRT.yml index 63e8fc0c92..04bc116581 100644 --- a/CPAC/resources/configs/pipeline_config_benchmark-FNIRT.yml +++ b/CPAC/resources/configs/pipeline_config_benchmark-FNIRT.yml @@ -21,6 +21,9 @@ pipeline_setup: # Generate quality control pages containing preprocessing and derivative outputs. generate_quality_control_images: On + # Generate eXtensible Connectivity Pipeline-style quality control files + generate_xcpqc_files: On + # Include extra versions and intermediate steps of functional preprocessing in the output directory. write_func_outputs: On diff --git a/CPAC/resources/configs/pipeline_config_ccs-options.yml b/CPAC/resources/configs/pipeline_config_ccs-options.yml index f73cedec84..891a800837 100644 --- a/CPAC/resources/configs/pipeline_config_ccs-options.yml +++ b/CPAC/resources/configs/pipeline_config_ccs-options.yml @@ -21,6 +21,9 @@ pipeline_setup: # Generate quality control pages containing preprocessing and derivative outputs. generate_quality_control_images: On + # Generate eXtensible Connectivity Pipeline-style quality control files + generate_xcpqc_files: On + # Include extra versions and intermediate steps of functional preprocessing in the output directory. write_func_outputs: On diff --git a/CPAC/resources/configs/pipeline_config_default-deprecated.yml b/CPAC/resources/configs/pipeline_config_default-deprecated.yml index cc768ce714..f774f8e479 100644 --- a/CPAC/resources/configs/pipeline_config_default-deprecated.yml +++ b/CPAC/resources/configs/pipeline_config_default-deprecated.yml @@ -21,6 +21,9 @@ pipeline_setup: # Generate quality control pages containing preprocessing and derivative outputs. generate_quality_control_images: On + # Generate eXtensible Connectivity Pipeline-style quality control files + generate_xcpqc_files: On + anatomical_preproc: run: On acpc_alignment: diff --git a/CPAC/resources/configs/pipeline_config_default.yml b/CPAC/resources/configs/pipeline_config_default.yml index 0bed19be8b..c414021ff8 100644 --- a/CPAC/resources/configs/pipeline_config_default.yml +++ b/CPAC/resources/configs/pipeline_config_default.yml @@ -54,10 +54,10 @@ pipeline_setup: # Quality control outputs quality_control: # Generate quality control pages containing preprocessing and derivative outputs. - generate_quality_control_images: True + generate_quality_control_images: On # Generate eXtensible Connectivity Pipeline-style quality control files - generate_xcpqc_files: False + generate_xcpqc_files: On working_directory: diff --git a/CPAC/resources/configs/pipeline_config_fmriprep-ingress.yml b/CPAC/resources/configs/pipeline_config_fmriprep-ingress.yml index da6b97142f..1f6cf5e1ef 100644 --- a/CPAC/resources/configs/pipeline_config_fmriprep-ingress.yml +++ b/CPAC/resources/configs/pipeline_config_fmriprep-ingress.yml @@ -21,6 +21,9 @@ pipeline_setup: # Generate quality control pages containing preprocessing and derivative outputs. generate_quality_control_images: On + # Generate eXtensible Connectivity Pipeline-style quality control files + generate_xcpqc_files: On + outdir_ingress: run: On Template: MNI152NLin2009cAsym diff --git a/CPAC/resources/configs/pipeline_config_fmriprep-options.yml b/CPAC/resources/configs/pipeline_config_fmriprep-options.yml index f97bcf3180..842f371257 100644 --- a/CPAC/resources/configs/pipeline_config_fmriprep-options.yml +++ b/CPAC/resources/configs/pipeline_config_fmriprep-options.yml @@ -13,6 +13,10 @@ pipeline_setup: # Name for this pipeline configuration - useful for identification. # This string will be sanitized and used in filepaths pipeline_name: cpac_fmriprep-options + output_directory: + quality_control: + # Generate eXtensible Connectivity Pipeline-style quality control files + generate_xcpqc_files: On system_config: # Select Off if you intend to run CPAC on a single machine. diff --git a/CPAC/resources/configs/pipeline_config_monkey-ABCD.yml b/CPAC/resources/configs/pipeline_config_monkey-ABCD.yml index e1fb1e8e66..9289e85966 100644 --- a/CPAC/resources/configs/pipeline_config_monkey-ABCD.yml +++ b/CPAC/resources/configs/pipeline_config_monkey-ABCD.yml @@ -21,6 +21,9 @@ pipeline_setup: # Generate quality control pages containing preprocessing and derivative outputs. generate_quality_control_images: On + # Generate eXtensible Connectivity Pipeline-style quality control files + generate_xcpqc_files: On + system_config: # Select Off if you intend to run CPAC on a single machine. diff --git a/CPAC/resources/configs/pipeline_config_monkey.yml b/CPAC/resources/configs/pipeline_config_monkey.yml index 17b1396759..4caef0c006 100644 --- a/CPAC/resources/configs/pipeline_config_monkey.yml +++ b/CPAC/resources/configs/pipeline_config_monkey.yml @@ -21,6 +21,9 @@ pipeline_setup: # Generate quality control pages containing preprocessing and derivative outputs. generate_quality_control_images: On + # Generate eXtensible Connectivity Pipeline-style quality control files + generate_xcpqc_files: On + system_config: # Select Off if you intend to run CPAC on a single machine. diff --git a/CPAC/resources/configs/pipeline_config_ndmg.yml b/CPAC/resources/configs/pipeline_config_ndmg.yml index af183e82c1..02cd19b673 100644 --- a/CPAC/resources/configs/pipeline_config_ndmg.yml +++ b/CPAC/resources/configs/pipeline_config_ndmg.yml @@ -21,6 +21,9 @@ pipeline_setup: # Generate quality control pages containing preprocessing and derivative outputs. generate_quality_control_images: On + # Generate eXtensible Connectivity Pipeline-style quality control files + generate_xcpqc_files: On + system_config: # The number of cores to allocate to ANTS-based anatomical registration per participant. diff --git a/CPAC/resources/configs/pipeline_config_regtest-1.yml b/CPAC/resources/configs/pipeline_config_regtest-1.yml index 22b0506092..7e61db6b8c 100644 --- a/CPAC/resources/configs/pipeline_config_regtest-1.yml +++ b/CPAC/resources/configs/pipeline_config_regtest-1.yml @@ -21,6 +21,9 @@ pipeline_setup: # Generate quality control pages containing preprocessing and derivative outputs. generate_quality_control_images: On + # Generate eXtensible Connectivity Pipeline-style quality control files + generate_xcpqc_files: On + # Include extra versions and intermediate steps of functional preprocessing in the output directory. write_func_outputs: On diff --git a/CPAC/resources/configs/pipeline_config_regtest-2.yml b/CPAC/resources/configs/pipeline_config_regtest-2.yml index 574f9a6f4c..0ba3b198aa 100644 --- a/CPAC/resources/configs/pipeline_config_regtest-2.yml +++ b/CPAC/resources/configs/pipeline_config_regtest-2.yml @@ -21,6 +21,9 @@ pipeline_setup: # Generate quality control pages containing preprocessing and derivative outputs. generate_quality_control_images: On + # Generate eXtensible Connectivity Pipeline-style quality control files + generate_xcpqc_files: On + # Include extra versions and intermediate steps of functional preprocessing in the output directory. write_func_outputs: On diff --git a/CPAC/resources/configs/pipeline_config_regtest-3.yml b/CPAC/resources/configs/pipeline_config_regtest-3.yml index 876e14cc58..d9a2cd679e 100644 --- a/CPAC/resources/configs/pipeline_config_regtest-3.yml +++ b/CPAC/resources/configs/pipeline_config_regtest-3.yml @@ -21,6 +21,9 @@ pipeline_setup: # Generate quality control pages containing preprocessing and derivative outputs. generate_quality_control_images: On + # Generate eXtensible Connectivity Pipeline-style quality control files + generate_xcpqc_files: On + # Include extra versions and intermediate steps of functional preprocessing in the output directory. write_func_outputs: On diff --git a/CPAC/resources/configs/pipeline_config_regtest-4.yml b/CPAC/resources/configs/pipeline_config_regtest-4.yml index 534a5cf6b7..b33af48a33 100644 --- a/CPAC/resources/configs/pipeline_config_regtest-4.yml +++ b/CPAC/resources/configs/pipeline_config_regtest-4.yml @@ -21,6 +21,9 @@ pipeline_setup: # Generate quality control pages containing preprocessing and derivative outputs. generate_quality_control_images: On + # Generate eXtensible Connectivity Pipeline-style quality control files + generate_xcpqc_files: On + # Include extra versions and intermediate steps of functional preprocessing in the output directory. write_func_outputs: On diff --git a/CPAC/resources/configs/pipeline_config_rodent.yml b/CPAC/resources/configs/pipeline_config_rodent.yml index a066241431..95bc06b9b8 100644 --- a/CPAC/resources/configs/pipeline_config_rodent.yml +++ b/CPAC/resources/configs/pipeline_config_rodent.yml @@ -21,6 +21,9 @@ pipeline_setup: # Generate quality control pages containing preprocessing and derivative outputs. generate_quality_control_images: On + # Generate eXtensible Connectivity Pipeline-style quality control files + generate_xcpqc_files: On + system_config: # The maximum amount of memory each participant's workflow can allocate. From 23ca231915d4eb22987d23ec3635688f54027b42 Mon Sep 17 00:00:00 2001 From: Jon Cluce Date: Thu, 27 Mar 2025 11:44:35 -0400 Subject: [PATCH 231/507] :memo: Add "Turned `generate_xcpqc_files` on for all preconfigurations except `blank`" to CHANGELOG [skip ci] --- CHANGELOG.md | 1 + 1 file changed, 1 insertion(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index 8bf75894d2..83e3cddbf8 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -43,6 +43,7 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 - Renamed `desc-ref_bold` created in this workflow to `desc-unifized_bold`. - `coregistration_prep_fmriprep` nodeblock now checks if `desc-unifized_bold` exists in the Resource Pool, if not it runs the `FSL-AFNI subworkflow` to create it. - Input `desc-brain_bold` to `desc-preproc_bold` for `sbref` generation nodeblock `coregistration_prep_vol`. +- Turned `generate_xcpqc_files` on for all preconfigurations except `blank`. ### Fixed From e447211689a04ff7f291da5b1c05d97e028373e9 Mon Sep 17 00:00:00 2001 From: Jon Cluce Date: Thu, 27 Mar 2025 13:31:54 -0400 Subject: [PATCH 232/507] :pencil2: Update `f"space-{sym}template_desc-{orig}_mask"` re: #2182 --- CPAC/registration/registration.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/CPAC/registration/registration.py b/CPAC/registration/registration.py index 982005932a..a1cf562981 100644 --- a/CPAC/registration/registration.py +++ b/CPAC/registration/registration.py @@ -1601,7 +1601,7 @@ def FSL_registration_connector( fnirt_reg_anat_mni, "outputspec.output_head", ), - f"space-{sym}template_desc-{orig}_mask": ( + f"space-{sym}template_desc-{'brain' if orig == 'T1w' else orig}_mask": ( fnirt_reg_anat_mni, "outputspec.output_mask", ), From beddfae7c1075823a39905530eb0e84b652909e9 Mon Sep 17 00:00:00 2001 From: Jon Cluce Date: Thu, 27 Mar 2025 14:44:36 -0400 Subject: [PATCH 233/507] :bug: Update nodeblock outputs for "register_symmetric_FSL_anat_to_template" re: 20572de --- CPAC/registration/registration.py | 40 ++++++++++++++++--------------- 1 file changed, 21 insertions(+), 19 deletions(-) diff --git a/CPAC/registration/registration.py b/CPAC/registration/registration.py index a1cf562981..258cb9712d 100644 --- a/CPAC/registration/registration.py +++ b/CPAC/registration/registration.py @@ -2318,26 +2318,28 @@ def register_FSL_anat_to_template(wf, cfg, strat_pool, pipe_num, opt=None): "dilated-symmetric-brain-mask", ], outputs={ - "space-symtemplate_desc-preproc_T1w": { - "Template": "T1w-brain-template-symmetric" - }, - "from-T1w_to-symtemplate_mode-image_desc-linear_xfm": { - "Template": "T1w-template-symmetric" - }, - "from-symtemplate_to-T1w_mode-image_desc-linear_xfm": { - "Template": "T1w-template-symmetric" - }, - "from-T1w_to-symtemplate_mode-image_xfm": { - "Template": "T1w-template-symmetric" - }, - "from-longitudinal_to-symtemplate_mode-image_desc-linear_xfm": { - "Template": "T1w-template-symmetric" - }, - "from-symtemplate_to-longitudinal_mode-image_desc-linear_xfm": { - "Template": "T1w-template-symmetric" + **{ + f"space-symtemplate_desc-{suffix}": { + "Template": "T1w-brain-template-symmetric" + } + for suffix in [ + *[f"{desc}_T1w" for desc in ["brain", "preproc"]], + "brain_mask", + ] }, - "from-longitudinal_to-symtemplate_mode-image_xfm": { - "Template": "T1w-template-symmetric" + **{ + output: {"Template": "T1w-template-symmetric"} + for output in [ + "space-symtemplate_desc-head_T1w", + "from-T1w_to-symtemplate_mode-image_desc-linear_xfm", + "from-symtemplate_to-T1w_mode-image_desc-linear_xfm", + "from-T1w_to-symtemplate_mode-image_warp", + "from-T1w_to-symtemplate_mode-image_xfm", + "from-longitudinal_to-symtemplate_mode-image_desc-linear_xfm", + "from-symtemplate_to-longitudinal_mode-image_desc-linear_xfm", + "from-longitudinal_to-symtemplate_mode-image_xfm", + "space-symtemplate_desc-T1wT2w_biasfield", + ] }, }, ) From e8ca276b534847d04d3eee7d16c0347a60974320 Mon Sep 17 00:00:00 2001 From: "birajstha:construction_worker::penguin" Date: Thu, 27 Mar 2025 15:21:19 -0400 Subject: [PATCH 234/507] removing TODO from config --- CPAC/resources/configs/pipeline_config_blank.yml | 2 -- CPAC/resources/configs/pipeline_config_default.yml | 2 -- 2 files changed, 4 deletions(-) diff --git a/CPAC/resources/configs/pipeline_config_blank.yml b/CPAC/resources/configs/pipeline_config_blank.yml index b69cee4854..7371aa81de 100644 --- a/CPAC/resources/configs/pipeline_config_blank.yml +++ b/CPAC/resources/configs/pipeline_config_blank.yml @@ -747,8 +747,6 @@ registration_workflows: # Choose FSL or ABCD as coregistration method using: FSL - #TODO Add input field here to choose between whole head or brain - # Choose coregistration interpolation interpolation: trilinear diff --git a/CPAC/resources/configs/pipeline_config_default.yml b/CPAC/resources/configs/pipeline_config_default.yml index 0bed19be8b..a30cabe1b7 100644 --- a/CPAC/resources/configs/pipeline_config_default.yml +++ b/CPAC/resources/configs/pipeline_config_default.yml @@ -765,8 +765,6 @@ registration_workflows: # Choose FSL or ABCD as coregistration method using: FSL - #TODO Add input field here to choose between whole head or brain - # Choose coregistration interpolation interpolation: trilinear From c1cfdaa0fb934dfb86a7b9fa20befb6a30e2f691 Mon Sep 17 00:00:00 2001 From: "birajstha:construction_worker::penguin" Date: Fri, 4 Apr 2025 13:26:47 -0400 Subject: [PATCH 235/507] adding specific switch for correct_restore_brain_intensity_abcd nodeblock --- CHANGELOG.md | 1 + CPAC/anat_preproc/anat_preproc.py | 5 ++--- CPAC/pipeline/schema.py | 3 +++ CPAC/resources/configs/pipeline_config_abcd-options.yml | 3 +++ CPAC/resources/configs/pipeline_config_blank.yml | 3 +++ CPAC/resources/configs/pipeline_config_default.yml | 3 +++ 6 files changed, 15 insertions(+), 3 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 83e3cddbf8..7145fdffb2 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -44,6 +44,7 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 - `coregistration_prep_fmriprep` nodeblock now checks if `desc-unifized_bold` exists in the Resource Pool, if not it runs the `FSL-AFNI subworkflow` to create it. - Input `desc-brain_bold` to `desc-preproc_bold` for `sbref` generation nodeblock `coregistration_prep_vol`. - Turned `generate_xcpqc_files` on for all preconfigurations except `blank`. +- Introduced specific switch for `correct_restore_brain_intensity_abcd` nodeblock, enabling it by default only in `abcd-options` pre-config. ### Fixed diff --git a/CPAC/anat_preproc/anat_preproc.py b/CPAC/anat_preproc/anat_preproc.py index 8fed143fae..8850c7c4ec 100644 --- a/CPAC/anat_preproc/anat_preproc.py +++ b/CPAC/anat_preproc/anat_preproc.py @@ -3161,9 +3161,8 @@ def fast_bias_field_correction(config=None, wf_name="fast_bias_field_correction" @nodeblock( name="correct_restore_brain_intensity_abcd", - config=["anatomical_preproc", "brain_extraction"], - option_key="using", - option_val="FreeSurfer-ABCD", + config=["anatomical_preproc", "brain_extraction", "FreeSurfer-ABCD"], + switch=["correct_restore_brain_intensity_abcd"], inputs=[ ( "desc-preproc_T1w", diff --git a/CPAC/pipeline/schema.py b/CPAC/pipeline/schema.py index e25df0b9b4..4894fc990d 100644 --- a/CPAC/pipeline/schema.py +++ b/CPAC/pipeline/schema.py @@ -638,6 +638,9 @@ def sanitize(filename): "regmask_path": Maybe(str), }, "FreeSurfer-BET": {"T1w_brain_template_mask_ccs": Maybe(str)}, + "FreeSurfer-ABCD": { + "correct_restore_brain_intensity_abcd": bool1_1, + }, }, }, "segmentation": { diff --git a/CPAC/resources/configs/pipeline_config_abcd-options.yml b/CPAC/resources/configs/pipeline_config_abcd-options.yml index 02859027ff..00b8c90e2b 100644 --- a/CPAC/resources/configs/pipeline_config_abcd-options.yml +++ b/CPAC/resources/configs/pipeline_config_abcd-options.yml @@ -84,6 +84,9 @@ anatomical_preproc: # this is a fork option using: [FreeSurfer-ABCD] + FreeSurfer-ABCD: + correct_restore_brain_intensity_abcd: On + # Non-local means filtering via ANTs DenoiseImage non_local_means_filtering: diff --git a/CPAC/resources/configs/pipeline_config_blank.yml b/CPAC/resources/configs/pipeline_config_blank.yml index 7371aa81de..ed6a3cd887 100644 --- a/CPAC/resources/configs/pipeline_config_blank.yml +++ b/CPAC/resources/configs/pipeline_config_blank.yml @@ -419,6 +419,9 @@ anatomical_preproc: # niworkflows-ants registration mask (can be optional) regmask_path: /ants_template/oasis/T_template0_BrainCerebellumRegistrationMask.nii.gz + FreeSurfer-ABCD: + correct_restore_brain_intensity_abcd: Off + run_t2: Off # Bias field correction based on square root of T1w * T2w diff --git a/CPAC/resources/configs/pipeline_config_default.yml b/CPAC/resources/configs/pipeline_config_default.yml index fa4b41179c..87ff1a663d 100644 --- a/CPAC/resources/configs/pipeline_config_default.yml +++ b/CPAC/resources/configs/pipeline_config_default.yml @@ -484,6 +484,9 @@ anatomical_preproc: # Template to be used for FreeSurfer-BET brain extraction in CCS-options pipeline T1w_brain_template_mask_ccs: /ccs_template/MNI152_T1_1mm_first_brain_mask.nii.gz + FreeSurfer-ABCD: + correct_restore_brain_intensity_abcd: Off + segmentation: From 98dd32f935373318def6049e2793347bec5d6ca5 Mon Sep 17 00:00:00 2001 From: "birajstha:construction_worker::penguin" Date: Fri, 4 Apr 2025 15:22:30 -0400 Subject: [PATCH 236/507] adding specific switch for restore_t1w_intensity --- CHANGELOG.md | 2 +- CPAC/anat_preproc/anat_preproc.py | 4 ++-- CPAC/pipeline/schema.py | 6 +++--- CPAC/resources/configs/pipeline_config_abcd-options.yml | 4 ++-- CPAC/resources/configs/pipeline_config_blank.yml | 4 ++-- CPAC/resources/configs/pipeline_config_default.yml | 4 ++-- 6 files changed, 12 insertions(+), 12 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 7145fdffb2..baa8a8ecd0 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -44,7 +44,7 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 - `coregistration_prep_fmriprep` nodeblock now checks if `desc-unifized_bold` exists in the Resource Pool, if not it runs the `FSL-AFNI subworkflow` to create it. - Input `desc-brain_bold` to `desc-preproc_bold` for `sbref` generation nodeblock `coregistration_prep_vol`. - Turned `generate_xcpqc_files` on for all preconfigurations except `blank`. -- Introduced specific switch for `correct_restore_brain_intensity_abcd` nodeblock, enabling it by default only in `abcd-options` pre-config. +- Introduced specific switch `restore_t1w_intensity` for `correct_restore_brain_intensity_abcd` nodeblock, enabling it by default only in `abcd-options` pre-config. ### Fixed diff --git a/CPAC/anat_preproc/anat_preproc.py b/CPAC/anat_preproc/anat_preproc.py index 8850c7c4ec..751fb499d3 100644 --- a/CPAC/anat_preproc/anat_preproc.py +++ b/CPAC/anat_preproc/anat_preproc.py @@ -3161,8 +3161,8 @@ def fast_bias_field_correction(config=None, wf_name="fast_bias_field_correction" @nodeblock( name="correct_restore_brain_intensity_abcd", - config=["anatomical_preproc", "brain_extraction", "FreeSurfer-ABCD"], - switch=["correct_restore_brain_intensity_abcd"], + config=["anatomical_preproc", "restore_t1w_intensity"], + switch=["run"], inputs=[ ( "desc-preproc_T1w", diff --git a/CPAC/pipeline/schema.py b/CPAC/pipeline/schema.py index 4894fc990d..828c0b1aec 100644 --- a/CPAC/pipeline/schema.py +++ b/CPAC/pipeline/schema.py @@ -638,9 +638,9 @@ def sanitize(filename): "regmask_path": Maybe(str), }, "FreeSurfer-BET": {"T1w_brain_template_mask_ccs": Maybe(str)}, - "FreeSurfer-ABCD": { - "correct_restore_brain_intensity_abcd": bool1_1, - }, + }, + "restore_t1w_intensity": { + "run": bool1_1, }, }, "segmentation": { diff --git a/CPAC/resources/configs/pipeline_config_abcd-options.yml b/CPAC/resources/configs/pipeline_config_abcd-options.yml index 00b8c90e2b..cd5c14ad42 100644 --- a/CPAC/resources/configs/pipeline_config_abcd-options.yml +++ b/CPAC/resources/configs/pipeline_config_abcd-options.yml @@ -84,8 +84,8 @@ anatomical_preproc: # this is a fork option using: [FreeSurfer-ABCD] - FreeSurfer-ABCD: - correct_restore_brain_intensity_abcd: On + restore_t1w_intensity: + run: On # Non-local means filtering via ANTs DenoiseImage non_local_means_filtering: diff --git a/CPAC/resources/configs/pipeline_config_blank.yml b/CPAC/resources/configs/pipeline_config_blank.yml index ed6a3cd887..66f34fe41c 100644 --- a/CPAC/resources/configs/pipeline_config_blank.yml +++ b/CPAC/resources/configs/pipeline_config_blank.yml @@ -419,8 +419,8 @@ anatomical_preproc: # niworkflows-ants registration mask (can be optional) regmask_path: /ants_template/oasis/T_template0_BrainCerebellumRegistrationMask.nii.gz - FreeSurfer-ABCD: - correct_restore_brain_intensity_abcd: Off + restore_t1w_intensity: + run: Off run_t2: Off diff --git a/CPAC/resources/configs/pipeline_config_default.yml b/CPAC/resources/configs/pipeline_config_default.yml index 87ff1a663d..5c22d2ee86 100644 --- a/CPAC/resources/configs/pipeline_config_default.yml +++ b/CPAC/resources/configs/pipeline_config_default.yml @@ -484,8 +484,8 @@ anatomical_preproc: # Template to be used for FreeSurfer-BET brain extraction in CCS-options pipeline T1w_brain_template_mask_ccs: /ccs_template/MNI152_T1_1mm_first_brain_mask.nii.gz - FreeSurfer-ABCD: - correct_restore_brain_intensity_abcd: Off + restore_t1w_intensity: + run: Off segmentation: From 09853bdb75ca90997a361f778a5e8355ad51a691 Mon Sep 17 00:00:00 2001 From: Jon Cluce Date: Thu, 10 Apr 2025 16:49:36 -0400 Subject: [PATCH 237/507] :recycle: DRY up output key collection for group config --- CPAC/pipeline/cpac_group_runner.py | 60 ++++----------------------- CPAC/utils/create_fsl_flame_preset.py | 16 +------ CPAC/utils/outputs.py | 19 ++++++++- 3 files changed, 25 insertions(+), 70 deletions(-) diff --git a/CPAC/pipeline/cpac_group_runner.py b/CPAC/pipeline/cpac_group_runner.py index 57d5cc80dc..acc594adaf 100644 --- a/CPAC/pipeline/cpac_group_runner.py +++ b/CPAC/pipeline/cpac_group_runner.py @@ -1,4 +1,4 @@ -# Copyright (C) 2022-2024 C-PAC Developers +# Copyright (C) 2022-2025 C-PAC Developers # This file is part of C-PAC. @@ -143,31 +143,12 @@ def gather_nifti_globs(pipeline_output_folder, resource_list, pull_func=False): import glob import os - import pandas as pd - import pkg_resources as p + from CPAC.utils.outputs import group_derivatives exts = ".nii" nifti_globs = [] - keys_tsv = p.resource_filename("CPAC", "resources/cpac_outputs.tsv") - try: - keys = pd.read_csv(keys_tsv, delimiter="\t") - except Exception as e: - err = ( - "\n[!] Could not access or read the cpac_outputs.tsv " - f"resource file:\n{keys_tsv}\n\nError details {e}\n" - ) - raise Exception(err) - - derivative_list = list(keys[keys["Sub-Directory"] == "func"]["Resource"]) - derivative_list = derivative_list + list( - keys[keys["Sub-Directory"] == "anat"]["Resource"] - ) - - if pull_func: - derivative_list = derivative_list + list( - keys[keys["Space"] == "functional"]["Resource"] - ) + derivative_list = group_derivatives(pull_func) if len(resource_list) == 0: err = "\n\n[!] No derivatives selected!\n\n" @@ -361,33 +342,14 @@ def create_output_dict_list( """Create a dictionary of output filepaths and their associated information.""" import os - import pandas as pd - import pkg_resources as p - if len(resource_list) == 0: err = "\n\n[!] No derivatives selected!\n\n" raise Exception(err) if derivatives is None: - keys_tsv = p.resource_filename("CPAC", "resources/cpac_outputs.tsv") - try: - keys = pd.read_csv(keys_tsv, delimiter="\t") - except Exception as e: - err = ( - "\n[!] Could not access or read the cpac_outputs.csv " - f"resource file:\n{keys_tsv}\n\nError details {e}\n" - ) - raise Exception(err) + from CPAC.utils.outputs import group_derivatives - derivatives = list(keys[keys["Sub-Directory"] == "func"]["Resource"]) - derivatives = derivatives + list( - keys[keys["Sub-Directory"] == "anat"]["Resource"] - ) - - if pull_func: - derivatives = derivatives + list( - keys[keys["Space"] == "functional"]["Resource"] - ) + derivatives = group_derivatives(pull_func) # remove any extra /'s pipeline_output_folder = pipeline_output_folder.rstrip("/") @@ -752,18 +714,10 @@ def prep_feat_inputs(group_config_file: str) -> dict: import os import pandas as pd - import pkg_resources as p - keys_tsv = p.resource_filename("CPAC", "resources/cpac_outputs.tsv") - try: - keys = pd.read_csv(keys_tsv, delimiter="\t") - except Exception as e: - err = ( - "\n[!] Could not access or read the cpac_outputs.tsv " - f"resource file:\n{keys_tsv}\n\nError details {e}\n" - ) - raise Exception(err) + from CPAC.utils.outputs import Outputs + keys = Outputs.reference derivatives = list( keys[keys["Derivative"] == "yes"][keys["Space"] == "template"][ keys["Values"] == "z-score" diff --git a/CPAC/utils/create_fsl_flame_preset.py b/CPAC/utils/create_fsl_flame_preset.py index 856c10a3b4..848fe5e9fe 100644 --- a/CPAC/utils/create_fsl_flame_preset.py +++ b/CPAC/utils/create_fsl_flame_preset.py @@ -1,4 +1,4 @@ -# Copyright (C) 2018-2024 C-PAC Developers +# Copyright (C) 2018-2025 C-PAC Developers # This file is part of C-PAC. @@ -1092,20 +1092,6 @@ def run( import os - import pandas as pd - import pkg_resources as p - - # make life easy - keys_csv = p.resource_filename("CPAC", "resources/cpac_outputs.csv") - try: - pd.read_csv(keys_csv) - except Exception as e: - err = ( - "\n[!] Could not access or read the cpac_outputs.csv " - f"resource file:\n{keys_csv}\n\nError details {e}\n" - ) - raise Exception(err) - if derivative_list == "all": derivative_list = [ "alff", diff --git a/CPAC/utils/outputs.py b/CPAC/utils/outputs.py index 451d893987..0d7b580400 100644 --- a/CPAC/utils/outputs.py +++ b/CPAC/utils/outputs.py @@ -17,6 +17,7 @@ """Specify the resources that C-PAC writes to the output direcotry.""" from importlib.resources import files +from typing import ClassVar import pandas as pd @@ -47,8 +48,12 @@ class Outputs: reference[reference["4D Time Series"] == "Yes"]["Resource"] ) - anat = list(reference[reference["Sub-Directory"] == "anat"]["Resource"]) - func = list(reference[reference["Sub-Directory"] == "func"]["Resource"]) + anat: ClassVar[list[str]] = list( + reference[reference["Sub-Directory"] == "anat"]["Resource"] + ) + func: ClassVar[list[str]] = list( + reference[reference["Sub-Directory"] == "func"]["Resource"] + ) # outputs to send into smoothing, if smoothing is enabled, and # outputs to write out if the user selects to write non-smoothed outputs @@ -65,6 +70,8 @@ class Outputs: all_template_filter = _template_filter | _epitemplate_filter | _symtemplate_filter all_native_filter = _T1w_native_filter | _bold_native_filter | _long_native_filter + bold_native: ClassVar[list[str]] = list(reference[_bold_native_filter]["Resource"]) + native_nonsmooth = list( reference[all_native_filter & _nonsmoothed_filter]["Resource"] ) @@ -121,3 +128,11 @@ def _is_gifti(_file_key): for gifti in giftis.itertuples() if " " in gifti.File } + + +def group_derivatives(pull_func: bool = False) -> list[str]: + """Gather keys for anatomical and functional derivatives for group analysis.""" + derivatives: list[str] = Outputs.func + Outputs.anat + if pull_func: + derivatives = derivatives + Outputs.bold_native + return derivatives From 157d0a38f7ef207e2abf651b455861e0e911c1bb Mon Sep 17 00:00:00 2001 From: Jon Cluce Date: Thu, 10 Apr 2025 17:07:12 -0400 Subject: [PATCH 238/507] :memo: Add "Lingering calls to `cpac_outputs.csv` (was changed to `cpac_outputs.tsv` in v1.8.1)." to CHANGELOG --- CHANGELOG.md | 1 + 1 file changed, 1 insertion(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index baa8a8ecd0..f896015b6f 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -53,6 +53,7 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 - Fixed empty `shell` variable in cluster run scripts. - A bug in which bandpass filters always assumed 1D regressor files have exactly 5 header rows. - Removed an erroneous connection to AFNI 3dTProject in nuisance denoising that would unnecessarily send a spike regressor as a censor. This would sometimes cause TRs to unnecessarily be dropped from the timeseries as if scrubbing were being performed. +- Lingering calls to `cpac_outputs.csv` (was changed to `cpac_outputs.tsv` in v1.8.1). ### Removed From 0978ac3515bc24024b955c065121dba451187f2a Mon Sep 17 00:00:00 2001 From: Jon Cluce Date: Fri, 11 Apr 2025 22:47:32 -0400 Subject: [PATCH 239/507] :memo: Type-annotate `Outputs.reference` Co-authored-by: Biraj Shrestha <111654544+birajstha@users.noreply.github.com> --- CPAC/utils/outputs.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/CPAC/utils/outputs.py b/CPAC/utils/outputs.py index 0d7b580400..f148bba87d 100644 --- a/CPAC/utils/outputs.py +++ b/CPAC/utils/outputs.py @@ -28,7 +28,9 @@ class Outputs: reference_csv = str(files("CPAC").joinpath("resources/cpac_outputs.tsv")) try: - reference = pd.read_csv(reference_csv, delimiter="\t", keep_default_na=False) + reference: ClassVar[pd.DataFrame] = pd.read_csv( + reference_csv, delimiter="\t", keep_default_na=False + ) except Exception as e: err = ( "\n[!] Could not access or read the cpac_outputs.tsv " From 611a37ec0ff3491b30013b7c1137e0c16da87877 Mon Sep 17 00:00:00 2001 From: birajstha Date: Mon, 14 Apr 2025 13:12:03 -0500 Subject: [PATCH 240/507] changing ref from template to T1w image --- CPAC/anat_preproc/anat_preproc.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/CPAC/anat_preproc/anat_preproc.py b/CPAC/anat_preproc/anat_preproc.py index 751fb499d3..58eaaf0820 100644 --- a/CPAC/anat_preproc/anat_preproc.py +++ b/CPAC/anat_preproc/anat_preproc.py @@ -3058,7 +3058,7 @@ def fnirt_based_brain_extraction(config=None, wf_name="fnirt_based_brain_extract inverse_warp = pe.Node(interface=fsl.InvWarp(), name="inverse_warp") inverse_warp.inputs.output_type = "NIFTI_GZ" - preproc.connect(inputnode, "template_skull_for_anat_2mm", inverse_warp, "reference") + preproc.connect(inputnode, "anat_data", inverse_warp, "reference") preproc.connect(non_linear_reg, "field_file", inverse_warp, "warp") From ef7b15a4eb3e43728ae93b47f1ec31b0cd940ab9 Mon Sep 17 00:00:00 2001 From: birajstha Date: Mon, 14 Apr 2025 20:47:36 -0500 Subject: [PATCH 241/507] adding to changelog --- CHANGELOG.md | 5 +++++ CPAC/anat_preproc/anat_preproc.py | 3 +-- 2 files changed, 6 insertions(+), 2 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index f896015b6f..2d7580118d 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -7,6 +7,7 @@ C-PAC is free software: you can redistribute it and/or modify it under the terms C-PAC is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more details. You should have received a copy of the GNU Lesser General Public License along with C-PAC. If not, see . --> + # Changelog All notable changes to this project will be documented in this file. @@ -115,6 +116,7 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 - Fixed a bug where sparsity thresholds were not being scaled for network centrality. - Fixed a bug where `calculate_motion_first` would not calculate motion at all. - Fixed a bug in parsing `FROM: /file/path` syntax +- Fixed chipped `desc-restoreBrain_T1w` from `freesurfer_abcd_preproc` nodeblock. ### Changed @@ -304,10 +306,13 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 ### Changed - In a given pipeline configuration, segmentation probability maps and binary tissue masks are warped to template space, and those warped masks are included in the output directory + - if `registration_workflows['functional_registration']['EPI_registration']['run segmentation']` is `On` and `segmentation['tissue_segmentation']['Template_Based']['template_for_segmentation']` includes `EPI_Template` and/or + - if `registration_workflows['anatomical_registration']['run']` is `On` and `segmentation['tissue_segmentation']['Template_Based']['template_for_segmentation']` includes `T1_Template` + - Renamed connectivity matrices from `*_connectome.tsv` to `*_correlations.tsv` - Moved some ephemeral logging statements into `pypeline.log` diff --git a/CPAC/anat_preproc/anat_preproc.py b/CPAC/anat_preproc/anat_preproc.py index 58eaaf0820..683bb522f7 100644 --- a/CPAC/anat_preproc/anat_preproc.py +++ b/CPAC/anat_preproc/anat_preproc.py @@ -3053,8 +3053,7 @@ def fnirt_based_brain_extraction(config=None, wf_name="fnirt_based_brain_extract preproc.connect(non_linear_reg, "field_file", apply_warp, "field_file") # Invert warp and transform dilated brain mask back into native space, and use it to mask input image - # Input and reference spaces are the same, using 2mm reference to save time - # invwarp --ref="$Reference2mm" -w "$WD"/str2standard.nii.gz -o "$WD"/standard2str.nii.gz + # invwarp --ref="$T1w" -w "$WD"/str2standard.nii.gz -o "$WD"/standard2str.nii.gz inverse_warp = pe.Node(interface=fsl.InvWarp(), name="inverse_warp") inverse_warp.inputs.output_type = "NIFTI_GZ" From 1fa33b7b159f99ea424c4af1277679120ff62e9b Mon Sep 17 00:00:00 2001 From: birajstha Date: Mon, 14 Apr 2025 21:02:53 -0500 Subject: [PATCH 242/507] updating the changelog --- CHANGELOG.md | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 2d7580118d..d4219f11e3 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -54,7 +54,7 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 - Fixed empty `shell` variable in cluster run scripts. - A bug in which bandpass filters always assumed 1D regressor files have exactly 5 header rows. - Removed an erroneous connection to AFNI 3dTProject in nuisance denoising that would unnecessarily send a spike regressor as a censor. This would sometimes cause TRs to unnecessarily be dropped from the timeseries as if scrubbing were being performed. -- Lingering calls to `cpac_outputs.csv` (was changed to `cpac_outputs.tsv` in v1.8.1). +- Fixed chipped `desc-restoreBrain_T1w` from `freesurfer_abcd_preproc` nodeblock. ### Removed @@ -116,7 +116,6 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 - Fixed a bug where sparsity thresholds were not being scaled for network centrality. - Fixed a bug where `calculate_motion_first` would not calculate motion at all. - Fixed a bug in parsing `FROM: /file/path` syntax -- Fixed chipped `desc-restoreBrain_T1w` from `freesurfer_abcd_preproc` nodeblock. ### Changed From b668e67c9f99553694902f4fe35df06e0219dd32 Mon Sep 17 00:00:00 2001 From: birajstha Date: Mon, 14 Apr 2025 21:07:53 -0500 Subject: [PATCH 243/507] with precommit --- CHANGELOG.md | 1 + 1 file changed, 1 insertion(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index d4219f11e3..3c527f89e2 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -54,6 +54,7 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 - Fixed empty `shell` variable in cluster run scripts. - A bug in which bandpass filters always assumed 1D regressor files have exactly 5 header rows. - Removed an erroneous connection to AFNI 3dTProject in nuisance denoising that would unnecessarily send a spike regressor as a censor. This would sometimes cause TRs to unnecessarily be dropped from the timeseries as if scrubbing were being performed. +- Lingering calls to `cpac_outputs.csv` (was changed to `cpac_outputs.tsv` in v1.8.1). - Fixed chipped `desc-restoreBrain_T1w` from `freesurfer_abcd_preproc` nodeblock. ### Removed From a9ec3b6da8e663af9f18fbab4c72d9bb8a5481a4 Mon Sep 17 00:00:00 2001 From: birajstha Date: Mon, 14 Apr 2025 21:12:30 -0500 Subject: [PATCH 244/507] restoring spaces added by prettier extension --- CHANGELOG.md | 4 ---- 1 file changed, 4 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 3c527f89e2..71e5abedae 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -7,7 +7,6 @@ C-PAC is free software: you can redistribute it and/or modify it under the terms C-PAC is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more details. You should have received a copy of the GNU Lesser General Public License along with C-PAC. If not, see . --> - # Changelog All notable changes to this project will be documented in this file. @@ -306,13 +305,10 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 ### Changed - In a given pipeline configuration, segmentation probability maps and binary tissue masks are warped to template space, and those warped masks are included in the output directory - - if `registration_workflows['functional_registration']['EPI_registration']['run segmentation']` is `On` and `segmentation['tissue_segmentation']['Template_Based']['template_for_segmentation']` includes `EPI_Template` and/or - - if `registration_workflows['anatomical_registration']['run']` is `On` and `segmentation['tissue_segmentation']['Template_Based']['template_for_segmentation']` includes `T1_Template` - - Renamed connectivity matrices from `*_connectome.tsv` to `*_correlations.tsv` - Moved some ephemeral logging statements into `pypeline.log` From 39e6e1507513004491bd7c4677ca41a495a1a927 Mon Sep 17 00:00:00 2001 From: birajstha Date: Tue, 15 Apr 2025 09:58:27 -0500 Subject: [PATCH 245/507] adding a bit more info into the changelog --- CHANGELOG.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 71e5abedae..939ead7f7d 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -54,7 +54,7 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 - A bug in which bandpass filters always assumed 1D regressor files have exactly 5 header rows. - Removed an erroneous connection to AFNI 3dTProject in nuisance denoising that would unnecessarily send a spike regressor as a censor. This would sometimes cause TRs to unnecessarily be dropped from the timeseries as if scrubbing were being performed. - Lingering calls to `cpac_outputs.csv` (was changed to `cpac_outputs.tsv` in v1.8.1). -- Fixed chipped `desc-restoreBrain_T1w` from `freesurfer_abcd_preproc` nodeblock. +- A bug in the `freesurfer_abcd_preproc` nodeblock where the `Template` image was incorrectly used as `reference` during the `inverse_warp` step. Replacing it with the subject-specific `T1w` image resolved the issue of the `desc-restoreBrain_T1w` being chipped off. ### Removed From 52318ca48d9d2492d8b8b61f292e1b62dff7b714 Mon Sep 17 00:00:00 2001 From: "birajstha:construction_worker::penguin" Date: Wed, 9 Apr 2025 14:28:17 -0400 Subject: [PATCH 246/507] removing master input as data is already aligned to desired orientation --- CPAC/func_preproc/func_preproc.py | 3 --- 1 file changed, 3 deletions(-) diff --git a/CPAC/func_preproc/func_preproc.py b/CPAC/func_preproc/func_preproc.py index ff626765c4..dc72907bf1 100644 --- a/CPAC/func_preproc/func_preproc.py +++ b/CPAC/func_preproc/func_preproc.py @@ -1661,9 +1661,6 @@ def bold_mask_anatomical_resampled(wf, cfg, strat_pool, pipe_num, opt=None): "in_file", ) - node, out = strat_pool.get_data("desc-preproc_bold") - wf.connect(node, out, func_mask_template_to_native, "master") - outputs = { "space-template_res-bold_desc-brain_T1w": ( anat_brain_to_func_res, From ed1b2147c7155d71a2ab7f5a38f1e2eb03fc1175 Mon Sep 17 00:00:00 2001 From: "birajstha:construction_worker::penguin" Date: Fri, 11 Apr 2025 12:12:20 -0400 Subject: [PATCH 247/507] removing resampling of mask from template space to native space --- CPAC/func_preproc/func_preproc.py | 20 -------------------- 1 file changed, 20 deletions(-) diff --git a/CPAC/func_preproc/func_preproc.py b/CPAC/func_preproc/func_preproc.py index dc72907bf1..06e2993294 100644 --- a/CPAC/func_preproc/func_preproc.py +++ b/CPAC/func_preproc/func_preproc.py @@ -1597,7 +1597,6 @@ def anat_brain_mask_to_bold_res(wf_name, cfg, pipe_num): option_key=["functional_preproc", "func_masking", "using"], option_val="Anatomical_Resampled", inputs=[ - "desc-preproc_bold", "T1w-template-funcreg", "space-template_desc-preproc_T1w", "space-template_desc-brain_mask", @@ -1605,7 +1604,6 @@ def anat_brain_mask_to_bold_res(wf_name, cfg, pipe_num): outputs=[ "space-template_res-bold_desc-brain_T1w", "space-template_desc-bold_mask", - "space-bold_desc-brain_mask", ], ) def bold_mask_anatomical_resampled(wf, cfg, strat_pool, pipe_num, opt=None): @@ -1644,23 +1642,6 @@ def bold_mask_anatomical_resampled(wf, cfg, strat_pool, pipe_num, opt=None): "inputspec.space-template_desc-preproc_T1w", ) - # Resample func mask in template space back to native space - func_mask_template_to_native = pe.Node( - interface=afni.Resample(), - name=f"resample_func_mask_to_native_{pipe_num}", - mem_gb=0, - mem_x=(0.0115, "in_file", "t"), - ) - func_mask_template_to_native.inputs.resample_mode = "NN" - func_mask_template_to_native.inputs.outputtype = "NIFTI_GZ" - - wf.connect( - anat_brain_mask_to_func_res, - "outputspec.space-template_desc-bold_mask", - func_mask_template_to_native, - "in_file", - ) - outputs = { "space-template_res-bold_desc-brain_T1w": ( anat_brain_to_func_res, @@ -1670,7 +1651,6 @@ def bold_mask_anatomical_resampled(wf, cfg, strat_pool, pipe_num, opt=None): anat_brain_mask_to_func_res, "outputspec.space-template_desc-bold_mask", ), - "space-bold_desc-brain_mask": (func_mask_template_to_native, "out_file"), } return (wf, outputs) From c7136eb81bc96d53d397ed269ea1a74350d5e819 Mon Sep 17 00:00:00 2001 From: "birajstha:construction_worker::penguin" Date: Wed, 16 Apr 2025 11:57:32 -0400 Subject: [PATCH 248/507] changing the pre-configs adding anatomical_resampled to the template-space masking block --- .../configs/pipeline_config_abcd-options.yml | 17 ++++++----------- .../configs/pipeline_config_abcd-prep.yml | 16 ++++++---------- .../resources/configs/pipeline_config_blank.yml | 10 ++++++++-- .../configs/pipeline_config_ccs-options.yml | 3 +-- .../configs/pipeline_config_default.yml | 10 ++++++++-- .../pipeline_config_fmriprep-options.yml | 3 +-- .../configs/pipeline_config_monkey-ABCD.yml | 3 +-- .../configs/pipeline_config_monkey.yml | 3 +-- .../configs/pipeline_config_regtest-3.yml | 3 +-- .../configs/pipeline_config_regtest-4.yml | 3 +-- .../configs/pipeline_config_rodent.yml | 3 +-- 11 files changed, 35 insertions(+), 39 deletions(-) diff --git a/CPAC/resources/configs/pipeline_config_abcd-options.yml b/CPAC/resources/configs/pipeline_config_abcd-options.yml index cd5c14ad42..5f1f1a855b 100644 --- a/CPAC/resources/configs/pipeline_config_abcd-options.yml +++ b/CPAC/resources/configs/pipeline_config_abcd-options.yml @@ -290,20 +290,15 @@ functional_preproc: using: [PhaseDiff, Blip-FSL-TOPUP] func_masking: - run: On - - # Apply functional mask in native space - apply_func_mask_in_native_space: Off + run: Off - # using: ['AFNI', 'FSL', 'FSL_AFNI', 'Anatomical_Refined', 'Anatomical_Based', 'Anatomical_Resampled', 'CCS_Anatomical_Refined'] - # FSL_AFNI: fMRIPrep-style BOLD mask. Ref: https://github.com/nipreps/niworkflows/blob/a221f612/niworkflows/func/util.py#L246-L514 - # Anatomical_Refined: 1. binarize anat mask, in case it is not a binary mask. 2. fill holes of anat mask 3. init_bold_mask : input raw func → dilate init func brain mask 4. refined_bold_mask : input motion corrected func → dilate anatomical mask 5. get final func mask - # Anatomical_Based: Generate the BOLD mask by basing it off of the anatomical brain mask. Adapted from DCAN Lab's BOLD mask method from the ABCD pipeline. - # Anatomical_Resampled: Resample anatomical brain mask in standard space to get BOLD brain mask in standard space. Adapted from DCAN Lab's BOLD mask method from the ABCD pipeline. ("Create fMRI resolution standard space files for T1w image, wmparc, and brain mask […] don't use FLIRT to do spline interpolation with -applyisoxfm for the 2mm and 1mm cases because it doesn't know the peculiarities of the MNI template FOVs") - # CCS_Anatomical_Refined: Generate the BOLD mask by basing it off of the anatomical brain. Adapted from the BOLD mask method from the CCS pipeline. - # this is a fork point + template_space_func_masking: + run: On + # Anatomical_Resampled: Resample anatomical brain mask in standard space to get BOLD brain mask in standard space. Adapted from DCAN Lab's BOLD mask method from the ABCD pipeline. ("Create fMRI resolution standard space files for T1w image, wmparc, and brain mask […] don't use FLIRT to do spline interpolation with -applyisoxfm for the 2mm and 1mm cases because it doesn't know the peculiarities of the MNI template FOVs") using: [Anatomical_Resampled] + apply_func_mask_in_template_space: On + generate_func_mean: # Generate mean functional image diff --git a/CPAC/resources/configs/pipeline_config_abcd-prep.yml b/CPAC/resources/configs/pipeline_config_abcd-prep.yml index 22ec01b021..aef9718952 100644 --- a/CPAC/resources/configs/pipeline_config_abcd-prep.yml +++ b/CPAC/resources/configs/pipeline_config_abcd-prep.yml @@ -226,15 +226,11 @@ functional_preproc: using: [] func_masking: + run: Off - # Apply functional mask in native space - apply_func_mask_in_native_space: Off - - # using: ['AFNI', 'FSL', 'FSL_AFNI', 'Anatomical_Refined', 'Anatomical_Based', 'Anatomical_Resampled', 'CCS_Anatomical_Refined'] - # FSL_AFNI: fMRIPrep-style BOLD mask. Ref: https://github.com/nipreps/niworkflows/blob/a221f612/niworkflows/func/util.py#L246-L514 - # Anatomical_Refined: 1. binarize anat mask, in case it is not a binary mask. 2. fill holes of anat mask 3. init_bold_mask : input raw func → dilate init func brain mask 4. refined_bold_mask : input motion corrected func → dilate anatomical mask 5. get final func mask - # Anatomical_Based: Generate the BOLD mask by basing it off of the anatomical brain mask. Adapted from DCAN Lab's BOLD mask method from the ABCD pipeline. - # Anatomical_Resampled: Resample anatomical brain mask in standard space to get BOLD brain mask in standard space. Adapted from DCAN Lab's BOLD mask method from the ABCD pipeline. ("Create fMRI resolution standard space files for T1w image, wmparc, and brain mask […] don't use FLIRT to do spline interpolation with -applyisoxfm for the 2mm and 1mm cases because it doesn't know the peculiarities of the MNI template FOVs") - # CCS_Anatomical_Refined: Generate the BOLD mask by basing it off of the anatomical brain. Adapted from the BOLD mask method from the CCS pipeline. - # this is a fork point + template_space_func_masking: + run: On + # Anatomical_Resampled: Resample anatomical brain mask in standard space to get BOLD brain mask in standard space. Adapted from DCAN Lab's BOLD mask method from the ABCD pipeline. ("Create fMRI resolution standard space files for T1w image, wmparc, and brain mask […] don't use FLIRT to do spline interpolation with -applyisoxfm for the 2mm and 1mm cases because it doesn't know the peculiarities of the MNI template FOVs") using: [Anatomical_Resampled] + + apply_func_mask_in_template_space: On \ No newline at end of file diff --git a/CPAC/resources/configs/pipeline_config_blank.yml b/CPAC/resources/configs/pipeline_config_blank.yml index 66f34fe41c..0d48fbf0ca 100644 --- a/CPAC/resources/configs/pipeline_config_blank.yml +++ b/CPAC/resources/configs/pipeline_config_blank.yml @@ -1139,11 +1139,10 @@ functional_preproc: # Apply functional mask in native space apply_func_mask_in_native_space: On - # using: ['AFNI', 'FSL', 'FSL_AFNI', 'Anatomical_Refined', 'Anatomical_Based', 'Anatomical_Resampled', 'CCS_Anatomical_Refined'] + # using: ['AFNI', 'FSL', 'FSL_AFNI', 'Anatomical_Refined', 'Anatomical_Based', 'CCS_Anatomical_Refined'] # FSL_AFNI: fMRIPrep-style BOLD mask. Ref: https://github.com/nipreps/niworkflows/blob/a221f612/niworkflows/func/util.py#L246-L514 # Anatomical_Refined: 1. binarize anat mask, in case it is not a binary mask. 2. fill holes of anat mask 3. init_bold_mask : input raw func → dilate init func brain mask 4. refined_bold_mask : input motion corrected func → dilate anatomical mask 5. get final func mask # Anatomical_Based: Generate the BOLD mask by basing it off of the anatomical brain mask. Adapted from DCAN Lab's BOLD mask method from the ABCD pipeline. - # Anatomical_Resampled: Resample anatomical brain mask in standard space to get BOLD brain mask in standard space. Adapted from DCAN Lab's BOLD mask method from the ABCD pipeline. ("Create fMRI resolution standard space files for T1w image, wmparc, and brain mask […] don't use FLIRT to do spline interpolation with -applyisoxfm for the 2mm and 1mm cases because it doesn't know the peculiarities of the MNI template FOVs") # CCS_Anatomical_Refined: Generate the BOLD mask by basing it off of the anatomical brain. Adapted from the BOLD mask method from the CCS pipeline. # this is a fork point using: [AFNI] @@ -1151,7 +1150,14 @@ functional_preproc: # Choose whether or not to dilate the anatomical mask if you choose 'Anatomical_Refined' as the functional masking option. It will dilate one voxel if enabled. anatomical_mask_dilation: Off + + template_space_func_masking: + run: Off + # Anatomical_Resampled: Resample anatomical brain mask in standard space to get BOLD brain mask in standard space. Adapted from DCAN Lab's BOLD mask method from the ABCD pipeline. ("Create fMRI resolution standard space files for T1w image, wmparc, and brain mask […] don't use FLIRT to do spline interpolation with -applyisoxfm for the 2mm and 1mm cases because it doesn't know the peculiarities of the MNI template FOVs") + using: [Anatomical_Resampled] + apply_func_mask_in_template_space: Off + generate_func_mean: # Generate mean functional image diff --git a/CPAC/resources/configs/pipeline_config_ccs-options.yml b/CPAC/resources/configs/pipeline_config_ccs-options.yml index 891a800837..00aafa4ca1 100644 --- a/CPAC/resources/configs/pipeline_config_ccs-options.yml +++ b/CPAC/resources/configs/pipeline_config_ccs-options.yml @@ -183,11 +183,10 @@ functional_preproc: func_masking: run: On - # using: ['AFNI', 'FSL', 'FSL_AFNI', 'Anatomical_Refined', 'Anatomical_Based', 'Anatomical_Resampled', 'CCS_Anatomical_Refined'] + # using: ['AFNI', 'FSL', 'FSL_AFNI', 'Anatomical_Refined', 'Anatomical_Based', 'CCS_Anatomical_Refined'] # FSL_AFNI: fMRIPrep-style BOLD mask. Ref: https://github.com/nipreps/niworkflows/blob/a221f612/niworkflows/func/util.py#L246-L514 # Anatomical_Refined: 1. binarize anat mask, in case it is not a binary mask. 2. fill holes of anat mask 3. init_bold_mask : input raw func → dilate init func brain mask 4. refined_bold_mask : input motion corrected func → dilate anatomical mask 5. get final func mask # Anatomical_Based: Generate the BOLD mask by basing it off of the anatomical brain mask. Adapted from DCAN Lab's BOLD mask method from the ABCD pipeline. - # Anatomical_Resampled: Resample anatomical brain mask in standard space to get BOLD brain mask in standard space. Adapted from DCAN Lab's BOLD mask method from the ABCD pipeline. ("Create fMRI resolution standard space files for T1w image, wmparc, and brain mask […] don't use FLIRT to do spline interpolation with -applyisoxfm for the 2mm and 1mm cases because it doesn't know the peculiarities of the MNI template FOVs") # CCS_Anatomical_Refined: Generate the BOLD mask by basing it off of the anatomical brain. Adapted from the BOLD mask method from the CCS pipeline. # this is a fork point using: [CCS_Anatomical_Refined] diff --git a/CPAC/resources/configs/pipeline_config_default.yml b/CPAC/resources/configs/pipeline_config_default.yml index 5c22d2ee86..c937526a79 100644 --- a/CPAC/resources/configs/pipeline_config_default.yml +++ b/CPAC/resources/configs/pipeline_config_default.yml @@ -1256,12 +1256,11 @@ functional_preproc: func_masking: run: On - # using: ['AFNI', 'FSL', 'FSL_AFNI', 'Anatomical_Refined', 'Anatomical_Based', 'Anatomical_Resampled', 'CCS_Anatomical_Refined'] + # using: ['AFNI', 'FSL', 'FSL_AFNI', 'Anatomical_Refined', 'Anatomical_Based', 'CCS_Anatomical_Refined'] # FSL_AFNI: fMRIPrep-style BOLD mask. Ref: https://github.com/nipreps/niworkflows/blob/a221f612/niworkflows/func/util.py#L246-L514 # Anatomical_Refined: 1. binarize anat mask, in case it is not a binary mask. 2. fill holes of anat mask 3. init_bold_mask : input raw func → dilate init func brain mask 4. refined_bold_mask : input motion corrected func → dilate anatomical mask 5. get final func mask # Anatomical_Based: Generate the BOLD mask by basing it off of the anatomical brain mask. Adapted from DCAN Lab's BOLD mask method from the ABCD pipeline. - # Anatomical_Resampled: Resample anatomical brain mask in standard space to get BOLD brain mask in standard space. Adapted from DCAN Lab's BOLD mask method from the ABCD pipeline. ("Create fMRI resolution standard space files for T1w image, wmparc, and brain mask […] don't use FLIRT to do spline interpolation with -applyisoxfm for the 2mm and 1mm cases because it doesn't know the peculiarities of the MNI template FOVs") # CCS_Anatomical_Refined: Generate the BOLD mask by basing it off of the anatomical brain. Adapted from the BOLD mask method from the CCS pipeline. # this is a fork point @@ -1334,6 +1333,13 @@ functional_preproc: # Apply functional mask in native space apply_func_mask_in_native_space: On + template_space_func_masking: + run: Off + # Anatomical_Resampled: Resample anatomical brain mask in standard space to get BOLD brain mask in standard space. Adapted from DCAN Lab's BOLD mask method from the ABCD pipeline. ("Create fMRI resolution standard space files for T1w image, wmparc, and brain mask […] don't use FLIRT to do spline interpolation with -applyisoxfm for the 2mm and 1mm cases because it doesn't know the peculiarities of the MNI template FOVs") + using: [Anatomical_Resampled] + + apply_func_mask_in_template_space: Off + generate_func_mean: # Generate mean functional image diff --git a/CPAC/resources/configs/pipeline_config_fmriprep-options.yml b/CPAC/resources/configs/pipeline_config_fmriprep-options.yml index 842f371257..f67225fa73 100644 --- a/CPAC/resources/configs/pipeline_config_fmriprep-options.yml +++ b/CPAC/resources/configs/pipeline_config_fmriprep-options.yml @@ -370,11 +370,10 @@ functional_preproc: brain_mask: /code/CPAC/resources/templates/tpl-MNI152NLin2009cAsym_res-02_desc-brain_mask.nii.gz brain_probseg: /code/CPAC/resources/templates/tpl-MNI152NLin2009cAsym_res-01_label-brain_probseg.nii.gz - # using: ['AFNI', 'FSL', 'FSL_AFNI', 'Anatomical_Refined', 'Anatomical_Based', 'Anatomical_Resampled', 'CCS_Anatomical_Refined'] + # using: ['AFNI', 'FSL', 'FSL_AFNI', 'Anatomical_Refined', 'Anatomical_Based', 'CCS_Anatomical_Refined'] # FSL_AFNI: fMRIPrep-style BOLD mask. Ref: https://github.com/nipreps/niworkflows/blob/a221f612/niworkflows/func/util.py#L246-L514 # Anatomical_Refined: 1. binarize anat mask, in case it is not a binary mask. 2. fill holes of anat mask 3. init_bold_mask : input raw func → dilate init func brain mask 4. refined_bold_mask : input motion corrected func → dilate anatomical mask 5. get final func mask # Anatomical_Based: Generate the BOLD mask by basing it off of the anatomical brain mask. Adapted from DCAN Lab's BOLD mask method from the ABCD pipeline. - # Anatomical_Resampled: Resample anatomical brain mask in standard space to get BOLD brain mask in standard space. Adapted from DCAN Lab's BOLD mask method from the ABCD pipeline. ("Create fMRI resolution standard space files for T1w image, wmparc, and brain mask […] don't use FLIRT to do spline interpolation with -applyisoxfm for the 2mm and 1mm cases because it doesn't know the peculiarities of the MNI template FOVs") # CCS_Anatomical_Refined: Generate the BOLD mask by basing it off of the anatomical brain. Adapted from the BOLD mask method from the CCS pipeline. # this is a fork point using: [FSL_AFNI] diff --git a/CPAC/resources/configs/pipeline_config_monkey-ABCD.yml b/CPAC/resources/configs/pipeline_config_monkey-ABCD.yml index 9289e85966..3be4bf4ded 100644 --- a/CPAC/resources/configs/pipeline_config_monkey-ABCD.yml +++ b/CPAC/resources/configs/pipeline_config_monkey-ABCD.yml @@ -325,11 +325,10 @@ functional_preproc: brain_mask: brain_probseg: - # using: ['AFNI', 'FSL', 'FSL_AFNI', 'Anatomical_Refined', 'Anatomical_Based', 'Anatomical_Resampled', 'CCS_Anatomical_Refined'] + # using: ['AFNI', 'FSL', 'FSL_AFNI', 'Anatomical_Refined', 'Anatomical_Based', 'CCS_Anatomical_Refined'] # FSL_AFNI: fMRIPrep-style BOLD mask. Ref: https://github.com/nipreps/niworkflows/blob/a221f612/niworkflows/func/util.py#L246-L514 # Anatomical_Refined: 1. binarize anat mask, in case it is not a binary mask. 2. fill holes of anat mask 3. init_bold_mask : input raw func → dilate init func brain mask 4. refined_bold_mask : input motion corrected func → dilate anatomical mask 5. get final func mask # Anatomical_Based: Generate the BOLD mask by basing it off of the anatomical brain mask. Adapted from DCAN Lab's BOLD mask method from the ABCD pipeline. - # Anatomical_Resampled: Resample anatomical brain mask in standard space to get BOLD brain mask in standard space. Adapted from DCAN Lab's BOLD mask method from the ABCD pipeline. ("Create fMRI resolution standard space files for T1w image, wmparc, and brain mask […] don't use FLIRT to do spline interpolation with -applyisoxfm for the 2mm and 1mm cases because it doesn't know the peculiarities of the MNI template FOVs") # CCS_Anatomical_Refined: Generate the BOLD mask by basing it off of the anatomical brain. Adapted from the BOLD mask method from the CCS pipeline. # this is a fork point using: [Anatomical_Based] diff --git a/CPAC/resources/configs/pipeline_config_monkey.yml b/CPAC/resources/configs/pipeline_config_monkey.yml index 4caef0c006..f8c5324e0d 100644 --- a/CPAC/resources/configs/pipeline_config_monkey.yml +++ b/CPAC/resources/configs/pipeline_config_monkey.yml @@ -252,11 +252,10 @@ functional_preproc: func_masking: run: On - # using: ['AFNI', 'FSL', 'FSL_AFNI', 'Anatomical_Refined', 'Anatomical_Based', 'Anatomical_Resampled', 'CCS_Anatomical_Refined'] + # using: ['AFNI', 'FSL', 'FSL_AFNI', 'Anatomical_Refined', 'Anatomical_Based', 'CCS_Anatomical_Refined'] # FSL_AFNI: fMRIPrep-style BOLD mask. Ref: https://github.com/nipreps/niworkflows/blob/a221f612/niworkflows/func/util.py#L246-L514 # Anatomical_Refined: 1. binarize anat mask, in case it is not a binary mask. 2. fill holes of anat mask 3. init_bold_mask : input raw func → dilate init func brain mask 4. refined_bold_mask : input motion corrected func → dilate anatomical mask 5. get final func mask # Anatomical_Based: Generate the BOLD mask by basing it off of the anatomical brain mask. Adapted from DCAN Lab's BOLD mask method from the ABCD pipeline. - # Anatomical_Resampled: Resample anatomical brain mask in standard space to get BOLD brain mask in standard space. Adapted from DCAN Lab's BOLD mask method from the ABCD pipeline. ("Create fMRI resolution standard space files for T1w image, wmparc, and brain mask […] don't use FLIRT to do spline interpolation with -applyisoxfm for the 2mm and 1mm cases because it doesn't know the peculiarities of the MNI template FOVs") # CCS_Anatomical_Refined: Generate the BOLD mask by basing it off of the anatomical brain. Adapted from the BOLD mask method from the CCS pipeline. # this is a fork point using: [Anatomical_Refined] diff --git a/CPAC/resources/configs/pipeline_config_regtest-3.yml b/CPAC/resources/configs/pipeline_config_regtest-3.yml index d9a2cd679e..6d5fed9e55 100644 --- a/CPAC/resources/configs/pipeline_config_regtest-3.yml +++ b/CPAC/resources/configs/pipeline_config_regtest-3.yml @@ -197,11 +197,10 @@ functional_preproc: FSL_AFNI: bold_ref: /code/CPAC/resources/templates/tpl-MNI152NLin2009cAsym_res-02_desc-fMRIPrep_boldref.nii.gz - # using: ['AFNI', 'FSL', 'FSL_AFNI', 'Anatomical_Refined', 'Anatomical_Based', 'Anatomical_Resampled', 'CCS_Anatomical_Refined'] + # using: ['AFNI', 'FSL', 'FSL_AFNI', 'Anatomical_Refined', 'Anatomical_Based', 'CCS_Anatomical_Refined'] # FSL_AFNI: fMRIPrep-style BOLD mask. Ref: https://github.com/nipreps/niworkflows/blob/a221f612/niworkflows/func/util.py#L246-L514 # Anatomical_Refined: 1. binarize anat mask, in case it is not a binary mask. 2. fill holes of anat mask 3. init_bold_mask : input raw func → dilate init func brain mask 4. refined_bold_mask : input motion corrected func → dilate anatomical mask 5. get final func mask # Anatomical_Based: Generate the BOLD mask by basing it off of the anatomical brain mask. Adapted from DCAN Lab's BOLD mask method from the ABCD pipeline. - # Anatomical_Resampled: Resample anatomical brain mask in standard space to get BOLD brain mask in standard space. Adapted from DCAN Lab's BOLD mask method from the ABCD pipeline. ("Create fMRI resolution standard space files for T1w image, wmparc, and brain mask […] don't use FLIRT to do spline interpolation with -applyisoxfm for the 2mm and 1mm cases because it doesn't know the peculiarities of the MNI template FOVs") # CCS_Anatomical_Refined: Generate the BOLD mask by basing it off of the anatomical brain. Adapted from the BOLD mask method from the CCS pipeline. # this is a fork point using: [FSL_AFNI] diff --git a/CPAC/resources/configs/pipeline_config_regtest-4.yml b/CPAC/resources/configs/pipeline_config_regtest-4.yml index b33af48a33..1a1d254d3d 100644 --- a/CPAC/resources/configs/pipeline_config_regtest-4.yml +++ b/CPAC/resources/configs/pipeline_config_regtest-4.yml @@ -214,11 +214,10 @@ functional_preproc: func_masking: run: On - # using: ['AFNI', 'FSL', 'FSL_AFNI', 'Anatomical_Refined', 'Anatomical_Based', 'Anatomical_Resampled', 'CCS_Anatomical_Refined'] + # using: ['AFNI', 'FSL', 'FSL_AFNI', 'Anatomical_Refined', 'Anatomical_Based', 'CCS_Anatomical_Refined'] # FSL_AFNI: fMRIPrep-style BOLD mask. Ref: https://github.com/nipreps/niworkflows/blob/a221f612/niworkflows/func/util.py#L246-L514 # Anatomical_Refined: 1. binarize anat mask, in case it is not a binary mask. 2. fill holes of anat mask 3. init_bold_mask : input raw func → dilate init func brain mask 4. refined_bold_mask : input motion corrected func → dilate anatomical mask 5. get final func mask # Anatomical_Based: Generate the BOLD mask by basing it off of the anatomical brain mask. Adapted from DCAN Lab's BOLD mask method from the ABCD pipeline. - # Anatomical_Resampled: Resample anatomical brain mask in standard space to get BOLD brain mask in standard space. Adapted from DCAN Lab's BOLD mask method from the ABCD pipeline. ("Create fMRI resolution standard space files for T1w image, wmparc, and brain mask […] don't use FLIRT to do spline interpolation with -applyisoxfm for the 2mm and 1mm cases because it doesn't know the peculiarities of the MNI template FOVs") # CCS_Anatomical_Refined: Generate the BOLD mask by basing it off of the anatomical brain. Adapted from the BOLD mask method from the CCS pipeline. # this is a fork point using: [Anatomical_Refined] diff --git a/CPAC/resources/configs/pipeline_config_rodent.yml b/CPAC/resources/configs/pipeline_config_rodent.yml index 95bc06b9b8..f3677f47d7 100644 --- a/CPAC/resources/configs/pipeline_config_rodent.yml +++ b/CPAC/resources/configs/pipeline_config_rodent.yml @@ -215,11 +215,10 @@ functional_preproc: # Robust brain center estimation. Mutually exclusive with functional,reduce_bias,robust,padding,remove_eyes,surfaces robust: On - # using: ['AFNI', 'FSL', 'FSL_AFNI', 'Anatomical_Refined', 'Anatomical_Based', 'Anatomical_Resampled', 'CCS_Anatomical_Refined'] + # using: ['AFNI', 'FSL', 'FSL_AFNI', 'Anatomical_Refined', 'Anatomical_Based', 'CCS_Anatomical_Refined'] # FSL_AFNI: fMRIPrep-style BOLD mask. Ref: https://github.com/nipreps/niworkflows/blob/a221f612/niworkflows/func/util.py#L246-L514 # Anatomical_Refined: 1. binarize anat mask, in case it is not a binary mask. 2. fill holes of anat mask 3. init_bold_mask : input raw func → dilate init func brain mask 4. refined_bold_mask : input motion corrected func → dilate anatomical mask 5. get final func mask # Anatomical_Based: Generate the BOLD mask by basing it off of the anatomical brain mask. Adapted from DCAN Lab's BOLD mask method from the ABCD pipeline. - # Anatomical_Resampled: Resample anatomical brain mask in standard space to get BOLD brain mask in standard space. Adapted from DCAN Lab's BOLD mask method from the ABCD pipeline. ("Create fMRI resolution standard space files for T1w image, wmparc, and brain mask […] don't use FLIRT to do spline interpolation with -applyisoxfm for the 2mm and 1mm cases because it doesn't know the peculiarities of the MNI template FOVs") # CCS_Anatomical_Refined: Generate the BOLD mask by basing it off of the anatomical brain. Adapted from the BOLD mask method from the CCS pipeline. # this is a fork point using: [FSL] From cfeaa01794d39010224e22318dd98db2d0c186ce Mon Sep 17 00:00:00 2001 From: "birajstha:construction_worker::penguin" Date: Wed, 16 Apr 2025 12:06:51 -0400 Subject: [PATCH 249/507] adding template_space_func_masking to the schema --- CPAC/pipeline/schema.py | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/CPAC/pipeline/schema.py b/CPAC/pipeline/schema.py index 828c0b1aec..a4b548148a 100644 --- a/CPAC/pipeline/schema.py +++ b/CPAC/pipeline/schema.py @@ -960,7 +960,6 @@ def sanitize(filename): "FSL_AFNI", "Anatomical_Refined", "Anatomical_Based", - "Anatomical_Resampled", "CCS_Anatomical_Refined", ] ) @@ -1011,6 +1010,11 @@ def sanitize(filename): }, "apply_func_mask_in_native_space": bool1_1, }, + "template_space_func_masking": { + "run": bool1_1, + "using": [In({"Anatomical_Resampled"})], + "apply_func_mask_in_template_space": bool1_1, + }, "generate_func_mean": { "run": bool1_1, }, From 2df7b30fcb2208700ad6268a12729447ca6f8914 Mon Sep 17 00:00:00 2001 From: "birajstha:construction_worker::penguin" Date: Wed, 16 Apr 2025 12:34:02 -0400 Subject: [PATCH 250/507] adding the template_space_func_masking nodeblock --- CPAC/func_preproc/func_preproc.py | 48 ++++++++++++++++++++++++++++++- CPAC/pipeline/cpac_pipeline.py | 8 +++++- 2 files changed, 54 insertions(+), 2 deletions(-) diff --git a/CPAC/func_preproc/func_preproc.py b/CPAC/func_preproc/func_preproc.py index 06e2993294..00a3368722 100644 --- a/CPAC/func_preproc/func_preproc.py +++ b/CPAC/func_preproc/func_preproc.py @@ -1611,7 +1611,7 @@ def bold_mask_anatomical_resampled(wf, cfg, strat_pool, pipe_num, opt=None): Adapted from `DCAN Lab's BOLD mask method from the ABCD pipeline `_. """ - anat_brain_to_func_res = anat_brain_to_bold_res(wf, cfg, pipe_num) + anat_brain_to_func_res = anat_brain_to_bold_res(wf_name="anat_brain_to_bold_res", cfg, pipe_num) node, out = strat_pool.get_data("space-template_desc-preproc_T1w") wf.connect( @@ -1836,6 +1836,52 @@ def bold_masking(wf, cfg, strat_pool, pipe_num, opt=None): return (wf, outputs) +@nodeblock( + name="template_space_bold_masking", + switch=[ + ["functional_preproc", "run"], + ["functional_preproc", "template_space_func_masking", "run"], + ["functional_preproc", "template_space_func_masking", "apply_func_mask_in_template_space"], + ], + inputs=[("space-template_desc-preproc_bold", "space-template_desc-bold_mask")], + outputs={ + "space-template_desc-preproc_bold": { + "Description": "The skull-stripped BOLD time-series.", + "SkullStripped": True, + }, + "space-template_desc-brain_bold": { + "Description": "The skull-stripped BOLD time-series.", + "SkullStripped": True, + }, + "space-template_desc-head_bold": { + "Description": "The non skull-stripped BOLD time-series.", + "SkullStripped": False, + }, + }, +) +def template_space_bold_masking(wf, cfg, strat_pool, pipe_num, opt=None): + """Mask the bold in template space.""" + func_edge_detect = pe.Node( + interface=afni_utils.Calc(), name=f"template_space_func_extract_brain_{pipe_num}" + ) + + func_edge_detect.inputs.expr = "a*b" + func_edge_detect.inputs.outputtype = "NIFTI_GZ" + + node_head_bold, out_head_bold = strat_pool.get_data("space-template_desc-preproc_bold") + wf.connect(node_head_bold, out_head_bold, func_edge_detect, "in_file_a") + + node, out = strat_pool.get_data("space-template_desc-bold_mask") + wf.connect(node, out, func_edge_detect, "in_file_b") + + outputs = { + "space-template_desc-preproc_bold": (func_edge_detect, "out_file"), + "space-template_desc-brain_bold": (func_edge_detect, "out_file"), + "space-template_desc-head_bold": (node_head_bold, out_head_bold), + } + + return (wf, outputs) + @nodeblock( name="func_mean", diff --git a/CPAC/pipeline/cpac_pipeline.py b/CPAC/pipeline/cpac_pipeline.py index 1b64b286a8..6a4a1f90ba 100644 --- a/CPAC/pipeline/cpac_pipeline.py +++ b/CPAC/pipeline/cpac_pipeline.py @@ -1275,7 +1275,6 @@ def build_workflow(subject_id, sub_dict, cfg, pipeline_name=None): bold_mask_fsl_afni, bold_mask_anatomical_refined, bold_mask_anatomical_based, - bold_mask_anatomical_resampled, bold_mask_ccs, ], bold_masking, @@ -1402,6 +1401,13 @@ def build_workflow(subject_id, sub_dict, cfg, pipeline_name=None): if cfg.voxel_mirrored_homotopic_connectivity["run"]: pipeline_blocks += [create_func_to_T1template_symmetric_xfm] + # Template space functional masking + if cfg.functional_preproc["template_space_func_masking"]["run"]: + if not rpool.check_rpool("space-template_desc-bold_mask"): + pipeline_blocks += bold_mask_anatomical_resampled, + if cfg.functional_preproc["template_space_func_masking"]["apply_func_mask_in_template_space"]: + pipeline_blocks += apply_func_mask_to_template, + # Nuisance Correction generate_only = ( True not in cfg["nuisance_corrections", "2-nuisance_regression", "run"] From f457a31cfd9361f84ec3969b10b5e125bc7d99ca Mon Sep 17 00:00:00 2001 From: Jon Cluce Date: Wed, 16 Apr 2025 14:20:02 -0400 Subject: [PATCH 251/507] :lock: Set permissions on :octocat: workflows --- .github/workflows/build_C-PAC.yml | 2 ++ .github/workflows/build_and_test.yml | 2 ++ .github/workflows/deploy_to_Docker_Hub.yml | 2 ++ .github/workflows/on_push.yml | 2 ++ .github/workflows/regression_test_full.yml | 2 ++ .github/workflows/regression_test_lite.yml | 2 ++ .github/workflows/smoke_test_participant.yml | 2 ++ 7 files changed, 14 insertions(+) diff --git a/.github/workflows/build_C-PAC.yml b/.github/workflows/build_C-PAC.yml index fb103acdd7..2dd3298d42 100644 --- a/.github/workflows/build_C-PAC.yml +++ b/.github/workflows/build_C-PAC.yml @@ -1,5 +1,7 @@ name: Build C-PAC image +permissions: read-all + on: workflow_call: inputs: diff --git a/.github/workflows/build_and_test.yml b/.github/workflows/build_and_test.yml index ddfabc4001..4aef28211f 100644 --- a/.github/workflows/build_and_test.yml +++ b/.github/workflows/build_and_test.yml @@ -17,7 +17,9 @@ name: Build and test C-PAC permissions: + contents: read packages: write + pull-requests: read on: workflow_call: diff --git a/.github/workflows/deploy_to_Docker_Hub.yml b/.github/workflows/deploy_to_Docker_Hub.yml index a8ee3e6547..1a284bb07c 100644 --- a/.github/workflows/deploy_to_Docker_Hub.yml +++ b/.github/workflows/deploy_to_Docker_Hub.yml @@ -1,5 +1,7 @@ name: Deploy to Docker Hub +permissions: read-all + on: workflow_call: inputs: diff --git a/.github/workflows/on_push.yml b/.github/workflows/on_push.yml index 741ac4e86a..f7a6d1db5c 100644 --- a/.github/workflows/on_push.yml +++ b/.github/workflows/on_push.yml @@ -16,6 +16,8 @@ # License along with C-PAC. If not, see . name: Build and test C-PAC +permissions: read-all + on: push: diff --git a/.github/workflows/regression_test_full.yml b/.github/workflows/regression_test_full.yml index 2c95bf209b..beb494e0fd 100644 --- a/.github/workflows/regression_test_full.yml +++ b/.github/workflows/regression_test_full.yml @@ -1,5 +1,7 @@ name: Run Regression Full Test +permissions: read-all + on: workflow_call: diff --git a/.github/workflows/regression_test_lite.yml b/.github/workflows/regression_test_lite.yml index a68baa0cda..cad2157787 100644 --- a/.github/workflows/regression_test_lite.yml +++ b/.github/workflows/regression_test_lite.yml @@ -1,5 +1,7 @@ name: Launch lite regression test +permissions: read-all + on: pull_request: branches: diff --git a/.github/workflows/smoke_test_participant.yml b/.github/workflows/smoke_test_participant.yml index e41292535f..f16949c76c 100644 --- a/.github/workflows/smoke_test_participant.yml +++ b/.github/workflows/smoke_test_participant.yml @@ -16,6 +16,8 @@ # License along with C-PAC. If not, see . name: Run participant smoke test +permissions: read-all + on: workflow_call: workflow_dispatch: From 1e5f9af266e90d29a5ef799056d3a5b2bcd4728f Mon Sep 17 00:00:00 2001 From: Jon Cluce Date: Wed, 16 Apr 2025 14:29:11 -0400 Subject: [PATCH 252/507] :bug: :construction_worker: Fix deleted branch name --- .github/workflows/delete_images.yml | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/.github/workflows/delete_images.yml b/.github/workflows/delete_images.yml index ce259fd811..70efdb0eaf 100644 --- a/.github/workflows/delete_images.yml +++ b/.github/workflows/delete_images.yml @@ -1,5 +1,9 @@ name: Delete development images +permissions: + contents: read + packages: write + on: delete: @@ -14,6 +18,7 @@ jobs: - '' - lite env: + DELETED_BRANCH: ${{ github.event.ref }} GITHUB_TOKEN: ${{ secrets.API_PACKAGE_READ_DELETE }} IMAGE: c-pac steps: @@ -32,7 +37,7 @@ jobs: then VARIANT=-${{ inputs.variant }} fi - TAG=${GITHUB_REF_NAME} + TAG=${DELETED_BRANCH//\//_} TAG=$TAG$VARIANT VERSION_ID=$(python .github/scripts/get_package_id.py $OWNER $IMAGE $TAG) From 66c7f0ae6f9f78e60bf543f82ee6814074055b9f Mon Sep 17 00:00:00 2001 From: Jon Cluce Date: Wed, 16 Apr 2025 15:49:25 -0400 Subject: [PATCH 253/507] :alembic: Use default GITHUB_TOKEN --- .github/workflows/delete_images.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/delete_images.yml b/.github/workflows/delete_images.yml index 70efdb0eaf..dbd4863b97 100644 --- a/.github/workflows/delete_images.yml +++ b/.github/workflows/delete_images.yml @@ -19,7 +19,7 @@ jobs: - lite env: DELETED_BRANCH: ${{ github.event.ref }} - GITHUB_TOKEN: ${{ secrets.API_PACKAGE_READ_DELETE }} + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} IMAGE: c-pac steps: - name: Check out C-PAC From 3669836a37b2e0f0afa3a0d93b19d38c490769a6 Mon Sep 17 00:00:00 2001 From: Jon Cluce Date: Wed, 16 Apr 2025 16:04:21 -0400 Subject: [PATCH 254/507] :bug: Fix package ID call --- .github/scripts/get_package_id.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/.github/scripts/get_package_id.py b/.github/scripts/get_package_id.py index 0cc8e8aa57..d289f30418 100644 --- a/.github/scripts/get_package_id.py +++ b/.github/scripts/get_package_id.py @@ -83,6 +83,8 @@ def fetch(url): ] ) ) + if isinstance(response, dict): + response = [response] return response _packages = fetch( From d31864df87b2b7e9ba3293142ba2afeb78150fd1 Mon Sep 17 00:00:00 2001 From: Jon Cluce Date: Wed, 16 Apr 2025 17:03:27 -0400 Subject: [PATCH 255/507] :bug: Fix variant pointer --- .github/workflows/delete_images.yml | 9 +++++---- 1 file changed, 5 insertions(+), 4 deletions(-) diff --git a/.github/workflows/delete_images.yml b/.github/workflows/delete_images.yml index dbd4863b97..ba87689837 100644 --- a/.github/workflows/delete_images.yml +++ b/.github/workflows/delete_images.yml @@ -33,18 +33,19 @@ jobs: else OWNER_TYPE=org fi - if [[ "${{ inputs.variant }}" != "" ]] + if [[ "${{ matrix.variant }}" != "" ]] then - VARIANT=-${{ inputs.variant }} + VARIANT="-${{ matrix.variant }}" fi TAG=${DELETED_BRANCH//\//_} TAG=$TAG$VARIANT VERSION_ID=$(python .github/scripts/get_package_id.py $OWNER $IMAGE $TAG) - curl \ + MESSAGE=$(curl \ -u ${GITHUB_TOKEN}: \ -X DELETE \ - https://api.github.com/${OWNER_TYPE}/${OWNER}/packages/container/c-pac/versions/${VERSION_ID} + https://api.github.com/${OWNER_TYPE}/${OWNER}/packages/container/c-pac/versions/${VERSION_ID}) + echo "${MESSAGE}" - name: Delete all containers from repository without tags uses: Chizkiyahu/delete-untagged-ghcr-action@v6 with: From 11f9c5f32ca04bf1835a940057721c087a563ae2 Mon Sep 17 00:00:00 2001 From: Jon Cluce Date: Wed, 16 Apr 2025 17:06:24 -0400 Subject: [PATCH 256/507] :construction_worker: Continue on error --- .github/workflows/delete_images.yml | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/.github/workflows/delete_images.yml b/.github/workflows/delete_images.yml index ba87689837..de95e8d22b 100644 --- a/.github/workflows/delete_images.yml +++ b/.github/workflows/delete_images.yml @@ -25,6 +25,7 @@ jobs: - name: Check out C-PAC uses: actions/checkout@v4 - name: 'Delete branch image' + continue-on-error: true run: | OWNER=$(echo ${GITHUB_REPOSITORY} | cut -d '/' -f 1) if [[ $(curl -u ${GITHUB_TOKEN}: https://api.github.com/users/${OWNER} | jq '.type') == '"User"' ]] @@ -41,12 +42,12 @@ jobs: TAG=$TAG$VARIANT VERSION_ID=$(python .github/scripts/get_package_id.py $OWNER $IMAGE $TAG) - MESSAGE=$(curl \ + curl \ -u ${GITHUB_TOKEN}: \ -X DELETE \ - https://api.github.com/${OWNER_TYPE}/${OWNER}/packages/container/c-pac/versions/${VERSION_ID}) - echo "${MESSAGE}" + https://api.github.com/${OWNER_TYPE}/${OWNER}/packages/container/c-pac/versions/${VERSION_ID} - name: Delete all containers from repository without tags + if: matrix.variant == '' uses: Chizkiyahu/delete-untagged-ghcr-action@v6 with: token: ${GITHUB_TOKEN} From 16a40a236deaced09a144fea03fa931e6a4bfef2 Mon Sep 17 00:00:00 2001 From: Jon Cluce Date: Wed, 16 Apr 2025 17:12:20 -0400 Subject: [PATCH 257/507] :construction_worker: Update delete package secret --- .github/workflows/delete_images.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/delete_images.yml b/.github/workflows/delete_images.yml index de95e8d22b..cfe0025741 100644 --- a/.github/workflows/delete_images.yml +++ b/.github/workflows/delete_images.yml @@ -19,7 +19,7 @@ jobs: - lite env: DELETED_BRANCH: ${{ github.event.ref }} - GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + GITHUB_TOKEN: ${{ secrets.API_PACKAGE_READ_DELETE }} IMAGE: c-pac steps: - name: Check out C-PAC @@ -50,7 +50,7 @@ jobs: if: matrix.variant == '' uses: Chizkiyahu/delete-untagged-ghcr-action@v6 with: - token: ${GITHUB_TOKEN} + token: ${{ secrets.API_PACKAGE_READ_DELETE }} repository_owner: ${{ github.repository_owner }} repository: ${{ github.repository }} untagged_only: true From bb03257230cbec7313fe6c4664123fa12e2137ca Mon Sep 17 00:00:00 2001 From: Jon Cluce Date: Wed, 16 Apr 2025 17:14:38 -0400 Subject: [PATCH 258/507] :lock: Give initial script enough permissions for subscripts --- .github/workflows/build_C-PAC.yml | 9 ++++++++- .github/workflows/build_and_test.yml | 6 +++++- .github/workflows/deploy_to_Docker_Hub.yml | 9 ++++++++- .github/workflows/on_push.yml | 9 ++++++++- .github/workflows/regression_test_full.yml | 9 ++++++++- .github/workflows/regression_test_lite.yml | 9 ++++++++- .github/workflows/smoke_test_participant.yml | 9 ++++++++- 7 files changed, 53 insertions(+), 7 deletions(-) diff --git a/.github/workflows/build_C-PAC.yml b/.github/workflows/build_C-PAC.yml index 2dd3298d42..3ab3918a14 100644 --- a/.github/workflows/build_C-PAC.yml +++ b/.github/workflows/build_C-PAC.yml @@ -1,6 +1,13 @@ name: Build C-PAC image -permissions: read-all +permissions: + checks: write + contents: read + deployments: write + issues: write + packages: write + pull-requests: write + statuses: write on: workflow_call: diff --git a/.github/workflows/build_and_test.yml b/.github/workflows/build_and_test.yml index 4aef28211f..8e333296bd 100644 --- a/.github/workflows/build_and_test.yml +++ b/.github/workflows/build_and_test.yml @@ -17,9 +17,13 @@ name: Build and test C-PAC permissions: + checks: write contents: read + deployments: write + issues: write packages: write - pull-requests: read + pull-requests: write + statuses: write on: workflow_call: diff --git a/.github/workflows/deploy_to_Docker_Hub.yml b/.github/workflows/deploy_to_Docker_Hub.yml index 1a284bb07c..f6cb13d13e 100644 --- a/.github/workflows/deploy_to_Docker_Hub.yml +++ b/.github/workflows/deploy_to_Docker_Hub.yml @@ -1,6 +1,13 @@ name: Deploy to Docker Hub -permissions: read-all +permissions: + checks: write + contents: read + deployments: write + issues: write + packages: write + pull-requests: write + statuses: write on: workflow_call: diff --git a/.github/workflows/on_push.yml b/.github/workflows/on_push.yml index f7a6d1db5c..8168ae245a 100644 --- a/.github/workflows/on_push.yml +++ b/.github/workflows/on_push.yml @@ -16,7 +16,14 @@ # License along with C-PAC. If not, see . name: Build and test C-PAC -permissions: read-all +permissions: + checks: write + contents: read + deployments: write + issues: write + packages: write + pull-requests: write + statuses: write on: push: diff --git a/.github/workflows/regression_test_full.yml b/.github/workflows/regression_test_full.yml index beb494e0fd..75eafdf59a 100644 --- a/.github/workflows/regression_test_full.yml +++ b/.github/workflows/regression_test_full.yml @@ -1,6 +1,13 @@ name: Run Regression Full Test -permissions: read-all +permissions: + checks: write + contents: read + deployments: write + issues: write + packages: write + pull-requests: write + statuses: write on: workflow_call: diff --git a/.github/workflows/regression_test_lite.yml b/.github/workflows/regression_test_lite.yml index cad2157787..e3a05b43fd 100644 --- a/.github/workflows/regression_test_lite.yml +++ b/.github/workflows/regression_test_lite.yml @@ -1,6 +1,13 @@ name: Launch lite regression test -permissions: read-all +permissions: + checks: write + contents: read + deployments: write + issues: write + packages: write + pull-requests: write + statuses: write on: pull_request: diff --git a/.github/workflows/smoke_test_participant.yml b/.github/workflows/smoke_test_participant.yml index f16949c76c..dc4206d602 100644 --- a/.github/workflows/smoke_test_participant.yml +++ b/.github/workflows/smoke_test_participant.yml @@ -16,7 +16,14 @@ # License along with C-PAC. If not, see . name: Run participant smoke test -permissions: read-all +permissions: + checks: write + contents: read + deployments: write + issues: write + packages: write + pull-requests: write + statuses: write on: workflow_call: From 6d57901c979164470f5aa57dce95762006acdf4e Mon Sep 17 00:00:00 2001 From: Jon Cluce Date: Wed, 16 Apr 2025 17:26:48 -0400 Subject: [PATCH 259/507] :construction_worker: Set OWNER_TYPE --- .github/workflows/delete_images.yml | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/.github/workflows/delete_images.yml b/.github/workflows/delete_images.yml index cfe0025741..958ec073d9 100644 --- a/.github/workflows/delete_images.yml +++ b/.github/workflows/delete_images.yml @@ -34,6 +34,7 @@ jobs: else OWNER_TYPE=org fi + echo "OWNER_TYPE=${OWNER_TYPE}" >> $GITHUB_ENV if [[ "${{ matrix.variant }}" != "" ]] then VARIANT="-${{ matrix.variant }}" @@ -48,10 +49,13 @@ jobs: https://api.github.com/${OWNER_TYPE}/${OWNER}/packages/container/c-pac/versions/${VERSION_ID} - name: Delete all containers from repository without tags if: matrix.variant == '' + env: + OWNER_TYPE: ${{ env.OWNER_TYPE }} uses: Chizkiyahu/delete-untagged-ghcr-action@v6 with: token: ${{ secrets.API_PACKAGE_READ_DELETE }} repository_owner: ${{ github.repository_owner }} repository: ${{ github.repository }} untagged_only: true - owner_type: org + except_untagged_multiplatform: false + owner_type: ${{ env.OWNER_TYPE }} From b8cd11cbe26ed6eb0d16ae81ecedf6d80c9315b3 Mon Sep 17 00:00:00 2001 From: Jon Cluce Date: Wed, 16 Apr 2025 17:30:43 -0400 Subject: [PATCH 260/507] :necktie: Cap exported owner type at 4 chars --- .github/workflows/delete_images.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/delete_images.yml b/.github/workflows/delete_images.yml index 958ec073d9..e981581511 100644 --- a/.github/workflows/delete_images.yml +++ b/.github/workflows/delete_images.yml @@ -34,7 +34,7 @@ jobs: else OWNER_TYPE=org fi - echo "OWNER_TYPE=${OWNER_TYPE}" >> $GITHUB_ENV + echo "OWNER_TYPE=${OWNER_TYPE:0:4}" >> $GITHUB_ENV if [[ "${{ matrix.variant }}" != "" ]] then VARIANT="-${{ matrix.variant }}" From c510636190be4d898378b3c8e04430b04f7dc70e Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Thu, 17 Apr 2025 16:36:08 +0000 Subject: [PATCH 261/507] :arrow_up: Bump urllib3 from 1.26.18 to 1.26.19 Bumps [urllib3](https://github.com/urllib3/urllib3) from 1.26.18 to 1.26.19. - [Release notes](https://github.com/urllib3/urllib3/releases) - [Changelog](https://github.com/urllib3/urllib3/blob/1.26.19/CHANGES.rst) - [Commits](https://github.com/urllib3/urllib3/compare/1.26.18...1.26.19) --- updated-dependencies: - dependency-name: urllib3 dependency-type: direct:production ... Signed-off-by: dependabot[bot] --- requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements.txt b/requirements.txt index 2e3c085220..bb9f9b6c73 100644 --- a/requirements.txt +++ b/requirements.txt @@ -46,6 +46,6 @@ importlib-metadata==6.8.0 lxml==4.9.2 pip==23.3 setuptools==70.0.0 -urllib3==1.26.18 +urllib3==1.26.19 wheel==0.40.0 zipp==3.19.1 From 9cca41092b2aef0573669d4b620f29a6baf56eff Mon Sep 17 00:00:00 2001 From: "birajstha:construction_worker::penguin" Date: Fri, 25 Apr 2025 00:24:25 -0400 Subject: [PATCH 262/507] adding quick bold masking with automask --- CPAC/func_preproc/func_motion.py | 18 +++++++++++++----- CPAC/func_preproc/func_preproc.py | 2 +- CPAC/pipeline/cpac_pipeline.py | 15 ++++++++------- .../configs/pipeline_config_abcd-options.yml | 14 +++++++++++--- 4 files changed, 33 insertions(+), 16 deletions(-) diff --git a/CPAC/func_preproc/func_motion.py b/CPAC/func_preproc/func_motion.py index bea7d2e29c..251187b938 100644 --- a/CPAC/func_preproc/func_motion.py +++ b/CPAC/func_preproc/func_motion.py @@ -87,11 +87,19 @@ def calc_motion_stats(wf, cfg, strat_pool, pipe_num, opt=None): gen_motion_stats, "inputspec.motion_correct", ) - wf.connect( - *strat_pool.get_data("space-bold_desc-brain_mask"), - gen_motion_stats, - "inputspec.mask", - ) + + try : + wf.connect(*strat_pool.get_data("space-bold_desc-brain_mask"), + gen_motion_stats, + "inputspec.mask", + ) + except : + mask_bold = pe.Node(interface=afni.MaskTool(), name=f"mask_bold_{pipe_num}") + mask_bold.inputs.outputtype = "NIFTI_GZ" + node, out = strat_pool.get_data("desc-preproc_bold") + wf.connect(node, out, mask_bold, "in_file") + wf.connect(mask_bold, "out_file", gen_motion_stats, "inputspec.mask") + wf.connect( *strat_pool.get_data("desc-movementParameters_motion"), gen_motion_stats, diff --git a/CPAC/func_preproc/func_preproc.py b/CPAC/func_preproc/func_preproc.py index 00a3368722..0204ede715 100644 --- a/CPAC/func_preproc/func_preproc.py +++ b/CPAC/func_preproc/func_preproc.py @@ -1611,7 +1611,7 @@ def bold_mask_anatomical_resampled(wf, cfg, strat_pool, pipe_num, opt=None): Adapted from `DCAN Lab's BOLD mask method from the ABCD pipeline `_. """ - anat_brain_to_func_res = anat_brain_to_bold_res(wf_name="anat_brain_to_bold_res", cfg, pipe_num) + anat_brain_to_func_res = anat_brain_to_bold_res(wf_name="anat_brain_to_bold_res", cfg=cfg, pipe_num=pipe_num) node, out = strat_pool.get_data("space-template_desc-preproc_T1w") wf.connect( diff --git a/CPAC/pipeline/cpac_pipeline.py b/CPAC/pipeline/cpac_pipeline.py index 6a4a1f90ba..6e4d24c27d 100644 --- a/CPAC/pipeline/cpac_pipeline.py +++ b/CPAC/pipeline/cpac_pipeline.py @@ -99,6 +99,7 @@ bold_mask_fsl, bold_mask_fsl_afni, bold_masking, + template_space_bold_masking, func_despike, func_despike_template, func_mean, @@ -1401,13 +1402,6 @@ def build_workflow(subject_id, sub_dict, cfg, pipeline_name=None): if cfg.voxel_mirrored_homotopic_connectivity["run"]: pipeline_blocks += [create_func_to_T1template_symmetric_xfm] - # Template space functional masking - if cfg.functional_preproc["template_space_func_masking"]["run"]: - if not rpool.check_rpool("space-template_desc-bold_mask"): - pipeline_blocks += bold_mask_anatomical_resampled, - if cfg.functional_preproc["template_space_func_masking"]["apply_func_mask_in_template_space"]: - pipeline_blocks += apply_func_mask_to_template, - # Nuisance Correction generate_only = ( True not in cfg["nuisance_corrections", "2-nuisance_regression", "run"] @@ -1518,6 +1512,13 @@ def build_workflow(subject_id, sub_dict, cfg, pipeline_name=None): warp_bold_mask_to_EPItemplate, warp_deriv_mask_to_EPItemplate, ] + + # Template space functional masking + if cfg.functional_preproc["template_space_func_masking"]["run"]: + if not rpool.check_rpool("space-template_desc-bold_mask"): + pipeline_blocks += bold_mask_anatomical_resampled, + if cfg.functional_preproc["template_space_func_masking"]["apply_func_mask_in_template_space"]: + pipeline_blocks += template_space_bold_masking, # Template-space nuisance regression nuisance_template = ( diff --git a/CPAC/resources/configs/pipeline_config_abcd-options.yml b/CPAC/resources/configs/pipeline_config_abcd-options.yml index 5f1f1a855b..e38eff290c 100644 --- a/CPAC/resources/configs/pipeline_config_abcd-options.yml +++ b/CPAC/resources/configs/pipeline_config_abcd-options.yml @@ -210,11 +210,16 @@ registration_workflows: # input: ['Mean_Functional', 'Selected_Functional_Volume', 'fmriprep_reference'] input: [Selected_Functional_Volume] + # Mask the sbref created by coregistration input prep nodeblocks above before registration + mask_sbref: Off + # Choose coregistration interpolation interpolation: spline # Choose coregistration degree of freedom dof: 12 + + func_registration_to_template: @@ -311,7 +316,7 @@ functional_preproc: nuisance_corrections: 2-nuisance_regression: - + run: Off # Select which nuisance signal corrections to apply Regressors: - Name: default @@ -324,6 +329,9 @@ nuisance_corrections: include_delayed_squared: On include_squared: On + # switch to Off if nuisance regression is off and you don't want to write out the regressors + create_regressors: Off + # Process and refine masks used to produce regressors and time series for # regression. regressor_masks: @@ -380,10 +388,10 @@ regional_homogeneity: # ----------------------- post_processing: spatial_smoothing: - run: On + run: Off z-scoring: - run: On + run: Off seed_based_correlation_analysis: From fbc6abf328e7eee115a7a3b7a27e95a0fbd98b16 Mon Sep 17 00:00:00 2001 From: "birajstha:construction_worker::penguin" Date: Fri, 25 Apr 2025 01:09:11 -0400 Subject: [PATCH 263/507] tweaking the switch to match the new nodeblock --- CPAC/func_preproc/func_preproc.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/CPAC/func_preproc/func_preproc.py b/CPAC/func_preproc/func_preproc.py index 0204ede715..bc53f9e49d 100644 --- a/CPAC/func_preproc/func_preproc.py +++ b/CPAC/func_preproc/func_preproc.py @@ -1592,9 +1592,9 @@ def anat_brain_mask_to_bold_res(wf_name, cfg, pipe_num): name="bold_mask_anatomical_resampled", switch=[ ["functional_preproc", "run"], - ["functional_preproc", "func_masking", "run"], + ["functional_preproc", "template_space_func_masking", "run"], ], - option_key=["functional_preproc", "func_masking", "using"], + option_key=["functional_preproc", "template_space_func_masking", "using"], option_val="Anatomical_Resampled", inputs=[ "T1w-template-funcreg", From 992e01c84b6e669acdba8f72014b2bb326c65dd6 Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Fri, 25 Apr 2025 05:09:39 +0000 Subject: [PATCH 264/507] [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci --- CPAC/func_preproc/func_motion.py | 9 +++++---- CPAC/func_preproc/func_preproc.py | 18 ++++++++++++++---- CPAC/pipeline/cpac_pipeline.py | 10 ++++++---- .../configs/pipeline_config_abcd-options.yml | 4 ++-- .../configs/pipeline_config_abcd-prep.yml | 2 +- .../configs/pipeline_config_blank.yml | 4 ++-- 6 files changed, 30 insertions(+), 17 deletions(-) diff --git a/CPAC/func_preproc/func_motion.py b/CPAC/func_preproc/func_motion.py index 251187b938..4268786ea4 100644 --- a/CPAC/func_preproc/func_motion.py +++ b/CPAC/func_preproc/func_motion.py @@ -88,17 +88,18 @@ def calc_motion_stats(wf, cfg, strat_pool, pipe_num, opt=None): "inputspec.motion_correct", ) - try : - wf.connect(*strat_pool.get_data("space-bold_desc-brain_mask"), + try: + wf.connect( + *strat_pool.get_data("space-bold_desc-brain_mask"), gen_motion_stats, "inputspec.mask", ) - except : + except: mask_bold = pe.Node(interface=afni.MaskTool(), name=f"mask_bold_{pipe_num}") mask_bold.inputs.outputtype = "NIFTI_GZ" node, out = strat_pool.get_data("desc-preproc_bold") wf.connect(node, out, mask_bold, "in_file") - wf.connect(mask_bold, "out_file", gen_motion_stats, "inputspec.mask") + wf.connect(mask_bold, "out_file", gen_motion_stats, "inputspec.mask") wf.connect( *strat_pool.get_data("desc-movementParameters_motion"), diff --git a/CPAC/func_preproc/func_preproc.py b/CPAC/func_preproc/func_preproc.py index bc53f9e49d..c5f535b5c5 100644 --- a/CPAC/func_preproc/func_preproc.py +++ b/CPAC/func_preproc/func_preproc.py @@ -1611,7 +1611,9 @@ def bold_mask_anatomical_resampled(wf, cfg, strat_pool, pipe_num, opt=None): Adapted from `DCAN Lab's BOLD mask method from the ABCD pipeline `_. """ - anat_brain_to_func_res = anat_brain_to_bold_res(wf_name="anat_brain_to_bold_res", cfg=cfg, pipe_num=pipe_num) + anat_brain_to_func_res = anat_brain_to_bold_res( + wf_name="anat_brain_to_bold_res", cfg=cfg, pipe_num=pipe_num + ) node, out = strat_pool.get_data("space-template_desc-preproc_T1w") wf.connect( @@ -1836,12 +1838,17 @@ def bold_masking(wf, cfg, strat_pool, pipe_num, opt=None): return (wf, outputs) + @nodeblock( name="template_space_bold_masking", switch=[ ["functional_preproc", "run"], ["functional_preproc", "template_space_func_masking", "run"], - ["functional_preproc", "template_space_func_masking", "apply_func_mask_in_template_space"], + [ + "functional_preproc", + "template_space_func_masking", + "apply_func_mask_in_template_space", + ], ], inputs=[("space-template_desc-preproc_bold", "space-template_desc-bold_mask")], outputs={ @@ -1862,13 +1869,16 @@ def bold_masking(wf, cfg, strat_pool, pipe_num, opt=None): def template_space_bold_masking(wf, cfg, strat_pool, pipe_num, opt=None): """Mask the bold in template space.""" func_edge_detect = pe.Node( - interface=afni_utils.Calc(), name=f"template_space_func_extract_brain_{pipe_num}" + interface=afni_utils.Calc(), + name=f"template_space_func_extract_brain_{pipe_num}", ) func_edge_detect.inputs.expr = "a*b" func_edge_detect.inputs.outputtype = "NIFTI_GZ" - node_head_bold, out_head_bold = strat_pool.get_data("space-template_desc-preproc_bold") + node_head_bold, out_head_bold = strat_pool.get_data( + "space-template_desc-preproc_bold" + ) wf.connect(node_head_bold, out_head_bold, func_edge_detect, "in_file_a") node, out = strat_pool.get_data("space-template_desc-bold_mask") diff --git a/CPAC/pipeline/cpac_pipeline.py b/CPAC/pipeline/cpac_pipeline.py index 6e4d24c27d..aaa7d4662d 100644 --- a/CPAC/pipeline/cpac_pipeline.py +++ b/CPAC/pipeline/cpac_pipeline.py @@ -1512,13 +1512,15 @@ def build_workflow(subject_id, sub_dict, cfg, pipeline_name=None): warp_bold_mask_to_EPItemplate, warp_deriv_mask_to_EPItemplate, ] - + # Template space functional masking if cfg.functional_preproc["template_space_func_masking"]["run"]: if not rpool.check_rpool("space-template_desc-bold_mask"): - pipeline_blocks += bold_mask_anatomical_resampled, - if cfg.functional_preproc["template_space_func_masking"]["apply_func_mask_in_template_space"]: - pipeline_blocks += template_space_bold_masking, + pipeline_blocks += (bold_mask_anatomical_resampled,) + if cfg.functional_preproc["template_space_func_masking"][ + "apply_func_mask_in_template_space" + ]: + pipeline_blocks += (template_space_bold_masking,) # Template-space nuisance regression nuisance_template = ( diff --git a/CPAC/resources/configs/pipeline_config_abcd-options.yml b/CPAC/resources/configs/pipeline_config_abcd-options.yml index e38eff290c..2e95477e05 100644 --- a/CPAC/resources/configs/pipeline_config_abcd-options.yml +++ b/CPAC/resources/configs/pipeline_config_abcd-options.yml @@ -218,7 +218,7 @@ registration_workflows: # Choose coregistration degree of freedom dof: 12 - + func_registration_to_template: @@ -331,7 +331,7 @@ nuisance_corrections: # switch to Off if nuisance regression is off and you don't want to write out the regressors create_regressors: Off - + # Process and refine masks used to produce regressors and time series for # regression. regressor_masks: diff --git a/CPAC/resources/configs/pipeline_config_abcd-prep.yml b/CPAC/resources/configs/pipeline_config_abcd-prep.yml index aef9718952..5cd97c6d38 100644 --- a/CPAC/resources/configs/pipeline_config_abcd-prep.yml +++ b/CPAC/resources/configs/pipeline_config_abcd-prep.yml @@ -233,4 +233,4 @@ functional_preproc: # Anatomical_Resampled: Resample anatomical brain mask in standard space to get BOLD brain mask in standard space. Adapted from DCAN Lab's BOLD mask method from the ABCD pipeline. ("Create fMRI resolution standard space files for T1w image, wmparc, and brain mask […] don't use FLIRT to do spline interpolation with -applyisoxfm for the 2mm and 1mm cases because it doesn't know the peculiarities of the MNI template FOVs") using: [Anatomical_Resampled] - apply_func_mask_in_template_space: On \ No newline at end of file + apply_func_mask_in_template_space: On diff --git a/CPAC/resources/configs/pipeline_config_blank.yml b/CPAC/resources/configs/pipeline_config_blank.yml index 0d48fbf0ca..18db947205 100644 --- a/CPAC/resources/configs/pipeline_config_blank.yml +++ b/CPAC/resources/configs/pipeline_config_blank.yml @@ -1150,14 +1150,14 @@ functional_preproc: # Choose whether or not to dilate the anatomical mask if you choose 'Anatomical_Refined' as the functional masking option. It will dilate one voxel if enabled. anatomical_mask_dilation: Off - + template_space_func_masking: run: Off # Anatomical_Resampled: Resample anatomical brain mask in standard space to get BOLD brain mask in standard space. Adapted from DCAN Lab's BOLD mask method from the ABCD pipeline. ("Create fMRI resolution standard space files for T1w image, wmparc, and brain mask […] don't use FLIRT to do spline interpolation with -applyisoxfm for the 2mm and 1mm cases because it doesn't know the peculiarities of the MNI template FOVs") using: [Anatomical_Resampled] apply_func_mask_in_template_space: Off - + generate_func_mean: # Generate mean functional image From e49988f166fb76d322088cc4e30af78d92ed979a Mon Sep 17 00:00:00 2001 From: Jon Cluce Date: Fri, 25 Apr 2025 11:51:13 -0400 Subject: [PATCH 265/507] :bug: Update outputs for `apply_blip_to_timeseries_separately` --- CPAC/registration/registration.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/CPAC/registration/registration.py b/CPAC/registration/registration.py index 258cb9712d..5b5d7493a9 100644 --- a/CPAC/registration/registration.py +++ b/CPAC/registration/registration.py @@ -3770,7 +3770,7 @@ def apply_phasediff_to_timeseries_separately(wf, cfg, strat_pool, pipe_num, opt= "fsl-blip-warp", ) ], - outputs=["desc-preproc_bold", "desc-stc_bold", "bold"], + outputs=["desc-preproc_bold", "desc-reorient_bold", "desc-stc_bold"], ) def apply_blip_to_timeseries_separately(wf, cfg, strat_pool, pipe_num, opt=None): """Apply blip to timeseries.""" From 3e95ba347a0c0f649737abf5e1440268d08d6055 Mon Sep 17 00:00:00 2001 From: "birajstha:construction_worker::penguin" Date: Fri, 25 Apr 2025 13:14:27 -0400 Subject: [PATCH 266/507] using afni.Automask instead of MaskTool --- CPAC/func_preproc/func_motion.py | 14 ++++++++------ 1 file changed, 8 insertions(+), 6 deletions(-) diff --git a/CPAC/func_preproc/func_motion.py b/CPAC/func_preproc/func_motion.py index 4268786ea4..8be60075ed 100644 --- a/CPAC/func_preproc/func_motion.py +++ b/CPAC/func_preproc/func_motion.py @@ -88,18 +88,20 @@ def calc_motion_stats(wf, cfg, strat_pool, pipe_num, opt=None): "inputspec.motion_correct", ) - try: + if strat_pool.check_rpool("space-bold_desc-brain_mask"): wf.connect( *strat_pool.get_data("space-bold_desc-brain_mask"), gen_motion_stats, "inputspec.mask", ) - except: - mask_bold = pe.Node(interface=afni.MaskTool(), name=f"mask_bold_{pipe_num}") - mask_bold.inputs.outputtype = "NIFTI_GZ" + else: + automask = pe.Node(interface=afni.Automask(), name=f"automask_bold_{pipe_num}") + automask.inputs.dilate = 1 + automask.inputs.outputtype = "NIFTI_GZ" + node, out = strat_pool.get_data("desc-preproc_bold") - wf.connect(node, out, mask_bold, "in_file") - wf.connect(mask_bold, "out_file", gen_motion_stats, "inputspec.mask") + wf.connect(node, out, automask, "in_file") + wf.connect(automask, "out_file", gen_motion_stats, "inputspec.mask") wf.connect( *strat_pool.get_data("desc-movementParameters_motion"), From 860cfde1191b924c6277aabd08b8d9e309627ddd Mon Sep 17 00:00:00 2001 From: "birajstha:construction_worker::penguin" Date: Fri, 25 Apr 2025 15:18:27 -0400 Subject: [PATCH 267/507] adding to changelog --- CHANGELOG.md | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index 078114399e..9e1831eb45 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -46,6 +46,11 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 - Turned `generate_xcpqc_files` on for all preconfigurations except `blank`. - Introduced specific switch `restore_t1w_intensity` for `correct_restore_brain_intensity_abcd` nodeblock, enabling it by default only in `abcd-options` pre-config. - Updated GitHub Actions to run automated integration and regression tests on HPC. +- Refactored `bold_mask_anatomical_resampled` nodeblock and related pipeline configs: + - Limited scope to template-space masking only. + - Removed broken support for native-space masking. + - Introduced a new `template_space_func_masking` section in the pipeline config for template-space-only methods. + - Moved `Anatomical_Resampled` masking method from `func_masking` to the `template_space_func_masking`. ### Fixed From e371bd1f5727d60373c473523d678ec4dfdd6f5e Mon Sep 17 00:00:00 2001 From: "birajstha:construction_worker::penguin" Date: Fri, 25 Apr 2025 15:25:03 -0400 Subject: [PATCH 268/507] Reverting few custom changes used for testing --- CPAC/resources/configs/pipeline_config_abcd-options.yml | 8 ++------ 1 file changed, 2 insertions(+), 6 deletions(-) diff --git a/CPAC/resources/configs/pipeline_config_abcd-options.yml b/CPAC/resources/configs/pipeline_config_abcd-options.yml index 2e95477e05..318b817f1c 100644 --- a/CPAC/resources/configs/pipeline_config_abcd-options.yml +++ b/CPAC/resources/configs/pipeline_config_abcd-options.yml @@ -316,7 +316,6 @@ functional_preproc: nuisance_corrections: 2-nuisance_regression: - run: Off # Select which nuisance signal corrections to apply Regressors: - Name: default @@ -329,9 +328,6 @@ nuisance_corrections: include_delayed_squared: On include_squared: On - # switch to Off if nuisance regression is off and you don't want to write out the regressors - create_regressors: Off - # Process and refine masks used to produce regressors and time series for # regression. regressor_masks: @@ -388,10 +384,10 @@ regional_homogeneity: # ----------------------- post_processing: spatial_smoothing: - run: Off + run: On z-scoring: - run: Off + run: On seed_based_correlation_analysis: From 2f91c23777cd263d409aaa0ad1c704e560ff1e92 Mon Sep 17 00:00:00 2001 From: Jon Cluce Date: Tue, 29 Apr 2025 10:40:00 -0400 Subject: [PATCH 269/507] :bug: Fix `TypeError: unsupported operand type(s) for -: 'NoneType' and 'NoneType'` --- CPAC/utils/monitoring/draw_gantt_chart.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/CPAC/utils/monitoring/draw_gantt_chart.py b/CPAC/utils/monitoring/draw_gantt_chart.py index 089e9fdd39..67f26693f9 100644 --- a/CPAC/utils/monitoring/draw_gantt_chart.py +++ b/CPAC/utils/monitoring/draw_gantt_chart.py @@ -401,7 +401,7 @@ def generate_gantt_chart( return for node in nodes_list: - if "duration" not in node: + if "duration" not in node and (node["start"] and node["finish"]): node["duration"] = (node["finish"] - node["start"]).total_seconds() # Create the header of the report with useful information From eb3cbbbc9ef8505782ecbde71ac535bc8bf3ba20 Mon Sep 17 00:00:00 2001 From: "birajstha:construction_worker::penguin" Date: Tue, 29 Apr 2025 12:59:12 -0400 Subject: [PATCH 270/507] changes made to the resolve confusing namings after jon's review --- CPAC/func_preproc/func_preproc.py | 19 +++++++------------ CPAC/pipeline/cpac_pipeline.py | 14 ++++++++------ .../configs/pipeline_config_abcd-options.yml | 2 -- .../configs/pipeline_config_abcd-prep.yml | 2 -- .../configs/pipeline_config_blank.yml | 2 -- .../configs/pipeline_config_default.yml | 2 -- 6 files changed, 15 insertions(+), 26 deletions(-) diff --git a/CPAC/func_preproc/func_preproc.py b/CPAC/func_preproc/func_preproc.py index c5f535b5c5..34a2b73771 100644 --- a/CPAC/func_preproc/func_preproc.py +++ b/CPAC/func_preproc/func_preproc.py @@ -1844,11 +1844,6 @@ def bold_masking(wf, cfg, strat_pool, pipe_num, opt=None): switch=[ ["functional_preproc", "run"], ["functional_preproc", "template_space_func_masking", "run"], - [ - "functional_preproc", - "template_space_func_masking", - "apply_func_mask_in_template_space", - ], ], inputs=[("space-template_desc-preproc_bold", "space-template_desc-bold_mask")], outputs={ @@ -1868,25 +1863,25 @@ def bold_masking(wf, cfg, strat_pool, pipe_num, opt=None): ) def template_space_bold_masking(wf, cfg, strat_pool, pipe_num, opt=None): """Mask the bold in template space.""" - func_edge_detect = pe.Node( + func_apply_mask = pe.Node( interface=afni_utils.Calc(), name=f"template_space_func_extract_brain_{pipe_num}", ) - func_edge_detect.inputs.expr = "a*b" - func_edge_detect.inputs.outputtype = "NIFTI_GZ" + func_apply_mask.inputs.expr = "a*b" + func_apply_mask.inputs.outputtype = "NIFTI_GZ" node_head_bold, out_head_bold = strat_pool.get_data( "space-template_desc-preproc_bold" ) - wf.connect(node_head_bold, out_head_bold, func_edge_detect, "in_file_a") + wf.connect(node_head_bold, out_head_bold, func_apply_mask, "in_file_a") node, out = strat_pool.get_data("space-template_desc-bold_mask") - wf.connect(node, out, func_edge_detect, "in_file_b") + wf.connect(node, out, func_apply_mask, "in_file_b") outputs = { - "space-template_desc-preproc_bold": (func_edge_detect, "out_file"), - "space-template_desc-brain_bold": (func_edge_detect, "out_file"), + "space-template_desc-preproc_bold": (func_apply_mask, "out_file"), + "space-template_desc-brain_bold": (func_apply_mask, "out_file"), "space-template_desc-head_bold": (node_head_bold, out_head_bold), } diff --git a/CPAC/pipeline/cpac_pipeline.py b/CPAC/pipeline/cpac_pipeline.py index aaa7d4662d..bb4617a8de 100644 --- a/CPAC/pipeline/cpac_pipeline.py +++ b/CPAC/pipeline/cpac_pipeline.py @@ -99,7 +99,6 @@ bold_mask_fsl, bold_mask_fsl_afni, bold_masking, - template_space_bold_masking, func_despike, func_despike_template, func_mean, @@ -108,6 +107,7 @@ func_scaling, func_slice_time, func_truncate, + template_space_bold_masking, ) from CPAC.network_centrality.pipeline import network_centrality from CPAC.nuisance.nuisance import ( @@ -1516,11 +1516,13 @@ def build_workflow(subject_id, sub_dict, cfg, pipeline_name=None): # Template space functional masking if cfg.functional_preproc["template_space_func_masking"]["run"]: if not rpool.check_rpool("space-template_desc-bold_mask"): - pipeline_blocks += (bold_mask_anatomical_resampled,) - if cfg.functional_preproc["template_space_func_masking"][ - "apply_func_mask_in_template_space" - ]: - pipeline_blocks += (template_space_bold_masking,) + pipeline_blocks += [ + bold_mask_anatomical_resampled, + ] + + pipeline_blocks += [ + template_space_bold_masking, + ] # Template-space nuisance regression nuisance_template = ( diff --git a/CPAC/resources/configs/pipeline_config_abcd-options.yml b/CPAC/resources/configs/pipeline_config_abcd-options.yml index 318b817f1c..a77944ce60 100644 --- a/CPAC/resources/configs/pipeline_config_abcd-options.yml +++ b/CPAC/resources/configs/pipeline_config_abcd-options.yml @@ -302,8 +302,6 @@ functional_preproc: # Anatomical_Resampled: Resample anatomical brain mask in standard space to get BOLD brain mask in standard space. Adapted from DCAN Lab's BOLD mask method from the ABCD pipeline. ("Create fMRI resolution standard space files for T1w image, wmparc, and brain mask […] don't use FLIRT to do spline interpolation with -applyisoxfm for the 2mm and 1mm cases because it doesn't know the peculiarities of the MNI template FOVs") using: [Anatomical_Resampled] - apply_func_mask_in_template_space: On - generate_func_mean: # Generate mean functional image diff --git a/CPAC/resources/configs/pipeline_config_abcd-prep.yml b/CPAC/resources/configs/pipeline_config_abcd-prep.yml index 5cd97c6d38..033232ffd9 100644 --- a/CPAC/resources/configs/pipeline_config_abcd-prep.yml +++ b/CPAC/resources/configs/pipeline_config_abcd-prep.yml @@ -232,5 +232,3 @@ functional_preproc: run: On # Anatomical_Resampled: Resample anatomical brain mask in standard space to get BOLD brain mask in standard space. Adapted from DCAN Lab's BOLD mask method from the ABCD pipeline. ("Create fMRI resolution standard space files for T1w image, wmparc, and brain mask […] don't use FLIRT to do spline interpolation with -applyisoxfm for the 2mm and 1mm cases because it doesn't know the peculiarities of the MNI template FOVs") using: [Anatomical_Resampled] - - apply_func_mask_in_template_space: On diff --git a/CPAC/resources/configs/pipeline_config_blank.yml b/CPAC/resources/configs/pipeline_config_blank.yml index 18db947205..8579ce1f6c 100644 --- a/CPAC/resources/configs/pipeline_config_blank.yml +++ b/CPAC/resources/configs/pipeline_config_blank.yml @@ -1156,8 +1156,6 @@ functional_preproc: # Anatomical_Resampled: Resample anatomical brain mask in standard space to get BOLD brain mask in standard space. Adapted from DCAN Lab's BOLD mask method from the ABCD pipeline. ("Create fMRI resolution standard space files for T1w image, wmparc, and brain mask […] don't use FLIRT to do spline interpolation with -applyisoxfm for the 2mm and 1mm cases because it doesn't know the peculiarities of the MNI template FOVs") using: [Anatomical_Resampled] - apply_func_mask_in_template_space: Off - generate_func_mean: # Generate mean functional image diff --git a/CPAC/resources/configs/pipeline_config_default.yml b/CPAC/resources/configs/pipeline_config_default.yml index c937526a79..97118bf2b7 100644 --- a/CPAC/resources/configs/pipeline_config_default.yml +++ b/CPAC/resources/configs/pipeline_config_default.yml @@ -1338,8 +1338,6 @@ functional_preproc: # Anatomical_Resampled: Resample anatomical brain mask in standard space to get BOLD brain mask in standard space. Adapted from DCAN Lab's BOLD mask method from the ABCD pipeline. ("Create fMRI resolution standard space files for T1w image, wmparc, and brain mask […] don't use FLIRT to do spline interpolation with -applyisoxfm for the 2mm and 1mm cases because it doesn't know the peculiarities of the MNI template FOVs") using: [Anatomical_Resampled] - apply_func_mask_in_template_space: Off - generate_func_mean: # Generate mean functional image From 6d8da2be9d9f6e31885aad7f540057ccbed4b07f Mon Sep 17 00:00:00 2001 From: "birajstha:construction_worker::penguin" Date: Tue, 29 Apr 2025 13:09:13 -0400 Subject: [PATCH 271/507] removing schema validation for removed switch --- CPAC/pipeline/schema.py | 1 - 1 file changed, 1 deletion(-) diff --git a/CPAC/pipeline/schema.py b/CPAC/pipeline/schema.py index a4b548148a..83012523da 100644 --- a/CPAC/pipeline/schema.py +++ b/CPAC/pipeline/schema.py @@ -1013,7 +1013,6 @@ def sanitize(filename): "template_space_func_masking": { "run": bool1_1, "using": [In({"Anatomical_Resampled"})], - "apply_func_mask_in_template_space": bool1_1, }, "generate_func_mean": { "run": bool1_1, From 9a1834bf503f65379db68f783056649fbb5b18bf Mon Sep 17 00:00:00 2001 From: Jon Cluce Date: Tue, 22 Apr 2025 15:04:47 -0400 Subject: [PATCH 272/507] :construction_worker: Report coverage to codecov instead of Circle artifact --- .circleci/main.yml | 8 +++++--- 1 file changed, 5 insertions(+), 3 deletions(-) diff --git a/.circleci/main.yml b/.circleci/main.yml index 4dfc06f738..e202325582 100644 --- a/.circleci/main.yml +++ b/.circleci/main.yml @@ -2,6 +2,9 @@ version: 2.1 # Singularity started failing to set up on Circle circa May 2023, so those tests are currently disabled +orbs: + codecov: codecov/codecov@5 + parameters: branch: type: string @@ -36,7 +39,8 @@ commands: name: "Combining and reporting coverage" command: | coverage combine - coverage html --ignore-errors + coverage xml -o coverage.xml # Generate XML report + - codecov/upload configure-git-user: steps: - add_ssh_keys: @@ -185,8 +189,6 @@ jobs: # key: coverage-singularity-lite-{{ .Revision }} - set-python-version - combine-coverage - - store_artifacts: - path: htmlcov push-branch-to-docker-hub: parameters: variant: From 71faa9ae0960a8704c40154a05b7c013cecb9cdf Mon Sep 17 00:00:00 2001 From: Jon Cluce Date: Wed, 23 Apr 2025 12:42:02 -0400 Subject: [PATCH 273/507] :wrench: Configure coverage --- pyproject.toml | 19 +++++++++++++++++++ 1 file changed, 19 insertions(+) diff --git a/pyproject.toml b/pyproject.toml index 13181c224b..22dec38dbd 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -18,3 +18,22 @@ [build-system] requires = ["nipype==1.8.6", "numpy==1.25.1", "pyyaml==6.0", "setuptools<60.0", "voluptuous==0.13.1"] build-backend = "setuptools.build_meta" + +[tool.coverage.paths] +source = [ + "/code", + "/home/circleci/project" +] + +[tool.coverage.report] +ignore_errors = true +include_namespace_packages = true +skip_empty = true + +[tool.coverage.run] +branch = true +relative_files = true +source = [ + "CPAC", + "dev/circleci_data" +] From dffb1c5686855225b16f9ed1a8020e2e1a203083 Mon Sep 17 00:00:00 2001 From: Jon Cluce Date: Wed, 23 Apr 2025 14:19:15 -0400 Subject: [PATCH 274/507] :wrench: :construction_worker: Configure codecov --- .codecov.yml | 18 ++++++++++++++++++ 1 file changed, 18 insertions(+) create mode 100644 .codecov.yml diff --git a/.codecov.yml b/.codecov.yml new file mode 100644 index 0000000000..423aa156d0 --- /dev/null +++ b/.codecov.yml @@ -0,0 +1,18 @@ +comment: # this is a top-level key + layout: "diff, files" + behavior: default + require_changes: false # learn more in the Requiring Changes section below + require_base: false # [true :: must have a base report to post] + require_head: true # [true :: must have a head report to post] + hide_project_coverage: false # [true :: only show coverage on the git diff] + +coverage: + precision: 1 + range: "50..100" + round: nearest + status: + project: + default: # default is the status check's name, not default settings + only_pulls: false + target: auto + threshold: "5" From b11eb67ef705d14543c72460da86a2b1e264a07c Mon Sep 17 00:00:00 2001 From: Jon Cluce Date: Wed, 23 Apr 2025 14:22:33 -0400 Subject: [PATCH 275/507] :memo: Add coverage badge to README --- README.md | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/README.md b/README.md index c320755101..f8501ba049 100644 --- a/README.md +++ b/README.md @@ -9,8 +9,7 @@ C-PAC is distributed in the hope that it will be useful, but WITHOUT ANY WARRANT You should have received a copy of the GNU Lesser General Public License along with C-PAC. If not, see . --> C-PAC: Configurable Pipeline for the Analysis of Connectomes ============================================================ -[![DOI for "Moving Beyond Processing and Analysis-Related Variation in Neuroscience"](https://zenodo.org/badge/DOI/10.1101/2021.12.01.470790.svg)](https://doi.org/10.1101/2021.12.01.470790) [![DOI for "FCP-INDI/C-PAC: CPAC Version 1.0.0 Beta"](https://zenodo.org/badge/DOI/10.5281/zenodo.164638.svg)](https://doi.org/10.5281/zenodo.164638) [![pre-commit.ci status](https://results.pre-commit.ci/badge/github/FCP-INDI/C-PAC/main.svg)](https://results.pre-commit.ci/latest/github/FCP-INDI/C-PAC/main) - +[![DOI for "Moving Beyond Processing and Analysis-Related Variation in Neuroscience"](https://zenodo.org/badge/DOI/10.1101/2021.12.01.470790.svg)](https://doi.org/10.1101/2021.12.01.470790) [![DOI for "FCP-INDI/C-PAC: CPAC Version 1.0.0 Beta"](https://zenodo.org/badge/DOI/10.5281/zenodo.164638.svg)](https://doi.org/10.5281/zenodo.164638) [![pre-commit.ci status](https://results.pre-commit.ci/badge/github/FCP-INDI/C-PAC/main.svg)](https://results.pre-commit.ci/latest/github/FCP-INDI/C-PAC/main) [![codecov](https://codecov.io/github/FCP-INDI/C-PAC/graph/badge.svg?token=sWxXoDRf1M)](https://codecov.io/github/FCP-INDI/C-PAC) [![LGPL](https://www.gnu.org/graphics/lgplv3-88x31.png)](./COPYING.LESSER) From 00ca95800204ec365a708ac51f52c0d043f4e5e4 Mon Sep 17 00:00:00 2001 From: Jon Cluce Date: Thu, 24 Apr 2025 10:47:31 -0400 Subject: [PATCH 276/507] :bulb: Remove extra comments --- .codecov.yml | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/.codecov.yml b/.codecov.yml index 423aa156d0..ec04ab49f2 100644 --- a/.codecov.yml +++ b/.codecov.yml @@ -1,10 +1,10 @@ -comment: # this is a top-level key +comment: layout: "diff, files" behavior: default - require_changes: false # learn more in the Requiring Changes section below - require_base: false # [true :: must have a base report to post] - require_head: true # [true :: must have a head report to post] - hide_project_coverage: false # [true :: only show coverage on the git diff] + require_changes: false + require_base: false + require_head: true + hide_project_coverage: false coverage: precision: 1 From 054a04b70ef99e6ea1c80d437e95406d120e1bf2 Mon Sep 17 00:00:00 2001 From: Jon Cluce Date: Thu, 24 Apr 2025 10:57:24 -0400 Subject: [PATCH 277/507] :wrench: Specify main branch for codecov reporting --- .codecov.yml | 3 +++ 1 file changed, 3 insertions(+) diff --git a/.codecov.yml b/.codecov.yml index ec04ab49f2..ee4af5c69c 100644 --- a/.codecov.yml +++ b/.codecov.yml @@ -1,3 +1,6 @@ +codecov: + branch: main + comment: layout: "diff, files" behavior: default From 310d427e43f415e9c636a387b628a79a88c38af5 Mon Sep 17 00:00:00 2001 From: Jon Cluce Date: Thu, 24 Apr 2025 11:01:56 -0400 Subject: [PATCH 278/507] :wrench: Adjust coverage target range --- .codecov.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.codecov.yml b/.codecov.yml index ee4af5c69c..44836c40ce 100644 --- a/.codecov.yml +++ b/.codecov.yml @@ -11,7 +11,7 @@ comment: coverage: precision: 1 - range: "50..100" + range: "50..90" round: nearest status: project: From b4e3488508020a13b3a9ab16334b6e67dbfbde5f Mon Sep 17 00:00:00 2001 From: Jon Cluce Date: Fri, 25 Apr 2025 13:49:26 -0400 Subject: [PATCH 279/507] :bug: Decode bytestrings before coercing types --- CPAC/utils/utils.py | 9 ++++++--- 1 file changed, 6 insertions(+), 3 deletions(-) diff --git a/CPAC/utils/utils.py b/CPAC/utils/utils.py index 69b96be4ca..6ca7e25689 100644 --- a/CPAC/utils/utils.py +++ b/CPAC/utils/utils.py @@ -525,6 +525,9 @@ def check(self, val_to_check: str, throw_exception: bool): msg = f"Missing value for {val_to_check} for participant {self.subject}." raise ValueError(msg) + if isinstance(ret_val, bytes): + ret_val = ret_val.decode("utf-8") + return ret_val @overload @@ -631,6 +634,8 @@ def fetch_and_convert( f" ≅ '{matched_keys[1]}'." ) if convert_to: + if isinstance(raw_value, bytes): + raw_value = raw_value.decode("utf-8") try: value = convert_to(raw_value) except (TypeError, ValueError): @@ -2634,8 +2639,6 @@ def _replace_in_value_list(current_value, replacement_tuple): def flip_orientation_code(code): - """ - Reverts an orientation code by flipping R↔L, A↔P, and I↔S. - """ + """Reverts an orientation code by flipping R↔L, A↔P, and I↔S.""" flip_dict = {"R": "L", "L": "R", "A": "P", "P": "A", "I": "S", "S": "I"} return "".join(flip_dict[c] for c in code) From 94bff7c89c3a5ef2d7314df4078a238684be121e Mon Sep 17 00:00:00 2001 From: Jon Cluce Date: Mon, 28 Apr 2025 16:35:59 -0400 Subject: [PATCH 280/507] :bug: Restore header to `censors.tsv` --- CPAC/nuisance/utils/utils.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/CPAC/nuisance/utils/utils.py b/CPAC/nuisance/utils/utils.py index 9067b72ab6..db6667dcb3 100644 --- a/CPAC/nuisance/utils/utils.py +++ b/CPAC/nuisance/utils/utils.py @@ -139,7 +139,7 @@ def find_offending_time_points( censor_vector[extended_censors] = 0 out_file_path = os.path.join(os.getcwd(), "censors.tsv") - np.savetxt(out_file_path, censor_vector, fmt="%d", comments="") + np.savetxt(out_file_path, censor_vector, fmt="%d", header="censor", comments="") return out_file_path From 1a16e415bd2a87cabc2b03c57bb48b330d6a4626 Mon Sep 17 00:00:00 2001 From: Jon Cluce Date: Tue, 29 Apr 2025 14:56:23 -0400 Subject: [PATCH 281/507] :recycle: Modernize test --- CPAC/nuisance/tests/test_utils.py | 44 +++++++++++++++++++++---------- 1 file changed, 30 insertions(+), 14 deletions(-) diff --git a/CPAC/nuisance/tests/test_utils.py b/CPAC/nuisance/tests/test_utils.py index be0ea03e96..c853259248 100644 --- a/CPAC/nuisance/tests/test_utils.py +++ b/CPAC/nuisance/tests/test_utils.py @@ -1,8 +1,26 @@ +# Copyright (C) 2019-2025 C-PAC Developers + +# This file is part of C-PAC. + +# C-PAC is free software: you can redistribute it and/or modify it under +# the terms of the GNU Lesser General Public License as published by the +# Free Software Foundation, either version 3 of the License, or (at your +# option) any later version. + +# C-PAC is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or +# FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public +# License for more details. + +# You should have received a copy of the GNU Lesser General Public +# License along with C-PAC. If not, see . +"""Test nuisance utilities.""" + +from importlib.resources import as_file, files import os import tempfile import numpy as np -import pkg_resources as p import pytest from CPAC.nuisance.utils import calc_compcor_components, find_offending_time_points @@ -10,24 +28,22 @@ logger = getLogger("CPAC.nuisance.tests") -mocked_outputs = p.resource_filename( - "CPAC", os.path.join("nuisance", "tests", "motion_statistics") -) +_mocked_outputs = files("CPAC").joinpath("nuisance/tests/motion_statistics") @pytest.mark.skip(reason="needs refactoring") def test_find_offending_time_points(): dl_dir = tempfile.mkdtemp() os.chdir(dl_dir) - - censored = find_offending_time_points( - os.path.join(mocked_outputs, "FD_J.1D"), - os.path.join(mocked_outputs, "FD_P.1D"), - os.path.join(mocked_outputs, "DVARS.1D"), - 2.0, - 2.0, - "1.5SD", - ) + with as_file(_mocked_outputs) as mocked_outputs: + censored = find_offending_time_points( + str(mocked_outputs / "FD_J.1D"), + str(mocked_outputs / "FD_P.1D"), + str(mocked_outputs / "DVARS.1D"), + 2.0, + 2.0, + "1.5SD", + ) censored = np.loadtxt(censored).astype(bool) @@ -41,4 +57,4 @@ def test_calc_compcor_components(): compcor_filename = calc_compcor_components(data_filename, 5, mask_filename) logger.info("compcor components written to %s", compcor_filename) - assert 0 == 1 + From 421d868c6595b505ba04c712e1f1deb94d48654d Mon Sep 17 00:00:00 2001 From: Jon Cluce Date: Tue, 29 Apr 2025 15:27:50 -0400 Subject: [PATCH 282/507] :recycle: Skip header when loading TSV into NumPy Ref https://github.com/FCP-INDI/C-PAC/pull/2228#discussion_r2067035438, https://github.com/FCP-INDI/C-PAC/pull/2228#discussion_r2067046337 Co-authored-by: Biraj Shrestha <111654544+birajstha@users.noreply.github.com> --- CPAC/nuisance/nuisance.py | 11 +++++++---- CPAC/nuisance/tests/test_utils.py | 25 ++++++++++++++++++++++++- CPAC/nuisance/utils/__init__.py | 4 +++- CPAC/nuisance/utils/utils.py | 19 ++++++++++++++++++- 4 files changed, 52 insertions(+), 7 deletions(-) diff --git a/CPAC/nuisance/nuisance.py b/CPAC/nuisance/nuisance.py index ce4c1298da..d7b78fc6e6 100644 --- a/CPAC/nuisance/nuisance.py +++ b/CPAC/nuisance/nuisance.py @@ -1,4 +1,4 @@ -# Copyright (C) 2012-2024 C-PAC Developers +# Copyright (C) 2012-2025 C-PAC Developers # This file is part of C-PAC. @@ -14,6 +14,8 @@ # You should have received a copy of the GNU Lesser General Public # License along with C-PAC. If not, see . +"""Nusiance regression.""" + import os from typing import Literal @@ -29,6 +31,7 @@ from CPAC.nuisance.utils import ( find_offending_time_points, generate_summarize_tissue_mask, + load_censor_tsv, temporal_variance_mask, ) from CPAC.nuisance.utils.compcor import ( @@ -302,7 +305,7 @@ def gather_nuisance( raise ValueError(msg) try: - regressors = np.loadtxt(regressor_file) + regressors = load_censor_tsv(regressor_file, regressor_length) except (OSError, TypeError, UnicodeDecodeError, ValueError) as error: msg = f"Could not read regressor {regressor_type} from {regressor_file}." raise OSError(msg) from error @@ -382,7 +385,7 @@ def gather_nuisance( if custom_file_paths: for custom_file_path in custom_file_paths: try: - custom_regressor = np.loadtxt(custom_file_path) + custom_regressor = load_censor_tsv(custom_file_path, regressor_length) except: msg = "Could not read regressor {0} from {1}.".format( "Custom", custom_file_path @@ -421,7 +424,7 @@ def gather_nuisance( censor_volumes = np.ones((regressor_length,), dtype=int) else: try: - censor_volumes = np.loadtxt(regressor_file) + censor_volumes = load_censor_tsv(regressor_file, regressor_length) except: msg = ( f"Could not read regressor {regressor_type} from {regressor_file}." diff --git a/CPAC/nuisance/tests/test_utils.py b/CPAC/nuisance/tests/test_utils.py index c853259248..24bbc0660e 100644 --- a/CPAC/nuisance/tests/test_utils.py +++ b/CPAC/nuisance/tests/test_utils.py @@ -18,12 +18,18 @@ from importlib.resources import as_file, files import os +from pathlib import Path +from random import randint import tempfile import numpy as np import pytest -from CPAC.nuisance.utils import calc_compcor_components, find_offending_time_points +from CPAC.nuisance.utils import ( + calc_compcor_components, + find_offending_time_points, + load_censor_tsv, +) from CPAC.utils.monitoring.custom_logging import getLogger logger = getLogger("CPAC.nuisance.tests") @@ -58,3 +64,20 @@ def test_calc_compcor_components(): compcor_filename = calc_compcor_components(data_filename, 5, mask_filename) logger.info("compcor components written to %s", compcor_filename) + +@pytest.mark.parametrize("header", [True, False]) +def test_load_censor_tsv(header: bool, tmp_path: Path) -> None: + """Test loading of censor tsv files with and without headers.""" + expected_length = 3 + filepath = tmp_path / "censor.tsv" + with filepath.open("w") as f: + if header: + f.write("censor\n") + for i in range(expected_length): + f.write(f"{randint(0, 1)}\n") + censors = load_censor_tsv(str(filepath), expected_length) + assert ( + censors.shape[0] == expected_length + ), "Length of censors does not match expected length" + with pytest.raises(ValueError, match="expected length"): + load_censor_tsv(str(filepath), expected_length + 1) diff --git a/CPAC/nuisance/utils/__init__.py b/CPAC/nuisance/utils/__init__.py index 4fa7decfb9..dc18a10e16 100644 --- a/CPAC/nuisance/utils/__init__.py +++ b/CPAC/nuisance/utils/__init__.py @@ -1,4 +1,4 @@ -# Copyright (C) 2019-2024 C-PAC Developers +# Copyright (C) 2019-2025 C-PAC Developers # This file is part of C-PAC. @@ -21,6 +21,7 @@ from .utils import ( find_offending_time_points, generate_summarize_tissue_mask, + load_censor_tsv, NuisanceRegressor, temporal_variance_mask, ) @@ -30,6 +31,7 @@ "compcor", "find_offending_time_points", "generate_summarize_tissue_mask", + "load_censor_tsv", "NuisanceRegressor", "temporal_variance_mask", ] diff --git a/CPAC/nuisance/utils/utils.py b/CPAC/nuisance/utils/utils.py index db6667dcb3..49ac12ac99 100644 --- a/CPAC/nuisance/utils/utils.py +++ b/CPAC/nuisance/utils/utils.py @@ -1,4 +1,4 @@ -# Copyright (C) 2019-2024 C-PAC Developers +# Copyright (C) 2019-2025 C-PAC Developers # This file is part of C-PAC. @@ -21,6 +21,7 @@ import re from typing import Optional +import numpy as np from nipype.interfaces import afni, ants, fsl import nipype.interfaces.utility as util from nipype.pipeline.engine import Workflow @@ -860,3 +861,19 @@ def encode(selector: dict) -> str: def __repr__(self) -> str: """Return a string representation of the nuisance regressor.""" return NuisanceRegressor.encode(self.selector) + + +def load_censor_tsv(filepath: str, expected_length: int) -> np.ndarray: + """Load censor TSV and verify length.""" + header = False + censor = np.empty((0)) + try: + censor = np.loadtxt(filepath) + except ValueError: + header = True + if header or censor.shape[0] == expected_length + 1: + censor = np.loadtxt(filepath, skiprows=1) + if censor.shape[0] == expected_length: + return censor + msg = f"Censor file length ({censor.shape[0]}) does not match expected length ({expected_length})." + raise ValueError(msg) From f9fb92475fca543e6f2fdc57231745ddf505da50 Mon Sep 17 00:00:00 2001 From: Jon Cluce Date: Fri, 9 May 2025 13:54:48 -0400 Subject: [PATCH 283/507] :necktie: :wrench: Add 'organism' to pipeline config --- CPAC/pipeline/schema.py | 17 ++++++++++++++--- CPAC/registration/registration.py | 2 +- .../configs/pipeline_config_abcd-options.yml | 4 ++++ .../configs/pipeline_config_abcd-prep.yml | 4 ++++ .../configs/pipeline_config_benchmark-ANTS.yml | 6 +++--- .../configs/pipeline_config_benchmark-FNIRT.yml | 6 +++--- .../resources/configs/pipeline_config_blank.yml | 3 +++ .../configs/pipeline_config_ccs-options.yml | 6 +++--- .../pipeline_config_default-deprecated.yml | 6 +++--- .../configs/pipeline_config_default.yml | 3 +++ .../pipeline_config_fmriprep-ingress.yml | 6 +++--- .../pipeline_config_fmriprep-options.yml | 4 ++++ .../configs/pipeline_config_fx-options.yml | 7 ------- .../configs/pipeline_config_monkey-ABCD.yml | 9 ++++++--- .../configs/pipeline_config_monkey.yml | 9 ++++++--- CPAC/resources/configs/pipeline_config_ndmg.yml | 6 +++--- .../configs/pipeline_config_rbc-options.yml | 8 -------- .../configs/pipeline_config_regtest-1.yml | 6 +++--- .../configs/pipeline_config_regtest-2.yml | 6 +++--- .../configs/pipeline_config_regtest-3.yml | 6 +++--- .../configs/pipeline_config_regtest-4.yml | 6 +++--- .../configs/pipeline_config_rodent.yml | 9 ++++++--- 22 files changed, 81 insertions(+), 58 deletions(-) diff --git a/CPAC/pipeline/schema.py b/CPAC/pipeline/schema.py index 828c0b1aec..940086c153 100644 --- a/CPAC/pipeline/schema.py +++ b/CPAC/pipeline/schema.py @@ -21,6 +21,7 @@ from itertools import chain, permutations import re from subprocess import CalledProcessError +from typing import Literal, TypeAlias import numpy as np from pathvalidate import sanitize_filename @@ -61,6 +62,12 @@ RESOLUTION_REGEX = r"^[0-9]+(\.[0-9]*){0,1}[a-z]*(x[0-9]+(\.[0-9]*){0,1}[a-z]*)*$" Number = Any(float, int, All(str, Match(SCIENTIFIC_NOTATION_STR_REGEX))) +Organism: TypeAlias = Literal[ + "human", + "non-human primate", + "rodent", +] +ORGANISMS: list[Organism] = ["human", "non-human primate", "rodent"] def str_to_bool1_1(x): # pylint: disable=invalid-name @@ -423,6 +430,7 @@ def sanitize(filename): "skip env check": Maybe(bool), # flag for skipping an environment check "pipeline_setup": { "pipeline_name": All(str, Length(min=1), sanitize), + "organism": In(ORGANISMS), "desired_orientation": In( {"RPI", "LPI", "RAI", "LAI", "RAS", "LAS", "RPS", "LPS"} ), @@ -1411,10 +1419,13 @@ def schema(config_dict): "anatomical_registration" ]["registration"]["using"] ): - raise ExclusiveInvalid( - "[!] Overwrite transform method is the same as the anatomical registration method! " - "No need to overwrite transform with the same registration method. Please turn it off or use a different registration method." + msg = ( + "[!] Overwrite transform method is the same as the anatomical " + "registration method! No need to overwrite transform with the same " + "registration method. Please turn it off or use a different " + "registration method." ) + raise ExclusiveInvalid(msg) except KeyError: pass try: diff --git a/CPAC/registration/registration.py b/CPAC/registration/registration.py index 5b5d7493a9..a7c1301d1d 100644 --- a/CPAC/registration/registration.py +++ b/CPAC/registration/registration.py @@ -1552,7 +1552,7 @@ def FSL_registration_connector( ), } - if opt == "FSL": + if opt == "FSL" and cfg["pipeline_setup", "organism"] == "non-human primate": fnirt_reg_anat_mni = create_fsl_fnirt_nonlinear_reg_nhp( f"anat_mni_fnirt_register{symm}" ) diff --git a/CPAC/resources/configs/pipeline_config_abcd-options.yml b/CPAC/resources/configs/pipeline_config_abcd-options.yml index cd5c14ad42..11891240a5 100644 --- a/CPAC/resources/configs/pipeline_config_abcd-options.yml +++ b/CPAC/resources/configs/pipeline_config_abcd-options.yml @@ -14,9 +14,13 @@ pipeline_setup: # This string will be sanitized and used in filepaths pipeline_name: cpac_abcd-options output_directory: + + # Quality control outputs quality_control: + # Generate eXtensible Connectivity Pipeline-style quality control files generate_xcpqc_files: On + system_config: # The maximum amount of memory each participant's workflow can allocate. diff --git a/CPAC/resources/configs/pipeline_config_abcd-prep.yml b/CPAC/resources/configs/pipeline_config_abcd-prep.yml index 22ec01b021..c8882984f8 100644 --- a/CPAC/resources/configs/pipeline_config_abcd-prep.yml +++ b/CPAC/resources/configs/pipeline_config_abcd-prep.yml @@ -14,9 +14,13 @@ pipeline_setup: # This string will be sanitized and used in filepaths pipeline_name: cpac_abcd-prep output_directory: + + # Quality control outputs quality_control: + # Generate eXtensible Connectivity Pipeline-style quality control files generate_xcpqc_files: On + system_config: # The maximum amount of memory each participant's workflow can allocate. diff --git a/CPAC/resources/configs/pipeline_config_benchmark-ANTS.yml b/CPAC/resources/configs/pipeline_config_benchmark-ANTS.yml index 0bb3fd15ac..b42c30f547 100644 --- a/CPAC/resources/configs/pipeline_config_benchmark-ANTS.yml +++ b/CPAC/resources/configs/pipeline_config_benchmark-ANTS.yml @@ -18,12 +18,12 @@ pipeline_setup: # Quality control outputs quality_control: - # Generate quality control pages containing preprocessing and derivative outputs. - generate_quality_control_images: On - # Generate eXtensible Connectivity Pipeline-style quality control files generate_xcpqc_files: On + # Generate quality control pages containing preprocessing and derivative outputs. + generate_quality_control_images: On + # Include extra versions and intermediate steps of functional preprocessing in the output directory. write_func_outputs: On diff --git a/CPAC/resources/configs/pipeline_config_benchmark-FNIRT.yml b/CPAC/resources/configs/pipeline_config_benchmark-FNIRT.yml index 04bc116581..7026e1f2fd 100644 --- a/CPAC/resources/configs/pipeline_config_benchmark-FNIRT.yml +++ b/CPAC/resources/configs/pipeline_config_benchmark-FNIRT.yml @@ -18,12 +18,12 @@ pipeline_setup: # Quality control outputs quality_control: - # Generate quality control pages containing preprocessing and derivative outputs. - generate_quality_control_images: On - # Generate eXtensible Connectivity Pipeline-style quality control files generate_xcpqc_files: On + # Generate quality control pages containing preprocessing and derivative outputs. + generate_quality_control_images: On + # Include extra versions and intermediate steps of functional preprocessing in the output directory. write_func_outputs: On diff --git a/CPAC/resources/configs/pipeline_config_blank.yml b/CPAC/resources/configs/pipeline_config_blank.yml index 66f34fe41c..7a792ca710 100644 --- a/CPAC/resources/configs/pipeline_config_blank.yml +++ b/CPAC/resources/configs/pipeline_config_blank.yml @@ -12,6 +12,9 @@ pipeline_setup: # This string will be sanitized and used in filepaths pipeline_name: cpac-blank-template + # Human, non-human primate, or rodent data? + organism: human + # Desired orientation for the output data. "RPI", "LPI", "RAI", "LAI", "RAS", "LAS", "RPS", "LPS" desired_orientation: RPI output_directory: diff --git a/CPAC/resources/configs/pipeline_config_ccs-options.yml b/CPAC/resources/configs/pipeline_config_ccs-options.yml index 891a800837..1479db8395 100644 --- a/CPAC/resources/configs/pipeline_config_ccs-options.yml +++ b/CPAC/resources/configs/pipeline_config_ccs-options.yml @@ -18,12 +18,12 @@ pipeline_setup: # Quality control outputs quality_control: - # Generate quality control pages containing preprocessing and derivative outputs. - generate_quality_control_images: On - # Generate eXtensible Connectivity Pipeline-style quality control files generate_xcpqc_files: On + # Generate quality control pages containing preprocessing and derivative outputs. + generate_quality_control_images: On + # Include extra versions and intermediate steps of functional preprocessing in the output directory. write_func_outputs: On diff --git a/CPAC/resources/configs/pipeline_config_default-deprecated.yml b/CPAC/resources/configs/pipeline_config_default-deprecated.yml index f774f8e479..22c0e5dada 100644 --- a/CPAC/resources/configs/pipeline_config_default-deprecated.yml +++ b/CPAC/resources/configs/pipeline_config_default-deprecated.yml @@ -18,12 +18,12 @@ pipeline_setup: # Quality control outputs quality_control: - # Generate quality control pages containing preprocessing and derivative outputs. - generate_quality_control_images: On - # Generate eXtensible Connectivity Pipeline-style quality control files generate_xcpqc_files: On + # Generate quality control pages containing preprocessing and derivative outputs. + generate_quality_control_images: On + anatomical_preproc: run: On acpc_alignment: diff --git a/CPAC/resources/configs/pipeline_config_default.yml b/CPAC/resources/configs/pipeline_config_default.yml index 5c22d2ee86..ccb074cfc0 100644 --- a/CPAC/resources/configs/pipeline_config_default.yml +++ b/CPAC/resources/configs/pipeline_config_default.yml @@ -13,6 +13,9 @@ pipeline_setup: # This string will be sanitized and used in filepaths pipeline_name: cpac-default-pipeline + # Human, non-human primate, or rodent data? + organism: human + # Desired orientation for the output data. "RPI", "LPI", "RAI", "LAI", "RAS", "LAS", "RPS", "LPS" desired_orientation: RPI diff --git a/CPAC/resources/configs/pipeline_config_fmriprep-ingress.yml b/CPAC/resources/configs/pipeline_config_fmriprep-ingress.yml index 1f6cf5e1ef..275a7d8d1f 100644 --- a/CPAC/resources/configs/pipeline_config_fmriprep-ingress.yml +++ b/CPAC/resources/configs/pipeline_config_fmriprep-ingress.yml @@ -18,12 +18,12 @@ pipeline_setup: # Quality control outputs quality_control: - # Generate quality control pages containing preprocessing and derivative outputs. - generate_quality_control_images: On - # Generate eXtensible Connectivity Pipeline-style quality control files generate_xcpqc_files: On + # Generate quality control pages containing preprocessing and derivative outputs. + generate_quality_control_images: On + outdir_ingress: run: On Template: MNI152NLin2009cAsym diff --git a/CPAC/resources/configs/pipeline_config_fmriprep-options.yml b/CPAC/resources/configs/pipeline_config_fmriprep-options.yml index 842f371257..c156cfc878 100644 --- a/CPAC/resources/configs/pipeline_config_fmriprep-options.yml +++ b/CPAC/resources/configs/pipeline_config_fmriprep-options.yml @@ -14,9 +14,13 @@ pipeline_setup: # This string will be sanitized and used in filepaths pipeline_name: cpac_fmriprep-options output_directory: + + # Quality control outputs quality_control: + # Generate eXtensible Connectivity Pipeline-style quality control files generate_xcpqc_files: On + system_config: # Select Off if you intend to run CPAC on a single machine. diff --git a/CPAC/resources/configs/pipeline_config_fx-options.yml b/CPAC/resources/configs/pipeline_config_fx-options.yml index e65fd62483..f8cc2f8de6 100644 --- a/CPAC/resources/configs/pipeline_config_fx-options.yml +++ b/CPAC/resources/configs/pipeline_config_fx-options.yml @@ -13,13 +13,6 @@ pipeline_setup: # Name for this pipeline configuration - useful for identification. # This string will be sanitized and used in filepaths pipeline_name: cpac_fx-options - output_directory: - - # Quality control outputs - quality_control: - - # Generate eXtensible Connectivity Pipeline-style quality control files - generate_xcpqc_files: On nuisance_corrections: 2-nuisance_regression: diff --git a/CPAC/resources/configs/pipeline_config_monkey-ABCD.yml b/CPAC/resources/configs/pipeline_config_monkey-ABCD.yml index 9289e85966..8c283f79d6 100644 --- a/CPAC/resources/configs/pipeline_config_monkey-ABCD.yml +++ b/CPAC/resources/configs/pipeline_config_monkey-ABCD.yml @@ -13,17 +13,20 @@ pipeline_setup: # Name for this pipeline configuration - useful for identification. # This string will be sanitized and used in filepaths pipeline_name: cpac_default_monkey_skullstrip + + # Human, non-human primate, or rodent data? + organism: non-human primate output_directory: # Quality control outputs quality_control: - # Generate quality control pages containing preprocessing and derivative outputs. - generate_quality_control_images: On - # Generate eXtensible Connectivity Pipeline-style quality control files generate_xcpqc_files: On + # Generate quality control pages containing preprocessing and derivative outputs. + generate_quality_control_images: On + system_config: # Select Off if you intend to run CPAC on a single machine. diff --git a/CPAC/resources/configs/pipeline_config_monkey.yml b/CPAC/resources/configs/pipeline_config_monkey.yml index 4caef0c006..906b2b3611 100644 --- a/CPAC/resources/configs/pipeline_config_monkey.yml +++ b/CPAC/resources/configs/pipeline_config_monkey.yml @@ -13,17 +13,20 @@ pipeline_setup: # Name for this pipeline configuration - useful for identification. # This string will be sanitized and used in filepaths pipeline_name: cpac_default_monkey_skullstrip + + # Human, non-human primate, or rodent data? + organism: non-human primate output_directory: # Quality control outputs quality_control: - # Generate quality control pages containing preprocessing and derivative outputs. - generate_quality_control_images: On - # Generate eXtensible Connectivity Pipeline-style quality control files generate_xcpqc_files: On + # Generate quality control pages containing preprocessing and derivative outputs. + generate_quality_control_images: On + system_config: # Select Off if you intend to run CPAC on a single machine. diff --git a/CPAC/resources/configs/pipeline_config_ndmg.yml b/CPAC/resources/configs/pipeline_config_ndmg.yml index 02cd19b673..bd77602300 100644 --- a/CPAC/resources/configs/pipeline_config_ndmg.yml +++ b/CPAC/resources/configs/pipeline_config_ndmg.yml @@ -18,12 +18,12 @@ pipeline_setup: # Quality control outputs quality_control: - # Generate quality control pages containing preprocessing and derivative outputs. - generate_quality_control_images: On - # Generate eXtensible Connectivity Pipeline-style quality control files generate_xcpqc_files: On + # Generate quality control pages containing preprocessing and derivative outputs. + generate_quality_control_images: On + system_config: # The number of cores to allocate to ANTS-based anatomical registration per participant. diff --git a/CPAC/resources/configs/pipeline_config_rbc-options.yml b/CPAC/resources/configs/pipeline_config_rbc-options.yml index 0332d1a975..27fae8a7ac 100644 --- a/CPAC/resources/configs/pipeline_config_rbc-options.yml +++ b/CPAC/resources/configs/pipeline_config_rbc-options.yml @@ -13,14 +13,6 @@ pipeline_setup: # Name for this pipeline configuration - useful for identification. # This string will be sanitized and used in filepaths pipeline_name: RBCv0 - output_directory: - - # Quality control outputs - quality_control: - - # Generate eXtensible Connectivity Pipeline-style quality control files - generate_xcpqc_files: On - system_config: # Stop worklow execution on first crash? diff --git a/CPAC/resources/configs/pipeline_config_regtest-1.yml b/CPAC/resources/configs/pipeline_config_regtest-1.yml index 7e61db6b8c..ee1ff5d5cf 100644 --- a/CPAC/resources/configs/pipeline_config_regtest-1.yml +++ b/CPAC/resources/configs/pipeline_config_regtest-1.yml @@ -18,12 +18,12 @@ pipeline_setup: # Quality control outputs quality_control: - # Generate quality control pages containing preprocessing and derivative outputs. - generate_quality_control_images: On - # Generate eXtensible Connectivity Pipeline-style quality control files generate_xcpqc_files: On + # Generate quality control pages containing preprocessing and derivative outputs. + generate_quality_control_images: On + # Include extra versions and intermediate steps of functional preprocessing in the output directory. write_func_outputs: On diff --git a/CPAC/resources/configs/pipeline_config_regtest-2.yml b/CPAC/resources/configs/pipeline_config_regtest-2.yml index 0ba3b198aa..e5b6dbd626 100644 --- a/CPAC/resources/configs/pipeline_config_regtest-2.yml +++ b/CPAC/resources/configs/pipeline_config_regtest-2.yml @@ -18,12 +18,12 @@ pipeline_setup: # Quality control outputs quality_control: - # Generate quality control pages containing preprocessing and derivative outputs. - generate_quality_control_images: On - # Generate eXtensible Connectivity Pipeline-style quality control files generate_xcpqc_files: On + # Generate quality control pages containing preprocessing and derivative outputs. + generate_quality_control_images: On + # Include extra versions and intermediate steps of functional preprocessing in the output directory. write_func_outputs: On diff --git a/CPAC/resources/configs/pipeline_config_regtest-3.yml b/CPAC/resources/configs/pipeline_config_regtest-3.yml index d9a2cd679e..1c97b3416b 100644 --- a/CPAC/resources/configs/pipeline_config_regtest-3.yml +++ b/CPAC/resources/configs/pipeline_config_regtest-3.yml @@ -18,12 +18,12 @@ pipeline_setup: # Quality control outputs quality_control: - # Generate quality control pages containing preprocessing and derivative outputs. - generate_quality_control_images: On - # Generate eXtensible Connectivity Pipeline-style quality control files generate_xcpqc_files: On + # Generate quality control pages containing preprocessing and derivative outputs. + generate_quality_control_images: On + # Include extra versions and intermediate steps of functional preprocessing in the output directory. write_func_outputs: On diff --git a/CPAC/resources/configs/pipeline_config_regtest-4.yml b/CPAC/resources/configs/pipeline_config_regtest-4.yml index b33af48a33..5bc94822be 100644 --- a/CPAC/resources/configs/pipeline_config_regtest-4.yml +++ b/CPAC/resources/configs/pipeline_config_regtest-4.yml @@ -18,12 +18,12 @@ pipeline_setup: # Quality control outputs quality_control: - # Generate quality control pages containing preprocessing and derivative outputs. - generate_quality_control_images: On - # Generate eXtensible Connectivity Pipeline-style quality control files generate_xcpqc_files: On + # Generate quality control pages containing preprocessing and derivative outputs. + generate_quality_control_images: On + # Include extra versions and intermediate steps of functional preprocessing in the output directory. write_func_outputs: On diff --git a/CPAC/resources/configs/pipeline_config_rodent.yml b/CPAC/resources/configs/pipeline_config_rodent.yml index 95bc06b9b8..a72126f4a7 100644 --- a/CPAC/resources/configs/pipeline_config_rodent.yml +++ b/CPAC/resources/configs/pipeline_config_rodent.yml @@ -13,17 +13,20 @@ pipeline_setup: # Name for this pipeline configuration - useful for identification. # This string will be sanitized and used in filepaths pipeline_name: analysis + + # Human, non-human primate, or rodent data? + organism: rodent output_directory: # Quality control outputs quality_control: - # Generate quality control pages containing preprocessing and derivative outputs. - generate_quality_control_images: On - # Generate eXtensible Connectivity Pipeline-style quality control files generate_xcpqc_files: On + # Generate quality control pages containing preprocessing and derivative outputs. + generate_quality_control_images: On + system_config: # The maximum amount of memory each participant's workflow can allocate. From aa1729178865da8281734cb4ccfb8443e9710706 Mon Sep 17 00:00:00 2001 From: Jon Cluce Date: Fri, 9 May 2025 15:20:14 -0400 Subject: [PATCH 284/507] :necktie: Restore nhp FNIRT logic Co-authored-by: Elizabeth Kenneally <113037677+e-kenneally@users.noreply.github.com> --- CPAC/registration/registration.py | 183 ++++++++++++++++-------------- 1 file changed, 99 insertions(+), 84 deletions(-) diff --git a/CPAC/registration/registration.py b/CPAC/registration/registration.py index a7c1301d1d..6b3a719394 100644 --- a/CPAC/registration/registration.py +++ b/CPAC/registration/registration.py @@ -1,4 +1,4 @@ -# Copyright (C) 2012-2024 C-PAC Developers +# Copyright (C) 2012-2025 C-PAC Developers # This file is part of C-PAC. @@ -17,7 +17,7 @@ # pylint: disable=too-many-lines,ungrouped-imports,wrong-import-order """Workflows for registration.""" -from typing import Optional +from typing import Literal, Optional from voluptuous import RequiredFieldInvalid from nipype.interfaces import afni, ants, c3, fsl, utility as util @@ -40,6 +40,7 @@ seperate_warps_list, single_ants_xfm_to_list, ) +from CPAC.utils.configuration import Configuration from CPAC.utils.interfaces import Function from CPAC.utils.interfaces.fsl import Merge as fslMerge from CPAC.utils.utils import check_prov_for_motion_tool, check_prov_for_regtool @@ -1454,9 +1455,15 @@ def create_wf_calculate_ants_warp( def FSL_registration_connector( - wf_name, cfg, orig="T1w", opt=None, symmetric=False, template="T1w" -): + wf_name: str, + cfg: Configuration, + orig: str = "T1w", + opt: Literal["FSL", "FSL-linear"] = "FSL", + symmetric: bool = False, + template: str = "T1w", +) -> tuple[pe.Workflow, dict[str, tuple]]: """Transform raw data to template with FSL.""" + assert opt in ["FSL", "FSL-linear"] wf = pe.Workflow(name=wf_name) inputNode = pe.Node( @@ -1485,78 +1492,71 @@ def FSL_registration_connector( tmpl = "" if template == "EPI": tmpl = "EPI" + flirt_reg_anat_mni = create_fsl_flirt_linear_reg(f"anat_mni_flirt_register{symm}") - if opt in ("FSL", "FSL-linear"): - flirt_reg_anat_mni = create_fsl_flirt_linear_reg( - f"anat_mni_flirt_register{symm}" - ) + # Input registration parameters + wf.connect(inputNode, "interpolation", flirt_reg_anat_mni, "inputspec.interp") - # Input registration parameters - wf.connect(inputNode, "interpolation", flirt_reg_anat_mni, "inputspec.interp") + wf.connect(inputNode, "input_brain", flirt_reg_anat_mni, "inputspec.input_brain") - wf.connect( - inputNode, "input_brain", flirt_reg_anat_mni, "inputspec.input_brain" - ) + wf.connect( + inputNode, + "reference_brain", + flirt_reg_anat_mni, + "inputspec.reference_brain", + ) - wf.connect( - inputNode, - "reference_brain", - flirt_reg_anat_mni, - "inputspec.reference_brain", - ) + write_lin_composite_xfm = pe.Node( + interface=fsl.ConvertWarp(), name=f"fsl_lin-warp_to_nii{symm}" + ) - write_lin_composite_xfm = pe.Node( - interface=fsl.ConvertWarp(), name=f"fsl_lin-warp_to_nii{symm}" - ) + wf.connect(inputNode, "reference_brain", write_lin_composite_xfm, "reference") - wf.connect(inputNode, "reference_brain", write_lin_composite_xfm, "reference") + wf.connect( + flirt_reg_anat_mni, + "outputspec.linear_xfm", + write_lin_composite_xfm, + "premat", + ) - wf.connect( - flirt_reg_anat_mni, - "outputspec.linear_xfm", - write_lin_composite_xfm, - "premat", - ) + write_invlin_composite_xfm = pe.Node( + interface=fsl.ConvertWarp(), name=f"fsl_invlin-warp_to_nii{symm}" + ) - write_invlin_composite_xfm = pe.Node( - interface=fsl.ConvertWarp(), name=f"fsl_invlin-warp_to_nii{symm}" - ) + wf.connect(inputNode, "reference_brain", write_invlin_composite_xfm, "reference") - wf.connect( - inputNode, "reference_brain", write_invlin_composite_xfm, "reference" - ) + wf.connect( + flirt_reg_anat_mni, + "outputspec.invlinear_xfm", + write_invlin_composite_xfm, + "premat", + ) - wf.connect( + outputs = { + f"space-{sym}template_desc-preproc_{orig}": ( flirt_reg_anat_mni, - "outputspec.invlinear_xfm", + "outputspec.output_brain", + ), + f"from-{orig}_to-{sym}{tmpl}template_mode-image_desc-linear_xfm": ( + write_lin_composite_xfm, + "out_file", + ), + f"from-{sym}{tmpl}template_to-{orig}_mode-image_desc-linear_xfm": ( write_invlin_composite_xfm, - "premat", - ) - - outputs = { - f"space-{sym}template_desc-preproc_{orig}": ( - flirt_reg_anat_mni, - "outputspec.output_brain", - ), - f"from-{orig}_to-{sym}{tmpl}template_mode-image_desc-linear_xfm": ( - write_lin_composite_xfm, - "out_file", - ), - f"from-{sym}{tmpl}template_to-{orig}_mode-image_desc-linear_xfm": ( - write_invlin_composite_xfm, - "out_file", - ), - f"from-{orig}_to-{sym}{tmpl}template_mode-image_xfm": ( - write_lin_composite_xfm, - "out_file", - ), - } - - if opt == "FSL" and cfg["pipeline_setup", "organism"] == "non-human primate": - fnirt_reg_anat_mni = create_fsl_fnirt_nonlinear_reg_nhp( - f"anat_mni_fnirt_register{symm}" - ) + "out_file", + ), + f"from-{orig}_to-{sym}{tmpl}template_mode-image_xfm": ( + write_lin_composite_xfm, + "out_file", + ), + } + if opt == "FSL": + fnirt_reg_anat_mni = ( + create_fsl_fnirt_nonlinear_reg_nhp + if cfg["pipeline_setup", "organism"] == "non-human primate" + else create_fsl_fnirt_nonlinear_reg + )(f"anat_mni_fnirt_register{symm}") wf.connect( inputNode, "input_brain", fnirt_reg_anat_mni, "inputspec.input_brain" ) @@ -1597,30 +1597,35 @@ def FSL_registration_connector( fnirt_reg_anat_mni, "outputspec.output_brain", ), - f"space-{sym}template_desc-head_{orig}": ( - fnirt_reg_anat_mni, - "outputspec.output_head", - ), - f"space-{sym}template_desc-{'brain' if orig == 'T1w' else orig}_mask": ( - fnirt_reg_anat_mni, - "outputspec.output_mask", - ), - f"space-{sym}template_desc-T1wT2w_biasfield": ( - fnirt_reg_anat_mni, - "outputspec.output_biasfield", - ), f"from-{orig}_to-{sym}{tmpl}template_mode-image_xfm": ( fnirt_reg_anat_mni, "outputspec.nonlinear_xfm", ), - f"from-{orig}_to-{sym}{tmpl}template_mode-image_warp": ( - fnirt_reg_anat_mni, - "outputspec.nonlinear_warp", - ), } + if cfg["pipeline_setup", "organism"] == "non-human primate": + added_outputs.update( + { + f"space-{sym}template_desc-head_{orig}": ( + fnirt_reg_anat_mni, + "outputspec.output_head", + ), + f"space-{sym}template_desc-{'brain' if orig == 'T1w' else orig}_mask": ( + fnirt_reg_anat_mni, + "outputspec.output_mask", + ), + f"space-{sym}template_desc-T1wT2w_biasfield": ( + fnirt_reg_anat_mni, + "outputspec.output_biasfield", + ), + f"from-{orig}_to-{sym}{tmpl}template_mode-image_warp": ( + fnirt_reg_anat_mni, + "outputspec.nonlinear_warp", + ), + } + ) outputs.update(added_outputs) - return (wf, outputs) + return wf, outputs def ANTs_registration_connector( @@ -2256,6 +2261,7 @@ def bold_to_T1template_xfm_connector( ) def register_FSL_anat_to_template(wf, cfg, strat_pool, pipe_num, opt=None): """Register T1w to template with FSL.""" + assert opt in ["FSL", "FSL-linear"] fsl, outputs = FSL_registration_connector( f"register_{opt}_anat_to_template_{pipe_num}", cfg, orig="T1w", opt=opt ) @@ -2274,11 +2280,18 @@ def register_FSL_anat_to_template(wf, cfg, strat_pool, pipe_num, opt=None): node, out = connect wf.connect(node, out, fsl, "inputspec.input_brain") - node, out = strat_pool.get_data("T1w-brain-template") - wf.connect(node, out, fsl, "inputspec.reference_brain") + if cfg["pipeline_setup", "organism"] == "non-human primate": + node, out = strat_pool.get_data("FNIRT-T1w-brain-template") + wf.connect(node, out, fsl, "inputspec.reference_brain") - node, out = strat_pool.get_data("T1w-template") - wf.connect(node, out, fsl, "inputspec.reference_head") + node, out = strat_pool.get_data("FNIRT-T1w-template") + wf.connect(node, out, fsl, "inputspec.reference_head") + else: + node, out = strat_pool.get_data("T1w-brain-template") + wf.connect(node, out, fsl, "inputspec.reference_brain") + + node, out = strat_pool.get_data("T1w-template") + wf.connect(node, out, fsl, "inputspec.reference_head") node, out = strat_pool.get_data( ["desc-preproc_T1w", "space-longitudinal_desc-reorient_T1w"] @@ -2345,6 +2358,7 @@ def register_FSL_anat_to_template(wf, cfg, strat_pool, pipe_num, opt=None): ) def register_symmetric_FSL_anat_to_template(wf, cfg, strat_pool, pipe_num, opt=None): """Register T1w to symmetric template with FSL.""" + assert opt in ["FSL", "FSL-linear"] fsl, outputs = FSL_registration_connector( f"register_{opt}_anat_to_template_symmetric_{pipe_num}", cfg, @@ -2419,6 +2433,7 @@ def register_symmetric_FSL_anat_to_template(wf, cfg, strat_pool, pipe_num, opt=N ) def register_FSL_EPI_to_template(wf, cfg, strat_pool, pipe_num, opt=None): """Directly register the mean functional to an EPI template. No T1w involved.""" + assert opt in ["FSL", "FSL-linear"] fsl, outputs = FSL_registration_connector( f"register_{opt}_EPI_to_template_{pipe_num}", cfg, From d7b2403fb7526cf24c913f7b9e408c9b0a85d51e Mon Sep 17 00:00:00 2001 From: Jon Cluce Date: Sun, 11 May 2025 15:24:11 -0400 Subject: [PATCH 285/507] :package: Link `libcrypt.so.2` for `csh` [rebuild standard] --- .github/Dockerfiles/base-lite.Dockerfile | 38 ++++++++++---------- .github/Dockerfiles/base-standard.Dockerfile | 11 +++--- 2 files changed, 25 insertions(+), 24 deletions(-) diff --git a/.github/Dockerfiles/base-lite.Dockerfile b/.github/Dockerfiles/base-lite.Dockerfile index e5c85d258a..82d550b220 100644 --- a/.github/Dockerfiles/base-lite.Dockerfile +++ b/.github/Dockerfiles/base-lite.Dockerfile @@ -1,4 +1,4 @@ -# Copyright (C) 2023 C-PAC Developers +# Copyright (C) 2023-2025 C-PAC Developers # This file is part of C-PAC. @@ -14,12 +14,12 @@ # You should have received a copy of the GNU Lesser General Public # License along with C-PAC. If not, see . -FROM ghcr.io/fcp-indi/c-pac/afni:23.3.09-jammy as AFNI -FROM ghcr.io/fcp-indi/c-pac/ants:2.4.3-jammy as ANTs -FROM ghcr.io/fcp-indi/c-pac/c3d:1.0.0-jammy as c3d -FROM ghcr.io/fcp-indi/c-pac/connectome-workbench:1.5.0.neurodebian-jammy as connectome-workbench -FROM ghcr.io/fcp-indi/c-pac/fsl:6.0.6.5-jammy as FSL -FROM ghcr.io/fcp-indi/c-pac/ica-aroma:0.4.4-beta-jammy as ICA-AROMA +FROM ghcr.io/fcp-indi/c-pac/afni:23.3.09-jammy AS afni +FROM ghcr.io/fcp-indi/c-pac/ants:2.4.3-jammy AS ants +FROM ghcr.io/fcp-indi/c-pac/c3d:1.0.0-jammy AS c3d +FROM ghcr.io/fcp-indi/c-pac/connectome-workbench:1.5.0.neurodebian-jammy AS connectome-workbench +FROM ghcr.io/fcp-indi/c-pac/fsl:6.0.6.5-jammy AS fsl +FROM ghcr.io/fcp-indi/c-pac/ica-aroma:0.4.4-beta-jammy AS ica-aroma FROM ghcr.io/fcp-indi/c-pac/ubuntu:jammy-non-free LABEL org.opencontainers.image.description="NOT INTENDED FOR USE OTHER THAN AS A STAGE IMAGE IN A MULTI-STAGE BUILD \ @@ -47,11 +47,11 @@ ENV FSLTCLSH=$FSLDIR/bin/fsltclsh \ PATH=${FSLDIR}/bin:$PATH \ TZ=America/New_York \ USER=c-pac_user -COPY --from=FSL /lib/x86_64-linux-gnu /lib/x86_64-linux-gnu -COPY --from=FSL /usr/lib/x86_64-linux-gnu /usr/lib/x86_64-linux-gnu -COPY --from=FSL /usr/bin /usr/bin -COPY --from=FSL /usr/local/bin /usr/local/bin -COPY --from=FSL /usr/share/fsl /usr/share/fsl +COPY --from=fsl /lib/x86_64-linux-gnu /lib/x86_64-linux-gnu +COPY --from=fsl /usr/lib/x86_64-linux-gnu /usr/lib/x86_64-linux-gnu +COPY --from=fsl /usr/bin /usr/bin +COPY --from=fsl /usr/local/bin /usr/local/bin +COPY --from=fsl /usr/share/fsl /usr/share/fsl # Installing C-PAC dependencies COPY requirements.txt /opt/requirements.txt @@ -67,10 +67,10 @@ ENV C3DPATH /opt/c3d ENV PATH $C3DPATH/bin:$PATH # Installing AFNI -COPY --from=AFNI /lib/x86_64-linux-gnu/ld* /lib/x86_64-linux-gnu/ -COPY --from=AFNI /lib/x86_64-linux-gnu/lib*so* /lib/x86_64-linux-gnu/ -COPY --from=AFNI /lib64/ld* /lib64/ -COPY --from=AFNI /opt/afni/ /opt/afni/ +COPY --from=afni /lib/x86_64-linux-gnu/ld* /lib/x86_64-linux-gnu/ +COPY --from=afni /lib/x86_64-linux-gnu/lib*so* /lib/x86_64-linux-gnu/ +COPY --from=afni /lib64/ld* /lib64/ +COPY --from=afni /opt/afni/ /opt/afni/ # set up AFNI ENV PATH=/opt/afni:$PATH @@ -79,11 +79,11 @@ ENV LANG="en_US.UTF-8" \ LC_ALL="en_US.UTF-8" \ ANTSPATH=/usr/lib/ants/bin \ PATH=/usr/lib/ants/bin:$PATH -COPY --from=ANTs /usr/lib/ants/ /usr/lib/ants/ -COPY --from=ANTs /ants_template/ /ants_template/ +COPY --from=ants /usr/lib/ants/ /usr/lib/ants/ +COPY --from=ants /ants_template/ /ants_template/ # Installing ICA-AROMA -COPY --from=ICA-AROMA /opt/ICA-AROMA/ /opt/ICA-AROMA/ +COPY --from=ica-aroma /opt/ICA-AROMA/ /opt/ICA-AROMA/ ENV PATH=/opt/ICA-AROMA:$PATH # link libraries & clean up diff --git a/.github/Dockerfiles/base-standard.Dockerfile b/.github/Dockerfiles/base-standard.Dockerfile index 0ba2cd5158..07f7884d7d 100644 --- a/.github/Dockerfiles/base-standard.Dockerfile +++ b/.github/Dockerfiles/base-standard.Dockerfile @@ -1,4 +1,4 @@ -# Copyright (C) 2022-2023 C-PAC Developers +# Copyright (C) 2022-2025 C-PAC Developers # This file is part of C-PAC. @@ -14,7 +14,7 @@ # You should have received a copy of the GNU Lesser General Public # License along with C-PAC. If not, see . -FROM ghcr.io/fcp-indi/c-pac/freesurfer:6.0.0-min.neurodocker-jammy as FreeSurfer +FROM ghcr.io/fcp-indi/c-pac/freesurfer:6.0.0-min.neurodocker-jammy AS freesurfer FROM ghcr.io/fcp-indi/c-pac/stage-base:lite-v1.8.8.dev1 LABEL org.opencontainers.image.description="NOT INTENDED FOR USE OTHER THAN AS A STAGE IMAGE IN A MULTI-STAGE BUILD \ @@ -37,15 +37,16 @@ ENV PATH="$FREESURFER_HOME/bin:$PATH" \ SUBJECTS_DIR="$FREESURFER_HOME/subjects" \ MNI_DIR="$FREESURFER_HOME/mni" ENV MINC_BIN_DIR="$MNI_DIR/bin" \ - MINC_LIB_DIR="$MNI_DIR/lib" \ - PATH="$PATH:$MINC_BIN_DIR" -COPY --from=FreeSurfer /usr/lib/freesurfer/ /usr/lib/freesurfer/ + MINC_LIB_DIR="$MNI_DIR/lib" +ENV PATH="$PATH:$MINC_BIN_DIR" +COPY --from=freesurfer /usr/lib/freesurfer/ /usr/lib/freesurfer/ COPY dev/docker_data/license.txt $FREESURFER_HOME/license.txt # link libraries & clean up RUN apt-get autoremove -y \ && apt-get autoclean -y \ && rm -rf /var/lib/apt/lists/* /tmp/* /var/tmp/* /root/.cache/* \ + && ln -s /usr/lib/x86_64-linux-gnu/libcrypt.so.1 /usr/lib/x86_64-linux-gnu/libcrypt.so.2 \ && find / -type f -print0 | sort -t/ -k2 | xargs -0 rdfind -makehardlinks true \ && rm -rf results.txt \ && ldconfig \ From 81d0b6e8af8cc11273bda7bb729c61570b85356c Mon Sep 17 00:00:00 2001 From: Jon Cluce Date: Mon, 12 May 2025 12:10:22 -0400 Subject: [PATCH 286/507] =?UTF-8?q?fixup!=20=F0=9F=90=9B=20Fix=20`TypeErro?= =?UTF-8?q?r:=20unsupported=20operand=20type(s)=20for=20-:=20'NoneType'=20?= =?UTF-8?q?and=20'NoneType'`=20(#2229)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- CPAC/utils/monitoring/draw_gantt_chart.py | 8 ++++++-- 1 file changed, 6 insertions(+), 2 deletions(-) diff --git a/CPAC/utils/monitoring/draw_gantt_chart.py b/CPAC/utils/monitoring/draw_gantt_chart.py index 67f26693f9..bf6758e524 100644 --- a/CPAC/utils/monitoring/draw_gantt_chart.py +++ b/CPAC/utils/monitoring/draw_gantt_chart.py @@ -23,7 +23,7 @@ # Prior to release 0.12, Nipype was licensed under a BSD license. -# Modifications Copyright (C) 2021-2023 C-PAC Developers +# Modifications Copyright (C) 2021-2025 C-PAC Developers # This file is part of C-PAC. @@ -407,7 +407,11 @@ def generate_gantt_chart( # Create the header of the report with useful information start_node = nodes_list[0] last_node = nodes_list[-1] - duration = (last_node["finish"] - start_node["start"]).total_seconds() + try: + duration = (last_node["finish"] - start_node["start"]).total_seconds() + except TypeError: + # no duration + return # Get events based dictionary of node run stats events = create_event_dict(start_node["start"], nodes_list) From 6b7056db040c5dcd8ee81f884d4ad9ae3540aa32 Mon Sep 17 00:00:00 2001 From: Jon Cluce Date: Tue, 13 May 2025 14:58:02 -0400 Subject: [PATCH 287/507] :coffin: Remove support for AFNI ECM < v21.1.1 --- CHANGELOG.md | 1 + CPAC/network_centrality/network_centrality.py | 53 ++++++++----------- CPAC/utils/interfaces/afni.py | 6 +-- 3 files changed, 24 insertions(+), 36 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 078114399e..69e8a2543d 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -71,6 +71,7 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 - Inputs `[desc-motion_bold, bold]` from `coregistration_prep_vol` nodeblock. - `input` field from `coregistration` in blank and default config. - `reg_with_skull` swtich from `func_input_prep` in blank and default config. +- Support for AFNI 3dECM < v21.1.1. #### Removed CI dependency diff --git a/CPAC/network_centrality/network_centrality.py b/CPAC/network_centrality/network_centrality.py index 21230b7385..ed919da7e5 100644 --- a/CPAC/network_centrality/network_centrality.py +++ b/CPAC/network_centrality/network_centrality.py @@ -1,4 +1,4 @@ -# Copyright (C) 2015-2024 C-PAC Developers +# Copyright (C) 2015-2025 C-PAC Developers # This file is part of C-PAC. @@ -14,6 +14,8 @@ # You should have received a copy of the GNU Lesser General Public # License along with C-PAC. If not, see . +"""Network centrality.""" + from pathlib import Path from typing import Optional @@ -23,7 +25,7 @@ from CPAC.network_centrality.utils import ThresholdOptionError from CPAC.pipeline.schema import valid_options from CPAC.utils.docs import docstring_parameter -from CPAC.utils.interfaces.afni import AFNI_GTE_21_1_1, ECM +from CPAC.utils.interfaces.afni import ECM @docstring_parameter( @@ -107,8 +109,6 @@ def create_centrality_wf( method_option, threshold_option = utils.check_centrality_params( method_option, threshold_option, test_thresh ) - # Eigenvector centrality and AFNI ≥ 21.1.1? - ecm_gte_21_1_01 = (method_option == "eigenvector_centrality") and AFNI_GTE_21_1_1 out_names = tuple(f"{method_option}_{x}" for x in weight_options) if base_dir is None: centrality_wf = pe.Workflow(name=wf_name) @@ -135,32 +135,21 @@ def create_centrality_wf( # Eigenvector centrality elif method_option == "eigenvector_centrality": - if ecm_gte_21_1_01: - afni_centrality_node = pe.MapNode( - ECM(environ={"OMP_NUM_THREADS": str(num_threads)}), - name="afni_centrality", - mem_gb=memory_gb, - iterfield=["do_binary", "out_file"], - ) - afni_centrality_node.inputs.out_file = [ - f"eigenvector_centrality_{w_option}.nii.gz" - for w_option in weight_options - ] - afni_centrality_node.inputs.do_binary = [ - w_option == "Binarized" for w_option in weight_options - ] - centrality_wf.connect( - afni_centrality_node, "out_file", output_node, "outfile_list" - ) - else: - afni_centrality_node = pe.Node( - ECM(environ={"OMP_NUM_THREADS": str(num_threads)}), - name="afni_centrality", - mem_gb=memory_gb, - ) - afni_centrality_node.inputs.out_file = ( - "eigenvector_centrality_merged.nii.gz" - ) + afni_centrality_node = pe.MapNode( + ECM(environ={"OMP_NUM_THREADS": str(num_threads)}), + name="afni_centrality", + mem_gb=memory_gb, + iterfield=["do_binary", "out_file"], + ) + afni_centrality_node.inputs.out_file = [ + f"eigenvector_centrality_{w_option}.nii.gz" for w_option in weight_options + ] + afni_centrality_node.inputs.do_binary = [ + w_option == "Binarized" for w_option in weight_options + ] + centrality_wf.connect( + afni_centrality_node, "out_file", output_node, "outfile_list" + ) afni_centrality_node.inputs.memory = memory_gb # 3dECM input only # lFCD @@ -172,8 +161,8 @@ def create_centrality_wf( ) afni_centrality_node.inputs.out_file = "lfcd_merged.nii.gz" - if not ecm_gte_21_1_01: - # Need to separate sub-briks except for 3dECM if AFNI > 21.1.01 + if method_option != "eigenvector_centrality": + # Need to separate sub-briks except for 3dECM sep_subbriks_node = pe.Node( Function( input_names=["nifti_file", "out_names"], diff --git a/CPAC/utils/interfaces/afni.py b/CPAC/utils/interfaces/afni.py index af7f4e56b2..20f2c8ae57 100644 --- a/CPAC/utils/interfaces/afni.py +++ b/CPAC/utils/interfaces/afni.py @@ -1,4 +1,4 @@ -# Copyright (C) 2023 C-PAC Developers +# Copyright (C) 2023-2025 C-PAC Developers # This file is part of C-PAC. @@ -31,8 +31,6 @@ _major, _minor, _patch = [int(part) for part in AFNI_SEMVER.split(".")] AFNI_SEMVER = str(semver.Version.parse(f"{_major}.{_minor}.{_patch}")) del _major, _minor, _patch -AFNI_GTE_21_1_1 = semver.compare(AFNI_SEMVER, "21.1.1") >= 0 -"""AFNI version >= 21.1.1?""" class ECMInputSpec(_ECMInputSpec): @@ -51,4 +49,4 @@ class ECM(_ECM): input_spec = ECMInputSpec -__all__ = ["AFNI_GTE_21_1_1", "ECM"] +__all__ = ["ECM"] From eca8d25f8da5162e6947df307e517fd0d48f104c Mon Sep 17 00:00:00 2001 From: Jon Cluce Date: Wed, 14 May 2025 12:32:24 -0400 Subject: [PATCH 288/507] =?UTF-8?q?:construction=5Fworker:=20:arrow=5Fup:?= =?UTF-8?q?=20Upgrade=20`circleci/continuation`@1.1=E2=86=900.3?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .circleci/config.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.circleci/config.yml b/.circleci/config.yml index 6e0e5b7802..4faeb95677 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -1,7 +1,7 @@ setup: true version: 2.1 orbs: - continuation: circleci/continuation@0.3.1 + continuation: circleci/continuation@1.1.0 parameters: run_tests: From ddb11afedc37b45bf5f739cf46b923d58fb06e57 Mon Sep 17 00:00:00 2001 From: Jon Cluce Date: Wed, 14 May 2025 12:33:52 -0400 Subject: [PATCH 289/507] =?UTF-8?q?:construction=5Fworker:=20:arrow=5Fup:?= =?UTF-8?q?=20Upgrade=20Ubuntu@24.04=E2=86=9020.04?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .circleci/main.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.circleci/main.yml b/.circleci/main.yml index e202325582..3f408df584 100644 --- a/.circleci/main.yml +++ b/.circleci/main.yml @@ -172,7 +172,7 @@ commands: jobs: combine-coverage: machine: - image: ubuntu-2004:2023.04.2 + image: ubuntu-2404:2024.11.1 steps: - checkout - restore_cache: From dac91c21f0a3af6d1a2e4cbce4c5e84b4c9e79ca Mon Sep 17 00:00:00 2001 From: Jon Cluce Date: Wed, 14 May 2025 16:32:43 -0400 Subject: [PATCH 290/507] :construction_worker: :arrow_down: :pushpin: Pin Codecov orb to 5.3.0 --- .circleci/main.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.circleci/main.yml b/.circleci/main.yml index 3f408df584..5a07fda77a 100644 --- a/.circleci/main.yml +++ b/.circleci/main.yml @@ -3,7 +3,7 @@ version: 2.1 # Singularity started failing to set up on Circle circa May 2023, so those tests are currently disabled orbs: - codecov: codecov/codecov@5 + codecov: codecov/codecov@5.3.0 parameters: branch: From e30d68a6eb02010731962941e6b0a8e788508516 Mon Sep 17 00:00:00 2001 From: Jon Cluce Date: Sun, 18 May 2025 13:40:49 -0400 Subject: [PATCH 291/507] :bug: Link inputs for `mask_sbref` --- CPAC/registration/registration.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/CPAC/registration/registration.py b/CPAC/registration/registration.py index 6b3a719394..fa2c6e7457 100644 --- a/CPAC/registration/registration.py +++ b/CPAC/registration/registration.py @@ -3110,7 +3110,7 @@ def overwrite_transform_anat_to_template(wf, cfg, strat_pool, pipe_num, opt=None "mask_sbref", ], ], - inputs=["sbref", "space-bold_desc-brain_mask"], + inputs=[("sbref", "space-bold_desc-brain_mask")], outputs=["sbref"], ) def mask_sbref(wf, cfg, strat_pool, pipe_num, opt=None): From 17e68b757c0c848128cde2e977421c5b1a87c22d Mon Sep 17 00:00:00 2001 From: Jon Cluce Date: Mon, 19 May 2025 15:16:07 -0400 Subject: [PATCH 292/507] =?UTF-8?q?fixup!=20fixup!=20=F0=9F=90=9B=20Fix=20?= =?UTF-8?q?`TypeError:=20unsupported=20operand=20type(s)=20for=20-:=20'Non?= =?UTF-8?q?eType'=20and=20'NoneType'`=20(#2229)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- CPAC/utils/monitoring/draw_gantt_chart.py | 6 +- CPAC/utils/monitoring/monitoring.py | 72 ++++++++++++++++++++++- CPAC/utils/tests/test_utils.py | 13 ++++ 3 files changed, 86 insertions(+), 5 deletions(-) diff --git a/CPAC/utils/monitoring/draw_gantt_chart.py b/CPAC/utils/monitoring/draw_gantt_chart.py index bf6758e524..25aaf15b61 100644 --- a/CPAC/utils/monitoring/draw_gantt_chart.py +++ b/CPAC/utils/monitoring/draw_gantt_chart.py @@ -52,6 +52,8 @@ from nipype.utils.draw_gantt_chart import draw_lines, draw_resource_bar, log_to_dict +from CPAC.utils.monitoring.monitoring import DatetimeWithSafeNone + def create_event_dict(start_time, nodes_list): """ @@ -660,12 +662,12 @@ def _timing_timestamp(node): msg = "No logged nodes have timing information." raise ProcessLookupError(msg) return { - k: ( + k: DatetimeWithSafeNone( datetime.strptime(v, "%Y-%m-%dT%H:%M:%S.%f") if "." in v else datetime.fromisoformat(v) ) if (k in {"start", "finish"} and isinstance(v, str)) - else v + else DatetimeWithSafeNone(v) for k, v in node.items() } diff --git a/CPAC/utils/monitoring/monitoring.py b/CPAC/utils/monitoring/monitoring.py index 950f419b95..a461868f47 100644 --- a/CPAC/utils/monitoring/monitoring.py +++ b/CPAC/utils/monitoring/monitoring.py @@ -1,9 +1,11 @@ +from datetime import datetime, timedelta import glob import json import math import os import socketserver import threading +from typing import Optional import networkx as nx from traits.trait_base import Undefined @@ -13,6 +15,68 @@ from .custom_logging import getLogger +def _safe_none_diff( + self: "DatetimeWithSafeNone | _NoTime", other: "DatetimeWithSafeNone | _NoTime" +) -> datetime | timedelta: + """Subtract between a datetime or timedelta or None.""" + if isinstance(self, _NoTime): + return timedelta(0) + if isinstance(other, DatetimeWithSafeNone): + if isinstance(other, _NoTime): + return timedelta(0) + return self - other + if isinstance(other, (datetime, timedelta)): + return self._dt - other + msg = f"Cannot subtract {type(other)} from {type(self)}" + raise NotImplementedError(msg) + + +class _NoTime: + """A wrapper for None values that can be used in place of a datetime object.""" + + def __bool__(self) -> bool: + """Return False for _NoTime.""" + return False + + def __int__(self) -> int: + """Return 0 for _NoTime.""" + return 0 + + def __sub__(self, other: "DatetimeWithSafeNone | _NoTime") -> datetime | timedelta: + """Subtract between None and a datetime or timedelta or None.""" + return _safe_none_diff(self, other) + + +NoTime = _NoTime() +"""A singleton None that can be used in place of a datetime object.""" + + +class DatetimeWithSafeNone(datetime, _NoTime): + """Time class that can be None or a time value.""" + + def __new__(cls, dt: Optional[datetime]) -> "DatetimeWithSafeNone | _NoTime": + """Create a new instance of the class.""" + return ( + NoTime + if dt is None + else datetime.__new__( + cls, + dt.year, + dt.month, + dt.day, + dt.hour, + dt.minute, + dt.second, + dt.microsecond, + dt.tzinfo, + ) + ) + + def __sub__(self, other: "DatetimeWithSafeNone | _NoTime") -> datetime | timedelta: + """Subtract between a datetime or timedelta or None.""" + return _safe_none_diff(self, other) + + # Log initial information from all the nodes def recurse_nodes(workflow, prefix=""): for node in nx.topological_sort(workflow._graph): @@ -111,8 +175,8 @@ def log_nodes_cb(node, status): status_dict = { "id": str(node), "hash": node.inputs.get_hashval()[1], - "start": getattr(runtime, "startTime", None), - "finish": getattr(runtime, "endTime", None), + "start": DatetimeWithSafeNone(getattr(runtime, "startTime", None)), + "finish": DatetimeWithSafeNone(getattr(runtime, "endTime", None)), "runtime_threads": runtime_threads, "runtime_memory_gb": getattr(runtime, "mem_peak_gb", "N/A"), "estimated_memory_gb": node.mem_gb, @@ -122,7 +186,9 @@ def log_nodes_cb(node, status): if hasattr(node, "input_data_shape") and node.input_data_shape is not Undefined: status_dict["input_data_shape"] = node.input_data_shape - if status_dict["start"] is None or status_dict["finish"] is None: + if any( + not isinstance(status_dict[label], datetime) for label in ["start", "finish"] + ): status_dict["error"] = True logger.debug(json.dumps(status_dict)) diff --git a/CPAC/utils/tests/test_utils.py b/CPAC/utils/tests/test_utils.py index ab896c6029..b2317fda6d 100644 --- a/CPAC/utils/tests/test_utils.py +++ b/CPAC/utils/tests/test_utils.py @@ -1,5 +1,6 @@ """Tests of CPAC utility functions.""" +from datetime import datetime, timedelta import multiprocessing from unittest import mock @@ -10,6 +11,7 @@ from CPAC.pipeline.nodeblock import NodeBlockFunction from CPAC.utils.configuration import Configuration from CPAC.utils.monitoring.custom_logging import log_subprocess +from CPAC.utils.monitoring.monitoring import DatetimeWithSafeNone from CPAC.utils.tests import old_functions from CPAC.utils.utils import ( check_config_resources, @@ -168,3 +170,14 @@ def test_system_deps(): Raises an exception if dependencies are not met. """ check_system_deps(*([True] * 4)) + + +@pytest.mark.parametrize(["t1", "t2"], [(datetime.now(), None), (datetime.now(), None)]) +def test_datetime_with_safe_none(t1, t2): + """Test DatetimeWithSafeNone class works with datetime and None.""" + t1 = DatetimeWithSafeNone(t1) + t2 = DatetimeWithSafeNone(t2) + if t1 and t2: + assert isinstance(t2 - t1, datetime) + else: + assert t2 - t1 == timedelta(0) From 59d67875d1759b027e15e6eac7a27dfbeb60d218 Mon Sep 17 00:00:00 2001 From: Jon Cluce Date: Tue, 20 May 2025 10:05:21 -0400 Subject: [PATCH 293/507] :white_check_mark: Stack parametrize --- CPAC/utils/tests/test_utils.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/CPAC/utils/tests/test_utils.py b/CPAC/utils/tests/test_utils.py index b2317fda6d..08f17c38e4 100644 --- a/CPAC/utils/tests/test_utils.py +++ b/CPAC/utils/tests/test_utils.py @@ -172,7 +172,8 @@ def test_system_deps(): check_system_deps(*([True] * 4)) -@pytest.mark.parametrize(["t1", "t2"], [(datetime.now(), None), (datetime.now(), None)]) +@pytest.mark.parametrize("t1", [datetime.now(), None]) +@pytest.mark.parametrize("t2", [datetime.now(), None]) def test_datetime_with_safe_none(t1, t2): """Test DatetimeWithSafeNone class works with datetime and None.""" t1 = DatetimeWithSafeNone(t1) From c80cc72b33053b1382fccb5b556e8179dac5249b Mon Sep 17 00:00:00 2001 From: Jon Cluce Date: Tue, 20 May 2025 11:26:50 -0400 Subject: [PATCH 294/507] :art: Define `__bool__` for `DatetimeWithSafeNone` --- CPAC/utils/monitoring/monitoring.py | 45 +++++++++++++++++++++++++++-- 1 file changed, 43 insertions(+), 2 deletions(-) diff --git a/CPAC/utils/monitoring/monitoring.py b/CPAC/utils/monitoring/monitoring.py index a461868f47..015b5429a8 100644 --- a/CPAC/utils/monitoring/monitoring.py +++ b/CPAC/utils/monitoring/monitoring.py @@ -1,3 +1,22 @@ +# Copyright (C) 2018-2025 C-PAC Developers + +# This file is part of C-PAC. + +# C-PAC is free software: you can redistribute it and/or modify it under +# the terms of the GNU Lesser General Public License as published by the +# Free Software Foundation, either version 3 of the License, or (at your +# option) any later version. + +# C-PAC is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or +# FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public +# License for more details. + +# You should have received a copy of the GNU Lesser General Public +# License along with C-PAC. If not, see . +# pylint: disable=too-many-lines,ungrouped-imports,wrong-import-order +"""Monitoring utilities for C-PAC.""" + from datetime import datetime, timedelta import glob import json @@ -42,6 +61,14 @@ def __int__(self) -> int: """Return 0 for _NoTime.""" return 0 + def __repr__(self) -> str: + """Return 'NoTime' for _NoTime.""" + return "NoTime" + + def __str__(self) -> str: + """Return 'NoTime' for _NoTime.""" + return "NoTime" + def __sub__(self, other: "DatetimeWithSafeNone | _NoTime") -> datetime | timedelta: """Subtract between None and a datetime or timedelta or None.""" return _safe_none_diff(self, other) @@ -72,13 +99,27 @@ def __new__(cls, dt: Optional[datetime]) -> "DatetimeWithSafeNone | _NoTime": ) ) + def __bool__(self) -> bool: + """Return True if not NoTime.""" + return self is not NoTime + def __sub__(self, other: "DatetimeWithSafeNone | _NoTime") -> datetime | timedelta: """Subtract between a datetime or timedelta or None.""" return _safe_none_diff(self, other) + def __repr__(self) -> str: + """Return the string representation of the datetime or NoTime.""" + if self: + return datetime.__repr__(self) + return "NoTime" + + def __str__(self) -> str: + """Return the string representation of the datetime or NoTime.""" + return super().__str__() + -# Log initial information from all the nodes def recurse_nodes(workflow, prefix=""): + """Log initial information from all the nodes.""" for node in nx.topological_sort(workflow._graph): if isinstance(node, pe.Workflow): for subnode in recurse_nodes(node, prefix + workflow.name + "."): @@ -221,7 +262,7 @@ def handle(self): with open(callback_file, "rb") as lf: for l in lf.readlines(): # noqa: E741 - l = l.strip() # noqa: E741 + l = l.strip() # noqa: E741,PLW2901 try: node = json.loads(l) if node["id"] not in tree[subject]: From 9ce335e1d726978a279e0fd0cee41a329e067395 Mon Sep 17 00:00:00 2001 From: Jon Cluce Date: Tue, 20 May 2025 13:19:48 -0400 Subject: [PATCH 295/507] :recycle: Don't fork motion reference inputs --- CPAC/func_preproc/__init__.py | 7 +- CPAC/func_preproc/func_motion.py | 157 ++++++++++-------- .../tests/test_preproc_connections.py | 6 +- CPAC/pipeline/cpac_pipeline.py | 8 +- CPAC/utils/monitoring/monitoring.py | 1 - CPAC/utils/tests/test_utils.py | 21 ++- 6 files changed, 121 insertions(+), 79 deletions(-) diff --git a/CPAC/func_preproc/__init__.py b/CPAC/func_preproc/__init__.py index d10cdc55f0..06837c024a 100644 --- a/CPAC/func_preproc/__init__.py +++ b/CPAC/func_preproc/__init__.py @@ -1,4 +1,4 @@ -# Copyright (C) 2012-2023 C-PAC Developers +# Copyright (C) 2012-2025 C-PAC Developers # This file is part of C-PAC. @@ -22,17 +22,20 @@ func_motion_correct_only, func_motion_estimates, get_motion_ref, + get_motion_ref_fmriprep, motion_estimate_filter, ) from .func_preproc import get_idx, slice_timing_wf +get_motion_refs = [get_motion_ref, get_motion_ref_fmriprep] + __all__ = [ "calc_motion_stats", "func_motion_correct", "func_motion_correct_only", "func_motion_estimates", "get_idx", - "get_motion_ref", + "get_motion_refs", "motion_estimate_filter", "slice_timing_wf", ] diff --git a/CPAC/func_preproc/func_motion.py b/CPAC/func_preproc/func_motion.py index bea7d2e29c..26395bd47b 100644 --- a/CPAC/func_preproc/func_motion.py +++ b/CPAC/func_preproc/func_motion.py @@ -1,4 +1,4 @@ -# Copyright (C) 2012-2024 C-PAC Developers +# Copyright (C) 2012-2025 C-PAC Developers # This file is part of C-PAC. @@ -16,7 +16,8 @@ # License along with C-PAC. If not, see . """Functions for calculating motion parameters.""" -# pylint: disable=ungrouped-imports,wrong-import-order,wrong-import-position +from typing import Literal + from nipype.interfaces import afni, fsl, utility as util from nipype.interfaces.afni import preprocess, utils as afni_utils @@ -31,8 +32,10 @@ motion_power_statistics, ) from CPAC.pipeline import nipype_pipeline_engine as pe +from CPAC.pipeline.engine import ResourcePool from CPAC.pipeline.nodeblock import nodeblock from CPAC.pipeline.schema import valid_options +from CPAC.utils.configuration import Configuration from CPAC.utils.interfaces.function import Function from CPAC.utils.utils import check_prov_for_motion_tool @@ -364,79 +367,97 @@ def get_mcflirt_rms_abs(rms_files): "motion_correction", "motion_correction_reference", ], - option_val=["mean", "median", "selected_volume", "fmriprep_reference"], - inputs=["desc-preproc_bold", "desc-reorient_bold"], + option_val=["mean", "median", "selected_volume"], + inputs=["desc-preproc_bold"], outputs=["motion-basefile"], ) -def get_motion_ref(wf, cfg, strat_pool, pipe_num, opt=None): - if opt not in get_motion_ref.option_val: - msg = ( - "\n\n[!] Error: The 'motion_correction_reference' " - "parameter of the 'motion_correction' workflow " - "must be one of:\n\t{0}.\n\nTool input: '{1}'" - "\n\n".format( - " or ".join([f"'{val}'" for val in get_motion_ref.option_val]), opt +def get_motion_ref( + wf: pe.Workflow, + cfg: Configuration, + strat_pool: ResourcePool, + pipe_num: int, + opt: Literal["mean", "median", "selected_volume"], +) -> tuple[pe.Workflow, dict[str, tuple[pe.Node, str]]]: + """Get the reference image for motion correction.""" + node, out = strat_pool.get_data("desc-preproc_bold") + in_label = "in_file" + match opt: + case "mean": + func_get_RPI = pe.Node( + interface=afni_utils.TStat(options="-mean"), + name=f"func_get_mean_RPI_{pipe_num}", + mem_gb=0.48, + mem_x=(1435097126797993 / 302231454903657293676544, in_label), ) - ) - raise ValueError(msg) - - if opt == "mean": - func_get_RPI = pe.Node( - interface=afni_utils.TStat(), - name=f"func_get_mean_RPI_{pipe_num}", - mem_gb=0.48, - mem_x=(1435097126797993 / 302231454903657293676544, "in_file"), - ) - - func_get_RPI.inputs.options = "-mean" - func_get_RPI.inputs.outputtype = "NIFTI_GZ" - - node, out = strat_pool.get_data("desc-preproc_bold") - wf.connect(node, out, func_get_RPI, "in_file") - - elif opt == "median": - func_get_RPI = pe.Node( - interface=afni_utils.TStat(), name=f"func_get_median_RPI_{pipe_num}" - ) - - func_get_RPI.inputs.options = "-median" - func_get_RPI.inputs.outputtype = "NIFTI_GZ" - - node, out = strat_pool.get_data("desc-preproc_bold") - wf.connect(node, out, func_get_RPI, "in_file") - - elif opt == "selected_volume": - func_get_RPI = pe.Node( - interface=afni.Calc(), name=f"func_get_selected_RPI_{pipe_num}" - ) - - func_get_RPI.inputs.set( - expr="a", - single_idx=cfg.functional_preproc["motion_estimates_and_correction"][ - "motion_correction" - ]["motion_correction_reference_volume"], - outputtype="NIFTI_GZ", - ) + case "median": + func_get_RPI = pe.Node( + interface=afni_utils.TStat(options="-median"), + name=f"func_get_median_RPI_{pipe_num}", + ) + case "selected_volume": + func_get_RPI = pe.Node( + interface=afni.Calc( + expr="a", + single_idx=cfg.functional_preproc[ + "motion_estimates_and_correction" + ]["motion_correction"]["motion_correction_reference_volume"], + ), + name=f"func_get_selected_RPI_{pipe_num}", + ) + in_label = "in_file_a" + case _: + msg = ( + "\n\n[!] Error: The 'motion_correction_reference' " + "parameter of the 'motion_correction' workflow " + "must be one of:\n\t{0}.\n\nTool input: '{1}'" + "\n\n".format( + " or ".join([f"'{val}'" for val in get_motion_ref.option_val]), opt + ) + ) + raise ValueError(msg) + func_get_RPI.inputs.outputtype = "NIFTI_GZ" + wf.connect(node, out, func_get_RPI, in_label) + outputs = {"motion-basefile": (func_get_RPI, "out_file")} + return wf, outputs - node, out = strat_pool.get_data("desc-preproc_bold") - wf.connect(node, out, func_get_RPI, "in_file_a") - elif opt == "fmriprep_reference": - func_get_RPI = pe.Node( - Function( - input_names=["in_file"], - output_names=["out_file"], - function=estimate_reference_image, - ), - name=f"func_get_fmriprep_ref_{pipe_num}", - ) +@nodeblock( + name="get_motion_ref_fmriprep", + switch=["functional_preproc", "motion_estimates_and_correction", "run"], + option_key=[ + "functional_preproc", + "motion_estimates_and_correction", + "motion_correction", + "motion_correction_reference", + ], + option_val=["fmriprep_reference"], + inputs=["desc-reorient_bold"], + outputs=["motion-basefile"], +) +def get_motion_ref_fmriprep( + wf: pe.Workflow, + cfg: Configuration, + strat_pool: ResourcePool, + pipe_num: int, + opt: Literal["fmriprep_reference"], +) -> tuple[pe.Workflow, dict[str, tuple[pe.Node, str]]]: + """Get the fMRIPrep-style reference image for motion correction.""" + assert opt == "fmriprep_reference" + func_get_RPI = pe.Node( + Function( + input_names=["in_file"], + output_names=["out_file"], + function=estimate_reference_image, + ), + name=f"func_get_fmriprep_ref_{pipe_num}", + ) - node, out = strat_pool.get_data("desc-reorient_bold") - wf.connect(node, out, func_get_RPI, "in_file") + node, out = strat_pool.get_data("desc-reorient_bold") + wf.connect(node, out, func_get_RPI, "in_file") outputs = {"motion-basefile": (func_get_RPI, "out_file")} - return (wf, outputs) + return wf, outputs def motion_correct_3dvolreg(wf, cfg, strat_pool, pipe_num): @@ -728,7 +749,9 @@ def motion_correct_mcflirt(wf, cfg, strat_pool, pipe_num): } -def motion_correct_connections(wf, cfg, strat_pool, pipe_num, opt): +def motion_correct_connections( + wf, cfg, strat_pool, pipe_num, opt +): # -> tuple[Any, dict[str, tuple[Node, str]]]: """Check opt for valid option, then connect that option.""" motion_correct_options = valid_options["motion_correction"] if opt not in motion_correct_options: diff --git a/CPAC/func_preproc/tests/test_preproc_connections.py b/CPAC/func_preproc/tests/test_preproc_connections.py index f58380a7fd..7db0caa3c9 100644 --- a/CPAC/func_preproc/tests/test_preproc_connections.py +++ b/CPAC/func_preproc/tests/test_preproc_connections.py @@ -31,7 +31,7 @@ func_motion_correct, func_motion_correct_only, func_motion_estimates, - get_motion_ref, + get_motion_refs, motion_estimate_filter, ) from CPAC.func_preproc.func_preproc import func_normalize @@ -219,13 +219,13 @@ def test_motion_filter_connections( "calculate_motion_first", ]: func_motion_blocks = [ - get_motion_ref, + *get_motion_refs, func_motion_estimates, motion_estimate_filter, ] else: func_motion_blocks = [ - get_motion_ref, + *get_motion_refs, func_motion_correct, motion_estimate_filter, ] diff --git a/CPAC/pipeline/cpac_pipeline.py b/CPAC/pipeline/cpac_pipeline.py index 1b64b286a8..08a59d4a90 100644 --- a/CPAC/pipeline/cpac_pipeline.py +++ b/CPAC/pipeline/cpac_pipeline.py @@ -1,4 +1,4 @@ -# Copyright (C) 2012-2024 C-PAC Developers +# Copyright (C) 2012-2025 C-PAC Developers # This file is part of C-PAC. @@ -87,7 +87,7 @@ func_motion_correct, func_motion_correct_only, func_motion_estimates, - get_motion_ref, + get_motion_refs, motion_estimate_filter, ) from CPAC.func_preproc.func_preproc import ( @@ -1318,7 +1318,7 @@ def build_workflow(subject_id, sub_dict, cfg, pipeline_name=None): "motion_estimates" ]["calculate_motion_first"]: func_motion_blocks = [ - get_motion_ref, + *get_motion_refs, func_motion_estimates, motion_estimate_filter, ] @@ -1332,7 +1332,7 @@ def build_workflow(subject_id, sub_dict, cfg, pipeline_name=None): ) else: func_motion_blocks = [ - get_motion_ref, + *get_motion_refs, func_motion_correct, motion_estimate_filter, ] diff --git a/CPAC/utils/monitoring/monitoring.py b/CPAC/utils/monitoring/monitoring.py index 015b5429a8..5cea28fac6 100644 --- a/CPAC/utils/monitoring/monitoring.py +++ b/CPAC/utils/monitoring/monitoring.py @@ -14,7 +14,6 @@ # You should have received a copy of the GNU Lesser General Public # License along with C-PAC. If not, see . -# pylint: disable=too-many-lines,ungrouped-imports,wrong-import-order """Monitoring utilities for C-PAC.""" from datetime import datetime, timedelta diff --git a/CPAC/utils/tests/test_utils.py b/CPAC/utils/tests/test_utils.py index 08f17c38e4..5a7abf5015 100644 --- a/CPAC/utils/tests/test_utils.py +++ b/CPAC/utils/tests/test_utils.py @@ -1,3 +1,19 @@ +# Copyright (C) 2018-2025 C-PAC Developers + +# This file is part of C-PAC. + +# C-PAC is free software: you can redistribute it and/or modify it under +# the terms of the GNU Lesser General Public License as published by the +# Free Software Foundation, either version 3 of the License, or (at your +# option) any later version. + +# C-PAC is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or +# FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public +# License for more details. + +# You should have received a copy of the GNU Lesser General Public +# License along with C-PAC. If not, see . """Tests of CPAC utility functions.""" from datetime import datetime, timedelta @@ -7,7 +23,7 @@ from _pytest.logging import LogCaptureFixture import pytest -from CPAC.func_preproc import get_motion_ref +from CPAC.func_preproc import get_motion_refs from CPAC.pipeline.nodeblock import NodeBlockFunction from CPAC.utils.configuration import Configuration from CPAC.utils.monitoring.custom_logging import log_subprocess @@ -153,7 +169,8 @@ def test_executable(executable): _installation_check(executable, "-help") -def test_NodeBlock_option_SSOT(): # pylint: disable=invalid-name +@pytest.mark.parametrize("get_motion_ref", get_motion_refs) +def test_NodeBlock_option_SSOT(get_motion_ref: NodeBlockFunction): # pylint: disable=invalid-name """Test using NodeBlock dictionaries for SSOT for options.""" assert isinstance(get_motion_ref, NodeBlockFunction) with pytest.raises(ValueError) as value_error: From 36fed9df34c17adda0865af52c8eb7f2d828b3e6 Mon Sep 17 00:00:00 2001 From: Jon Cluce Date: Tue, 20 May 2025 14:49:24 -0400 Subject: [PATCH 296/507] :bug: Fix circular import --- CPAC/func_preproc/func_motion.py | 10 ++++++---- 1 file changed, 6 insertions(+), 4 deletions(-) diff --git a/CPAC/func_preproc/func_motion.py b/CPAC/func_preproc/func_motion.py index 26395bd47b..51424ab601 100644 --- a/CPAC/func_preproc/func_motion.py +++ b/CPAC/func_preproc/func_motion.py @@ -16,7 +16,7 @@ # License along with C-PAC. If not, see . """Functions for calculating motion parameters.""" -from typing import Literal +from typing import Literal, TYPE_CHECKING from nipype.interfaces import afni, fsl, utility as util from nipype.interfaces.afni import preprocess, utils as afni_utils @@ -32,13 +32,15 @@ motion_power_statistics, ) from CPAC.pipeline import nipype_pipeline_engine as pe -from CPAC.pipeline.engine import ResourcePool from CPAC.pipeline.nodeblock import nodeblock from CPAC.pipeline.schema import valid_options from CPAC.utils.configuration import Configuration from CPAC.utils.interfaces.function import Function from CPAC.utils.utils import check_prov_for_motion_tool +if TYPE_CHECKING: + from CPAC.pipeline.engine import ResourcePool + @nodeblock( name="calc_motion_stats", @@ -374,7 +376,7 @@ def get_mcflirt_rms_abs(rms_files): def get_motion_ref( wf: pe.Workflow, cfg: Configuration, - strat_pool: ResourcePool, + strat_pool: "ResourcePool", pipe_num: int, opt: Literal["mean", "median", "selected_volume"], ) -> tuple[pe.Workflow, dict[str, tuple[pe.Node, str]]]: @@ -437,7 +439,7 @@ def get_motion_ref( def get_motion_ref_fmriprep( wf: pe.Workflow, cfg: Configuration, - strat_pool: ResourcePool, + strat_pool: "ResourcePool", pipe_num: int, opt: Literal["fmriprep_reference"], ) -> tuple[pe.Workflow, dict[str, tuple[pe.Node, str]]]: From 5b55f9c54f24bcd35243dd82797f664f666fc064 Mon Sep 17 00:00:00 2001 From: Jon Cluce Date: Tue, 20 May 2025 20:40:31 -0400 Subject: [PATCH 297/507] :bug: Handle ISO-format datetime strings --- CPAC/utils/monitoring/monitoring.py | 25 ++++++++++++++++++------- CPAC/utils/tests/test_utils.py | 14 +++++++++----- 2 files changed, 27 insertions(+), 12 deletions(-) diff --git a/CPAC/utils/monitoring/monitoring.py b/CPAC/utils/monitoring/monitoring.py index 5cea28fac6..380bb5961e 100644 --- a/CPAC/utils/monitoring/monitoring.py +++ b/CPAC/utils/monitoring/monitoring.py @@ -23,7 +23,7 @@ import os import socketserver import threading -from typing import Optional +from typing import Optional, TypeAlias import networkx as nx from traits.trait_base import Undefined @@ -80,12 +80,12 @@ def __sub__(self, other: "DatetimeWithSafeNone | _NoTime") -> datetime | timedel class DatetimeWithSafeNone(datetime, _NoTime): """Time class that can be None or a time value.""" - def __new__(cls, dt: Optional[datetime]) -> "DatetimeWithSafeNone | _NoTime": + def __new__(cls, dt: "OptionalDatetime") -> "DatetimeWithSafeNone | _NoTime": """Create a new instance of the class.""" - return ( - NoTime - if dt is None - else datetime.__new__( + if dt is None: + return NoTime + if isinstance(dt, datetime): + return datetime.__new__( cls, dt.year, dt.month, @@ -96,7 +96,14 @@ def __new__(cls, dt: Optional[datetime]) -> "DatetimeWithSafeNone | _NoTime": dt.microsecond, dt.tzinfo, ) - ) + if isinstance(dt, str): + try: + return DatetimeWithSafeNone(datetime.fromisoformat(dt)) + except (ValueError, TypeError): + error = f"Invalid ISO-format datetime string: {dt}" + else: + error = f"Cannot convert {type(dt)} to datetime" + raise TypeError(error) def __bool__(self) -> bool: """Return True if not NoTime.""" @@ -117,6 +124,10 @@ def __str__(self) -> str: return super().__str__() +OptionalDatetime: TypeAlias = Optional[datetime | str | DatetimeWithSafeNone | _NoTime] +"""Type alias for a datetime, ISO-format string or None.""" + + def recurse_nodes(workflow, prefix=""): """Log initial information from all the nodes.""" for node in nx.topological_sort(workflow._graph): diff --git a/CPAC/utils/tests/test_utils.py b/CPAC/utils/tests/test_utils.py index 5a7abf5015..74e4a03993 100644 --- a/CPAC/utils/tests/test_utils.py +++ b/CPAC/utils/tests/test_utils.py @@ -27,7 +27,7 @@ from CPAC.pipeline.nodeblock import NodeBlockFunction from CPAC.utils.configuration import Configuration from CPAC.utils.monitoring.custom_logging import log_subprocess -from CPAC.utils.monitoring.monitoring import DatetimeWithSafeNone +from CPAC.utils.monitoring.monitoring import DatetimeWithSafeNone, OptionalDatetime from CPAC.utils.tests import old_functions from CPAC.utils.utils import ( check_config_resources, @@ -189,13 +189,17 @@ def test_system_deps(): check_system_deps(*([True] * 4)) -@pytest.mark.parametrize("t1", [datetime.now(), None]) -@pytest.mark.parametrize("t2", [datetime.now(), None]) -def test_datetime_with_safe_none(t1, t2): +@pytest.mark.parametrize( + "t1", [datetime.now(), datetime.isoformat(datetime.now()), None] +) +@pytest.mark.parametrize( + "t2", [datetime.now(), datetime.isoformat(datetime.now()), None] +) +def test_datetime_with_safe_none(t1: OptionalDatetime, t2: OptionalDatetime): """Test DatetimeWithSafeNone class works with datetime and None.""" t1 = DatetimeWithSafeNone(t1) t2 = DatetimeWithSafeNone(t2) if t1 and t2: - assert isinstance(t2 - t1, datetime) + assert isinstance(t2 - t1, timedelta) else: assert t2 - t1 == timedelta(0) From 946bf28e81fa835909ea8ef0a44cca4ae2f54938 Mon Sep 17 00:00:00 2001 From: Jon Cluce Date: Wed, 21 May 2025 09:55:42 -0400 Subject: [PATCH 298/507] fixup! :recycle: Don't fork motion reference inputs --- CPAC/func_preproc/tests/test_preproc_connections.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/CPAC/func_preproc/tests/test_preproc_connections.py b/CPAC/func_preproc/tests/test_preproc_connections.py index 7db0caa3c9..03a5b51dd6 100644 --- a/CPAC/func_preproc/tests/test_preproc_connections.py +++ b/CPAC/func_preproc/tests/test_preproc_connections.py @@ -1,4 +1,4 @@ -# Copyright (C) 2023-2024 C-PAC Developers +# Copyright (C) 2023-2025 C-PAC Developers # This file is part of C-PAC. @@ -26,12 +26,12 @@ from nipype.interfaces.utility import Function as NipypeFunction from nipype.pipeline.engine import Workflow as NipypeWorkflow +from CPAC.func_preproc import get_motion_refs from CPAC.func_preproc.func_motion import ( calc_motion_stats, func_motion_correct, func_motion_correct_only, func_motion_estimates, - get_motion_refs, motion_estimate_filter, ) from CPAC.func_preproc.func_preproc import func_normalize From dfb8c8157a0b6e2ab6923ca7acfb762b0841866b Mon Sep 17 00:00:00 2001 From: Jon Cluce Date: Tue, 20 May 2025 15:02:59 -0400 Subject: [PATCH 299/507] :pencil2: Fix nibabel import in phase_encode function node --- CPAC/distortion_correction/distortion_correction.py | 6 ++++-- CPAC/distortion_correction/utils.py | 4 +++- 2 files changed, 7 insertions(+), 3 deletions(-) diff --git a/CPAC/distortion_correction/distortion_correction.py b/CPAC/distortion_correction/distortion_correction.py index a7f0eaefcc..4457ab91fe 100644 --- a/CPAC/distortion_correction/distortion_correction.py +++ b/CPAC/distortion_correction/distortion_correction.py @@ -1,6 +1,6 @@ #!/usr/bin/env python3 # -*- coding: utf-8 -*- -# Copyright (C) 2017-2022 C-PAC Developers +# Copyright (C) 2017-2025 C-PAC Developers # This file is part of C-PAC. @@ -16,6 +16,8 @@ # You should have received a copy of the GNU Lesser General Public # License along with C-PAC. If not, see . +"""Distortion correction in C-PAC.""" + import os import subprocess @@ -663,7 +665,7 @@ def distcor_blip_fsl_topup(wf, cfg, strat_pool, pipe_num, opt=None): "import os", "import subprocess", "import numpy as np", - "import nibabel", + "import nibabel as nib", "import sys", ] phase_encoding = pe.Node( diff --git a/CPAC/distortion_correction/utils.py b/CPAC/distortion_correction/utils.py index b76acba074..0635df5eb8 100644 --- a/CPAC/distortion_correction/utils.py +++ b/CPAC/distortion_correction/utils.py @@ -1,4 +1,4 @@ -# Copyright (C) 2021-2023 C-PAC Developers +# Copyright (C) 2021-2025 C-PAC Developers # This file is part of C-PAC. @@ -14,6 +14,8 @@ # You should have received a copy of the GNU Lesser General Public # License along with C-PAC. If not, see . +"""Distortion correction utilities.""" + import os import subprocess import sys From d9e9e8490655d03853c3fa3ae631ae1c94ee4981 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 19 May 2025 19:24:24 +0000 Subject: [PATCH 300/507] :arrow_up: Bump setuptools in the pip group across 1 directory Bumps the pip group with 1 update in the / directory: [setuptools](https://github.com/pypa/setuptools). Updates `setuptools` from 68.0.0 to 78.1.1 - [Release notes](https://github.com/pypa/setuptools/releases) - [Changelog](https://github.com/pypa/setuptools/blob/main/NEWS.rst) - [Commits](https://github.com/pypa/setuptools/compare/v68.0.0...v78.1.1) --- updated-dependencies: - dependency-name: setuptools dependency-version: 78.1.1 dependency-type: direct:production dependency-group: pip ... Signed-off-by: dependabot[bot] --- requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements.txt b/requirements.txt index bb9f9b6c73..1242536fc0 100644 --- a/requirements.txt +++ b/requirements.txt @@ -45,7 +45,7 @@ h5py==3.8.0 importlib-metadata==6.8.0 lxml==4.9.2 pip==23.3 -setuptools==70.0.0 +setuptools==78.1.1 urllib3==1.26.19 wheel==0.40.0 zipp==3.19.1 From 10fc465f790196da145ee172d7fbca98f4ab7cdd Mon Sep 17 00:00:00 2001 From: Jon Cluce Date: Wed, 21 May 2025 10:52:02 -0400 Subject: [PATCH 301/507] :passport_control: Add token to Dependabot config --- .github/dependabot.yaml | 11 +++++++++++ 1 file changed, 11 insertions(+) diff --git a/.github/dependabot.yaml b/.github/dependabot.yaml index f80c6026e8..566c82469e 100644 --- a/.github/dependabot.yaml +++ b/.github/dependabot.yaml @@ -1,4 +1,10 @@ version: 2 +registries: + ghcr: + type: docker-registry + url: ghcr.io + username: ChildMindInstituteCNL + password: ${{ secrets.GHCR_REGISTRY_TOKEN }} updates: - package-ecosystem: "github-actions" directory: / @@ -8,6 +14,9 @@ updates: groups: all-actions: patterns: [ "*" ] + target-branch: develop + registries: + - ghcr - package-ecosystem: pip directory: / schedule: @@ -18,3 +27,5 @@ updates: development dependencies: dependency-type: development target-branch: develop + registries: + - ghcr From e59868bfc28ca93a17e1b5bf39b36ec673ad1dfe Mon Sep 17 00:00:00 2001 From: Jon Cluce Date: Wed, 21 May 2025 13:48:14 -0400 Subject: [PATCH 302/507] =?UTF-8?q?:bug:=20"regressors"=20=E2=86=92=20"des?= =?UTF-8?q?c-confounds=5Ftimeseries"?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- CPAC/_entrypoints/run.py | 12 +----------- CPAC/func_preproc/func_motion.py | 4 ++-- CPAC/pipeline/engine.py | 4 +++- CPAC/qc/pipeline.py | 20 +++++++++++++++++++- CPAC/qc/xcp.py | 24 ++++++++++++++++++++---- 5 files changed, 45 insertions(+), 19 deletions(-) diff --git a/CPAC/_entrypoints/run.py b/CPAC/_entrypoints/run.py index f84b6cf799..1a01489da8 100755 --- a/CPAC/_entrypoints/run.py +++ b/CPAC/_entrypoints/run.py @@ -1,5 +1,5 @@ #!/usr/bin/env python -# Copyright (C) 2018-2024 C-PAC Developers +# Copyright (C) 2018-2025 C-PAC Developers # This file is part of C-PAC. @@ -729,16 +729,6 @@ def run_main(): args.fail_fast ) - if c["pipeline_setup"]["output_directory"]["quality_control"][ - "generate_xcpqc_files" - ]: - c["functional_preproc"]["motion_estimates_and_correction"][ - "motion_estimates" - ]["calculate_motion_first"] = True - c["functional_preproc"]["motion_estimates_and_correction"][ - "motion_estimates" - ]["calculate_motion_after"] = True - if args.participant_label: WFLOGGER.info( "#### Running C-PAC for %s", ", ".join(args.participant_label) diff --git a/CPAC/func_preproc/func_motion.py b/CPAC/func_preproc/func_motion.py index bea7d2e29c..d95b80c314 100644 --- a/CPAC/func_preproc/func_motion.py +++ b/CPAC/func_preproc/func_motion.py @@ -1,4 +1,4 @@ -# Copyright (C) 2012-2024 C-PAC Developers +# Copyright (C) 2012-2025 C-PAC Developers # This file is part of C-PAC. @@ -365,7 +365,7 @@ def get_mcflirt_rms_abs(rms_files): "motion_correction_reference", ], option_val=["mean", "median", "selected_volume", "fmriprep_reference"], - inputs=["desc-preproc_bold", "desc-reorient_bold"], + inputs=[("desc-preproc_bold", "desc-reorient_bold")], outputs=["motion-basefile"], ) def get_motion_ref(wf, cfg, strat_pool, pipe_num, opt=None): diff --git a/CPAC/pipeline/engine.py b/CPAC/pipeline/engine.py index 11f5965e8e..2a1e13a8c8 100644 --- a/CPAC/pipeline/engine.py +++ b/CPAC/pipeline/engine.py @@ -14,6 +14,8 @@ # You should have received a copy of the GNU Lesser General Public # License along with C-PAC. If not, see . +"""C-PAC pipeline engine.""" + import ast import copy import hashlib @@ -307,7 +309,7 @@ def regressor_dct(self, cfg) -> dict: "ingress_regressors." ) _nr = cfg["nuisance_corrections", "2-nuisance_regression"] - if not hasattr(self, "timeseries"): + if not hasattr(self, "desc-confounds_timeseries"): if _nr["Regressors"]: self.regressors = {reg["Name"]: reg for reg in _nr["Regressors"]} else: diff --git a/CPAC/qc/pipeline.py b/CPAC/qc/pipeline.py index 15d6b35e09..2eb44ed4f2 100644 --- a/CPAC/qc/pipeline.py +++ b/CPAC/qc/pipeline.py @@ -1,3 +1,21 @@ +# Copyright (C) 2018-2025 C-PAC Developers + +# This file is part of C-PAC. + +# C-PAC is free software: you can redistribute it and/or modify it under +# the terms of the GNU Lesser General Public License as published by the +# Free Software Foundation, either version 3 of the License, or (at your +# option) any later version. + +# C-PAC is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or +# FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public +# License for more details. + +# You should have received a copy of the GNU Lesser General Public +# License along with C-PAC. If not, see . +"""C-PAC quality control pipeline.""" + import pkg_resources as p from CPAC.pipeline import nipype_pipeline_engine as pe @@ -156,7 +174,7 @@ def qc_brain_extraction(wf, cfg, strat_pool, pipe_num, opt=None): @nodeblock( - name="qc_brain_extraction", + name="qc_T1w_standard", config=["pipeline_setup", "output_directory", "quality_control"], switch=["generate_quality_control_images"], inputs=["space-template_desc-preproc_T1w", "T1w-brain-template"], diff --git a/CPAC/qc/xcp.py b/CPAC/qc/xcp.py index 95cb870430..4a313b4719 100644 --- a/CPAC/qc/xcp.py +++ b/CPAC/qc/xcp.py @@ -1,3 +1,19 @@ +# Copyright (C) 2021-2025 C-PAC Developers + +# This file is part of C-PAC. + +# C-PAC is free software: you can redistribute it and/or modify it under +# the terms of the GNU Lesser General Public License as published by the +# Free Software Foundation, either version 3 of the License, or (at your +# option) any later version. + +# C-PAC is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or +# FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public +# License for more details. + +# You should have received a copy of the GNU Lesser General Public +# License along with C-PAC. If not, see . """ Generate XCP-stype quality control files. @@ -439,7 +455,7 @@ def get_entity_part(key): "space-bold_desc-brain_mask", ["T1w-brain-template-mask", "EPI-template-mask"], ["space-template_desc-bold_mask", "space-EPItemplate_desc-bold_mask"], - "regressors", + "desc-confounds_timeseries", ["T1w-brain-template-funcreg", "EPI-brain-template-funcreg"], [ "desc-movementParametersUnfiltered_motion", @@ -458,7 +474,7 @@ def qc_xcp(wf, cfg, strat_pool, pipe_num, opt=None): # pylint: disable=invalid-name, unused-argument if cfg[ "nuisance_corrections", "2-nuisance_regression", "run" - ] and not strat_pool.check_rpool("regressors"): + ] and not strat_pool.check_rpool("desc-confounds_timeseries"): return wf, {} bids_info = pe.Node( Function( @@ -501,8 +517,8 @@ def qc_xcp(wf, cfg, strat_pool, pipe_num, opt=None): ) qc_file.inputs.desc = "preproc" qc_file.inputs.regressors = ( - strat_pool.node_data("regressors") - .node.name.split("regressors_")[-1][::-1] + strat_pool.node_data("desc-confounds_timeseries") + .node.name.split("desc-confounds_timeseries_")[-1][::-1] .split("_", 1)[-1][::-1] ) bold_to_T1w_mask = pe.Node( From a08f167fa85e5ca0734b97b3e3e7f484f7d0d920 Mon Sep 17 00:00:00 2001 From: Jon Cluce Date: Wed, 21 May 2025 13:48:14 -0400 Subject: [PATCH 303/507] =?UTF-8?q?:bug:=20"regressors"=20=E2=86=92=20"des?= =?UTF-8?q?c-confounds=5Ftimeseries"?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- CPAC/_entrypoints/run.py | 12 +----------- CPAC/func_preproc/func_motion.py | 4 +--- CPAC/pipeline/engine.py | 4 +++- CPAC/qc/pipeline.py | 20 +++++++++++++++++++- CPAC/qc/xcp.py | 24 ++++++++++++++++++++---- 5 files changed, 44 insertions(+), 20 deletions(-) diff --git a/CPAC/_entrypoints/run.py b/CPAC/_entrypoints/run.py index f84b6cf799..1a01489da8 100755 --- a/CPAC/_entrypoints/run.py +++ b/CPAC/_entrypoints/run.py @@ -1,5 +1,5 @@ #!/usr/bin/env python -# Copyright (C) 2018-2024 C-PAC Developers +# Copyright (C) 2018-2025 C-PAC Developers # This file is part of C-PAC. @@ -729,16 +729,6 @@ def run_main(): args.fail_fast ) - if c["pipeline_setup"]["output_directory"]["quality_control"][ - "generate_xcpqc_files" - ]: - c["functional_preproc"]["motion_estimates_and_correction"][ - "motion_estimates" - ]["calculate_motion_first"] = True - c["functional_preproc"]["motion_estimates_and_correction"][ - "motion_estimates" - ]["calculate_motion_after"] = True - if args.participant_label: WFLOGGER.info( "#### Running C-PAC for %s", ", ".join(args.participant_label) diff --git a/CPAC/func_preproc/func_motion.py b/CPAC/func_preproc/func_motion.py index 51424ab601..b8974f915b 100644 --- a/CPAC/func_preproc/func_motion.py +++ b/CPAC/func_preproc/func_motion.py @@ -751,9 +751,7 @@ def motion_correct_mcflirt(wf, cfg, strat_pool, pipe_num): } -def motion_correct_connections( - wf, cfg, strat_pool, pipe_num, opt -): # -> tuple[Any, dict[str, tuple[Node, str]]]: +def motion_correct_connections(wf, cfg, strat_pool, pipe_num, opt): """Check opt for valid option, then connect that option.""" motion_correct_options = valid_options["motion_correction"] if opt not in motion_correct_options: diff --git a/CPAC/pipeline/engine.py b/CPAC/pipeline/engine.py index 11f5965e8e..2a1e13a8c8 100644 --- a/CPAC/pipeline/engine.py +++ b/CPAC/pipeline/engine.py @@ -14,6 +14,8 @@ # You should have received a copy of the GNU Lesser General Public # License along with C-PAC. If not, see . +"""C-PAC pipeline engine.""" + import ast import copy import hashlib @@ -307,7 +309,7 @@ def regressor_dct(self, cfg) -> dict: "ingress_regressors." ) _nr = cfg["nuisance_corrections", "2-nuisance_regression"] - if not hasattr(self, "timeseries"): + if not hasattr(self, "desc-confounds_timeseries"): if _nr["Regressors"]: self.regressors = {reg["Name"]: reg for reg in _nr["Regressors"]} else: diff --git a/CPAC/qc/pipeline.py b/CPAC/qc/pipeline.py index 15d6b35e09..2eb44ed4f2 100644 --- a/CPAC/qc/pipeline.py +++ b/CPAC/qc/pipeline.py @@ -1,3 +1,21 @@ +# Copyright (C) 2018-2025 C-PAC Developers + +# This file is part of C-PAC. + +# C-PAC is free software: you can redistribute it and/or modify it under +# the terms of the GNU Lesser General Public License as published by the +# Free Software Foundation, either version 3 of the License, or (at your +# option) any later version. + +# C-PAC is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or +# FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public +# License for more details. + +# You should have received a copy of the GNU Lesser General Public +# License along with C-PAC. If not, see . +"""C-PAC quality control pipeline.""" + import pkg_resources as p from CPAC.pipeline import nipype_pipeline_engine as pe @@ -156,7 +174,7 @@ def qc_brain_extraction(wf, cfg, strat_pool, pipe_num, opt=None): @nodeblock( - name="qc_brain_extraction", + name="qc_T1w_standard", config=["pipeline_setup", "output_directory", "quality_control"], switch=["generate_quality_control_images"], inputs=["space-template_desc-preproc_T1w", "T1w-brain-template"], diff --git a/CPAC/qc/xcp.py b/CPAC/qc/xcp.py index 95cb870430..4a313b4719 100644 --- a/CPAC/qc/xcp.py +++ b/CPAC/qc/xcp.py @@ -1,3 +1,19 @@ +# Copyright (C) 2021-2025 C-PAC Developers + +# This file is part of C-PAC. + +# C-PAC is free software: you can redistribute it and/or modify it under +# the terms of the GNU Lesser General Public License as published by the +# Free Software Foundation, either version 3 of the License, or (at your +# option) any later version. + +# C-PAC is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or +# FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public +# License for more details. + +# You should have received a copy of the GNU Lesser General Public +# License along with C-PAC. If not, see . """ Generate XCP-stype quality control files. @@ -439,7 +455,7 @@ def get_entity_part(key): "space-bold_desc-brain_mask", ["T1w-brain-template-mask", "EPI-template-mask"], ["space-template_desc-bold_mask", "space-EPItemplate_desc-bold_mask"], - "regressors", + "desc-confounds_timeseries", ["T1w-brain-template-funcreg", "EPI-brain-template-funcreg"], [ "desc-movementParametersUnfiltered_motion", @@ -458,7 +474,7 @@ def qc_xcp(wf, cfg, strat_pool, pipe_num, opt=None): # pylint: disable=invalid-name, unused-argument if cfg[ "nuisance_corrections", "2-nuisance_regression", "run" - ] and not strat_pool.check_rpool("regressors"): + ] and not strat_pool.check_rpool("desc-confounds_timeseries"): return wf, {} bids_info = pe.Node( Function( @@ -501,8 +517,8 @@ def qc_xcp(wf, cfg, strat_pool, pipe_num, opt=None): ) qc_file.inputs.desc = "preproc" qc_file.inputs.regressors = ( - strat_pool.node_data("regressors") - .node.name.split("regressors_")[-1][::-1] + strat_pool.node_data("desc-confounds_timeseries") + .node.name.split("desc-confounds_timeseries_")[-1][::-1] .split("_", 1)[-1][::-1] ) bold_to_T1w_mask = pe.Node( From 5b994d2e22192ce38d3d7424f9e05ee6e4dcf9f6 Mon Sep 17 00:00:00 2001 From: Jon Cluce Date: Wed, 21 May 2025 14:43:08 -0400 Subject: [PATCH 304/507] :recycle: Make datetime-subtraction-safe None values --- CPAC/utils/monitoring/draw_gantt_chart.py | 14 ++- CPAC/utils/monitoring/monitoring.py | 127 +++++++++++++++++++++- CPAC/utils/tests/test_utils.py | 34 ++++++ 3 files changed, 166 insertions(+), 9 deletions(-) diff --git a/CPAC/utils/monitoring/draw_gantt_chart.py b/CPAC/utils/monitoring/draw_gantt_chart.py index 67f26693f9..25aaf15b61 100644 --- a/CPAC/utils/monitoring/draw_gantt_chart.py +++ b/CPAC/utils/monitoring/draw_gantt_chart.py @@ -23,7 +23,7 @@ # Prior to release 0.12, Nipype was licensed under a BSD license. -# Modifications Copyright (C) 2021-2023 C-PAC Developers +# Modifications Copyright (C) 2021-2025 C-PAC Developers # This file is part of C-PAC. @@ -52,6 +52,8 @@ from nipype.utils.draw_gantt_chart import draw_lines, draw_resource_bar, log_to_dict +from CPAC.utils.monitoring.monitoring import DatetimeWithSafeNone + def create_event_dict(start_time, nodes_list): """ @@ -407,7 +409,11 @@ def generate_gantt_chart( # Create the header of the report with useful information start_node = nodes_list[0] last_node = nodes_list[-1] - duration = (last_node["finish"] - start_node["start"]).total_seconds() + try: + duration = (last_node["finish"] - start_node["start"]).total_seconds() + except TypeError: + # no duration + return # Get events based dictionary of node run stats events = create_event_dict(start_node["start"], nodes_list) @@ -656,12 +662,12 @@ def _timing_timestamp(node): msg = "No logged nodes have timing information." raise ProcessLookupError(msg) return { - k: ( + k: DatetimeWithSafeNone( datetime.strptime(v, "%Y-%m-%dT%H:%M:%S.%f") if "." in v else datetime.fromisoformat(v) ) if (k in {"start", "finish"} and isinstance(v, str)) - else v + else DatetimeWithSafeNone(v) for k, v in node.items() } diff --git a/CPAC/utils/monitoring/monitoring.py b/CPAC/utils/monitoring/monitoring.py index 950f419b95..380bb5961e 100644 --- a/CPAC/utils/monitoring/monitoring.py +++ b/CPAC/utils/monitoring/monitoring.py @@ -1,9 +1,29 @@ +# Copyright (C) 2018-2025 C-PAC Developers + +# This file is part of C-PAC. + +# C-PAC is free software: you can redistribute it and/or modify it under +# the terms of the GNU Lesser General Public License as published by the +# Free Software Foundation, either version 3 of the License, or (at your +# option) any later version. + +# C-PAC is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or +# FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public +# License for more details. + +# You should have received a copy of the GNU Lesser General Public +# License along with C-PAC. If not, see . +"""Monitoring utilities for C-PAC.""" + +from datetime import datetime, timedelta import glob import json import math import os import socketserver import threading +from typing import Optional, TypeAlias import networkx as nx from traits.trait_base import Undefined @@ -13,8 +33,103 @@ from .custom_logging import getLogger -# Log initial information from all the nodes +def _safe_none_diff( + self: "DatetimeWithSafeNone | _NoTime", other: "DatetimeWithSafeNone | _NoTime" +) -> datetime | timedelta: + """Subtract between a datetime or timedelta or None.""" + if isinstance(self, _NoTime): + return timedelta(0) + if isinstance(other, DatetimeWithSafeNone): + if isinstance(other, _NoTime): + return timedelta(0) + return self - other + if isinstance(other, (datetime, timedelta)): + return self._dt - other + msg = f"Cannot subtract {type(other)} from {type(self)}" + raise NotImplementedError(msg) + + +class _NoTime: + """A wrapper for None values that can be used in place of a datetime object.""" + + def __bool__(self) -> bool: + """Return False for _NoTime.""" + return False + + def __int__(self) -> int: + """Return 0 for _NoTime.""" + return 0 + + def __repr__(self) -> str: + """Return 'NoTime' for _NoTime.""" + return "NoTime" + + def __str__(self) -> str: + """Return 'NoTime' for _NoTime.""" + return "NoTime" + + def __sub__(self, other: "DatetimeWithSafeNone | _NoTime") -> datetime | timedelta: + """Subtract between None and a datetime or timedelta or None.""" + return _safe_none_diff(self, other) + + +NoTime = _NoTime() +"""A singleton None that can be used in place of a datetime object.""" + + +class DatetimeWithSafeNone(datetime, _NoTime): + """Time class that can be None or a time value.""" + + def __new__(cls, dt: "OptionalDatetime") -> "DatetimeWithSafeNone | _NoTime": + """Create a new instance of the class.""" + if dt is None: + return NoTime + if isinstance(dt, datetime): + return datetime.__new__( + cls, + dt.year, + dt.month, + dt.day, + dt.hour, + dt.minute, + dt.second, + dt.microsecond, + dt.tzinfo, + ) + if isinstance(dt, str): + try: + return DatetimeWithSafeNone(datetime.fromisoformat(dt)) + except (ValueError, TypeError): + error = f"Invalid ISO-format datetime string: {dt}" + else: + error = f"Cannot convert {type(dt)} to datetime" + raise TypeError(error) + + def __bool__(self) -> bool: + """Return True if not NoTime.""" + return self is not NoTime + + def __sub__(self, other: "DatetimeWithSafeNone | _NoTime") -> datetime | timedelta: + """Subtract between a datetime or timedelta or None.""" + return _safe_none_diff(self, other) + + def __repr__(self) -> str: + """Return the string representation of the datetime or NoTime.""" + if self: + return datetime.__repr__(self) + return "NoTime" + + def __str__(self) -> str: + """Return the string representation of the datetime or NoTime.""" + return super().__str__() + + +OptionalDatetime: TypeAlias = Optional[datetime | str | DatetimeWithSafeNone | _NoTime] +"""Type alias for a datetime, ISO-format string or None.""" + + def recurse_nodes(workflow, prefix=""): + """Log initial information from all the nodes.""" for node in nx.topological_sort(workflow._graph): if isinstance(node, pe.Workflow): for subnode in recurse_nodes(node, prefix + workflow.name + "."): @@ -111,8 +226,8 @@ def log_nodes_cb(node, status): status_dict = { "id": str(node), "hash": node.inputs.get_hashval()[1], - "start": getattr(runtime, "startTime", None), - "finish": getattr(runtime, "endTime", None), + "start": DatetimeWithSafeNone(getattr(runtime, "startTime", None)), + "finish": DatetimeWithSafeNone(getattr(runtime, "endTime", None)), "runtime_threads": runtime_threads, "runtime_memory_gb": getattr(runtime, "mem_peak_gb", "N/A"), "estimated_memory_gb": node.mem_gb, @@ -122,7 +237,9 @@ def log_nodes_cb(node, status): if hasattr(node, "input_data_shape") and node.input_data_shape is not Undefined: status_dict["input_data_shape"] = node.input_data_shape - if status_dict["start"] is None or status_dict["finish"] is None: + if any( + not isinstance(status_dict[label], datetime) for label in ["start", "finish"] + ): status_dict["error"] = True logger.debug(json.dumps(status_dict)) @@ -155,7 +272,7 @@ def handle(self): with open(callback_file, "rb") as lf: for l in lf.readlines(): # noqa: E741 - l = l.strip() # noqa: E741 + l = l.strip() # noqa: E741,PLW2901 try: node = json.loads(l) if node["id"] not in tree[subject]: diff --git a/CPAC/utils/tests/test_utils.py b/CPAC/utils/tests/test_utils.py index ab896c6029..8b4e494597 100644 --- a/CPAC/utils/tests/test_utils.py +++ b/CPAC/utils/tests/test_utils.py @@ -1,5 +1,22 @@ +# Copyright (C) 2018-2025 C-PAC Developers + +# This file is part of C-PAC. + +# C-PAC is free software: you can redistribute it and/or modify it under +# the terms of the GNU Lesser General Public License as published by the +# Free Software Foundation, either version 3 of the License, or (at your +# option) any later version. + +# C-PAC is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or +# FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public +# License for more details. + +# You should have received a copy of the GNU Lesser General Public +# License along with C-PAC. If not, see . """Tests of CPAC utility functions.""" +from datetime import datetime, timedelta import multiprocessing from unittest import mock @@ -10,6 +27,7 @@ from CPAC.pipeline.nodeblock import NodeBlockFunction from CPAC.utils.configuration import Configuration from CPAC.utils.monitoring.custom_logging import log_subprocess +from CPAC.utils.monitoring.monitoring import DatetimeWithSafeNone, OptionalDatetime from CPAC.utils.tests import old_functions from CPAC.utils.utils import ( check_config_resources, @@ -168,3 +186,19 @@ def test_system_deps(): Raises an exception if dependencies are not met. """ check_system_deps(*([True] * 4)) + + +@pytest.mark.parametrize( + "t1", [datetime.now(), datetime.isoformat(datetime.now()), None] +) +@pytest.mark.parametrize( + "t2", [datetime.now(), datetime.isoformat(datetime.now()), None] +) +def test_datetime_with_safe_none(t1: OptionalDatetime, t2: OptionalDatetime): + """Test DatetimeWithSafeNone class works with datetime and None.""" + t1 = DatetimeWithSafeNone(t1) + t2 = DatetimeWithSafeNone(t2) + if t1 and t2: + assert isinstance(t2 - t1, timedelta) + else: + assert t2 - t1 == timedelta(0) From d5fe464fbf7ea20b02515ca5017c42c7f356c857 Mon Sep 17 00:00:00 2001 From: Jon Cluce Date: Wed, 21 May 2025 15:14:29 -0400 Subject: [PATCH 305/507] :bug: Serialize datetime subclass --- CPAC/utils/monitoring/monitoring.py | 25 +++++++++++++++++++++---- 1 file changed, 21 insertions(+), 4 deletions(-) diff --git a/CPAC/utils/monitoring/monitoring.py b/CPAC/utils/monitoring/monitoring.py index 380bb5961e..1378c77546 100644 --- a/CPAC/utils/monitoring/monitoring.py +++ b/CPAC/utils/monitoring/monitoring.py @@ -23,7 +23,7 @@ import os import socketserver import threading -from typing import Optional, TypeAlias +from typing import Any, Optional, TypeAlias import networkx as nx from traits.trait_base import Undefined @@ -124,6 +124,23 @@ def __str__(self) -> str: return super().__str__() +class DatetimeJSONEncoder(json.JSONEncoder): + """JSON encoder that handles DatetimeWithSafeNone instances.""" + + def default(self, o: Any) -> str: + """Convert datetime objects to ISO format.""" + if isinstance(o, datetime): + return o.isoformat() + if o is None or o is NoTime: + return "" + return super().default(o) + + +def json_dumps(obj: Any, **kwargs) -> str: + """Convert an object to a JSON string.""" + return json.dumps(obj, cls=DatetimeJSONEncoder, **kwargs) + + OptionalDatetime: TypeAlias = Optional[datetime | str | DatetimeWithSafeNone | _NoTime] """Type alias for a datetime, ISO-format string or None.""" @@ -144,7 +161,7 @@ def recurse_nodes(workflow, prefix=""): def log_nodes_initial(workflow): logger = getLogger("callback") for node in recurse_nodes(workflow): - logger.debug(json.dumps(node)) + logger.debug(json_dumps(node)) def log_nodes_cb(node, status): @@ -242,7 +259,7 @@ def log_nodes_cb(node, status): ): status_dict["error"] = True - logger.debug(json.dumps(status_dict)) + logger.debug(json_dumps(status_dict)) log_nodes_cb.__doc__ = f"""{_nipype_log_nodes_cb.__doc__} @@ -299,7 +316,7 @@ def handle(self): tree = {s: t for s, t in tree.items() if t} headers = "HTTP/1.1 200 OK\nConnection: close\n\n" - self.request.sendall(headers + json.dumps(tree) + "\n") + self.request.sendall(headers + json_dumps(tree) + "\n") class LoggingHTTPServer(socketserver.ThreadingTCPServer, object): From 5054ffa531b29020fc47f1e78a19cdb289c1d938 Mon Sep 17 00:00:00 2001 From: Jon Cluce Date: Wed, 5 Feb 2025 13:26:11 -0500 Subject: [PATCH 306/507] =?UTF-8?q?:art:=20Combine=20get=5Fcpac=5Fprovenan?= =?UTF-8?q?ce=20+=20check=5Fprov=5Ffor=5Fregtool=20=E2=86=92=20reg=5Ftool?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- CPAC/alff/alff.py | 8 +-- CPAC/func_preproc/func_motion.py | 4 +- .../longitudinal_workflow.py | 12 ++-- CPAC/nuisance/nuisance.py | 19 ++--- CPAC/pipeline/engine.py | 24 ++++++- CPAC/pipeline/nodeblock.py | 12 +++- CPAC/registration/registration.py | 70 +++++++------------ CPAC/seg_preproc/seg_preproc.py | 16 ++--- CPAC/utils/utils.py | 20 +----- CPAC/vmhc/vmhc.py | 22 +++++- 10 files changed, 97 insertions(+), 110 deletions(-) diff --git a/CPAC/alff/alff.py b/CPAC/alff/alff.py index f8bfc1a0b8..a66791b83c 100644 --- a/CPAC/alff/alff.py +++ b/CPAC/alff/alff.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright (C) 2012-2024 C-PAC Developers +# Copyright (C) 2012-2025 C-PAC Developers # This file is part of C-PAC. @@ -25,7 +25,6 @@ from CPAC.pipeline.nodeblock import nodeblock from CPAC.registration.registration import apply_transform from CPAC.utils.interfaces import Function -from CPAC.utils.utils import check_prov_for_regtool def create_alff(wf_name="alff_workflow"): @@ -320,10 +319,7 @@ def alff_falff(wf, cfg, strat_pool, pipe_num, opt=None): def alff_falff_space_template(wf, cfg, strat_pool, pipe_num, opt=None): outputs = {} if strat_pool.check_rpool("desc-denoisedNofilt_bold"): - xfm_prov = strat_pool.get_cpac_provenance( - "from-bold_to-template_mode-image_xfm" - ) - reg_tool = check_prov_for_regtool(xfm_prov) + reg_tool = strat_pool.reg_tool("from-bold_to-template_mode-image_xfm") num_cpus = cfg.pipeline_setup["system_config"]["max_cores_per_participant"] diff --git a/CPAC/func_preproc/func_motion.py b/CPAC/func_preproc/func_motion.py index b8974f915b..885e6a312b 100644 --- a/CPAC/func_preproc/func_motion.py +++ b/CPAC/func_preproc/func_motion.py @@ -36,7 +36,6 @@ from CPAC.pipeline.schema import valid_options from CPAC.utils.configuration import Configuration from CPAC.utils.interfaces.function import Function -from CPAC.utils.utils import check_prov_for_motion_tool if TYPE_CHECKING: from CPAC.pipeline.engine import ResourcePool @@ -73,8 +72,7 @@ ) def calc_motion_stats(wf, cfg, strat_pool, pipe_num, opt=None): """Calculate motion statistics for motion parameters.""" - motion_prov = strat_pool.get_cpac_provenance("desc-movementParameters_motion") - motion_correct_tool = check_prov_for_motion_tool(motion_prov) + motion_correct_tool = strat_pool.motion_tool("desc-movementParameters_motion") coordinate_transformation = [ "filtered-coordinate-transformation", "coordinate-transformation", diff --git a/CPAC/longitudinal_pipeline/longitudinal_workflow.py b/CPAC/longitudinal_pipeline/longitudinal_workflow.py index 5c989675c1..1ab06dceab 100644 --- a/CPAC/longitudinal_pipeline/longitudinal_workflow.py +++ b/CPAC/longitudinal_pipeline/longitudinal_workflow.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright (C) 2020-2024 C-PAC Developers +# Copyright (C) 2020-2025 C-PAC Developers # This file is part of C-PAC. @@ -44,7 +44,7 @@ from CPAC.utils.interfaces.datasink import DataSink from CPAC.utils.interfaces.function import Function from CPAC.utils.strategy import Strategy -from CPAC.utils.utils import check_config_resources, check_prov_for_regtool +from CPAC.utils.utils import check_config_resources @nodeblock( @@ -254,10 +254,7 @@ def mask_longitudinal_T1w_brain(wf, cfg, strat_pool, pipe_num, opt=None): outputs=["space-template_desc-brain_T1w"], ) def warp_longitudinal_T1w_to_template(wf, cfg, strat_pool, pipe_num, opt=None): - xfm_prov = strat_pool.get_cpac_provenance( - "from-longitudinal_to-template_mode-image_xfm" - ) - reg_tool = check_prov_for_regtool(xfm_prov) + reg_tool = strat_pool.reg_tool("from-longitudinal_to-template_mode-image_xfm") num_cpus = cfg.pipeline_setup["system_config"]["max_cores_per_participant"] @@ -325,10 +322,9 @@ def warp_longitudinal_T1w_to_template(wf, cfg, strat_pool, pipe_num, opt=None): ], ) def warp_longitudinal_seg_to_T1w(wf, cfg, strat_pool, pipe_num, opt=None): - xfm_prov = strat_pool.get_cpac_provenance( + reg_tool = strat_pool.reg_tool( "from-longitudinal_to-T1w_mode-image_desc-linear_xfm" ) - reg_tool = check_prov_for_regtool(xfm_prov) num_cpus = cfg.pipeline_setup["system_config"]["max_cores_per_participant"] diff --git a/CPAC/nuisance/nuisance.py b/CPAC/nuisance/nuisance.py index d7b78fc6e6..80ad38aaf4 100644 --- a/CPAC/nuisance/nuisance.py +++ b/CPAC/nuisance/nuisance.py @@ -53,7 +53,6 @@ from CPAC.utils.interfaces.function import Function from CPAC.utils.interfaces.pc import PC from CPAC.utils.monitoring import IFLOGGER -from CPAC.utils.utils import check_prov_for_regtool from .bandpass import afni_1dBandpass, bandpass_voxels @@ -2014,8 +2013,7 @@ def filtering_bold_and_regressors( outputs=["desc-preproc_bold", "desc-cleaned_bold"], ) def ICA_AROMA_FSLreg(wf, cfg, strat_pool, pipe_num, opt=None): - xfm_prov = strat_pool.get_cpac_provenance("from-T1w_to-template_mode-image_xfm") - reg_tool = check_prov_for_regtool(xfm_prov) + reg_tool = strat_pool.reg_tool("from-T1w_to-template_mode-image_xfm") if reg_tool != "fsl": return (wf, None) @@ -2061,8 +2059,7 @@ def ICA_AROMA_FSLreg(wf, cfg, strat_pool, pipe_num, opt=None): outputs=["desc-preproc_bold", "desc-cleaned_bold"], ) def ICA_AROMA_ANTsreg(wf, cfg, strat_pool, pipe_num, opt=None): - xfm_prov = strat_pool.get_cpac_provenance("from-bold_to-template_mode-image_xfm") - reg_tool = check_prov_for_regtool(xfm_prov) + reg_tool = strat_pool.reg_tool("from-bold_to-template_mode-image_xfm") if reg_tool != "ants": return (wf, None) @@ -2132,8 +2129,7 @@ def ICA_AROMA_ANTsreg(wf, cfg, strat_pool, pipe_num, opt=None): outputs=["desc-preproc_bold", "desc-cleaned_bold"], ) def ICA_AROMA_FSLEPIreg(wf, cfg, strat_pool, pipe_num, opt=None): - xfm_prov = strat_pool.get_cpac_provenance("from-bold_to-EPItemplate_mode-image_xfm") - reg_tool = check_prov_for_regtool(xfm_prov) + reg_tool = strat_pool.reg_tool("from-bold_to-EPItemplate_mode-image_xfm") if reg_tool != "fsl": return (wf, None) @@ -2185,8 +2181,7 @@ def ICA_AROMA_FSLEPIreg(wf, cfg, strat_pool, pipe_num, opt=None): outputs=["desc-preproc_bold", "desc-cleaned_bold"], ) def ICA_AROMA_ANTsEPIreg(wf, cfg, strat_pool, pipe_num, opt=None): - xfm_prov = strat_pool.get_cpac_provenance("from-bold_to-EPItemplate_mode-image_xfm") - reg_tool = check_prov_for_regtool(xfm_prov) + reg_tool = strat_pool.reg_tool("from-bold_to-EPItemplate_mode-image_xfm") if reg_tool != "ants": return (wf, None) @@ -2516,15 +2511,13 @@ def nuisance_regressors_generation( if space == "T1w": prefixes[0] = "" if strat_pool.check_rpool("from-template_to-T1w_mode-image_desc-linear_xfm"): - xfm_prov = strat_pool.get_cpac_provenance( + reg_tool = strat_pool.reg_tool( "from-template_to-T1w_mode-image_desc-linear_xfm" ) - reg_tool = check_prov_for_regtool(xfm_prov) elif space == "bold": - xfm_prov = strat_pool.get_cpac_provenance( + reg_tool = strat_pool.reg_tool( "from-EPItemplate_to-bold_mode-image_desc-linear_xfm" ) - reg_tool = check_prov_for_regtool(xfm_prov) if reg_tool is not None: use_ants = reg_tool == "ants" else: diff --git a/CPAC/pipeline/engine.py b/CPAC/pipeline/engine.py index 2a1e13a8c8..813cc2c457 100644 --- a/CPAC/pipeline/engine.py +++ b/CPAC/pipeline/engine.py @@ -24,7 +24,7 @@ import json import os import re -from typing import Optional +from typing import Literal, Optional import warnings import pandas as pd @@ -510,6 +510,28 @@ def get_cpac_provenance(self, resource, strat=None): json_data = self.get_json(resource, strat) return json_data["CpacProvenance"] + def motion_tool( + self, resource, strat=None + ) -> Optional[Literal["3dvolreg", "mcflirt"]]: + """Check provenance for motion correction tool.""" + prov = self.get_cpac_provenance(resource, strat) + last_entry = get_last_prov_entry(prov) + last_node = last_entry.split(":")[1] + if "3dvolreg" in last_node.lower(): + return "3dvolreg" + if "mcflirt" in last_node.lower(): + return "mcflirt" + # check entire prov + if "3dvolreg" in str(prov): + return "3dvolreg" + if "mcflirt" in str(prov): + return "mcflirt" + return None + + def reg_tool(self, resource, strat=None) -> Optional[Literal["ants", "fsl"]]: + """Check provenance for registration tool.""" + return check_prov_for_regtool(self.get_cpac_provenance(resource, strat)) + @staticmethod def generate_prov_string(prov): # this will generate a string from a SINGLE RESOURCE'S dictionary of diff --git a/CPAC/pipeline/nodeblock.py b/CPAC/pipeline/nodeblock.py index 53b9db1330..e3cda3d8bd 100644 --- a/CPAC/pipeline/nodeblock.py +++ b/CPAC/pipeline/nodeblock.py @@ -1,4 +1,4 @@ -# Copyright (C) 2023-2024 C-PAC Developers +# Copyright (C) 2023-2025 C-PAC Developers # This file is part of C-PAC. @@ -16,7 +16,13 @@ # License along with C-PAC. If not, see . """Class and decorator for NodeBlock functions.""" -from typing import Any, Callable, Optional +from typing import Any, Callable, Optional, TypeAlias + +from nipype.pipeline import engine as pe + +NODEBLOCK_RETURN: TypeAlias = tuple[ + pe.Workflow, dict[str, tuple[pe.Node | pe.Workflow, str]] +] class NodeBlockFunction: @@ -77,9 +83,9 @@ def __init__( ] ).rstrip() - # all node block functions have this signature def __call__(self, wf, cfg, strat_pool, pipe_num, opt=None): """ + All node block functions have this signature. Parameters ---------- diff --git a/CPAC/registration/registration.py b/CPAC/registration/registration.py index fa2c6e7457..2d853615e7 100644 --- a/CPAC/registration/registration.py +++ b/CPAC/registration/registration.py @@ -17,7 +17,7 @@ # pylint: disable=too-many-lines,ungrouped-imports,wrong-import-order """Workflows for registration.""" -from typing import Literal, Optional +from typing import Literal, Optional, TYPE_CHECKING from voluptuous import RequiredFieldInvalid from nipype.interfaces import afni, ants, c3, fsl, utility as util @@ -27,7 +27,7 @@ from CPAC.func_preproc.func_preproc import fsl_afni_subworkflow from CPAC.func_preproc.utils import chunk_ts, split_ts_chunks from CPAC.pipeline import nipype_pipeline_engine as pe -from CPAC.pipeline.nodeblock import nodeblock +from CPAC.pipeline.nodeblock import nodeblock, NODEBLOCK_RETURN from CPAC.registration.utils import ( change_itk_transform_type, check_transforms, @@ -40,10 +40,12 @@ seperate_warps_list, single_ants_xfm_to_list, ) -from CPAC.utils.configuration import Configuration from CPAC.utils.interfaces import Function from CPAC.utils.interfaces.fsl import Merge as fslMerge -from CPAC.utils.utils import check_prov_for_motion_tool, check_prov_for_regtool + +if TYPE_CHECKING: + from CPAC.pipeline.engine import ResourcePool + from CPAC.utils.configuration import Configuration def apply_transform( @@ -1461,7 +1463,7 @@ def FSL_registration_connector( opt: Literal["FSL", "FSL-linear"] = "FSL", symmetric: bool = False, template: str = "T1w", -) -> tuple[pe.Workflow, dict[str, tuple]]: +) -> NODEBLOCK_RETURN: """Transform raw data to template with FSL.""" assert opt in ["FSL", "FSL-linear"] wf = pe.Workflow(name=wf_name) @@ -2302,17 +2304,15 @@ def register_FSL_anat_to_template(wf, cfg, strat_pool, pipe_num, opt=None): wf.connect(node, out, fsl, "inputspec.reference_mask") if "space-longitudinal" in brain: - for key in outputs.keys(): + for key in list(outputs.keys()): if "from-T1w" in key: new_key = key.replace("from-T1w", "from-longitudinal") outputs[new_key] = outputs[key] - del outputs[key] if "to-T1w" in key: new_key = key.replace("to-T1w", "to-longitudinal") outputs[new_key] = outputs[key] - del outputs[key] - return (wf, outputs) + return wf, outputs @nodeblock( @@ -2396,7 +2396,7 @@ def register_symmetric_FSL_anat_to_template(wf, cfg, strat_pool, pipe_num, opt=N wf.connect(node, out, fsl, "inputspec.reference_mask") if "space-longitudinal" in brain: - for key in outputs.keys(): + for key in list(outputs.keys()): if "from-T1w" in key: new_key = key.replace("from-T1w", "from-longitudinal") outputs[new_key] = outputs[key] @@ -2629,16 +2629,15 @@ def register_ANTs_anat_to_template(wf, cfg, strat_pool, pipe_num, opt=None): wf.connect(node, out, ants_rc, "inputspec.lesion_mask") if "space-longitudinal" in brain: - for key in outputs: + for key in list(outputs.keys()): for direction in ["from", "to"]: if f"{direction}-T1w" in key: new_key = key.replace( f"{direction}-T1w", f"{direction}-longitudinal" ) outputs[new_key] = outputs[key] - del outputs[key] - return (wf, outputs) + return wf, outputs @nodeblock( @@ -2752,15 +2751,13 @@ def register_symmetric_ANTs_anat_to_template(wf, cfg, strat_pool, pipe_num, opt= wf.connect(node, out, ants, "inputspec.lesion_mask") if "space-longitudinal" in brain: - for key in outputs.keys(): + for key in list(outputs.keys()): if "from-T1w" in key: new_key = key.replace("from-T1w", "from-longitudinal") outputs[new_key] = outputs[key] - del outputs[key] if "to-T1w" in key: new_key = key.replace("to-T1w", "to-longitudinal") outputs[new_key] = outputs[key] - del outputs[key] return (wf, outputs) @@ -2875,9 +2872,7 @@ def register_ANTs_EPI_to_template(wf, cfg, strat_pool, pipe_num, opt=None): ) def overwrite_transform_anat_to_template(wf, cfg, strat_pool, pipe_num, opt=None): """Overwrite ANTs transforms with FSL transforms.""" - xfm_prov = strat_pool.get_cpac_provenance("from-T1w_to-template_mode-image_xfm") - - reg_tool = check_prov_for_regtool(xfm_prov) + reg_tool = strat_pool.reg_tool("from-T1w_to-template_mode-image_xfm") if opt.lower() == "fsl" and reg_tool.lower() == "ants": # Apply head-to-head transforms on brain using ABCD-style registration @@ -3504,8 +3499,7 @@ def create_func_to_T1template_xfm(wf, cfg, strat_pool, pipe_num, opt=None): Condense the BOLD-to-T1 coregistration transform and the T1-to-template transform into one transform matrix. """ - xfm_prov = strat_pool.get_cpac_provenance("from-T1w_to-template_mode-image_xfm") - reg_tool = check_prov_for_regtool(xfm_prov) + reg_tool = strat_pool.reg_tool("from-T1w_to-template_mode-image_xfm") xfm, outputs = bold_to_T1template_xfm_connector( f"create_func_to_T1wtemplate_xfm_{pipe_num}", cfg, reg_tool, symmetric=False @@ -3583,8 +3577,7 @@ def create_func_to_T1template_symmetric_xfm(wf, cfg, strat_pool, pipe_num, opt=N Condense the BOLD-to-T1 coregistration transform and the T1-to-symmetric-template transform into one transform matrix. """ - xfm_prov = strat_pool.get_cpac_provenance("from-T1w_to-symtemplate_mode-image_xfm") - reg_tool = check_prov_for_regtool(xfm_prov) + reg_tool = strat_pool.reg_tool("from-T1w_to-symtemplate_mode-image_xfm") xfm, outputs = bold_to_T1template_xfm_connector( f"create_func_to_T1wsymtemplate_xfm_{pipe_num}", @@ -3785,12 +3778,11 @@ def apply_phasediff_to_timeseries_separately(wf, cfg, strat_pool, pipe_num, opt= "fsl-blip-warp", ) ], - outputs=["desc-preproc_bold", "desc-reorient_bold", "desc-stc_bold"], + outputs=["desc-preproc_bold", "desc-stc_bold", "desc-reorient_bold"], ) def apply_blip_to_timeseries_separately(wf, cfg, strat_pool, pipe_num, opt=None): """Apply blip to timeseries.""" - xfm_prov = strat_pool.get_cpac_provenance("from-bold_to-template_mode-image_xfm") - reg_tool = check_prov_for_regtool(xfm_prov) + reg_tool = strat_pool.reg_tool("from-bold_to-template_mode-image_xfm") outputs = {"desc-preproc_bold": strat_pool.get_data("desc-preproc_bold")} if strat_pool.check_rpool("ants-blip-warp"): @@ -3867,8 +3859,7 @@ def apply_blip_to_timeseries_separately(wf, cfg, strat_pool, pipe_num, opt=None) ) def warp_wholeheadT1_to_template(wf, cfg, strat_pool, pipe_num, opt=None): """Warp T1 head to template.""" - xfm_prov = strat_pool.get_cpac_provenance("from-T1w_to-template_mode-image_xfm") - reg_tool = check_prov_for_regtool(xfm_prov) + reg_tool = strat_pool.reg_tool("from-T1w_to-template_mode-image_xfm") num_cpus = cfg.pipeline_setup["system_config"]["max_cores_per_participant"] @@ -3921,8 +3912,7 @@ def warp_wholeheadT1_to_template(wf, cfg, strat_pool, pipe_num, opt=None): ) def warp_T1mask_to_template(wf, cfg, strat_pool, pipe_num, opt=None): """Warp T1 mask to template.""" - xfm_prov = strat_pool.get_cpac_provenance("from-T1w_to-template_mode-image_xfm") - reg_tool = check_prov_for_regtool(xfm_prov) + reg_tool = strat_pool.reg_tool("from-T1w_to-template_mode-image_xfm") num_cpus = cfg.pipeline_setup["system_config"]["max_cores_per_participant"] @@ -3982,8 +3972,7 @@ def warp_T1mask_to_template(wf, cfg, strat_pool, pipe_num, opt=None): ) def warp_timeseries_to_T1template(wf, cfg, strat_pool, pipe_num, opt=None): """Warp timeseries to T1 template.""" - xfm_prov = strat_pool.get_cpac_provenance("from-bold_to-template_mode-image_xfm") - reg_tool = check_prov_for_regtool(xfm_prov) + reg_tool = strat_pool.reg_tool("from-bold_to-template_mode-image_xfm") num_cpus = cfg.pipeline_setup["system_config"]["max_cores_per_participant"] @@ -4045,8 +4034,7 @@ def warp_timeseries_to_T1template(wf, cfg, strat_pool, pipe_num, opt=None): ) def warp_timeseries_to_T1template_deriv(wf, cfg, strat_pool, pipe_num, opt=None): """Warp timeseries to T1 template at derivative resolution.""" - xfm_prov = strat_pool.get_cpac_provenance("from-bold_to-template_mode-image_xfm") - reg_tool = check_prov_for_regtool(xfm_prov) + reg_tool = strat_pool.reg_tool("from-bold_to-template_mode-image_xfm") num_cpus = cfg.pipeline_setup["system_config"]["max_cores_per_participant"] @@ -4888,8 +4876,7 @@ def single_step_resample_timeseries_to_T1template( # OF THE POSSIBILITY OF SUCH DAMAGE. # Modifications copyright (C) 2021 - 2024 C-PAC Developers - xfm_prov = strat_pool.get_cpac_provenance("from-T1w_to-template_mode-image_xfm") - reg_tool = check_prov_for_regtool(xfm_prov) + reg_tool = strat_pool.reg_tool("from-T1w_to-template_mode-image_xfm") bbr2itk = pe.Node( Function( @@ -4947,9 +4934,7 @@ def single_step_resample_timeseries_to_T1template( wf.connect(node, out, motionxfm2itk, "source_file") node, out = strat_pool.get_data("coordinate-transformation") - motion_correct_tool = check_prov_for_motion_tool( - strat_pool.get_cpac_provenance("coordinate-transformation") - ) + motion_correct_tool = strat_pool.motion_correct_tool("coordinate-transformation") if motion_correct_tool == "mcflirt": wf.connect(node, out, motionxfm2itk, "transform_file") elif motion_correct_tool == "3dvolreg": @@ -5488,8 +5473,8 @@ def warp_tissuemask_to_template(wf, cfg, strat_pool, pipe_num, xfm, template_spa def warp_resource_to_template( wf: pe.Workflow, - cfg, - strat_pool, + cfg: "Configuration", + strat_pool: "ResourcePool", pipe_num: int, input_resource: list[str] | str, xfm: str, @@ -5537,8 +5522,7 @@ def warp_resource_to_template( if template_space == "": template_space = "T1w" # determine tool used for registration - xfm_prov = strat_pool.get_cpac_provenance(xfm) - reg_tool = check_prov_for_regtool(xfm_prov) + reg_tool = strat_pool.reg_tool(xfm) # set 'resource' if strat_pool.check_rpool(input_resource): resource, input_resource = strat_pool.get_data( diff --git a/CPAC/seg_preproc/seg_preproc.py b/CPAC/seg_preproc/seg_preproc.py index f769cf14b3..69db6d60cb 100644 --- a/CPAC/seg_preproc/seg_preproc.py +++ b/CPAC/seg_preproc/seg_preproc.py @@ -1,4 +1,4 @@ -# Copyright (C) 2012-2023 C-PAC Developers +# Copyright (C) 2012-2025 C-PAC Developers # This file is part of C-PAC. @@ -35,7 +35,6 @@ from CPAC.utils.interfaces.function.seg_preproc import ( pick_tissue_from_labels_file_interface, ) -from CPAC.utils.utils import check_prov_for_regtool def process_segment_map(wf_name, use_priors, use_custom_threshold, reg_tool): @@ -536,7 +535,6 @@ def tissue_seg_fsl_fast(wf, cfg, strat_pool, pipe_num, opt=None): # triggered by 'segments' boolean input (-g or --segments) # 'probability_maps' output is a list of individual probability maps # triggered by 'probability_maps' boolean input (-p) - segment = pe.Node( interface=fsl.FAST(), name=f"segment_{pipe_num}", @@ -596,10 +594,8 @@ def tissue_seg_fsl_fast(wf, cfg, strat_pool, pipe_num, opt=None): xfm = "from-template_to-T1w_mode-image_desc-linear_xfm" if "space-longitudinal" in resource: xfm = "from-template_to-longitudinal_mode-image_desc-linear_xfm" - xfm_prov = strat_pool.get_cpac_provenance(xfm) - reg_tool = check_prov_for_regtool(xfm_prov) + reg_tool = strat_pool.reg_tool(xfm) else: - xfm_prov = None reg_tool = None xfm = None @@ -752,10 +748,7 @@ def tissue_seg_fsl_fast(wf, cfg, strat_pool, pipe_num, opt=None): outputs=["label-CSF_mask", "label-GM_mask", "label-WM_mask"], ) def tissue_seg_T1_template_based(wf, cfg, strat_pool, pipe_num, opt=None): - xfm_prov = strat_pool.get_cpac_provenance( - "from-template_to-T1w_mode-image_desc-linear_xfm" - ) - reg_tool = check_prov_for_regtool(xfm_prov) + reg_tool = strat_pool.reg_tool("from-template_to-T1w_mode-image_desc-linear_xfm") use_ants = reg_tool == "ants" csf_template2t1 = tissue_mask_template_to_t1(f"CSF_{pipe_num}", use_ants) @@ -806,10 +799,9 @@ def tissue_seg_T1_template_based(wf, cfg, strat_pool, pipe_num, opt=None): ], ) def tissue_seg_EPI_template_based(wf, cfg, strat_pool, pipe_num, opt=None): - xfm_prov = strat_pool.get_cpac_provenance( + reg_tool = strat_pool.reg_tool( "from-EPItemplate_to-bold_mode-image_desc-linear_xfm" ) - reg_tool = check_prov_for_regtool(xfm_prov) use_ants = reg_tool == "ants" csf_template2t1 = tissue_mask_template_to_t1("CSF", use_ants) diff --git a/CPAC/utils/utils.py b/CPAC/utils/utils.py index 6ca7e25689..ea7b1c2c5c 100644 --- a/CPAC/utils/utils.py +++ b/CPAC/utils/utils.py @@ -1,4 +1,4 @@ -# Copyright (C) 2012-2024 C-PAC Developers +# Copyright (C) 2012-2025 C-PAC Developers # This file is part of C-PAC. @@ -73,7 +73,7 @@ def get_last_prov_entry(prov): return prov[-1] -def check_prov_for_regtool(prov): +def check_prov_for_regtool(prov) -> Optional[Literal["ants", "fsl"]]: """Check provenance for registration tool.""" last_entry = get_last_prov_entry(prov) last_node = last_entry.split(":")[1] @@ -101,22 +101,6 @@ def check_prov_for_regtool(prov): return None -def check_prov_for_motion_tool(prov): - """Check provenance for motion correction tool.""" - last_entry = get_last_prov_entry(prov) - last_node = last_entry.split(":")[1] - if "3dvolreg" in last_node.lower(): - return "3dvolreg" - if "mcflirt" in last_node.lower(): - return "mcflirt" - # check entire prov - if "3dvolreg" in str(prov): - return "3dvolreg" - if "mcflirt" in str(prov): - return "mcflirt" - return None - - def _get_flag(in_flag): return in_flag diff --git a/CPAC/vmhc/vmhc.py b/CPAC/vmhc/vmhc.py index 3c547a8e2f..ddb2f57c60 100644 --- a/CPAC/vmhc/vmhc.py +++ b/CPAC/vmhc/vmhc.py @@ -1,3 +1,21 @@ +# Copyright (C) 2012-2025 C-PAC Developers + +# This file is part of C-PAC. + +# C-PAC is free software: you can redistribute it and/or modify it under +# the terms of the GNU Lesser General Public License as published by the +# Free Software Foundation, either version 3 of the License, or (at your +# option) any later version. + +# C-PAC is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or +# FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public +# License for more details. + +# You should have received a copy of the GNU Lesser General Public +# License along with C-PAC. If not, see . +"""Voxel-Mirrored Homotopic Connectivity.""" + from nipype.interfaces import fsl from nipype.interfaces.afni import preprocess @@ -5,7 +23,6 @@ from CPAC.pipeline import nipype_pipeline_engine as pe from CPAC.pipeline.nodeblock import nodeblock from CPAC.registration.registration import apply_transform -from CPAC.utils.utils import check_prov_for_regtool from CPAC.vmhc import * from .utils import * @@ -60,8 +77,7 @@ def smooth_func_vmhc(wf, cfg, strat_pool, pipe_num, opt=None): outputs=["space-symtemplate_desc-sm_bold"], ) def warp_timeseries_to_sym_template(wf, cfg, strat_pool, pipe_num, opt=None): - xfm_prov = strat_pool.get_cpac_provenance("from-bold_to-symtemplate_mode-image_xfm") - reg_tool = check_prov_for_regtool(xfm_prov) + reg_tool = strat_pool.reg_tool("from-bold_to-symtemplate_mode-image_xfm") num_cpus = cfg.pipeline_setup["system_config"]["max_cores_per_participant"] From 53a5de15d7fc3b28cb696d6303236d7be4b2b97d Mon Sep 17 00:00:00 2001 From: Jon Cluce Date: Thu, 22 May 2025 17:04:26 -0400 Subject: [PATCH 307/507] :art: Add type-checking to func_motion --- CPAC/func_preproc/func_motion.py | 80 +++++++++++++++++----- CPAC/pipeline/engine.py | 11 +-- CPAC/pipeline/nodeblock.py | 7 +- CPAC/pipeline/schema.py | 19 ++++- CPAC/pipeline/utils.py | 15 ++-- CPAC/utils/interfaces/function/function.py | 5 +- 6 files changed, 97 insertions(+), 40 deletions(-) diff --git a/CPAC/func_preproc/func_motion.py b/CPAC/func_preproc/func_motion.py index 885e6a312b..b82126c8a1 100644 --- a/CPAC/func_preproc/func_motion.py +++ b/CPAC/func_preproc/func_motion.py @@ -20,6 +20,7 @@ from nipype.interfaces import afni, fsl, utility as util from nipype.interfaces.afni import preprocess, utils as afni_utils +from nipype.pipeline.engine import Workflow from CPAC.func_preproc.utils import ( chunk_ts, @@ -32,13 +33,14 @@ motion_power_statistics, ) from CPAC.pipeline import nipype_pipeline_engine as pe -from CPAC.pipeline.nodeblock import nodeblock +from CPAC.pipeline.nodeblock import nodeblock, NODEBLOCK_RETURN from CPAC.pipeline.schema import valid_options from CPAC.utils.configuration import Configuration from CPAC.utils.interfaces.function import Function if TYPE_CHECKING: from CPAC.pipeline.engine import ResourcePool + from CPAC.pipeline.schema import MotionEstimateFilter @nodeblock( @@ -70,7 +72,13 @@ "desc-summary_motion", ], ) -def calc_motion_stats(wf, cfg, strat_pool, pipe_num, opt=None): +def calc_motion_stats( + wf: Workflow, + cfg: Configuration, + strat_pool: "ResourcePool", + pipe_num: int, + opt: None = None, +) -> NODEBLOCK_RETURN: """Calculate motion statistics for motion parameters.""" motion_correct_tool = strat_pool.motion_tool("desc-movementParameters_motion") coordinate_transformation = [ @@ -132,7 +140,7 @@ def calc_motion_stats(wf, cfg, strat_pool, pipe_num, opt=None): return wf, outputs -def estimate_reference_image(in_file): +def estimate_reference_image(in_file: str) -> str: """fMRIPrep-style BOLD reference. Generate a reference 3D map from BOLD and SBRef EPI images for BOLD datasets. @@ -198,7 +206,7 @@ def estimate_reference_image(in_file): # See the License for the specific language governing permissions and # limitations under the License. - # Modifications copyright (C) 2021 - 2024 C-PAC Developers + # Modifications copyright (C) 2021 - 2025 C-PAC Developers import os import numpy as np @@ -283,10 +291,17 @@ def estimate_reference_image(in_file): "using", ], option_val=["3dvolreg", "mcflirt"], - inputs=[("desc-preproc_bold", "motion-basefile")], + inputs=[("desc-preproc_bold", "motion-basefile", *_MOTION_PARAM_OUTPUTS)], outputs={**_MOTION_CORRECTED_OUTPUTS, **_MOTION_PARAM_OUTPUTS}, ) -def func_motion_correct(wf, cfg, strat_pool, pipe_num, opt=None): +def func_motion_correct( + wf: Workflow, + cfg: Configuration, + strat_pool: "ResourcePool", + pipe_num: int, + opt: Literal["3dvolreg", "mcflirt"], +) -> NODEBLOCK_RETURN: + """Perform motion estimation and correction using 3dVolReg or MCFLIRT.""" wf, outputs = motion_correct_connections(wf, cfg, strat_pool, pipe_num, opt) return wf, outputs @@ -305,7 +320,14 @@ def func_motion_correct(wf, cfg, strat_pool, pipe_num, opt=None): inputs=[("desc-preproc_bold", "motion-basefile")], outputs=_MOTION_CORRECTED_OUTPUTS, ) -def func_motion_correct_only(wf, cfg, strat_pool, pipe_num, opt=None): +def func_motion_correct_only( + wf: Workflow, + cfg: Configuration, + strat_pool: "ResourcePool", + pipe_num: int, + opt: Literal["3dvolreg", "mcflirt"], +) -> NODEBLOCK_RETURN: + """Perform motion correction without estimating motion parameters.""" wf, wf_outputs = motion_correct_connections(wf, cfg, strat_pool, pipe_num, opt) outputs = { @@ -329,7 +351,13 @@ def func_motion_correct_only(wf, cfg, strat_pool, pipe_num, opt=None): inputs=[("desc-preproc_bold", "motion-basefile")], outputs=_MOTION_PARAM_OUTPUTS, ) -def func_motion_estimates(wf, cfg, strat_pool, pipe_num, opt=None): +def func_motion_estimates( + wf: Workflow, + cfg: Configuration, + strat_pool: "ResourcePool", + pipe_num: int, + opt: Literal["3dvolreg", "mcflirt"], +) -> NODEBLOCK_RETURN: """Calculate motion estimates using 3dVolReg or MCFLIRT.""" from CPAC.pipeline.utils import present_outputs @@ -349,7 +377,9 @@ def func_motion_estimates(wf, cfg, strat_pool, pipe_num, opt=None): ) -def get_mcflirt_rms_abs(rms_files): +def get_mcflirt_rms_abs(rms_files: list[str]) -> tuple[str, str]: + """Split the RMS files into absolute and relative.""" + abs_file = rels_file = "not found" for path in rms_files: if "abs.rms" in path: abs_file = path @@ -372,12 +402,12 @@ def get_mcflirt_rms_abs(rms_files): outputs=["motion-basefile"], ) def get_motion_ref( - wf: pe.Workflow, + wf: Workflow, cfg: Configuration, strat_pool: "ResourcePool", pipe_num: int, opt: Literal["mean", "median", "selected_volume"], -) -> tuple[pe.Workflow, dict[str, tuple[pe.Node, str]]]: +) -> NODEBLOCK_RETURN: """Get the reference image for motion correction.""" node, out = strat_pool.get_data("desc-preproc_bold") in_label = "in_file" @@ -435,12 +465,12 @@ def get_motion_ref( outputs=["motion-basefile"], ) def get_motion_ref_fmriprep( - wf: pe.Workflow, + wf: Workflow, cfg: Configuration, strat_pool: "ResourcePool", pipe_num: int, opt: Literal["fmriprep_reference"], -) -> tuple[pe.Workflow, dict[str, tuple[pe.Node, str]]]: +) -> NODEBLOCK_RETURN: """Get the fMRIPrep-style reference image for motion correction.""" assert opt == "fmriprep_reference" func_get_RPI = pe.Node( @@ -460,7 +490,7 @@ def get_motion_ref_fmriprep( return wf, outputs -def motion_correct_3dvolreg(wf, cfg, strat_pool, pipe_num): +def motion_correct_3dvolreg(wf, cfg, strat_pool, pipe_num) -> NODEBLOCK_RETURN: """Calculate motion parameters with 3dvolreg.""" if int(cfg.pipeline_setup["system_config"]["max_cores_per_participant"]) > 1: chunk_imports = ["import nibabel as nib"] @@ -690,7 +720,7 @@ def motion_correct_3dvolreg(wf, cfg, strat_pool, pipe_num): return wf, outputs -def motion_correct_mcflirt(wf, cfg, strat_pool, pipe_num): +def motion_correct_mcflirt(wf, cfg, strat_pool, pipe_num) -> NODEBLOCK_RETURN: """Calculate motion parameters with MCFLIRT.""" func_motion_correct_A = pe.Node( interface=fsl.MCFLIRT(save_mats=True, save_plots=True), @@ -749,7 +779,13 @@ def motion_correct_mcflirt(wf, cfg, strat_pool, pipe_num): } -def motion_correct_connections(wf, cfg, strat_pool, pipe_num, opt): +def motion_correct_connections( + wf: Workflow, + cfg: Configuration, + strat_pool: "ResourcePool", + pipe_num: int, + opt: Literal["3dvolreg", "mcflirt"], +) -> NODEBLOCK_RETURN: """Check opt for valid option, then connect that option.""" motion_correct_options = valid_options["motion_correction"] if opt not in motion_correct_options: @@ -807,7 +843,13 @@ def motion_correct_connections(wf, cfg, strat_pool, pipe_num, opt): "motion-filter-plot": {}, }, ) -def motion_estimate_filter(wf, cfg, strat_pool, pipe_num, opt=None): +def motion_estimate_filter( + wf: Workflow, + cfg: Configuration, + strat_pool: "ResourcePool", + pipe_num: int, + opt: MotionEstimateFilter, +) -> NODEBLOCK_RETURN: """Filter motion parameters. .. versionchanged:: 1.8.6 @@ -889,10 +931,10 @@ def motion_estimate_filter(wf, cfg, strat_pool, pipe_num, opt=None): movement_parameters.out, ) - return (wf, outputs) + return wf, outputs -def normalize_motion_parameters(in_file): +def normalize_motion_parameters(in_file: str) -> str: """Convert FSL mcflirt motion parameters to AFNI space.""" import os diff --git a/CPAC/pipeline/engine.py b/CPAC/pipeline/engine.py index 813cc2c457..bce404246a 100644 --- a/CPAC/pipeline/engine.py +++ b/CPAC/pipeline/engine.py @@ -510,9 +510,7 @@ def get_cpac_provenance(self, resource, strat=None): json_data = self.get_json(resource, strat) return json_data["CpacProvenance"] - def motion_tool( - self, resource, strat=None - ) -> Optional[Literal["3dvolreg", "mcflirt"]]: + def motion_tool(self, resource: str, strat=None) -> Literal["3dvolreg", "mcflirt"]: """Check provenance for motion correction tool.""" prov = self.get_cpac_provenance(resource, strat) last_entry = get_last_prov_entry(prov) @@ -526,7 +524,12 @@ def motion_tool( return "3dvolreg" if "mcflirt" in str(prov): return "mcflirt" - return None + msg = ( + "\n[!] Developer info: the motion correction " + f"tool for {resource} is not in the " + "CpacProvenance.\n" + ) + raise LookupError(msg) def reg_tool(self, resource, strat=None) -> Optional[Literal["ants", "fsl"]]: """Check provenance for registration tool.""" diff --git a/CPAC/pipeline/nodeblock.py b/CPAC/pipeline/nodeblock.py index e3cda3d8bd..ff96314d2c 100644 --- a/CPAC/pipeline/nodeblock.py +++ b/CPAC/pipeline/nodeblock.py @@ -16,13 +16,12 @@ # License along with C-PAC. If not, see . """Class and decorator for NodeBlock functions.""" -from typing import Any, Callable, Optional, TypeAlias +from typing import Any, Callable, Mapping, Optional, TypeAlias from nipype.pipeline import engine as pe -NODEBLOCK_RETURN: TypeAlias = tuple[ - pe.Workflow, dict[str, tuple[pe.Node | pe.Workflow, str]] -] +POOL_RESOURCE: TypeAlias = Mapping[str, tuple[pe.Node | pe.Workflow, str]] +NODEBLOCK_RETURN: TypeAlias = tuple[pe.Workflow, POOL_RESOURCE] class NodeBlockFunction: diff --git a/CPAC/pipeline/schema.py b/CPAC/pipeline/schema.py index 940086c153..b06a597817 100644 --- a/CPAC/pipeline/schema.py +++ b/CPAC/pipeline/schema.py @@ -1,4 +1,4 @@ -# Copyright (C) 2022-2024 C-PAC Developers +# Copyright (C) 2022-2025 C-PAC Developers # This file is part of C-PAC. @@ -21,7 +21,7 @@ from itertools import chain, permutations import re from subprocess import CalledProcessError -from typing import Literal, TypeAlias +from typing import Literal, Optional as OptionalType, TypeAlias, TypedDict import numpy as np from pathvalidate import sanitize_filename @@ -273,6 +273,21 @@ def str_to_bool1_1(x): # pylint: disable=invalid-name dict, # TODO: specify other valid ANTs parameters ) ] + + +class MotionEstimateFilter(TypedDict): + """Type for motion estimate filter.""" + + filter_type: Literal["notch", "lowpass"] + filter_order: int + breathing_rate_min: OptionalType[float] + breathing_rate_max: OptionalType[float] + center_frequency: OptionalType[float] + filter_bandwidth: OptionalType[float] + lowpass_cutoff: OptionalType[float] + Name: OptionalType[str] + + motion_estimate_filter = Any( { # notch filter with breathing_rate_* set Required("filter_type"): "notch", diff --git a/CPAC/pipeline/utils.py b/CPAC/pipeline/utils.py index d135addc41..0c4605e460 100644 --- a/CPAC/pipeline/utils.py +++ b/CPAC/pipeline/utils.py @@ -1,4 +1,4 @@ -# Copyright (C) 2021-2024 C-PAC Developers +# Copyright (C) 2021-2025 C-PAC Developers # This file is part of C-PAC. @@ -19,11 +19,14 @@ from itertools import chain import os import subprocess -from typing import Optional +from typing import Optional, TYPE_CHECKING from CPAC.func_preproc.func_motion import motion_estimate_filter from CPAC.utils.bids_utils import insert_entity +if TYPE_CHECKING: + from CPAC.pipeline.nodeblock import POOL_RESOURCE + MOVEMENT_FILTER_KEYS = motion_estimate_filter.outputs @@ -108,7 +111,7 @@ def name_fork(resource_idx, cfg, json_info, out_dct): return resource_idx, out_dct -def present_outputs(outputs: dict, keys: list) -> dict: +def present_outputs(outputs: "POOL_RESOURCE", keys: list[str]) -> "POOL_RESOURCE": """ Return the subset of ``outputs`` including only that are present in ``keys``. @@ -122,12 +125,6 @@ def present_outputs(outputs: dict, keys: list) -> dict: NodeBlocks that differ only by configuration options and relevant output keys. - Parameters - ---------- - outputs : dict - - keys : list of str - Returns ------- dict diff --git a/CPAC/utils/interfaces/function/function.py b/CPAC/utils/interfaces/function/function.py index 34d01373d5..2e8c764242 100644 --- a/CPAC/utils/interfaces/function/function.py +++ b/CPAC/utils/interfaces/function/function.py @@ -5,6 +5,7 @@ # * Adds `as_module` argument and property # * Adds `sig_imports` decorator # * Automatically imports global Nipype loggers in function nodes +# * Specify type of `output_names` as `str | list[str]` instead of `str` # ORIGINAL WORK'S ATTRIBUTION NOTICE: # Copyright (c) 2009-2016, Nipype developers @@ -23,7 +24,7 @@ # Prior to release 0.12, Nipype was licensed under a BSD license. -# Modifications Copyright (C) 2018-2024 C-PAC Developers +# Modifications Copyright (C) 2018-2025 C-PAC Developers # This file is part of C-PAC. @@ -157,7 +158,7 @@ class Function(NipypeFunction): def __init__( self, input_names=None, - output_names="out", + output_names: str | list[str] = "out", function=None, imports=None, as_module=False, From 741b06f33b46fae343742042b7f10486f0115393 Mon Sep 17 00:00:00 2001 From: Jon Cluce Date: Fri, 23 May 2025 23:56:50 -0400 Subject: [PATCH 308/507] :recycle: Refactor motion connections --- CPAC/func_preproc/__init__.py | 9 +- CPAC/func_preproc/func_motion.py | 638 ++++++++++-------- .../tests/test_preproc_connections.py | 87 +-- CPAC/pipeline/cpac_pipeline.py | 69 +- CPAC/pipeline/engine.py | 3 + CPAC/pipeline/nodeblock.py | 5 +- CPAC/pipeline/schema.py | 1 + CPAC/pipeline/utils.py | 6 +- CPAC/registration/registration.py | 2 +- 9 files changed, 400 insertions(+), 420 deletions(-) diff --git a/CPAC/func_preproc/__init__.py b/CPAC/func_preproc/__init__.py index 06837c024a..646c497285 100644 --- a/CPAC/func_preproc/__init__.py +++ b/CPAC/func_preproc/__init__.py @@ -19,23 +19,20 @@ from .func_motion import ( calc_motion_stats, func_motion_correct, - func_motion_correct_only, func_motion_estimates, - get_motion_ref, - get_motion_ref_fmriprep, + get_motion_refs, motion_estimate_filter, + stack_motion_blocks, ) from .func_preproc import get_idx, slice_timing_wf -get_motion_refs = [get_motion_ref, get_motion_ref_fmriprep] - __all__ = [ "calc_motion_stats", "func_motion_correct", - "func_motion_correct_only", "func_motion_estimates", "get_idx", "get_motion_refs", "motion_estimate_filter", "slice_timing_wf", + "stack_motion_blocks", ] diff --git a/CPAC/func_preproc/func_motion.py b/CPAC/func_preproc/func_motion.py index b82126c8a1..31c1ff28dd 100644 --- a/CPAC/func_preproc/func_motion.py +++ b/CPAC/func_preproc/func_motion.py @@ -16,7 +16,7 @@ # License along with C-PAC. If not, see . """Functions for calculating motion parameters.""" -from typing import Literal, TYPE_CHECKING +from typing import cast, Literal, TYPE_CHECKING from nipype.interfaces import afni, fsl, utility as util from nipype.interfaces.afni import preprocess, utils as afni_utils @@ -33,8 +33,13 @@ motion_power_statistics, ) from CPAC.pipeline import nipype_pipeline_engine as pe -from CPAC.pipeline.nodeblock import nodeblock, NODEBLOCK_RETURN -from CPAC.pipeline.schema import valid_options +from CPAC.pipeline.nodeblock import ( + nodeblock, + NODEBLOCK_RETURN, + NodeBlockFunction, + POOL_RESOURCE_DICT, +) +from CPAC.pipeline.schema import MotionCorrection, valid_options from CPAC.utils.configuration import Configuration from CPAC.utils.interfaces.function import Function @@ -51,6 +56,8 @@ ], inputs=[ ( + "motion-correct-3dvolreg", + "motion-correct-mcflirt", "desc-preproc_bold", "space-bold_desc-brain_mask", "desc-movementParameters_motion", @@ -255,13 +262,15 @@ def estimate_reference_image(in_file: str) -> str: return out_file -_MOTION_CORRECTED_OUTPUTS = { +_MOTION_CORRECTED_OUTPUTS: dict[str, dict[str, str]] = { "desc-preproc_bold": {"Description": "Motion-corrected BOLD time-series."}, "desc-motion_bold": {"Description": "Motion-corrected BOLD time-series."}, } # the "filtered" outputs here are just for maintaining expecting # forking and connections and will not be output -_MOTION_PARAM_OUTPUTS = { +_MOTION_PARAM_OUTPUTS: dict[str, dict[str, str]] = { + "motion-correct-3dvolreg": {"Description": "Motion correction via 3dVolReg"}, + "motion-correct-mcflirt": {"Description": "Motion correction via MCFLIRT"}, "max-displacement": {}, "rels-displacement": {}, "desc-movementParameters_motion": { @@ -291,53 +300,35 @@ def estimate_reference_image(in_file: str) -> str: "using", ], option_val=["3dvolreg", "mcflirt"], - inputs=[("desc-preproc_bold", "motion-basefile", *_MOTION_PARAM_OUTPUTS)], - outputs={**_MOTION_CORRECTED_OUTPUTS, **_MOTION_PARAM_OUTPUTS}, + inputs=[ + ( + "motion-correct-3dvolreg", + "motion-correct-mcflirt", + "desc-preproc_bold", + "motion-basefile", + *_MOTION_PARAM_OUTPUTS, + ) + ], + outputs={ + **_MOTION_CORRECTED_OUTPUTS, + **_MOTION_PARAM_OUTPUTS, + }, ) def func_motion_correct( wf: Workflow, cfg: Configuration, strat_pool: "ResourcePool", pipe_num: int, - opt: Literal["3dvolreg", "mcflirt"], + opt: MotionCorrection, ) -> NODEBLOCK_RETURN: """Perform motion estimation and correction using 3dVolReg or MCFLIRT.""" - wf, outputs = motion_correct_connections(wf, cfg, strat_pool, pipe_num, opt) + wf, outputs = motion_correct_connections( + wf, cfg, strat_pool, pipe_num, opt, estimate=False, correct=True + ) return wf, outputs -@nodeblock( - name="motion_correction_only", - switch=["functional_preproc", "motion_estimates_and_correction", "run"], - option_key=[ - "functional_preproc", - "motion_estimates_and_correction", - "motion_correction", - "using", - ], - option_val=["3dvolreg", "mcflirt"], - inputs=[("desc-preproc_bold", "motion-basefile")], - outputs=_MOTION_CORRECTED_OUTPUTS, -) -def func_motion_correct_only( - wf: Workflow, - cfg: Configuration, - strat_pool: "ResourcePool", - pipe_num: int, - opt: Literal["3dvolreg", "mcflirt"], -) -> NODEBLOCK_RETURN: - """Perform motion correction without estimating motion parameters.""" - wf, wf_outputs = motion_correct_connections(wf, cfg, strat_pool, pipe_num, opt) - - outputs = { - "desc-preproc_bold": wf_outputs["desc-motion_bold"], - "desc-motion_bold": wf_outputs["desc-motion_bold"], - } - - return (wf, outputs) - - @nodeblock( name="motion_estimates", switch=["functional_preproc", "motion_estimates_and_correction", "run"], @@ -348,7 +339,14 @@ def func_motion_correct_only( "using", ], option_val=["3dvolreg", "mcflirt"], - inputs=[("desc-preproc_bold", "motion-basefile")], + inputs=[ + ( + "desc-preproc_bold", + "motion-basefile", + "motion-correct-3dvolreg", + "motion-correct-mcflirt", + ) + ], outputs=_MOTION_PARAM_OUTPUTS, ) def func_motion_estimates( @@ -356,24 +354,11 @@ def func_motion_estimates( cfg: Configuration, strat_pool: "ResourcePool", pipe_num: int, - opt: Literal["3dvolreg", "mcflirt"], + opt: MotionCorrection, ) -> NODEBLOCK_RETURN: """Calculate motion estimates using 3dVolReg or MCFLIRT.""" - from CPAC.pipeline.utils import present_outputs - - wf, wf_outputs = motion_correct_connections(wf, cfg, strat_pool, pipe_num, opt) - return ( - wf, - present_outputs( - wf_outputs, - [ - "coordinate-transformation", - "filtered-coordinate-transformation", - "max-displacement", - "desc-movementParameters_motion", - "rels-displacement", - ], - ), + return motion_correct_connections( + wf, cfg, strat_pool, pipe_num, opt, estimate=True, correct=False ) @@ -490,286 +475,338 @@ def get_motion_ref_fmriprep( return wf, outputs -def motion_correct_3dvolreg(wf, cfg, strat_pool, pipe_num) -> NODEBLOCK_RETURN: +get_motion_refs = [get_motion_ref, get_motion_ref_fmriprep] + + +def motion_correct_3dvolreg( + wf: Workflow, + cfg: Configuration, + strat_pool: "ResourcePool", + pipe_num: int, + estimate: bool, + correct: bool, +) -> NODEBLOCK_RETURN: """Calculate motion parameters with 3dvolreg.""" - if int(cfg.pipeline_setup["system_config"]["max_cores_per_participant"]) > 1: - chunk_imports = ["import nibabel as nib"] - chunk = pe.Node( - Function( - input_names=["func_file", "n_chunks", "chunk_size"], - output_names=["TR_ranges"], - function=chunk_ts, - imports=chunk_imports, - ), - name=f"chunk_{pipe_num}", - ) + outputs: POOL_RESOURCE_DICT = {} + if strat_pool.check_rpool("motion-correct-3dvolreg"): + out_motion_A, _ = strat_pool.get_data("motion-correct-3dvolreg") + else: + if int(cfg.pipeline_setup["system_config"]["max_cores_per_participant"]) > 1: + chunk_imports = ["import nibabel as nib"] + chunk = pe.Node( + Function( + input_names=["func_file", "n_chunks", "chunk_size"], + output_names=["TR_ranges"], + function=chunk_ts, + imports=chunk_imports, + ), + name=f"chunk_{pipe_num}", + ) - # chunk.inputs.n_chunks = int(cfg.pipeline_setup['system_config'][ - # 'max_cores_per_participant']) + # chunk.inputs.n_chunks = int(cfg.pipeline_setup['system_config'][ + # 'max_cores_per_participant']) - # 10-TR sized chunks - chunk.inputs.chunk_size = 10 + # 10-TR sized chunks + chunk.inputs.chunk_size = 10 - node, out = strat_pool.get_data("desc-preproc_bold") - wf.connect(node, out, chunk, "func_file") + node, out = strat_pool.get_data("desc-preproc_bold") + wf.connect(node, out, chunk, "func_file") - split_imports = ["import os", "import subprocess"] - split = pe.Node( - Function( - input_names=["func_file", "tr_ranges"], - output_names=["split_funcs"], - function=split_ts_chunks, - imports=split_imports, - ), - name=f"split_{pipe_num}", - ) + split_imports = ["import os", "import subprocess"] + split = pe.Node( + Function( + input_names=["func_file", "tr_ranges"], + output_names=["split_funcs"], + function=split_ts_chunks, + imports=split_imports, + ), + name=f"split_{pipe_num}", + ) - node, out = strat_pool.get_data("desc-preproc_bold") - wf.connect(node, out, split, "func_file") - wf.connect(chunk, "TR_ranges", split, "tr_ranges") + node, out = strat_pool.get_data("desc-preproc_bold") + wf.connect(node, out, split, "func_file") + wf.connect(chunk, "TR_ranges", split, "tr_ranges") - out_split_func = pe.Node( - interface=util.IdentityInterface(fields=["out_file"]), - name=f"out_split_func_{pipe_num}", - ) + out_split_func = pe.Node( + interface=util.IdentityInterface(fields=["out_file"]), + name=f"out_split_func_{pipe_num}", + ) - wf.connect(split, "split_funcs", out_split_func, "out_file") + wf.connect(split, "split_funcs", out_split_func, "out_file") - func_motion_correct = pe.MapNode( - interface=preprocess.Volreg(), - name=f"func_generate_ref_{pipe_num}", - iterfield=["in_file"], - ) + func_motion_correct = pe.MapNode( + interface=preprocess.Volreg(), + name=f"func_generate_ref_{pipe_num}", + iterfield=["in_file"], + ) - wf.connect(out_split_func, "out_file", func_motion_correct, "in_file") + wf.connect(out_split_func, "out_file", func_motion_correct, "in_file") - func_concat = pe.Node( - interface=afni_utils.TCat(), name=f"func_concat_{pipe_num}" - ) - func_concat.inputs.outputtype = "NIFTI_GZ" + func_concat = pe.Node( + interface=afni_utils.TCat(), name=f"func_concat_{pipe_num}" + ) + func_concat.inputs.outputtype = "NIFTI_GZ" - wf.connect(func_motion_correct, "out_file", func_concat, "in_files") + wf.connect(func_motion_correct, "out_file", func_concat, "in_files") - out_motion = pe.Node( - interface=util.IdentityInterface(fields=["out_file"]), - name=f"out_motion_{pipe_num}", - ) + out_motion = pe.Node( + interface=util.IdentityInterface(fields=["out_file"]), + name=f"out_motion_{pipe_num}", + ) - wf.connect(func_concat, "out_file", out_motion, "out_file") + wf.connect(func_concat, "out_file", out_motion, "out_file") - else: - out_split_func = pe.Node( - interface=util.IdentityInterface(fields=["out_file"]), - name=f"out_split_func_{pipe_num}", - ) + else: + out_split_func = pe.Node( + interface=util.IdentityInterface(fields=["out_file"]), + name=f"out_split_func_{pipe_num}", + ) - node, out = strat_pool.get_data("desc-preproc_bold") - wf.connect(node, out, out_split_func, "out_file") + node, out = strat_pool.get_data("desc-preproc_bold") + wf.connect(node, out, out_split_func, "out_file") - func_motion_correct = pe.Node( - interface=preprocess.Volreg(), name=f"func_generate_ref_{pipe_num}" - ) + func_motion_correct = pe.Node( + interface=preprocess.Volreg(), name=f"func_generate_ref_{pipe_num}" + ) - wf.connect(out_split_func, "out_file", func_motion_correct, "in_file") + wf.connect(out_split_func, "out_file", func_motion_correct, "in_file") - out_motion = pe.Node( - interface=util.IdentityInterface(fields=["out_file"]), - name=f"out_motion_{pipe_num}", - ) + out_motion = pe.Node( + interface=util.IdentityInterface(fields=["out_file"]), + name=f"out_motion_{pipe_num}", + ) - wf.connect(func_motion_correct, "out_file", out_motion, "out_file") + wf.connect(func_motion_correct, "out_file", out_motion, "out_file") - func_motion_correct.inputs.zpad = 4 - func_motion_correct.inputs.outputtype = "NIFTI_GZ" + func_motion_correct.inputs.zpad = 4 + func_motion_correct.inputs.outputtype = "NIFTI_GZ" - args = "-Fourier" - if cfg.functional_preproc["motion_estimates_and_correction"]["motion_correction"][ - "AFNI-3dvolreg" - ]["functional_volreg_twopass"]: - args = f"-twopass {args}" + args = "-Fourier" + if cfg.functional_preproc["motion_estimates_and_correction"][ + "motion_correction" + ]["AFNI-3dvolreg"]["functional_volreg_twopass"]: + args = f"-twopass {args}" - func_motion_correct.inputs.args = args + func_motion_correct.inputs.args = args - # Calculate motion parameters - func_motion_correct_A = func_motion_correct.clone( - f"func_motion_correct_3dvolreg_{pipe_num}" - ) - func_motion_correct_A.inputs.md1d_file = "max_displacement.1D" - func_motion_correct_A.inputs.args = args + # Calculate motion parameters + func_motion_correct_A = func_motion_correct.clone( + f"func_motion_correct_3dvolreg_{pipe_num}" + ) + func_motion_correct_A.inputs.md1d_file = "max_displacement.1D" + func_motion_correct_A.inputs.args = args - wf.connect(out_split_func, "out_file", func_motion_correct_A, "in_file") + wf.connect(out_split_func, "out_file", func_motion_correct_A, "in_file") - node, out = strat_pool.get_data("motion-basefile") - wf.connect(node, out, func_motion_correct_A, "basefile") + node, out = strat_pool.get_data("motion-basefile") + wf.connect(node, out, func_motion_correct_A, "basefile") - if int(cfg.pipeline_setup["system_config"]["max_cores_per_participant"]) > 1: - motion_concat = pe.Node( - interface=afni_utils.TCat(), name=f"motion_concat_{pipe_num}" - ) - motion_concat.inputs.outputtype = "NIFTI_GZ" + if int(cfg.pipeline_setup["system_config"]["max_cores_per_participant"]) > 1: + motion_concat = pe.Node( + interface=afni_utils.TCat(), name=f"motion_concat_{pipe_num}" + ) + motion_concat.inputs.outputtype = "NIFTI_GZ" - wf.connect(func_motion_correct_A, "out_file", motion_concat, "in_files") + wf.connect(func_motion_correct_A, "out_file", motion_concat, "in_files") - out_motion_A = pe.Node( - interface=util.IdentityInterface(fields=["out_file"]), - name=f"out_motion_A_{pipe_num}", - ) + out_motion_A = pe.Node( + interface=util.IdentityInterface(fields=["out_file"]), + name=f"out_motion_A_{pipe_num}", + ) - wf.connect(motion_concat, "out_file", out_motion_A, "out_file") + wf.connect(motion_concat, "out_file", out_motion_A, "out_file") - concat_imports = ["import os"] - md1d_concat = pe.Node( - Function( - input_names=["in_files"], - output_names=["out_file"], - function=oned_text_concat, - imports=concat_imports, - ), - name=f"md1d_concat_{pipe_num}", - ) + concat_imports = ["import os"] + md1d_concat = pe.Node( + Function( + input_names=["in_files"], + output_names=["out_file"], + function=oned_text_concat, + imports=concat_imports, + ), + name=f"md1d_concat_{pipe_num}", + ) - wf.connect(func_motion_correct_A, "md1d_file", md1d_concat, "in_files") + wf.connect(func_motion_correct_A, "md1d_file", md1d_concat, "in_files") - oned_concat = pe.Node( - Function( - input_names=["in_files"], - output_names=["out_file"], - function=oned_text_concat, - imports=concat_imports, - ), - name=f"oned_concat_{pipe_num}", - ) + oned_concat = pe.Node( + Function( + input_names=["in_files"], + output_names=["out_file"], + function=oned_text_concat, + imports=concat_imports, + ), + name=f"oned_concat_{pipe_num}", + ) - wf.connect(func_motion_correct_A, "oned_file", oned_concat, "in_files") + wf.connect(func_motion_correct_A, "oned_file", oned_concat, "in_files") - oned_matrix_concat = pe.Node( - Function( - input_names=["in_files"], - output_names=["out_file"], - function=oned_text_concat, - imports=concat_imports, - ), - name=f"oned_matrix_concat_{pipe_num}", - ) + oned_matrix_concat = pe.Node( + Function( + input_names=["in_files"], + output_names=["out_file"], + function=oned_text_concat, + imports=concat_imports, + ), + name=f"oned_matrix_concat_{pipe_num}", + ) - wf.connect( - func_motion_correct_A, "oned_matrix_save", oned_matrix_concat, "in_files" - ) + wf.connect( + func_motion_correct_A, + "oned_matrix_save", + oned_matrix_concat, + "in_files", + ) - out_md1d = pe.Node( - interface=util.IdentityInterface(fields=["out_file"]), - name=f"out_md1d_{pipe_num}", - ) + out_md1d = pe.Node( + interface=util.IdentityInterface(fields=["out_file"]), + name=f"out_md1d_{pipe_num}", + ) - wf.connect(md1d_concat, "out_file", out_md1d, "out_file") + wf.connect(md1d_concat, "out_file", out_md1d, "out_file") - out_oned = pe.Node( - interface=util.IdentityInterface(fields=["out_file"]), - name=f"out_oned_{pipe_num}", - ) + out_oned = pe.Node( + interface=util.IdentityInterface(fields=["out_file"]), + name=f"out_oned_{pipe_num}", + ) - wf.connect(oned_concat, "out_file", out_oned, "out_file") + wf.connect(oned_concat, "out_file", out_oned, "out_file") - out_oned_matrix = pe.Node( - interface=util.IdentityInterface(fields=["out_file"]), - name=f"out_oned_matrix_{pipe_num}", - ) + out_oned_matrix = pe.Node( + interface=util.IdentityInterface(fields=["out_file"]), + name=f"out_oned_matrix_{pipe_num}", + ) - wf.connect(oned_matrix_concat, "out_file", out_oned_matrix, "out_file") + wf.connect(oned_matrix_concat, "out_file", out_oned_matrix, "out_file") - else: - out_motion_A = pe.Node( - interface=util.IdentityInterface(fields=["out_file"]), - name=f"out_motion_A_{pipe_num}", - ) + else: + out_motion_A = pe.Node( + interface=util.IdentityInterface(fields=["out_file"]), + name=f"out_motion_A_{pipe_num}", + ) - wf.connect(func_motion_correct_A, "out_file", out_motion_A, "out_file") + wf.connect(func_motion_correct_A, "out_file", out_motion_A, "out_file") - out_md1d = pe.Node( - interface=util.IdentityInterface(fields=["out_file"]), - name=f"out_md1d_{pipe_num}", - ) + out_md1d = pe.Node( + interface=util.IdentityInterface(fields=["out_file"]), + name=f"out_md1d_{pipe_num}", + ) - wf.connect(func_motion_correct_A, "md1d_file", out_md1d, "out_file") + wf.connect(func_motion_correct_A, "md1d_file", out_md1d, "out_file") - out_oned = pe.Node( - interface=util.IdentityInterface(fields=["out_file"]), - name=f"out_oned_{pipe_num}", - ) + out_oned = pe.Node( + interface=util.IdentityInterface(fields=["out_file"]), + name=f"out_oned_{pipe_num}", + ) - wf.connect(func_motion_correct_A, "oned_file", out_oned, "out_file") + wf.connect(func_motion_correct_A, "oned_file", out_oned, "out_file") - out_oned_matrix = pe.Node( - interface=util.IdentityInterface(fields=["out_file"]), - name=f"out_oned_matrix_{pipe_num}", - ) + out_oned_matrix = pe.Node( + interface=util.IdentityInterface(fields=["out_file"]), + name=f"out_oned_matrix_{pipe_num}", + ) - wf.connect( - func_motion_correct_A, "oned_matrix_save", out_oned_matrix, "out_file" + wf.connect( + func_motion_correct_A, "oned_matrix_save", out_oned_matrix, "out_file" + ) + if estimate: + outputs.update( + { + "max-displacement": (out_md1d, "out_file"), + "desc-movementParameters_motion": (out_oned, "out_file"), + "coordinate-transformation": (out_oned_matrix, "out_file"), + "filtered-coordinate-transformation": (out_oned_matrix, "out_file"), + } + ) + if correct: + outputs.update( + { + "motion-correct-3dvolreg": (out_motion_A, ""), + "desc-preproc_bold": (out_motion_A, "out_file"), + "desc-motion_bold": (out_motion_A, "out_file"), + } ) - outputs = { - "desc-preproc_bold": (out_motion_A, "out_file"), - "desc-motion_bold": (out_motion_A, "out_file"), - "max-displacement": (out_md1d, "out_file"), - "desc-movementParameters_motion": (out_oned, "out_file"), - "coordinate-transformation": (out_oned_matrix, "out_file"), - "filtered-coordinate-transformation": (out_oned_matrix, "out_file"), - } - return wf, outputs -def motion_correct_mcflirt(wf, cfg, strat_pool, pipe_num) -> NODEBLOCK_RETURN: +def motion_correct_mcflirt( + wf: Workflow, + cfg: Configuration, + strat_pool: "ResourcePool", + pipe_num: int, + estimate: bool, + correct: bool, +) -> NODEBLOCK_RETURN: """Calculate motion parameters with MCFLIRT.""" - func_motion_correct_A = pe.Node( - interface=fsl.MCFLIRT(save_mats=True, save_plots=True), - name=f"func_motion_correct_mcflirt_{pipe_num}", - mem_gb=2.5, - ) - - func_motion_correct_A.inputs.save_mats = True - func_motion_correct_A.inputs.save_plots = True - func_motion_correct_A.inputs.save_rms = True - - node, out = strat_pool.get_data("desc-preproc_bold") - wf.connect(node, out, func_motion_correct_A, "in_file") + outputs: POOL_RESOURCE_DICT = {} + if strat_pool.check_rpool("motion-correct-mcflirt"): + func_motion_correct_A, _ = strat_pool.get_data("motion-correct-mcflirt") + else: + func_motion_correct_A = pe.Node( + interface=fsl.MCFLIRT(save_mats=True, save_plots=True), + name=f"func_motion_correct_mcflirt_{pipe_num}", + mem_gb=2.5, + ) - node, out = strat_pool.get_data("motion-basefile") - wf.connect(node, out, func_motion_correct_A, "ref_file") + func_motion_correct_A.inputs.save_mats = True + func_motion_correct_A.inputs.save_plots = True + func_motion_correct_A.inputs.save_rms = True - normalize_motion_params = pe.Node( - Function( - input_names=["in_file"], - output_names=["out_file"], - function=normalize_motion_parameters, - ), - name=f"norm_motion_params_{pipe_num}", - ) + node, out = strat_pool.get_data("desc-preproc_bold") + wf.connect(node, out, func_motion_correct_A, "in_file") - wf.connect(func_motion_correct_A, "par_file", normalize_motion_params, "in_file") + node, out = strat_pool.get_data("motion-basefile") + wf.connect(node, out, func_motion_correct_A, "ref_file") - get_rms_abs = pe.Node( - Function( - input_names=["rms_files"], - output_names=["abs_file", "rels_file"], - function=get_mcflirt_rms_abs, - ), - name=f"get_mcflirt_rms_abs_{pipe_num}", - ) + normalize_motion_params = pe.Node( + Function( + input_names=["in_file"], + output_names=["out_file"], + function=normalize_motion_parameters, + ), + name=f"norm_motion_params_{pipe_num}", + ) - wf.connect(func_motion_correct_A, "rms_files", get_rms_abs, "rms_files") + wf.connect( + func_motion_correct_A, "par_file", normalize_motion_params, "in_file" + ) - outputs = { - "desc-preproc_bold": (func_motion_correct_A, "out_file"), - "desc-motion_bold": (func_motion_correct_A, "out_file"), - "max-displacement": (get_rms_abs, "abs_file"), - "rels-displacement": (get_rms_abs, "rels_file"), - "desc-movementParameters_motion": (normalize_motion_params, "out_file"), - "coordinate-transformation": (func_motion_correct_A, "mat_file"), - "filtered-coordinate-transformation": (func_motion_correct_A, "mat_file"), - } + get_rms_abs = pe.Node( + Function( + input_names=["rms_files"], + output_names=["abs_file", "rels_file"], + function=get_mcflirt_rms_abs, + ), + name=f"get_mcflirt_rms_abs_{pipe_num}", + ) + wf.connect(func_motion_correct_A, "rms_files", get_rms_abs, "rms_files") + + if estimate: + outputs.update( + { + "max-displacement": (get_rms_abs, "abs_file"), + "rels-displacement": (get_rms_abs, "rels_file"), + "desc-movementParameters_motion": ( + normalize_motion_params, + "out_file", + ), + "coordinate-transformation": (func_motion_correct_A, "mat_file"), + "filtered-coordinate-transformation": ( + func_motion_correct_A, + "mat_file", + ), + } + ) + if correct: + outputs.update( + { + "motion-correct-mcflirt": (func_motion_correct_A, ""), + "desc-preproc_bold": (func_motion_correct_A, "out_file"), + "desc-motion_bold": (func_motion_correct_A, "out_file"), + } + ) return wf, outputs @@ -784,10 +821,14 @@ def motion_correct_connections( cfg: Configuration, strat_pool: "ResourcePool", pipe_num: int, - opt: Literal["3dvolreg", "mcflirt"], + opt: MotionCorrection, + estimate: bool, + correct: bool, ) -> NODEBLOCK_RETURN: """Check opt for valid option, then connect that option.""" - motion_correct_options = valid_options["motion_correction"] + motion_correct_options = cast( + list[MotionCorrection], valid_options["motion_correction"] + ) if opt not in motion_correct_options: msg = ( "\n\n[!] Error: The 'tool' parameter of the " @@ -796,7 +837,7 @@ def motion_correct_connections( f".\n\nTool input: {opt}\n\n" ) raise KeyError(msg) - return motion_correct[opt](wf, cfg, strat_pool, pipe_num) + return motion_correct[opt](wf, cfg, strat_pool, pipe_num, estimate, correct) @nodeblock( @@ -816,6 +857,7 @@ def motion_correct_connections( "max-displacement", "rels-displacement", "coordinate-transformation", + "filtered-coordinate-transformation", "desc-movementParameters_motion", ), "TR", @@ -848,7 +890,7 @@ def motion_estimate_filter( cfg: Configuration, strat_pool: "ResourcePool", pipe_num: int, - opt: MotionEstimateFilter, + opt: "MotionEstimateFilter", ) -> NODEBLOCK_RETURN: """Filter motion parameters. @@ -957,3 +999,43 @@ def normalize_motion_parameters(in_file: str) -> str: np.savetxt(out_file, motion_params) return out_file + + +def stack_motion_blocks( + func_blocks: dict[str, list[NodeBlockFunction | list[NodeBlockFunction]]], + cfg: Configuration, + rpool: "ResourcePool", +) -> list[NodeBlockFunction | list[NodeBlockFunction]]: + """Create a stack of motion correction nodeblocks.""" + func_motion_blocks: list[NodeBlockFunction | list[NodeBlockFunction]] = [ + motion_estimate_filter + ] + correct_after: list[NodeBlockFunction | list[NodeBlockFunction]] = [] + calc_motion: list[NodeBlockFunction | list[NodeBlockFunction]] = [] + if not rpool.check_rpool("desc-movementParameters_motion"): + calc_motion = [calc_motion_stats] + if cfg["functional_preproc"]["motion_estimates_and_correction"][ + "motion_estimates" + ]["calculate_motion_first"]: + func_motion_blocks = [ + *get_motion_refs, + func_motion_estimates, + motion_estimate_filter, + ] + correct_after = [func_motion_correct] + else: + func_motion_blocks = [ + *get_motion_refs, + func_motion_estimates, + func_motion_correct, + motion_estimate_filter, + ] + return [ + *func_blocks["init"], + *func_motion_blocks, + *func_blocks["preproc"], + *correct_after, + *func_blocks["mask"], + *calc_motion, + *func_blocks["prep"], + ] diff --git a/CPAC/func_preproc/tests/test_preproc_connections.py b/CPAC/func_preproc/tests/test_preproc_connections.py index 03a5b51dd6..9489849edd 100644 --- a/CPAC/func_preproc/tests/test_preproc_connections.py +++ b/CPAC/func_preproc/tests/test_preproc_connections.py @@ -26,14 +26,7 @@ from nipype.interfaces.utility import Function as NipypeFunction from nipype.pipeline.engine import Workflow as NipypeWorkflow -from CPAC.func_preproc import get_motion_refs -from CPAC.func_preproc.func_motion import ( - calc_motion_stats, - func_motion_correct, - func_motion_correct_only, - func_motion_estimates, - motion_estimate_filter, -) +from CPAC.func_preproc import stack_motion_blocks from CPAC.func_preproc.func_preproc import func_normalize from CPAC.nuisance.nuisance import choose_nuisance_blocks from CPAC.pipeline.cpac_pipeline import connect_pipeline @@ -183,26 +176,16 @@ def test_motion_filter_connections( rpool = ResourcePool(cfg=c) for resource in pre_resources: if resource.endswith("xfm"): - rpool.set_data( - resource, - before_this_test, - resource, - {}, - "", - f"created_before_this_test_{regtool}", - ) + node_name = f"created_before_this_test_{regtool}" + elif resource == "desc-movementParameters_motion": + node_name = f"created_before_this_test_{motion_correction}" else: - rpool.set_data( - resource, before_this_test, resource, {}, "", "created_before_this_test" - ) + node_name = "created_before_this_test" + rpool.set_data(resource, before_this_test, resource, {}, "", node_name) # set up blocks pipeline_blocks = [] - func_init_blocks = [] - func_motion_blocks = [] - func_preproc_blocks = [] - func_mask_blocks = [] - func_prep_blocks = [ - calc_motion_stats, + func_blocks = {key: [] for key in ["init", "preproc", "mask"]} + func_blocks["prep"] = [ func_normalize, [ coregistration_prep_vol, @@ -210,57 +193,8 @@ def test_motion_filter_connections( coregistration_prep_fmriprep, ], ] - # Motion Correction - func_motion_blocks = [] - if c[ - "functional_preproc", - "motion_estimates_and_correction", - "motion_estimates", - "calculate_motion_first", - ]: - func_motion_blocks = [ - *get_motion_refs, - func_motion_estimates, - motion_estimate_filter, - ] - else: - func_motion_blocks = [ - *get_motion_refs, - func_motion_correct, - motion_estimate_filter, - ] - if not rpool.check_rpool("desc-movementParameters_motion"): - if c[ - "functional_preproc", - "motion_estimates_and_correction", - "motion_estimates", - "calculate_motion_first", - ]: - func_blocks = ( - func_init_blocks - + func_motion_blocks - + func_preproc_blocks - + [func_motion_correct_only] - + func_mask_blocks - + func_prep_blocks - ) - else: - func_blocks = ( - func_init_blocks - + func_preproc_blocks - + func_motion_blocks - + func_mask_blocks - + func_prep_blocks - ) - else: - func_blocks = ( - func_init_blocks - + func_preproc_blocks - + func_motion_blocks - + func_mask_blocks - + func_prep_blocks - ) - pipeline_blocks += func_blocks + + pipeline_blocks += stack_motion_blocks(func_blocks, c, rpool) # Nuisance Correction generate_only = ( True not in c["nuisance_corrections", "2-nuisance_regression", "run"] @@ -299,6 +233,7 @@ def test_motion_filter_connections( "motion_correction", "using", ] + and "desc-movementParameters_motion" not in pre_resources ): # Only for [On, Off] + mcflirt, we should have at least one of each assert { diff --git a/CPAC/pipeline/cpac_pipeline.py b/CPAC/pipeline/cpac_pipeline.py index 08a59d4a90..4d07a192ee 100644 --- a/CPAC/pipeline/cpac_pipeline.py +++ b/CPAC/pipeline/cpac_pipeline.py @@ -25,6 +25,7 @@ import sys import time from time import strftime +from typing import TYPE_CHECKING import yaml import nipype @@ -83,12 +84,7 @@ distcor_phasediff_fsl_fugue, ) from CPAC.func_preproc import ( - calc_motion_stats, - func_motion_correct, - func_motion_correct_only, - func_motion_estimates, - get_motion_refs, - motion_estimate_filter, + stack_motion_blocks, ) from CPAC.func_preproc.func_preproc import ( bold_mask_afni, @@ -216,6 +212,9 @@ from CPAC.utils.workflow_serialization import cpac_flowdump_serializer from CPAC.vmhc.vmhc import smooth_func_vmhc, vmhc, warp_timeseries_to_sym_template +if TYPE_CHECKING: + from CPAC.pipeline.nodeblock import NodeBlockFunction + faulthandler.enable() # config.enable_debug_mode() @@ -1260,15 +1259,16 @@ def build_workflow(subject_id, sub_dict, cfg, pipeline_name=None): # Functional Preprocessing, including motion correction and BOLD masking if cfg.functional_preproc["run"]: - func_init_blocks = [func_reorient, func_scaling, func_truncate] - func_preproc_blocks = [func_despike, func_slice_time] + func_blocks: dict[str, list[NodeBlockFunction | list[NodeBlockFunction]]] = {} + func_blocks["init"] = [func_reorient, func_scaling, func_truncate] + func_blocks["preproc"] = [func_despike, func_slice_time] if not rpool.check_rpool("desc-mean_bold"): - func_preproc_blocks.append(func_mean) + func_blocks["preproc"].append(func_mean) - func_mask_blocks = [] + func_blocks["mask"] = [] if not rpool.check_rpool("space-bold_desc-brain_mask"): - func_mask_blocks = [ + func_blocks["mask"] = [ [ bold_mask_afni, bold_mask_fsl, @@ -1281,8 +1281,7 @@ def build_workflow(subject_id, sub_dict, cfg, pipeline_name=None): bold_masking, ] - func_prep_blocks = [ - calc_motion_stats, + func_blocks["prep"] = [ func_normalize, [ coregistration_prep_vol, @@ -1310,49 +1309,9 @@ def build_workflow(subject_id, sub_dict, cfg, pipeline_name=None): if distcor_blocks: if len(distcor_blocks) > 1: distcor_blocks = [distcor_blocks] - func_prep_blocks += distcor_blocks - - func_motion_blocks = [] - if not rpool.check_rpool("desc-movementParameters_motion"): - if cfg["functional_preproc"]["motion_estimates_and_correction"][ - "motion_estimates" - ]["calculate_motion_first"]: - func_motion_blocks = [ - *get_motion_refs, - func_motion_estimates, - motion_estimate_filter, - ] - func_blocks = ( - func_init_blocks - + func_motion_blocks - + func_preproc_blocks - + [func_motion_correct_only] - + func_mask_blocks - + func_prep_blocks - ) - else: - func_motion_blocks = [ - *get_motion_refs, - func_motion_correct, - motion_estimate_filter, - ] - func_blocks = ( - func_init_blocks - + func_preproc_blocks - + func_motion_blocks - + func_mask_blocks - + func_prep_blocks - ) - else: - func_blocks = ( - func_init_blocks - + func_preproc_blocks - + func_motion_blocks - + func_mask_blocks - + func_prep_blocks - ) + func_blocks["prep"] += distcor_blocks - pipeline_blocks += func_blocks + pipeline_blocks += stack_motion_blocks(func_blocks, cfg, rpool) # BOLD to T1 coregistration if cfg.registration_workflows["functional_registration"]["coregistration"][ diff --git a/CPAC/pipeline/engine.py b/CPAC/pipeline/engine.py index bce404246a..ac6e0cfe4a 100644 --- a/CPAC/pipeline/engine.py +++ b/CPAC/pipeline/engine.py @@ -512,6 +512,9 @@ def get_cpac_provenance(self, resource, strat=None): def motion_tool(self, resource: str, strat=None) -> Literal["3dvolreg", "mcflirt"]: """Check provenance for motion correction tool.""" + for tool in ["3dvolreg", "mcflirt"]: + if self.check_rpool(f"motion-correct-{tool}"): + return tool prov = self.get_cpac_provenance(resource, strat) last_entry = get_last_prov_entry(prov) last_node = last_entry.split(":")[1] diff --git a/CPAC/pipeline/nodeblock.py b/CPAC/pipeline/nodeblock.py index ff96314d2c..90541ad08d 100644 --- a/CPAC/pipeline/nodeblock.py +++ b/CPAC/pipeline/nodeblock.py @@ -20,8 +20,9 @@ from nipype.pipeline import engine as pe -POOL_RESOURCE: TypeAlias = Mapping[str, tuple[pe.Node | pe.Workflow, str]] -NODEBLOCK_RETURN: TypeAlias = tuple[pe.Workflow, POOL_RESOURCE] +POOL_RESOURCE_DICT: TypeAlias = dict[str, tuple[pe.Node | pe.Workflow, str]] +POOL_RESOURCE_MAPPING: TypeAlias = Mapping[str, tuple[pe.Node | pe.Workflow, str]] +NODEBLOCK_RETURN: TypeAlias = tuple[pe.Workflow, POOL_RESOURCE_MAPPING] class NodeBlockFunction: diff --git a/CPAC/pipeline/schema.py b/CPAC/pipeline/schema.py index b06a597817..e72432c111 100644 --- a/CPAC/pipeline/schema.py +++ b/CPAC/pipeline/schema.py @@ -109,6 +109,7 @@ def str_to_bool1_1(x): # pylint: disable=invalid-name bool1_1 = All(str_to_bool1_1, bool) forkable = All(Coerce(ListFromItem), [bool1_1], Length(max=2)) +MotionCorrection: TypeAlias = Literal["3dvolreg", "mcflirt"] valid_options = { "acpc": {"target": ["brain", "whole-head"]}, "brain_extraction": { diff --git a/CPAC/pipeline/utils.py b/CPAC/pipeline/utils.py index 0c4605e460..8d46fe1373 100644 --- a/CPAC/pipeline/utils.py +++ b/CPAC/pipeline/utils.py @@ -25,7 +25,7 @@ from CPAC.utils.bids_utils import insert_entity if TYPE_CHECKING: - from CPAC.pipeline.nodeblock import POOL_RESOURCE + from CPAC.pipeline.nodeblock import POOL_RESOURCE_MAPPING MOVEMENT_FILTER_KEYS = motion_estimate_filter.outputs @@ -111,7 +111,9 @@ def name_fork(resource_idx, cfg, json_info, out_dct): return resource_idx, out_dct -def present_outputs(outputs: "POOL_RESOURCE", keys: list[str]) -> "POOL_RESOURCE": +def present_outputs( + outputs: "POOL_RESOURCE_MAPPING", keys: list[str] +) -> "POOL_RESOURCE_MAPPING": """ Return the subset of ``outputs`` including only that are present in ``keys``. diff --git a/CPAC/registration/registration.py b/CPAC/registration/registration.py index 2d853615e7..0999ba053c 100644 --- a/CPAC/registration/registration.py +++ b/CPAC/registration/registration.py @@ -1458,7 +1458,7 @@ def create_wf_calculate_ants_warp( def FSL_registration_connector( wf_name: str, - cfg: Configuration, + cfg: "Configuration", orig: str = "T1w", opt: Literal["FSL", "FSL-linear"] = "FSL", symmetric: bool = False, From 04f0d5257e107bf0db838b1c08173fad098c0af5 Mon Sep 17 00:00:00 2001 From: "birajstha:construction_worker::penguin" Date: Tue, 27 May 2025 12:55:13 -0400 Subject: [PATCH 309/507] adding to changelog --- CHANGELOG.md | 1 + CPAC/nuisance/bandpass.py | 46 +++++++++++++++++++++++++-------------- 2 files changed, 31 insertions(+), 16 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 078114399e..78b3953693 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -56,6 +56,7 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 - Removed an erroneous connection to AFNI 3dTProject in nuisance denoising that would unnecessarily send a spike regressor as a censor. This would sometimes cause TRs to unnecessarily be dropped from the timeseries as if scrubbing were being performed. - Lingering calls to `cpac_outputs.csv` (was changed to `cpac_outputs.tsv` in v1.8.1). - A bug in the `freesurfer_abcd_preproc` nodeblock where the `Template` image was incorrectly used as `reference` during the `inverse_warp` step. Replacing it with the subject-specific `T1w` image resolved the issue of the `desc-restoreBrain_T1w` being chipped off. +- A bug in `ideal_bandpass` where the frequency mask was incorrectly applied, which caused filter to fail in certain cases. ### Removed diff --git a/CPAC/nuisance/bandpass.py b/CPAC/nuisance/bandpass.py index 451d4a5b9e..a31acee8e5 100644 --- a/CPAC/nuisance/bandpass.py +++ b/CPAC/nuisance/bandpass.py @@ -8,41 +8,55 @@ def ideal_bandpass(data, sample_period, bandpass_freqs): - # Derived from YAN Chao-Gan 120504 based on REST. + """ + Apply ideal bandpass filtering to a 1D time series data using FFT. Derived from YAN Chao-Gan 120504 based on REST. + + Parameters + ---------- + data : NDArray + 1D time series data to be filtered. + sample_period : float + Length of sampling period in seconds. + bandpass_freqs : tuple + Tuple containing the bandpass frequencies (LowCutoff, HighCutoff). + + Returns + ------- + NDArray + Filtered time series data. + + """ sample_freq = 1.0 / sample_period sample_length = data.shape[0] + nyquist_freq = sample_freq / 2.0 - data_p = np.zeros(int(2 ** np.ceil(np.log2(sample_length)))) + # Length of zero-padded data for efficient FFT + N = int(2 ** np.ceil(np.log2(len(data)))) + data_p = np.zeros(N) data_p[:sample_length] = data LowCutoff, HighCutoff = bandpass_freqs if LowCutoff is None: # No lower cutoff (low-pass filter) low_cutoff_i = 0 - elif LowCutoff > sample_freq / 2.0: + elif LowCutoff > nyquist_freq: # Cutoff beyond fs/2 (all-stop filter) - low_cutoff_i = int(data_p.shape[0] / 2) + low_cutoff_i = int(N / 2) else: - low_cutoff_i = np.ceil(LowCutoff * data_p.shape[0] * sample_period).astype( - "int" - ) + low_cutoff_i = np.ceil(LowCutoff * N * sample_period).astype("int") - if HighCutoff > sample_freq / 2.0 or HighCutoff is None: + if HighCutoff > nyquist_freq or HighCutoff is None: # Cutoff beyond fs/2 or unspecified (become a highpass filter) - high_cutoff_i = int(data_p.shape[0] / 2) + high_cutoff_i = int(N / 2) else: - high_cutoff_i = np.fix(HighCutoff * data_p.shape[0] * sample_period).astype( - "int" - ) + high_cutoff_i = np.fix(HighCutoff * N * sample_period).astype("int") freq_mask = np.zeros_like(data_p, dtype="bool") freq_mask[low_cutoff_i : high_cutoff_i + 1] = True - freq_mask[data_p.shape[0] - high_cutoff_i : data_p.shape[0] + 1 - low_cutoff_i] = ( - True - ) + freq_mask[N - high_cutoff_i : N + 1 - low_cutoff_i] = True f_data = fft(data_p) - f_data[freq_mask is not True] = 0.0 + f_data[~freq_mask] = 0.0 return np.real_if_close(ifft(f_data)[:sample_length]) From 6141a7b10e568425fe50d2b4c26989de8487d0f4 Mon Sep 17 00:00:00 2001 From: Jon Cluce Date: Tue, 27 May 2025 13:01:42 -0400 Subject: [PATCH 310/507] :bug: Fix QC movement parameters connection --- CPAC/qc/xcp.py | 10 +++++++++- 1 file changed, 9 insertions(+), 1 deletion(-) diff --git a/CPAC/qc/xcp.py b/CPAC/qc/xcp.py index 4a313b4719..50c9102365 100644 --- a/CPAC/qc/xcp.py +++ b/CPAC/qc/xcp.py @@ -152,6 +152,9 @@ def _connect_motion(wf, nodes, strat_pool, qc_file, pipe_num): ), name=f"cal_DVARS_strip_{pipe_num}", ) + motion_name = "desc-movementParametersUnfiltered_motion" + if motion_name not in nodes: + motion_name = "desc-movementParameters_motion" wf.connect( [ ( @@ -166,10 +169,15 @@ def _connect_motion(wf, nodes, strat_pool, qc_file, pipe_num): ), (cal_DVARS, cal_DVARS_strip, [("out_file", "file_1D")]), (cal_DVARS_strip, qc_file, [("out_file", "dvars_after")]), + ( + nodes[motion_name].node, + qc_file, + [(nodes[motion_name].out, "movement_parameters")], + ), *[ (nodes[node].node, qc_file, [(nodes[node].out, node.replace("-", "_"))]) for node in motion_params - if node in nodes + if not node.endswith("_motion") and node in nodes ], ] ) From 4c6accf08af0a7f94a19228ebb092f03657bb705 Mon Sep 17 00:00:00 2001 From: Jon Cluce Date: Tue, 27 May 2025 13:19:40 -0400 Subject: [PATCH 311/507] :bug: Don't convert non-datetimes to datetime --- CPAC/utils/monitoring/draw_gantt_chart.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/CPAC/utils/monitoring/draw_gantt_chart.py b/CPAC/utils/monitoring/draw_gantt_chart.py index 25aaf15b61..a7a0aaac96 100644 --- a/CPAC/utils/monitoring/draw_gantt_chart.py +++ b/CPAC/utils/monitoring/draw_gantt_chart.py @@ -635,7 +635,7 @@ def _timing(nodes_list): for node in nodes_list if "start" in node and "finish" in node ] - except ValueError: + except (TypeError, ValueError): # Drop any problematic nodes new_node_list = [] for node in nodes_list: @@ -669,5 +669,7 @@ def _timing_timestamp(node): ) if (k in {"start", "finish"} and isinstance(v, str)) else DatetimeWithSafeNone(v) + if k in {"start", "finish"} + else v for k, v in node.items() } From e38d59caf71ed2a6bf356521006fb2b85ed8db0a Mon Sep 17 00:00:00 2001 From: "birajstha:construction_worker::penguin" Date: Tue, 27 May 2025 13:31:35 -0400 Subject: [PATCH 312/507] adding a unit-test for ideal_bandpass --- CPAC/nuisance/bandpass.py | 4 ++++ CPAC/nuisance/tests/test_bandpass.py | 26 ++++++++++++++++++++++++++ 2 files changed, 30 insertions(+) diff --git a/CPAC/nuisance/bandpass.py b/CPAC/nuisance/bandpass.py index a31acee8e5..d56e42c3ef 100644 --- a/CPAC/nuisance/bandpass.py +++ b/CPAC/nuisance/bandpass.py @@ -6,6 +6,8 @@ import nibabel as nib from scipy.fftpack import fft, ifft +from CPAC.utils.monitoring import IFLOGGER + def ideal_bandpass(data, sample_period, bandpass_freqs): """ @@ -106,6 +108,8 @@ def bandpass_voxels(realigned_file, regressor_file, bandpass_freqs, sample_perio sample_period = float(hdr.get_zooms()[3]) # Sketchy check to convert TRs in millisecond units if sample_period > 20.0: + message = f"Sample period ({sample_period}) is very large. Assuming milliseconds and converting to seconds." + IFLOGGER.warning(message) sample_period /= 1000.0 Y_bp = np.zeros_like(Y) diff --git a/CPAC/nuisance/tests/test_bandpass.py b/CPAC/nuisance/tests/test_bandpass.py index 452b55d3c7..137fca7426 100644 --- a/CPAC/nuisance/tests/test_bandpass.py +++ b/CPAC/nuisance/tests/test_bandpass.py @@ -22,8 +22,11 @@ from numpy.typing import NDArray import pytest +import numpy as np +import scipy.fft import fft from CPAC.nuisance.bandpass import read_1D +from CPAC.nuisance.bandpass import ideal_bandpass RAW_ONE_D: Traversable = files("CPAC").joinpath("nuisance/tests/regressors.1D") @@ -46,3 +49,26 @@ def test_read_1D(start_line: int, tmp_path: Path) -> None: assert data.shape == (10, 29) # all header lines should be captured assert len(header) == 5 - start_line + + +@pytest.mark.parametrize("lowcut, highcut, in_freq, out_freq", [ + (0.005, 0.05, 0.01, 0.2), + (0.01, 0.1, 0.02, 0.15), + (0.02, 0.08, 0.04, 0.12), +]) +def test_ideal_bandpass_with_various_cutoffs(lowcut, highcut, in_freq, out_freq): + sample_period = 1.0 + t = np.arange(512) * sample_period + signal = np.sin(2 * np.pi * in_freq * t) + np.sin(2 * np.pi * out_freq * t) + + filtered = ideal_bandpass(signal, sample_period, (lowcut, highcut)) + + freqs = np.fft.fftfreq(len(signal), d=sample_period) + orig_fft = np.abs(fft(signal)) + filt_fft = np.abs(fft(filtered)) + + idx_in = np.argmin(np.abs(freqs - in_freq)) + idx_out = np.argmin(np.abs(freqs - out_freq)) + + assert filt_fft[idx_in] > 0.5 * orig_fft[idx_in] + assert filt_fft[idx_out] < 0.1 * orig_fft[idx_out] \ No newline at end of file From 4b0bf06450d6f05b663f319f367ce1555461b532 Mon Sep 17 00:00:00 2001 From: "birajstha:construction_worker::penguin" Date: Tue, 27 May 2025 13:35:42 -0400 Subject: [PATCH 313/507] pre-commit run --- CPAC/nuisance/tests/test_bandpass.py | 23 +++++++++++++---------- 1 file changed, 13 insertions(+), 10 deletions(-) diff --git a/CPAC/nuisance/tests/test_bandpass.py b/CPAC/nuisance/tests/test_bandpass.py index 137fca7426..dfac85c217 100644 --- a/CPAC/nuisance/tests/test_bandpass.py +++ b/CPAC/nuisance/tests/test_bandpass.py @@ -20,13 +20,12 @@ from importlib.resources import files from pathlib import Path +import numpy as np from numpy.typing import NDArray import pytest -import numpy as np -import scipy.fft import fft +from scipy.fft import fft -from CPAC.nuisance.bandpass import read_1D -from CPAC.nuisance.bandpass import ideal_bandpass +from CPAC.nuisance.bandpass import ideal_bandpass, read_1D RAW_ONE_D: Traversable = files("CPAC").joinpath("nuisance/tests/regressors.1D") @@ -51,12 +50,16 @@ def test_read_1D(start_line: int, tmp_path: Path) -> None: assert len(header) == 5 - start_line -@pytest.mark.parametrize("lowcut, highcut, in_freq, out_freq", [ - (0.005, 0.05, 0.01, 0.2), - (0.01, 0.1, 0.02, 0.15), - (0.02, 0.08, 0.04, 0.12), -]) +@pytest.mark.parametrize( + "lowcut, highcut, in_freq, out_freq", + [ + (0.005, 0.05, 0.01, 0.2), + (0.01, 0.1, 0.02, 0.15), + (0.02, 0.08, 0.04, 0.12), + ], +) def test_ideal_bandpass_with_various_cutoffs(lowcut, highcut, in_freq, out_freq): + """Test the ideal bandpass filter with various cutoff frequencies.""" sample_period = 1.0 t = np.arange(512) * sample_period signal = np.sin(2 * np.pi * in_freq * t) + np.sin(2 * np.pi * out_freq * t) @@ -71,4 +74,4 @@ def test_ideal_bandpass_with_various_cutoffs(lowcut, highcut, in_freq, out_freq) idx_out = np.argmin(np.abs(freqs - out_freq)) assert filt_fft[idx_in] > 0.5 * orig_fft[idx_in] - assert filt_fft[idx_out] < 0.1 * orig_fft[idx_out] \ No newline at end of file + assert filt_fft[idx_out] < 0.1 * orig_fft[idx_out] From c7ccec920d73610cbd0c617bb47228bdbf927c51 Mon Sep 17 00:00:00 2001 From: Jon Cluce Date: Tue, 27 May 2025 13:56:18 -0400 Subject: [PATCH 314/507] :bug: Fix dvars_after connection --- CPAC/qc/xcp.py | 93 +++++++++++++++++++++++++++----------------------- 1 file changed, 51 insertions(+), 42 deletions(-) diff --git a/CPAC/qc/xcp.py b/CPAC/qc/xcp.py index 50c9102365..1a316717cd 100644 --- a/CPAC/qc/xcp.py +++ b/CPAC/qc/xcp.py @@ -146,7 +146,7 @@ def _connect_motion(wf, nodes, strat_pool, qc_file, pipe_num): cal_DVARS_strip = pe.Node( Function( input_names=["file_1D"], - output_names=["out_file"], + output_names=["out_file", "out_matrix"], function=DVARS_strip_t0, as_module=True, ), @@ -168,7 +168,11 @@ def _connect_motion(wf, nodes, strat_pool, qc_file, pipe_num): [(nodes["space-bold_desc-brain_mask"].out, "mask")], ), (cal_DVARS, cal_DVARS_strip, [("out_file", "file_1D")]), - (cal_DVARS_strip, qc_file, [("out_file", "dvars_after")]), + ( + cal_DVARS_strip, + qc_file, + [("out_file", "dvars_after_path"), ("out_matrix", "dvars_after")], + ), ( nodes[motion_name].node, qc_file, @@ -200,83 +204,87 @@ def dvcorr(dvars, fdj): # This function is for a function node for which # Nipype will connect many other nodes as inputs def generate_xcp_qc( # noqa: PLR0913 - sub, - ses, - task, - run, - desc, - regressors, - bold2t1w_mask, - t1w_mask, - bold2template_mask, - template_mask, - original_func, - final_func, - movement_parameters, - dvars, - censor_indices, - framewise_displacement_jenkinson, - dvars_after, - template, -): + sub: str, + ses: str, + task: str, + run: str | int, + desc: str, + regressors: str, + bold2t1w_mask: str, + t1w_mask: str, + bold2template_mask: str, + template_mask: str, + original_func: str, + final_func: str, + movement_parameters: str, + dvars: str, + censor_indices: list[int], + framewise_displacement_jenkinson: str, + dvars_after: np.ndarray, + dvars_after_path: str, + template: str, +) -> str: """ Generate an RBC-style QC CSV. Parameters ---------- - sub : str + sub subject ID - ses : str + ses session ID - task : str + task task ID - run : str or int + run run ID - desc : str + desc description string - regressors : str + regressors 'Name' of regressors in fork - original_func : str + original_func path to original 'bold' image - final_bold : str + final_bold path to 'space-template_desc-preproc_bold' image - bold2t1w_mask : str + bold2t1w_mask path to bold-to-T1w transform applied to space-bold_desc-brain_mask with space-T1w_desc-brain_mask reference - t1w_mask : str + t1w_mask path to space-T1w_desc-brain_mask - bold2template_mask : str + bold2template_mask path to space-template_desc-bold_mask - template_mask : str + template_mask path to T1w-brain-template-mask or EPI-template-mask - movement_parameters: str + movement_parameters path to movement parameters - dvars : str + dvars path to DVARS before motion correction - censor_indices : list + censor_indices list of indices of censored volumes - framewise_displacement_jenkinson : str + framewise_displacement_jenkinson path to framewise displacement (Jenkinson) before motion correction - dvars_after : str - path to DVARS on final 'bold' image + dvars_after + DVARS matrix for final 'bold' image + + dvars_after_path + path to DVARS matrix for final 'bold' image - template : str + template path to registration template Returns @@ -343,10 +351,10 @@ def generate_xcp_qc( # noqa: PLR0913 meanDV["motionDVCorrInit"] = dvcorr(dvars, framewise_displacement_jenkinson) except ValueError as value_error: meanDV["motionDVCorrInit"] = f"ValueError({value_error!s})" - meanDV["meanDVFinal"] = np.mean(np.loadtxt(dvars_after)) + meanDV["meanDVFinal"] = np.mean(dvars_after) try: meanDV["motionDVCorrFinal"] = dvcorr( - dvars_after, framewise_displacement_jenkinson + dvars_after_path, framewise_displacement_jenkinson ) except ValueError as value_error: meanDV["motionDVCorrFinal"] = f"ValueError({value_error!s})" @@ -515,6 +523,7 @@ def qc_xcp(wf, cfg, strat_pool, pipe_num, opt=None): "censor_indices", "regressors", "framewise_displacement_jenkinson", + "dvars_after_path", "dvars_after", ], output_names=["qc_file"], From 62621e59fa6cfacc86dd71c51b54cbde66df45bc Mon Sep 17 00:00:00 2001 From: Jon Cluce Date: Wed, 4 Jun 2025 13:28:45 -0400 Subject: [PATCH 315/507] :wastebasket: Deprecate "motion_estimates" config option --- CHANGELOG.md | 1 + CPAC/func_preproc/func_motion.py | 40 +++------ .../tests/test_preproc_connections.py | 4 - CPAC/pipeline/schema.py | 90 ++++++++++++++++--- .../configs/pipeline_config_abcd-options.yml | 8 -- .../configs/pipeline_config_abcd-prep.yml | 6 -- .../configs/pipeline_config_blank.yml | 8 -- .../configs/pipeline_config_default.yml | 8 -- .../pipeline_config_fmriprep-options.yml | 5 -- .../configs/pipeline_config_regtest-3.yml | 5 -- .../configs/pipeline_config_regtest-4.yml | 5 -- CPAC/utils/utils.py | 10 --- 12 files changed, 91 insertions(+), 99 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 69e8a2543d..c5833ccfe4 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -72,6 +72,7 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 - `input` field from `coregistration` in blank and default config. - `reg_with_skull` swtich from `func_input_prep` in blank and default config. - Support for AFNI 3dECM < v21.1.1. +- `calculate_motion_before` and `calculate_motion_after` configuration options. #### Removed CI dependency diff --git a/CPAC/func_preproc/func_motion.py b/CPAC/func_preproc/func_motion.py index 31c1ff28dd..abea6691ca 100644 --- a/CPAC/func_preproc/func_motion.py +++ b/CPAC/func_preproc/func_motion.py @@ -322,12 +322,10 @@ def func_motion_correct( opt: MotionCorrection, ) -> NODEBLOCK_RETURN: """Perform motion estimation and correction using 3dVolReg or MCFLIRT.""" - wf, outputs = motion_correct_connections( + return motion_correct_connections( wf, cfg, strat_pool, pipe_num, opt, estimate=False, correct=True ) - return wf, outputs - @nodeblock( name="motion_estimates", @@ -1007,35 +1005,21 @@ def stack_motion_blocks( rpool: "ResourcePool", ) -> list[NodeBlockFunction | list[NodeBlockFunction]]: """Create a stack of motion correction nodeblocks.""" - func_motion_blocks: list[NodeBlockFunction | list[NodeBlockFunction]] = [ - motion_estimate_filter - ] - correct_after: list[NodeBlockFunction | list[NodeBlockFunction]] = [] - calc_motion: list[NodeBlockFunction | list[NodeBlockFunction]] = [] - if not rpool.check_rpool("desc-movementParameters_motion"): - calc_motion = [calc_motion_stats] - if cfg["functional_preproc"]["motion_estimates_and_correction"][ - "motion_estimates" - ]["calculate_motion_first"]: - func_motion_blocks = [ - *get_motion_refs, - func_motion_estimates, - motion_estimate_filter, - ] - correct_after = [func_motion_correct] - else: - func_motion_blocks = [ - *get_motion_refs, - func_motion_estimates, - func_motion_correct, - motion_estimate_filter, - ] + func_motion_blocks: list[NodeBlockFunction | list[NodeBlockFunction]] = ( + [motion_estimate_filter] + if rpool.check_rpool("desc-movementParameters_motion") + else [ + *get_motion_refs, + func_motion_estimates, + motion_estimate_filter, + calc_motion_stats, + func_motion_correct, + ] + ) return [ *func_blocks["init"], *func_motion_blocks, *func_blocks["preproc"], - *correct_after, *func_blocks["mask"], - *calc_motion, *func_blocks["prep"], ] diff --git a/CPAC/func_preproc/tests/test_preproc_connections.py b/CPAC/func_preproc/tests/test_preproc_connections.py index 9489849edd..6e316791f7 100644 --- a/CPAC/func_preproc/tests/test_preproc_connections.py +++ b/CPAC/func_preproc/tests/test_preproc_connections.py @@ -135,10 +135,6 @@ def test_motion_filter_connections( "functional_preproc": { "motion_estimates_and_correction": { "motion_correction": {"using": motion_correction}, - "motion_estimates": { - "calculate_motion_after": not calculate_motion_first, - "calculate_motion_first": calculate_motion_first, - }, "motion_estimate_filter": {"run": run, "filters": filters}, "run": True, }, diff --git a/CPAC/pipeline/schema.py b/CPAC/pipeline/schema.py index e72432c111..729804a03a 100644 --- a/CPAC/pipeline/schema.py +++ b/CPAC/pipeline/schema.py @@ -18,10 +18,18 @@ """Validation schema for C-PAC pipeline configurations.""" # pylint: disable=too-many-lines +from dataclasses import dataclass from itertools import chain, permutations import re from subprocess import CalledProcessError -from typing import Literal, Optional as OptionalType, TypeAlias, TypedDict +from typing import ( + Any as AnyType, + Literal, + Optional as OptionalType, + TypeAlias, + TypedDict, +) +import warnings import numpy as np from pathvalidate import sanitize_filename @@ -45,11 +53,14 @@ Range, Required, Schema, + Schemable, Title, + UNDEFINED, ) from CPAC.utils.datatypes import ItemFromList, ListFromItem from CPAC.utils.docs import DOCS_URL_PREFIX +from CPAC.utils.monitoring import UTLOGGER from CPAC.utils.utils import YAML_BOOLS # 1 or more digits, optional decimal, 'e', optional '-', 1 or more digits @@ -70,6 +81,65 @@ ORGANISMS: list[Organism] = ["human", "non-human primate", "rodent"] +def deprecated_option(option: Schemable, version: str, message: str) -> None: + """Mark an option as deprecated. + + Parameters + ---------- + option + The deprecated option. + version + The version in which the option was deprecated. + message + A message explaining the deprecation. + """ + UTLOGGER.warning( + f"Option '{option}' is deprecated as of version {version}: {message}" + ) + warnings.warn( + f"Option '{option}' is deprecated as of version {version}: {message}", + DeprecationWarning, + stacklevel=2, + ) + + +@dataclass +class DeprecatedOption: + """A version and message for a deprecated option.""" + + version: str + message: str + + +class Deprecated(Optional): + """Mark an option as deprecated. + + This class is used to mark options that are deprecated in the schema. + It inherits from `Optional` to allow the option to be omitted. + """ + + def __init__( + self, + schema: Schemable, + version: str, + msg: str = "This option is deprecated and will be removed in a future release.", + default: AnyType = UNDEFINED, + description: AnyType | None = None, + ) -> None: + """Initialize the Deprecated option.""" + super().__init__(schema, msg, default, description) + setattr(self, "deprecated", DeprecatedOption(version, msg)) + + def __call__(self, v: AnyType) -> AnyType: + """Call the Deprecated option.""" + if v is not None: + info = getattr(self, "deprecated", None) + if info: + deprecated_option(self._schema, info.version, info.message) + return super().__call__(v) + return v + + def str_to_bool1_1(x): # pylint: disable=invalid-name """Convert strings to Booleans for YAML1.1 syntax. @@ -911,7 +981,11 @@ def sanitize(filename): }, "motion_estimates_and_correction": { "run": bool1_1, - "motion_estimates": { + Deprecated( + "motion_estimates", + version="v1.8.8", + msg="The option to choose whether to calculate motion estimates before or after slice-timing correction was removed in v1.8.8 and will have no effect. This configuration option will be removed in a future release.", + ): { "calculate_motion_first": bool1_1, "calculate_motion_after": bool1_1, }, @@ -1291,20 +1365,12 @@ def sanitize(filename): ) -def schema(config_dict): +def schema(config_dict: dict) -> dict: """Validate a participant-analysis pipeline configuration. Validate against the latest validation schema by first applying backwards- compatibility patches, then applying Voluptuous validation, then handling complex configuration interaction checks before returning validated config_dict. - - Parameters - ---------- - config_dict : dict - - Returns - ------- - dict """ from CPAC.utils.utils import _changes_1_8_0_to_1_8_1 @@ -1475,4 +1541,4 @@ def schema(config_dict): return partially_validated -schema.schema = latest_schema.schema +schema.schema = latest_schema.schema # type: ignore[reportFunctionMemberAccess] diff --git a/CPAC/resources/configs/pipeline_config_abcd-options.yml b/CPAC/resources/configs/pipeline_config_abcd-options.yml index 11891240a5..ae940c2812 100644 --- a/CPAC/resources/configs/pipeline_config_abcd-options.yml +++ b/CPAC/resources/configs/pipeline_config_abcd-options.yml @@ -262,14 +262,6 @@ functional_preproc: run: On motion_estimates_and_correction: run: On - motion_estimates: - - # calculate motion statistics BEFORE slice-timing correction - calculate_motion_first: On - - # calculate motion statistics AFTER motion correction - calculate_motion_after: Off - motion_correction: # using: ['3dvolreg', 'mcflirt'] diff --git a/CPAC/resources/configs/pipeline_config_abcd-prep.yml b/CPAC/resources/configs/pipeline_config_abcd-prep.yml index c8882984f8..1c76888075 100644 --- a/CPAC/resources/configs/pipeline_config_abcd-prep.yml +++ b/CPAC/resources/configs/pipeline_config_abcd-prep.yml @@ -215,12 +215,6 @@ registration_workflows: interpolation: Linear functional_preproc: - motion_estimates_and_correction: - motion_estimates: - - # calculate motion statistics AFTER motion correction - calculate_motion_after: Off - distortion_correction: # using: ['PhaseDiff', 'Blip', 'Blip-FSL-TOPUP'] diff --git a/CPAC/resources/configs/pipeline_config_blank.yml b/CPAC/resources/configs/pipeline_config_blank.yml index 7a792ca710..90434d9404 100644 --- a/CPAC/resources/configs/pipeline_config_blank.yml +++ b/CPAC/resources/configs/pipeline_config_blank.yml @@ -972,14 +972,6 @@ functional_preproc: motion_estimates_and_correction: run: Off - motion_estimates: - - # calculate motion statistics BEFORE slice-timing correction - calculate_motion_first: Off - - # calculate motion statistics AFTER motion correction - calculate_motion_after: On - motion_correction: # using: ['3dvolreg', 'mcflirt'] diff --git a/CPAC/resources/configs/pipeline_config_default.yml b/CPAC/resources/configs/pipeline_config_default.yml index ccb074cfc0..d1ae1c7cfb 100644 --- a/CPAC/resources/configs/pipeline_config_default.yml +++ b/CPAC/resources/configs/pipeline_config_default.yml @@ -1072,14 +1072,6 @@ functional_preproc: run: On - motion_estimates: - - # calculate motion statistics BEFORE slice-timing correction - calculate_motion_first: Off - - # calculate motion statistics AFTER motion correction - calculate_motion_after: On - motion_correction: # using: ['3dvolreg', 'mcflirt'] diff --git a/CPAC/resources/configs/pipeline_config_fmriprep-options.yml b/CPAC/resources/configs/pipeline_config_fmriprep-options.yml index c156cfc878..cee586e561 100644 --- a/CPAC/resources/configs/pipeline_config_fmriprep-options.yml +++ b/CPAC/resources/configs/pipeline_config_fmriprep-options.yml @@ -345,11 +345,6 @@ functional_preproc: motion_estimates_and_correction: run: On - motion_estimates: - - # calculate motion statistics BEFORE slice-timing correction - calculate_motion_first: On - motion_correction: # using: ['3dvolreg', 'mcflirt'] diff --git a/CPAC/resources/configs/pipeline_config_regtest-3.yml b/CPAC/resources/configs/pipeline_config_regtest-3.yml index 1c97b3416b..ae38bc7965 100644 --- a/CPAC/resources/configs/pipeline_config_regtest-3.yml +++ b/CPAC/resources/configs/pipeline_config_regtest-3.yml @@ -164,11 +164,6 @@ functional_preproc: motion_estimates_and_correction: run: On - motion_estimates: - - # calculate motion statistics BEFORE slice-timing correction - calculate_motion_first: On - motion_correction: # using: ['3dvolreg', 'mcflirt'] diff --git a/CPAC/resources/configs/pipeline_config_regtest-4.yml b/CPAC/resources/configs/pipeline_config_regtest-4.yml index 5bc94822be..8d1f955c16 100644 --- a/CPAC/resources/configs/pipeline_config_regtest-4.yml +++ b/CPAC/resources/configs/pipeline_config_regtest-4.yml @@ -192,11 +192,6 @@ functional_preproc: motion_estimates_and_correction: run: On - motion_estimates: - - # calculate motion statistics BEFORE slice-timing correction - calculate_motion_first: On - motion_correction: # using: ['3dvolreg', 'mcflirt'] diff --git a/CPAC/utils/utils.py b/CPAC/utils/utils.py index ea7b1c2c5c..c767de4e8a 100644 --- a/CPAC/utils/utils.py +++ b/CPAC/utils/utils.py @@ -1594,16 +1594,6 @@ def _changes_1_8_0_to_1_8_1(config_dict: dict) -> dict: del config_dict["functional_preproc"]["motion_estimates_and_correction"][ "calculate_motion_first" ] - config_dict = set_nested_value( - config_dict, - [ - "functional_preproc", - "motion_estimates_and_correction", - "motion_estimates", - "calculate_motion_first", - ], - calculate_motion_first, - ) return config_dict From f711ef8c6b860c854094208afaf5f97650979c89 Mon Sep 17 00:00:00 2001 From: Jon Cluce Date: Wed, 4 Jun 2025 14:51:58 -0400 Subject: [PATCH 316/507] :pencil2: Fix schema imports --- CPAC/pipeline/schema.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/CPAC/pipeline/schema.py b/CPAC/pipeline/schema.py index 729804a03a..ac9969f4b2 100644 --- a/CPAC/pipeline/schema.py +++ b/CPAC/pipeline/schema.py @@ -53,10 +53,9 @@ Range, Required, Schema, - Schemable, Title, - UNDEFINED, ) +from voluptuous.schema_builder import Schemable, UNDEFINED from CPAC.utils.datatypes import ItemFromList, ListFromItem from CPAC.utils.docs import DOCS_URL_PREFIX From c93f39304d8d23775c1009573a3f9ce6fa44cec5 Mon Sep 17 00:00:00 2001 From: Jon Cluce Date: Wed, 4 Jun 2025 15:02:05 -0400 Subject: [PATCH 317/507] :arrow_up: Upgrade `voluptuous`@0.15.2 [rebuild lite] [rebuild standard] --- CHANGELOG.md | 4 ++++ pyproject.toml | 2 +- requirements.txt | 2 +- 3 files changed, 6 insertions(+), 2 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index c5833ccfe4..1d0ac7f455 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -57,6 +57,10 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 - Lingering calls to `cpac_outputs.csv` (was changed to `cpac_outputs.tsv` in v1.8.1). - A bug in the `freesurfer_abcd_preproc` nodeblock where the `Template` image was incorrectly used as `reference` during the `inverse_warp` step. Replacing it with the subject-specific `T1w` image resolved the issue of the `desc-restoreBrain_T1w` being chipped off. +### Upgraded dependencies + +- `voluptuous` 0.13.1 → 0.15.2 + ### Removed - Variant image recipes. diff --git a/pyproject.toml b/pyproject.toml index 22dec38dbd..84ffa2ad8f 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -16,7 +16,7 @@ # License along with C-PAC. If not, see . [build-system] -requires = ["nipype==1.8.6", "numpy==1.25.1", "pyyaml==6.0", "setuptools<60.0", "voluptuous==0.13.1"] +requires = ["nipype==1.8.6", "numpy==1.25.1", "pyyaml==6.0", "setuptools<60.0", "voluptuous==0.15.2"] build-backend = "setuptools.build_meta" [tool.coverage.paths] diff --git a/requirements.txt b/requirements.txt index bb9f9b6c73..be44a513da 100644 --- a/requirements.txt +++ b/requirements.txt @@ -36,7 +36,7 @@ scipy==1.11.1 sdcflows==2.4.0 semver==3.0.1 traits==6.3.2 -voluptuous==0.13.1 +voluptuous==0.15.2 # the below are pinned specifically to match what the FSL installer installs botocore==1.31.4 charset-normalizer==3.1.0 From 887b887e58e23f28f7b444acee1a0174bbb5bcee Mon Sep 17 00:00:00 2001 From: Jon Cluce Date: Wed, 4 Jun 2025 22:45:05 -0400 Subject: [PATCH 318/507] :white_check_mark: Test `Deprecated` schema option --- CPAC/pipeline/test/test_schema_validation.py | 33 +++++++++++++++++++- 1 file changed, 32 insertions(+), 1 deletion(-) diff --git a/CPAC/pipeline/test/test_schema_validation.py b/CPAC/pipeline/test/test_schema_validation.py index 0b5e20da3f..2d299eaa65 100644 --- a/CPAC/pipeline/test/test_schema_validation.py +++ b/CPAC/pipeline/test/test_schema_validation.py @@ -1,6 +1,7 @@ """Tests for schema.py.""" from itertools import combinations +import warnings import pytest from voluptuous.error import ExclusiveInvalid, Invalid @@ -12,7 +13,9 @@ "run_value", [True, False, [True], [False], [True, False], [False, True]] ) def test_motion_estimates_and_correction(run_value): - """Test that any truthy forkable option for 'run' throws the custom + """Test for human-readable exception for invalid motion_estimate_filter. + + Test that any truthy forkable option for 'run' throws the custom human-readable exception for an invalid motion_estimate_filter. """ # pylint: disable=invalid-name @@ -143,3 +146,31 @@ def test_overwrite_transform(registration_using): with pytest.raises(ExclusiveInvalid) as e: Configuration(d) assert "Overwrite transform method is the same" in str(e.value) + + +@pytest.mark.parametrize( + "configuration", + [ + {}, + { + "functional_preproc": { + "motion_estimates_and_correction": { + "motion_estimates": { + "calculate_motion_first": False, + "calculate_motion_after": False, + } + } + } + }, + ], +) +def test_deprecation(configuration: dict) -> None: + """Test that deprecated options warn and non-deprecated options do not.""" + if configuration: + with pytest.warns(DeprecationWarning) as record: + Configuration(configuration) + assert any("motion_estimates" in str(w.message) for w in record) + else: + with warnings.catch_warnings(): + warnings.simplefilter("error") + Configuration(configuration) From 1c7eddca8955351e691b6093e5fd23de2270cca0 Mon Sep 17 00:00:00 2001 From: Jon Cluce Date: Wed, 4 Jun 2025 22:48:18 -0400 Subject: [PATCH 319/507] :bug: Motion after mask --- CPAC/func_preproc/func_motion.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/CPAC/func_preproc/func_motion.py b/CPAC/func_preproc/func_motion.py index abea6691ca..f4e3dd6003 100644 --- a/CPAC/func_preproc/func_motion.py +++ b/CPAC/func_preproc/func_motion.py @@ -1018,8 +1018,8 @@ def stack_motion_blocks( ) return [ *func_blocks["init"], - *func_motion_blocks, *func_blocks["preproc"], *func_blocks["mask"], + *func_motion_blocks, *func_blocks["prep"], ] From 5f7c37a9898f1ab0db49ce4d5e1ca1f8ed9d2c65 Mon Sep 17 00:00:00 2001 From: Jon Cluce Date: Thu, 5 Jun 2025 12:18:33 -0400 Subject: [PATCH 320/507] :bug: Remove extra inputs --- CPAC/func_preproc/func_motion.py | 1 - 1 file changed, 1 deletion(-) diff --git a/CPAC/func_preproc/func_motion.py b/CPAC/func_preproc/func_motion.py index f4e3dd6003..20b09f07cf 100644 --- a/CPAC/func_preproc/func_motion.py +++ b/CPAC/func_preproc/func_motion.py @@ -306,7 +306,6 @@ def estimate_reference_image(in_file: str) -> str: "motion-correct-mcflirt", "desc-preproc_bold", "motion-basefile", - *_MOTION_PARAM_OUTPUTS, ) ], outputs={ From eaffeb7ee034100f8a2d37855ccc57155f0bf927 Mon Sep 17 00:00:00 2001 From: Jon Cluce Date: Fri, 6 Jun 2025 10:18:17 -0400 Subject: [PATCH 321/507] :necktie: Resequence motion --- CPAC/func_preproc/func_motion.py | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/CPAC/func_preproc/func_motion.py b/CPAC/func_preproc/func_motion.py index 20b09f07cf..6a54fb7eb8 100644 --- a/CPAC/func_preproc/func_motion.py +++ b/CPAC/func_preproc/func_motion.py @@ -342,6 +342,7 @@ def func_motion_correct( "motion-basefile", "motion-correct-3dvolreg", "motion-correct-mcflirt", + "space-bold_desc-brain_mask", ) ], outputs=_MOTION_PARAM_OUTPUTS, @@ -1011,14 +1012,14 @@ def stack_motion_blocks( *get_motion_refs, func_motion_estimates, motion_estimate_filter, - calc_motion_stats, - func_motion_correct, ] ) return [ *func_blocks["init"], *func_blocks["preproc"], - *func_blocks["mask"], *func_motion_blocks, + func_motion_correct, + *func_blocks["mask"], + calc_motion_stats, *func_blocks["prep"], ] From ca554ba213c6d214f7b694663ae8099379cba346 Mon Sep 17 00:00:00 2001 From: Jon Cluce Date: Fri, 6 Jun 2025 13:03:45 -0400 Subject: [PATCH 322/507] :white_check_mark: Update `test_NodeBlock_option_SSOT` --- CPAC/func_preproc/func_motion.py | 2 +- CPAC/utils/tests/test_utils.py | 6 +++--- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/CPAC/func_preproc/func_motion.py b/CPAC/func_preproc/func_motion.py index 6a54fb7eb8..a5aba82f06 100644 --- a/CPAC/func_preproc/func_motion.py +++ b/CPAC/func_preproc/func_motion.py @@ -392,7 +392,6 @@ def get_motion_ref( opt: Literal["mean", "median", "selected_volume"], ) -> NODEBLOCK_RETURN: """Get the reference image for motion correction.""" - node, out = strat_pool.get_data("desc-preproc_bold") in_label = "in_file" match opt: case "mean": @@ -428,6 +427,7 @@ def get_motion_ref( ) ) raise ValueError(msg) + node, out = strat_pool.get_data("desc-preproc_bold") func_get_RPI.inputs.outputtype = "NIFTI_GZ" wf.connect(node, out, func_get_RPI, in_label) outputs = {"motion-basefile": (func_get_RPI, "out_file")} diff --git a/CPAC/utils/tests/test_utils.py b/CPAC/utils/tests/test_utils.py index 74e4a03993..5cc5e617c1 100644 --- a/CPAC/utils/tests/test_utils.py +++ b/CPAC/utils/tests/test_utils.py @@ -23,7 +23,7 @@ from _pytest.logging import LogCaptureFixture import pytest -from CPAC.func_preproc import get_motion_refs +from CPAC.func_preproc.func_motion import get_motion_ref from CPAC.pipeline.nodeblock import NodeBlockFunction from CPAC.utils.configuration import Configuration from CPAC.utils.monitoring.custom_logging import log_subprocess @@ -169,13 +169,13 @@ def test_executable(executable): _installation_check(executable, "-help") -@pytest.mark.parametrize("get_motion_ref", get_motion_refs) -def test_NodeBlock_option_SSOT(get_motion_ref: NodeBlockFunction): # pylint: disable=invalid-name +def test_NodeBlock_option_SSOT(): # pylint: disable=invalid-name """Test using NodeBlock dictionaries for SSOT for options.""" assert isinstance(get_motion_ref, NodeBlockFunction) with pytest.raises(ValueError) as value_error: get_motion_ref(None, None, None, None, opt="chaos") error_message = str(value_error.value).rstrip() + assert get_motion_ref.option_val for opt in get_motion_ref.option_val: assert f"'{opt}'" in error_message assert error_message.endswith("Tool input: 'chaos'") From 8447e3169c91a73170d5924b2eb5f3b210d8c6b5 Mon Sep 17 00:00:00 2001 From: Jon Cluce Date: Fri, 6 Jun 2025 14:06:56 -0400 Subject: [PATCH 323/507] fixup! :necktie: Resequence motion --- CPAC/func_preproc/func_motion.py | 18 ++++++++---------- 1 file changed, 8 insertions(+), 10 deletions(-) diff --git a/CPAC/func_preproc/func_motion.py b/CPAC/func_preproc/func_motion.py index a5aba82f06..cdb789441e 100644 --- a/CPAC/func_preproc/func_motion.py +++ b/CPAC/func_preproc/func_motion.py @@ -1005,19 +1005,17 @@ def stack_motion_blocks( rpool: "ResourcePool", ) -> list[NodeBlockFunction | list[NodeBlockFunction]]: """Create a stack of motion correction nodeblocks.""" - func_motion_blocks: list[NodeBlockFunction | list[NodeBlockFunction]] = ( - [motion_estimate_filter] - if rpool.check_rpool("desc-movementParameters_motion") - else [ - *get_motion_refs, - func_motion_estimates, - motion_estimate_filter, - ] - ) + func_blocks["motion"] = [] + if not rpool.check_rpool("motion-basefile"): + func_blocks["motion"].extend(get_motion_refs) + assert calc_motion_stats.inputs + if not all(rpool.check_rpool(resource) for resource in calc_motion_stats.inputs): + func_blocks["motion"].append(func_motion_estimates) + func_blocks["motion"].append(motion_estimate_filter) return [ *func_blocks["init"], *func_blocks["preproc"], - *func_motion_blocks, + *func_blocks["motion"], func_motion_correct, *func_blocks["mask"], calc_motion_stats, From 6781aaef211b2d69009ec81582886ff9783a1cc0 Mon Sep 17 00:00:00 2001 From: Jon Cluce Date: Mon, 9 Jun 2025 11:26:09 -0400 Subject: [PATCH 324/507] fixup! fixup! :necktie: Resequence motion --- CPAC/func_preproc/func_motion.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/CPAC/func_preproc/func_motion.py b/CPAC/func_preproc/func_motion.py index cdb789441e..63e5da69a7 100644 --- a/CPAC/func_preproc/func_motion.py +++ b/CPAC/func_preproc/func_motion.py @@ -1011,12 +1011,11 @@ def stack_motion_blocks( assert calc_motion_stats.inputs if not all(rpool.check_rpool(resource) for resource in calc_motion_stats.inputs): func_blocks["motion"].append(func_motion_estimates) - func_blocks["motion"].append(motion_estimate_filter) + func_blocks["motion"].extend([func_motion_correct, motion_estimate_filter]) return [ *func_blocks["init"], *func_blocks["preproc"], *func_blocks["motion"], - func_motion_correct, *func_blocks["mask"], calc_motion_stats, *func_blocks["prep"], From bba145ee4c51bd3afb0fff5488061a8883a026fe Mon Sep 17 00:00:00 2001 From: Jon Cluce Date: Mon, 9 Jun 2025 12:42:26 -0400 Subject: [PATCH 325/507] =?UTF-8?q?:arrow=5Fup:=20Upgrade=20`requests`=202?= =?UTF-8?q?.32.0=20=E2=86=92=202.32.3?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- CHANGELOG.md | 4 ++++ dev/docker_data/unpinned_requirements.txt | 1 - requirements.txt | 2 +- 3 files changed, 5 insertions(+), 2 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 078114399e..27779ab83e 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -47,6 +47,10 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 - Introduced specific switch `restore_t1w_intensity` for `correct_restore_brain_intensity_abcd` nodeblock, enabling it by default only in `abcd-options` pre-config. - Updated GitHub Actions to run automated integration and regression tests on HPC. +### Upgraded + +- `requests` 2.32.0 → 2.32.3 + ### Fixed - A bug in which AWS S3 encryption was looked for in Nipype config instead of pipeline config (only affected uploading logs). diff --git a/dev/docker_data/unpinned_requirements.txt b/dev/docker_data/unpinned_requirements.txt index 186fee9168..2680ed2186 100644 --- a/dev/docker_data/unpinned_requirements.txt +++ b/dev/docker_data/unpinned_requirements.txt @@ -23,7 +23,6 @@ pybids PyPEER @ https://github.com/shnizzedy/PyPEER/archive/6965d2b2bea0fef824e885fec33a8e0e6bd50a97.zip python-dateutil PyYAML -requests scikit-learn scipy sdcflows diff --git a/requirements.txt b/requirements.txt index 1242536fc0..60e27eea5b 100644 --- a/requirements.txt +++ b/requirements.txt @@ -30,7 +30,7 @@ pybids==0.15.6 PyPEER @ git+https://git@github.com/ChildMindInstitute/PyPEER.git@6965d2b2bea0fef824e885fec33a8e0e6bd50a97 python-dateutil==2.8.2 PyYAML==6.0 -requests==2.32.0 +requests==2.32.3 scikit-learn==1.5.0 scipy==1.11.1 sdcflows==2.4.0 From 7897957eac315b3f41f7e11adbc9f92fb7f45da6 Mon Sep 17 00:00:00 2001 From: "birajstha:construction_worker::penguin" Date: Mon, 9 Jun 2025 16:49:11 -0400 Subject: [PATCH 326/507] adding native transforms (.mat files) to outputs --- CPAC/pipeline/schema.py | 1 + CPAC/registration/registration.py | 273 ++++++++++-------- .../configs/pipeline_config_blank.yml | 3 + .../configs/pipeline_config_default.yml | 2 + CPAC/resources/cpac_outputs.tsv | 45 +++ 5 files changed, 206 insertions(+), 118 deletions(-) diff --git a/CPAC/pipeline/schema.py b/CPAC/pipeline/schema.py index 828c0b1aec..966d4d2187 100644 --- a/CPAC/pipeline/schema.py +++ b/CPAC/pipeline/schema.py @@ -692,6 +692,7 @@ def sanitize(filename): }, }, "registration_workflows": { + "sink_native_transforms": bool1_1, "anatomical_registration": { "run": bool1_1, "resolution_for_anat": All(str, Match(RESOLUTION_REGEX)), diff --git a/CPAC/registration/registration.py b/CPAC/registration/registration.py index 5b5d7493a9..805af8f3fd 100644 --- a/CPAC/registration/registration.py +++ b/CPAC/registration/registration.py @@ -1552,6 +1552,20 @@ def FSL_registration_connector( ), } + if cfg.registration_workflows["sink_native_transforms"]: + outputs.update( + { + f"from-{orig}_to-{sym}{tmpl}template_mode-image_desc-linear_xfm": ( + flirt_reg_anat_mni, + "outputspec.linear_xfm", + ), + f"from-{sym}{tmpl}template_to-{orig}_mode-image_desc-linear_xfm": ( + flirt_reg_anat_mni, + "outputspec.invlinear_xfm", + ), + } + ) + if opt == "FSL": fnirt_reg_anat_mni = create_fsl_fnirt_nonlinear_reg_nhp( f"anat_mni_fnirt_register{symm}" @@ -2048,6 +2062,24 @@ def ANTs_registration_connector( ), } + if cfg.registration_workflows["sink_native_transforms"]: + outputs.update( + { + f"from-{orig}_to-{sym}{tmpl}template_mode-image_desc-initial_xfm": ( + ants_reg_anat_mni, + "outputspec.ants_initial_xfm", + ), + f"from-{orig}_to-{sym}{tmpl}template_mode-image_desc-rigid_xfm": ( + ants_reg_anat_mni, + "outputspec.ants_rigid_xfm", + ), + f"from-{orig}_to-{sym}{tmpl}template_mode-image_desc-affine_xfm": ( + ants_reg_anat_mni, + "outputspec.ants_affine_xfm", + ), + } + ) + return (wf, outputs) @@ -2482,75 +2514,76 @@ def register_FSL_EPI_to_template(wf, cfg, strat_pool, pipe_num, opt=None): "label-lesion_mask", ], outputs={ - "space-template_desc-preproc_T1w": { - "Description": "The preprocessed T1w brain transformed to " - "template space.", - "Template": "T1w-template", - }, - "from-T1w_to-template_mode-image_desc-linear_xfm": { - "Description": "Linear (affine) transform from T1w native space " - "to T1w-template space.", - "Template": "T1w-template", - }, - "from-template_to-T1w_mode-image_desc-linear_xfm": { - "Description": "Linear (affine) transform from T1w-template space " - "to T1w native space.", - "Template": "T1w-template", - }, - "from-T1w_to-template_mode-image_desc-nonlinear_xfm": { - "Description": "Nonlinear (warp field) transform from T1w native " - "space to T1w-template space.", - "Template": "T1w-template", - }, - "from-template_to-T1w_mode-image_desc-nonlinear_xfm": { - "Description": "Nonlinear (warp field) transform from " - "T1w-template space to T1w native space.", - "Template": "T1w-template", - }, - "from-T1w_to-template_mode-image_xfm": { - "Description": "Composite (affine + warp field) transform from " - "T1w native space to T1w-template space.", - "Template": "T1w-template", - }, - "from-template_to-T1w_mode-image_xfm": { - "Description": "Composite (affine + warp field) transform from " - "T1w-template space to T1w native space.", - "Template": "T1w-template", - }, - "from-longitudinal_to-template_mode-image_desc-linear_xfm": { - "Description": "Linear (affine) transform from " - "longitudinal-template space to T1w-template " - "space.", - "Template": "T1w-template", - }, - "from-template_to-longitudinal_mode-image_desc-linear_xfm": { - "Description": "Linear (affine) transform from T1w-template " - "space to longitudinal-template space.", - "Template": "T1w-template", - }, - "from-longitudinal_to-template_mode-image_desc-nonlinear_xfm": { - "Description": "Nonlinear (warp field) transform from " - "longitudinal-template space to T1w-template " - "space.", - "Template": "T1w-template", - }, - "from-template_to-longitudinal_mode-image_desc-nonlinear_xfm": { - "Description": "Nonlinear (warp field) transform from " - "T1w-template space to longitudinal-template " - "space.", - "Template": "T1w-template", - }, - "from-longitudinal_to-template_mode-image_xfm": { - "Description": "Composite (affine + warp field) transform from " - "longitudinal-template space to T1w-template " - "space.", - "Template": "T1w-template", + **{ + k: {"Description": v, "Template": "T1w-template"} + for k, v in [ + ( + "space-template_desc-preproc_T1w", + "The preprocessed T1w brain transformed to template space.", + ), + ( + "from-T1w_to-template_mode-image_desc-linear_xfm", + "Linear (affine) transform from T1w native space to T1w-template space.", + ), + ( + "from-template_to-T1w_mode-image_desc-linear_xfm", + "Linear (affine) transform from T1w-template space to T1w native space.", + ), + ( + "from-T1w_to-template_mode-image_desc-nonlinear_xfm", + "Nonlinear (warp field) transform from T1w native space to T1w-template space.", + ), + ( + "from-template_to-T1w_mode-image_desc-nonlinear_xfm", + "Nonlinear (warp field) transform from T1w-template space to T1w native space.", + ), + ( + "from-T1w_to-template_mode-image_xfm", + "Composite (affine + warp field) transform from T1w native space to T1w-template space.", + ), + ( + "from-template_to-T1w_mode-image_xfm", + "Composite (affine + warp field) transform from T1w-template space to T1w native space.", + ), + ( + "from-longitudinal_to-template_mode-image_desc-linear_xfm", + "Linear (affine) transform from longitudinal-template space to T1w-template space.", + ), + ( + "from-template_to-longitudinal_mode-image_desc-linear_xfm", + "Linear (affine) transform from T1w-template space to longitudinal-template space.", + ), + ( + "from-longitudinal_to-template_mode-image_desc-nonlinear_xfm", + "Nonlinear (warp field) transform from longitudinal-template space to T1w-template space.", + ), + ( + "from-template_to-longitudinal_mode-image_desc-nonlinear_xfm", + "Nonlinear (warp field) transform from T1w-template space to longitudinal-template space.", + ), + ( + "from-longitudinal_to-template_mode-image_xfm", + "Composite (affine + warp field) transform from longitudinal-template space to T1w-template space.", + ), + ( + "from-template_to-longitudinal_mode-image_xfm", + "Composite (affine + warp field) transform from T1w-template space to longitudinal-template space.", + ), + ] }, - "from-template_to-longitudinal_mode-image_xfm": { - "Description": "Composite (affine + warp field) transform from " - "T1w-template space to longitudinal-template " - "space.", - "Template": "T1w-template", + **{ + f"from-{src}_to-{dst}_mode-image_desc-{xfm}_xfm": { + "Description": f"{desc} transform from {src.replace('longitudinal', 'longitudinal-template') if src == 'longitudinal' else src} native space to {dst.replace('longitudinal', 'longitudinal-template') if dst == 'longitudinal' else dst}-template space.", + "Template": "T1w-template", + } + for src in ["T1w", "longitudinal"] + for dst in ["template", "longitudinal"] + for xfm, desc in [ + ("initial", "Initial"), + ("rigid", "Rigid"), + ("affine", "Affine"), + ] + if src != dst }, }, ) @@ -2648,44 +2681,37 @@ def register_ANTs_anat_to_template(wf, cfg, strat_pool, pipe_num, opt=None): "label-lesion_mask", ], outputs={ - "space-symtemplate_desc-preproc_T1w": { - "Template": "T1w-brain-template-symmetric" - }, - "from-T1w_to-symtemplate_mode-image_desc-linear_xfm": { - "Template": "T1w-template-symmetric" - }, - "from-symtemplate_to-T1w_mode-image_desc-linear_xfm": { - "Template": "T1w-template-symmetric" - }, - "from-T1w_to-symtemplate_mode-image_desc-nonlinear_xfm": { - "Template": "T1w-template-symmetric" - }, - "from-symtemplate_to-T1w_mode-image_desc-nonlinear_xfm": { - "Template": "T1w-template-symmetric" - }, - "from-T1w_to-symtemplate_mode-image_xfm": { - "Template": "T1w-template-symmetric" - }, - "from-symtemplate_to-T1w_mode-image_xfm": { - "Template": "T1w-template-symmetric" - }, - "from-longitudinal_to-symtemplate_mode-image_desc-linear_xfm": { - "Template": "T1w-template-symmetric" - }, - "from-symtemplate_to-longitudinal_mode-image_desc-linear_xfm": { - "Template": "T1w-template-symmetric" - }, - "from-longitudinal_to-symtemplate_mode-image_desc-nonlinear_xfm": { - "Template": "T1w-template-symmetric" - }, - "from-symtemplate_to-longitudinal_mode-image_desc-nonlinear_xfm": { - "Template": "T1w-template-symmetric" - }, - "from-longitudinal_to-symtemplate_mode-image_xfm": { - "Template": "T1w-template-symmetric" + **{ + k: {"Template": "T1w-template-symmetric"} + for k in [ + "space-symtemplate_desc-preproc_T1w", + "from-T1w_to-symtemplate_mode-image_desc-linear_xfm", + "from-symtemplate_to-T1w_mode-image_desc-linear_xfm", + "from-T1w_to-symtemplate_mode-image_desc-nonlinear_xfm", + "from-symtemplate_to-T1w_mode-image_desc-nonlinear_xfm", + "from-T1w_to-symtemplate_mode-image_xfm", + "from-symtemplate_to-T1w_mode-image_xfm", + "from-longitudinal_to-symtemplate_mode-image_desc-linear_xfm", + "from-symtemplate_to-longitudinal_mode-image_desc-linear_xfm", + "from-longitudinal_to-symtemplate_mode-image_desc-nonlinear_xfm", + "from-symtemplate_to-longitudinal_mode-image_desc-nonlinear_xfm", + "from-longitudinal_to-symtemplate_mode-image_xfm", + "from-symtemplate_to-longitudinal_mode-image_xfm", + ] }, - "from-symtemplate_to-longitudinal_mode-image_xfm": { - "Template": "T1w-template-symmetric" + **{ + f"from-{src}_to-{dst}_mode-image_desc-{xfm}_xfm": { + "Description": f"{desc} transform from {src.replace('longitudinal', 'longitudinal-template') if src == 'longitudinal' else src} native space to {dst.replace('longitudinal', 'longitudinal-template') if dst == 'longitudinal' else dst}-template-symmetric space.", + "Template": "T1w-template-symmetric", + } + for src in ["T1w", "longitudinal", "symtemplate"] + for dst in ["symtemplate", "T1w", "longitudinal"] + for xfm, desc in [ + ("initial", "Initial"), + ("rigid", "Rigid"), + ("affine", "Affine"), + ] + if src != dst }, }, ) @@ -2762,21 +2788,32 @@ def register_symmetric_ANTs_anat_to_template(wf, cfg, strat_pool, pipe_num, opt= "EPI-template-mask", ], outputs={ - "space-template_desc-preproc_bold": {"Template": "EPI-template"}, - "from-bold_to-EPItemplate_mode-image_desc-linear_xfm": { - "Template": "EPI-template" - }, - "from-EPItemplate_to-bold_mode-image_desc-linear_xfm": { - "Template": "EPI-template" - }, - "from-bold_to-EPItemplate_mode-image_desc-nonlinear_xfm": { - "Template": "EPI-template" + **{ + k: {"Template": "EPI-template"} + for k in [ + "space-template_desc-preproc_bold", + "from-bold_to-EPItemplate_mode-image_desc-linear_xfm", + "from-EPItemplate_to-bold_mode-image_desc-linear_xfm", + "from-bold_to-EPItemplate_mode-image_desc-nonlinear_xfm", + "from-EPItemplate_to-bold_mode-image_desc-nonlinear_xfm", + "from-bold_to-EPItemplate_mode-image_xfm", + "from-EPItemplate_to-bold_mode-image_xfm", + ] }, - "from-EPItemplate_to-bold_mode-image_desc-nonlinear_xfm": { - "Template": "EPI-template" + **{ + f"from-{src}_to-{dst}_mode-image_desc-{xfm}_xfm": { + "Description": f"{desc} transform from {src} native space to {dst} template space.", + "Template": "EPI-template", + } + for src in ["bold", "EPItemplate"] + for dst in ["EPItemplate", "bold"] + for xfm, desc in [ + ("initial", "Initial"), + ("rigid", "Rigid"), + ("affine", "Affine"), + ] + if src != dst }, - "from-bold_to-EPItemplate_mode-image_xfm": {"Template": "EPI-template"}, - "from-EPItemplate_to-bold_mode-image_xfm": {"Template": "EPI-template"}, }, ) def register_ANTs_EPI_to_template(wf, cfg, strat_pool, pipe_num, opt=None): diff --git a/CPAC/resources/configs/pipeline_config_blank.yml b/CPAC/resources/configs/pipeline_config_blank.yml index 66f34fe41c..b570541c6e 100644 --- a/CPAC/resources/configs/pipeline_config_blank.yml +++ b/CPAC/resources/configs/pipeline_config_blank.yml @@ -570,6 +570,9 @@ segmentation: WM_label: [2, 41] registration_workflows: + # sink native transform files to the output directory + sink_native_transforms: Off + anatomical_registration: run: Off registration: diff --git a/CPAC/resources/configs/pipeline_config_default.yml b/CPAC/resources/configs/pipeline_config_default.yml index 5c22d2ee86..17796e939d 100644 --- a/CPAC/resources/configs/pipeline_config_default.yml +++ b/CPAC/resources/configs/pipeline_config_default.yml @@ -618,6 +618,8 @@ segmentation: registration_workflows: + # sink native transform files to the output directory + sink_native_transforms: Off anatomical_registration: diff --git a/CPAC/resources/cpac_outputs.tsv b/CPAC/resources/cpac_outputs.tsv index 8fe4cd284f..b0f036f71d 100644 --- a/CPAC/resources/cpac_outputs.tsv +++ b/CPAC/resources/cpac_outputs.tsv @@ -209,6 +209,51 @@ from-template_to-longitudinal_mode-image_xfm xfm anat NIfTI from-template_to-T1w_mode-image_desc-linear_xfm xfm anat NIfTI from-template_to-T1w_mode-image_desc-nonlinear_xfm xfm anat NIfTI from-template_to-T1w_mode-image_xfm xfm anat NIfTI +from-T1w_to-template_mode-image_desc-linear_xfm xfm anat MAT +from-T1w_to-template_mode-image_desc-invlinear_xfm xfm anat MAT +from-T1w_to-symtemplate_mode-image_desc-linear_xfm xfm anat MAT +from-T1w_to-symtemplate_mode-image_desc-invlinear_xfm xfm anat MAT +from-T1w_to-template_mode-image_desc-initial_xfm xfm anat MAT +from-T1w_to-template_mode-image_desc-rigid_xfm xfm anat MAT +from-T1w_to-template_mode-image_desc-affine_xfm xfm anat MAT +from-T1w_to-template_mode-image_desc-initial_xfm xfm anat MAT +from-T1w_to-template_mode-image_desc-rigid_xfm xfm anat MAT +from-T1w_to-template_mode-image_desc-affine_xfm xfm anat MAT +from-T1w_to-longitudinal_mode-image_desc-initial_xfm xfm anat MAT +from-T1w_to-longitudinal_mode-image_desc-rigid_xfm xfm anat MAT +from-T1w_to-longitudinal_mode-image_desc-affine_xfm xfm anat MAT +from-longitudinal_to-template_mode-image_desc-initial_xfm xfm anat MAT +from-longitudinal_to-template_mode-image_desc-rigid_xfm xfm anat MAT +from-longitudinal_to-template_mode-image_desc-affine_xfm xfm anat MAT +from-longitudinal_to-T1w_mode-image_desc-initial_xfm xfm anat MAT +from-longitudinal_to-T1w_mode-image_desc-rigid_xfm xfm anat MAT +from-longitudinal_to-T1w_mode-image_desc-affine_xfm xfm anat MAT +from-T1w_to-symtemplate_mode-image_desc-initial_xfm xfm anat MAT +from-T1w_to-symtemplate_mode-image_desc-rigid_xfm xfm anat MAT +from-T1w_to-symtemplate_mode-image_desc-affine_xfm xfm anat MAT +from-T1w_to-longitudinal_mode-image_desc-initial_xfm xfm anat MAT +from-T1w_to-longitudinal_mode-image_desc-rigid_xfm xfm anat MAT +from-T1w_to-longitudinal_mode-image_desc-affine_xfm xfm anat MAT +from-longitudinal_to-symtemplate_mode-image_desc-initial_xfm xfm anat MAT +from-longitudinal_to-symtemplate_mode-image_desc-rigid_xfm xfm anat MAT +from-longitudinal_to-symtemplate_mode-image_desc-affine_xfm xfm anat MAT +from-longitudinal_to-T1w_mode-image_desc-initial_xfm xfm anat MAT +from-longitudinal_to-T1w_mode-image_desc-rigid_xfm xfm anat MAT +from-longitudinal_to-T1w_mode-image_desc-affine_xfm xfm anat MAT +from-symtemplate_to-T1w_mode-image_desc-initial_xfm xfm anat MAT +from-symtemplate_to-T1w_mode-image_desc-rigid_xfm xfm anat MAT +from-symtemplate_to-T1w_mode-image_desc-affine_xfm xfm anat MAT +from-symtemplate_to-longitudinal_mode-image_desc-initial_xfm xfm anat MAT +from-symtemplate_to-longitudinal_mode-image_desc-rigid_xfm xfm anat MAT +from-symtemplate_to-longitudinal_mode-image_desc-affine_xfm xfm anat MAT +from-bold_to-EPItemplate_mode-image_desc-initial_xfm xfm func MAT +from-bold_to-EPItemplate_mode-image_desc-rigid_xfm xfm func MAT +from-bold_to-EPItemplate_mode-image_desc-affine_xfm xfm func MAT +from-EPItemplate_to-bold_mode-image_desc-initial_xfm xfm func MAT +from-EPItemplate_to-bold_mode-image_desc-rigid_xfm xfm func MAT +from-EPItemplate_to-bold_mode-image_desc-affine_xfm xfm func MAT +from-bold_to-EPItemplate_mode-image_desc-linear_xfm xfm func MAT +from-bold_to-EPItemplate_mode-image_desc-invlinear_xfm xfm func MAT space-template_label-CSF_mask mask template anat NIfTI space-template_label-WM_mask mask template anat NIfTI space-template_label-GM_mask mask template anat NIfTI From fe769d729328d9196652902e611b898520314ac4 Mon Sep 17 00:00:00 2001 From: "birajstha:construction_worker::penguin" Date: Tue, 10 Jun 2025 11:23:45 -0400 Subject: [PATCH 327/507] adding to the changelog --- CHANGELOG.md | 1 + CPAC/registration/registration.py | 4 ++-- CPAC/resources/cpac_outputs.tsv | 12 ------------ 3 files changed, 3 insertions(+), 14 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 078114399e..b5b6b32bcc 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -29,6 +29,7 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 - New switch `mask_sbref` under `func_input_prep` in functional registration and set to default `on`. - New resource `desc-head_bold` as non skull-stripped bold from nodeblock `bold_masking`. - `censor_file_path` from `offending_timepoints_connector` in the `build_nuisance_regressor` node. +- Switch `sink_native_transforms` under `registration_workflows` to output all `.mat` files in ANTs and FSL Transforms. ### Changed diff --git a/CPAC/registration/registration.py b/CPAC/registration/registration.py index 805af8f3fd..de47ae0c9c 100644 --- a/CPAC/registration/registration.py +++ b/CPAC/registration/registration.py @@ -2704,8 +2704,8 @@ def register_ANTs_anat_to_template(wf, cfg, strat_pool, pipe_num, opt=None): "Description": f"{desc} transform from {src.replace('longitudinal', 'longitudinal-template') if src == 'longitudinal' else src} native space to {dst.replace('longitudinal', 'longitudinal-template') if dst == 'longitudinal' else dst}-template-symmetric space.", "Template": "T1w-template-symmetric", } - for src in ["T1w", "longitudinal", "symtemplate"] - for dst in ["symtemplate", "T1w", "longitudinal"] + for src in ["T1w", "longitudinal"] + for dst in ["symtemplate"] for xfm, desc in [ ("initial", "Initial"), ("rigid", "Rigid"), diff --git a/CPAC/resources/cpac_outputs.tsv b/CPAC/resources/cpac_outputs.tsv index b0f036f71d..125548d6b2 100644 --- a/CPAC/resources/cpac_outputs.tsv +++ b/CPAC/resources/cpac_outputs.tsv @@ -225,9 +225,6 @@ from-T1w_to-longitudinal_mode-image_desc-affine_xfm xfm anat MAT from-longitudinal_to-template_mode-image_desc-initial_xfm xfm anat MAT from-longitudinal_to-template_mode-image_desc-rigid_xfm xfm anat MAT from-longitudinal_to-template_mode-image_desc-affine_xfm xfm anat MAT -from-longitudinal_to-T1w_mode-image_desc-initial_xfm xfm anat MAT -from-longitudinal_to-T1w_mode-image_desc-rigid_xfm xfm anat MAT -from-longitudinal_to-T1w_mode-image_desc-affine_xfm xfm anat MAT from-T1w_to-symtemplate_mode-image_desc-initial_xfm xfm anat MAT from-T1w_to-symtemplate_mode-image_desc-rigid_xfm xfm anat MAT from-T1w_to-symtemplate_mode-image_desc-affine_xfm xfm anat MAT @@ -240,18 +237,9 @@ from-longitudinal_to-symtemplate_mode-image_desc-affine_xfm xfm anat MAT from-longitudinal_to-T1w_mode-image_desc-initial_xfm xfm anat MAT from-longitudinal_to-T1w_mode-image_desc-rigid_xfm xfm anat MAT from-longitudinal_to-T1w_mode-image_desc-affine_xfm xfm anat MAT -from-symtemplate_to-T1w_mode-image_desc-initial_xfm xfm anat MAT -from-symtemplate_to-T1w_mode-image_desc-rigid_xfm xfm anat MAT -from-symtemplate_to-T1w_mode-image_desc-affine_xfm xfm anat MAT -from-symtemplate_to-longitudinal_mode-image_desc-initial_xfm xfm anat MAT -from-symtemplate_to-longitudinal_mode-image_desc-rigid_xfm xfm anat MAT -from-symtemplate_to-longitudinal_mode-image_desc-affine_xfm xfm anat MAT from-bold_to-EPItemplate_mode-image_desc-initial_xfm xfm func MAT from-bold_to-EPItemplate_mode-image_desc-rigid_xfm xfm func MAT from-bold_to-EPItemplate_mode-image_desc-affine_xfm xfm func MAT -from-EPItemplate_to-bold_mode-image_desc-initial_xfm xfm func MAT -from-EPItemplate_to-bold_mode-image_desc-rigid_xfm xfm func MAT -from-EPItemplate_to-bold_mode-image_desc-affine_xfm xfm func MAT from-bold_to-EPItemplate_mode-image_desc-linear_xfm xfm func MAT from-bold_to-EPItemplate_mode-image_desc-invlinear_xfm xfm func MAT space-template_label-CSF_mask mask template anat NIfTI From 06a0c45e7771f85f325e8ac82614c9735449d26c Mon Sep 17 00:00:00 2001 From: "birajstha:construction_worker::penguin" Date: Tue, 10 Jun 2025 16:05:33 +0000 Subject: [PATCH 328/507] adding tests for the registration connectors for added outputs --- .../tests/test_ants_registration_connector.py | 55 +++++++++++++++++++ .../tests/test_fsl_registration_connector.py | 44 +++++++++++++++ 2 files changed, 99 insertions(+) create mode 100644 CPAC/registration/tests/test_ants_registration_connector.py create mode 100644 CPAC/registration/tests/test_fsl_registration_connector.py diff --git a/CPAC/registration/tests/test_ants_registration_connector.py b/CPAC/registration/tests/test_ants_registration_connector.py new file mode 100644 index 0000000000..dd8071624b --- /dev/null +++ b/CPAC/registration/tests/test_ants_registration_connector.py @@ -0,0 +1,55 @@ +import pytest +from types import SimpleNamespace +import CPAC.registration.ants_registration_connector as ants_registration_connector + +@pytest.fixture +def dummy_module(monkeypatch): + class DummyNode: + def __init__(self): + self.inputs = SimpleNamespace(inputspec=SimpleNamespace()) + self.outputspec = SimpleNamespace( + ants_initial_xfm="initial.mat", + ants_rigid_xfm="rigid.mat", + ants_affine_xfm="affine.mat", + ) + def dummy_create_wf(name): + return DummyNode() + monkeypatch.setattr(ants_registration_connector, 'create_wf_calculate_ants_warp', dummy_create_wf) + monkeypatch.setattr(ants_registration_connector, 'check_transforms', lambda x: (x, len(x))) + monkeypatch.setattr(ants_registration_connector, 'generate_inverse_transform_flags', lambda x: [True]*len(x)) + return ants_registration_connector + +def build_cfg(sink_native_transforms=True): + cfg = SimpleNamespace() + cfg.FROM = 'default' + cfg.registration_workflows = { + 'sink_native_transforms': sink_native_transforms + } + return cfg + +def test_sink_native_transforms_outputs(dummy_module): + connector = dummy_module + cfg = build_cfg(sink_native_transforms=True) + _, outputs = connector.ANTs_registration_connector( + wf_name='test', cfg=cfg + ) + expected_keys = [ + 'from-T1w_to-template_mode-image_desc-initial_xfm', + 'from-T1w_to-template_mode-image_desc-rigid_xfm', + 'from-T1w_to-template_mode-image_desc-affine_xfm', + ] + for key in expected_keys: + assert key in outputs + +def test_no_sink_native_transforms(dummy_module): + connector = dummy_module + cfg = build_cfg(sink_native_transforms=False) + _, outputs = connector.ANTs_registration_connector( + wf_name='test', cfg=cfg + ) + for key in [ + 'from-T1w_to-template_mode-image_desc-initial_xfm', + 'from-T1w_to-template_mode-image_desc-rigid_xfm', + 'from-T1w_to-template_mode-image_desc-affine_xfm', + ]: + assert key not in outputs \ No newline at end of file diff --git a/CPAC/registration/tests/test_fsl_registration_connector.py b/CPAC/registration/tests/test_fsl_registration_connector.py new file mode 100644 index 0000000000..1bb8629c83 --- /dev/null +++ b/CPAC/registration/tests/test_fsl_registration_connector.py @@ -0,0 +1,44 @@ +import pytest +from types import SimpleNamespace +from CPAC.registration.registration import FSL_registration_connector + +@pytest.fixture +def dummy_module(monkeypatch): + class DummyNode: + def __init__(self): + self.inputs = SimpleNamespace(inputspec=SimpleNamespace()) + self.outputspec = SimpleNamespace( + linear_xfm="linear.mat", + invlinear_xfm="invlinear.mat", + ) + def dummy_create_linear(name): + return DummyNode() + def dummy_create_nonlinear(name): + return DummyNode() + monkeypatch.setattr( + "CPAC.registration.registration.create_fsl_flirt_linear_reg", dummy_create_linear + ) + monkeypatch.setattr( + "CPAC.registration.registration.create_fsl_fnirt_nonlinear_reg_nhp", dummy_create_nonlinear + ) + return FSL_registration_connector + +def build_cfg(sink_native_transforms=True): + cfg = SimpleNamespace() + cfg.registration_workflows = { + 'sink_native_transforms': sink_native_transforms + } + return cfg + +def test_sink_native_transforms_outputs(dummy_module): + connector = dummy_module + cfg = build_cfg(sink_native_transforms=True) + _, outputs = connector( + wf_name='test', cfg=cfg, orig="T1w", opt="FSL" + ) + expected_keys = [ + 'from-T1w_to-template_mode-image_desc-linear_xfm', + 'from-template_to-T1w_mode-image_desc-linear_xfm', + ] + for key in expected_keys: + assert key in outputs \ No newline at end of file From a3ecfc5463c371b0fd523cd7cfa5e1fc10352c2d Mon Sep 17 00:00:00 2001 From: "birajstha:construction_worker::penguin" Date: Tue, 10 Jun 2025 12:43:05 -0400 Subject: [PATCH 329/507] fixing imports in the test --- .../tests/test_ants_registration_connector.py | 9 +++++---- 1 file changed, 5 insertions(+), 4 deletions(-) diff --git a/CPAC/registration/tests/test_ants_registration_connector.py b/CPAC/registration/tests/test_ants_registration_connector.py index dd8071624b..843a2e9cf1 100644 --- a/CPAC/registration/tests/test_ants_registration_connector.py +++ b/CPAC/registration/tests/test_ants_registration_connector.py @@ -1,6 +1,6 @@ import pytest from types import SimpleNamespace -import CPAC.registration.ants_registration_connector as ants_registration_connector +import CPAC.registration.ANTs_registration_connector as ants_registration_connector @pytest.fixture def dummy_module(monkeypatch): @@ -12,7 +12,7 @@ def __init__(self): ants_rigid_xfm="rigid.mat", ants_affine_xfm="affine.mat", ) - def dummy_create_wf(name): + def dummy_create_wf(name, num_threads, reg_ants_skull): return DummyNode() monkeypatch.setattr(ants_registration_connector, 'create_wf_calculate_ants_warp', dummy_create_wf) monkeypatch.setattr(ants_registration_connector, 'check_transforms', lambda x: (x, len(x))) @@ -30,8 +30,9 @@ def build_cfg(sink_native_transforms=True): def test_sink_native_transforms_outputs(dummy_module): connector = dummy_module cfg = build_cfg(sink_native_transforms=True) + params = {'metric': 'MI'} _, outputs = connector.ANTs_registration_connector( - wf_name='test', cfg=cfg + wf_name='test', cfg=cfg, params=params ) expected_keys = [ 'from-T1w_to-template_mode-image_desc-initial_xfm', @@ -45,7 +46,7 @@ def test_no_sink_native_transforms(dummy_module): connector = dummy_module cfg = build_cfg(sink_native_transforms=False) _, outputs = connector.ANTs_registration_connector( - wf_name='test', cfg=cfg + wf_name='test', cfg=cfg, params=params ) for key in [ 'from-T1w_to-template_mode-image_desc-initial_xfm', From 29b47ddf5017f7e74f7abf4d3235f9b11fcf48f3 Mon Sep 17 00:00:00 2001 From: "birajstha:construction_worker::penguin" Date: Tue, 10 Jun 2025 13:59:25 -0400 Subject: [PATCH 330/507] fixed imports --- CPAC/registration/tests/test_ants_registration_connector.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/CPAC/registration/tests/test_ants_registration_connector.py b/CPAC/registration/tests/test_ants_registration_connector.py index 843a2e9cf1..495218fa1e 100644 --- a/CPAC/registration/tests/test_ants_registration_connector.py +++ b/CPAC/registration/tests/test_ants_registration_connector.py @@ -1,6 +1,6 @@ import pytest from types import SimpleNamespace -import CPAC.registration.ANTs_registration_connector as ants_registration_connector +from CPAC.registration.registration import ANTs_registration_connector as ants_registration_connector @pytest.fixture def dummy_module(monkeypatch): @@ -45,6 +45,7 @@ def test_sink_native_transforms_outputs(dummy_module): def test_no_sink_native_transforms(dummy_module): connector = dummy_module cfg = build_cfg(sink_native_transforms=False) + params = {'metric': 'MI'} _, outputs = connector.ANTs_registration_connector( wf_name='test', cfg=cfg, params=params ) From 07ca12e081a3db46f574ac5e2fd4bf492ca8d615 Mon Sep 17 00:00:00 2001 From: "birajstha:construction_worker::penguin" Date: Tue, 10 Jun 2025 15:30:54 -0400 Subject: [PATCH 331/507] rewriting the tests for registration connector --- .../tests/test_ants_registration_connector.py | 64 +++++-------------- .../tests/test_fsl_registration_connector.py | 63 +++++++----------- 2 files changed, 39 insertions(+), 88 deletions(-) diff --git a/CPAC/registration/tests/test_ants_registration_connector.py b/CPAC/registration/tests/test_ants_registration_connector.py index 495218fa1e..5d413fddc0 100644 --- a/CPAC/registration/tests/test_ants_registration_connector.py +++ b/CPAC/registration/tests/test_ants_registration_connector.py @@ -1,57 +1,25 @@ import pytest -from types import SimpleNamespace -from CPAC.registration.registration import ANTs_registration_connector as ants_registration_connector +from CPAC.registration.registration import ANTs_registration_connector -@pytest.fixture -def dummy_module(monkeypatch): - class DummyNode: - def __init__(self): - self.inputs = SimpleNamespace(inputspec=SimpleNamespace()) - self.outputspec = SimpleNamespace( - ants_initial_xfm="initial.mat", - ants_rigid_xfm="rigid.mat", - ants_affine_xfm="affine.mat", - ) - def dummy_create_wf(name, num_threads, reg_ants_skull): - return DummyNode() - monkeypatch.setattr(ants_registration_connector, 'create_wf_calculate_ants_warp', dummy_create_wf) - monkeypatch.setattr(ants_registration_connector, 'check_transforms', lambda x: (x, len(x))) - monkeypatch.setattr(ants_registration_connector, 'generate_inverse_transform_flags', lambda x: [True]*len(x)) - return ants_registration_connector - -def build_cfg(sink_native_transforms=True): - cfg = SimpleNamespace() - cfg.FROM = 'default' - cfg.registration_workflows = { - 'sink_native_transforms': sink_native_transforms +@pytest.mark.parametrize("sink_native_transforms", ['On', 'Off']) +def test_ants_registration_connector(sink_native_transforms): + cfg = { + 'registration-workflows': {'sink_native_transforms': sink_native_transforms}, } - return cfg - -def test_sink_native_transforms_outputs(dummy_module): - connector = dummy_module - cfg = build_cfg(sink_native_transforms=True) - params = {'metric': 'MI'} - _, outputs = connector.ANTs_registration_connector( - wf_name='test', cfg=cfg, params=params - ) - expected_keys = [ - 'from-T1w_to-template_mode-image_desc-initial_xfm', - 'from-T1w_to-template_mode-image_desc-rigid_xfm', - 'from-T1w_to-template_mode-image_desc-affine_xfm', - ] - for key in expected_keys: - assert key in outputs - -def test_no_sink_native_transforms(dummy_module): - connector = dummy_module - cfg = build_cfg(sink_native_transforms=False) params = {'metric': 'MI'} - _, outputs = connector.ANTs_registration_connector( + _, outputs = ANTs_registration_connector( wf_name='test', cfg=cfg, params=params ) - for key in [ + expected_keys = { 'from-T1w_to-template_mode-image_desc-initial_xfm', 'from-T1w_to-template_mode-image_desc-rigid_xfm', 'from-T1w_to-template_mode-image_desc-affine_xfm', - ]: - assert key not in outputs \ No newline at end of file + } + if sink_native_transforms: + assert expected_keys.issubset(outputs.keys()), ( + f"Expected outputs {expected_keys} not found in {outputs.keys()}" + ) + else: + assert not expected_keys.intersection(outputs.keys()), ( + f"Outputs {expected_keys} should not be present when sink_native_transforms is Off" + ) \ No newline at end of file diff --git a/CPAC/registration/tests/test_fsl_registration_connector.py b/CPAC/registration/tests/test_fsl_registration_connector.py index 1bb8629c83..f3921d5605 100644 --- a/CPAC/registration/tests/test_fsl_registration_connector.py +++ b/CPAC/registration/tests/test_fsl_registration_connector.py @@ -1,44 +1,27 @@ import pytest -from types import SimpleNamespace from CPAC.registration.registration import FSL_registration_connector -@pytest.fixture -def dummy_module(monkeypatch): - class DummyNode: - def __init__(self): - self.inputs = SimpleNamespace(inputspec=SimpleNamespace()) - self.outputspec = SimpleNamespace( - linear_xfm="linear.mat", - invlinear_xfm="invlinear.mat", - ) - def dummy_create_linear(name): - return DummyNode() - def dummy_create_nonlinear(name): - return DummyNode() - monkeypatch.setattr( - "CPAC.registration.registration.create_fsl_flirt_linear_reg", dummy_create_linear - ) - monkeypatch.setattr( - "CPAC.registration.registration.create_fsl_fnirt_nonlinear_reg_nhp", dummy_create_nonlinear - ) - return FSL_registration_connector - -def build_cfg(sink_native_transforms=True): - cfg = SimpleNamespace() - cfg.registration_workflows = { - 'sink_native_transforms': sink_native_transforms +@pytest.mark.parametrize("sink_native_transforms", ['On', 'Off']) +def test_fsl_registration_connector(sink_native_transforms): + wf_name = 'test_fsl_registration_connector' + cfg = { + 'registration-workflows': {'sink_native_transforms': sink_native_transforms}, } - return cfg - -def test_sink_native_transforms_outputs(dummy_module): - connector = dummy_module - cfg = build_cfg(sink_native_transforms=True) - _, outputs = connector( - wf_name='test', cfg=cfg, orig="T1w", opt="FSL" - ) - expected_keys = [ - 'from-T1w_to-template_mode-image_desc-linear_xfm', - 'from-template_to-T1w_mode-image_desc-linear_xfm', - ] - for key in expected_keys: - assert key in outputs \ No newline at end of file + _, outputs = FSL_registration_connector(wf_name, cfg) + if sink_native_transforms == 'On': + expected_outputs = { + 'from-T1w_to-template_mode-image_desc-linear_xfm', + 'from-template_to-T1w_mode-image_desc-linear_xfm' + } + assert expected_outputs.issubset(outputs.keys()), ( + f"Expected outputs {expected_outputs} not found in {outputs.keys()}" + ) + else: + # Adjust this set based on what outputs should be present when 'Off' + not_expected_outputs = { + 'from-T1w_to-template_mode-image_desc-linear_xfm', + 'from-template_to-T1w_mode-image_desc-linear_xfm' + } + assert not not_expected_outputs.intersection(outputs.keys()), ( + f"Outputs {not_expected_outputs} should not be present when sink_native_transforms is Off" + ) \ No newline at end of file From 0788588ce00bb11b039312432a928be404c5ad6c Mon Sep 17 00:00:00 2001 From: "birajstha:construction_worker::penguin" Date: Tue, 10 Jun 2025 15:31:11 -0400 Subject: [PATCH 332/507] rewriting the tests for registration connector --- .../tests/test_ants_registration_connector.py | 30 +++++++++---------- .../tests/test_fsl_registration_connector.py | 30 ++++++++++--------- 2 files changed, 31 insertions(+), 29 deletions(-) diff --git a/CPAC/registration/tests/test_ants_registration_connector.py b/CPAC/registration/tests/test_ants_registration_connector.py index 5d413fddc0..7a4c518e27 100644 --- a/CPAC/registration/tests/test_ants_registration_connector.py +++ b/CPAC/registration/tests/test_ants_registration_connector.py @@ -1,25 +1,25 @@ import pytest + from CPAC.registration.registration import ANTs_registration_connector -@pytest.mark.parametrize("sink_native_transforms", ['On', 'Off']) + +@pytest.mark.parametrize("sink_native_transforms", ["On", "Off"]) def test_ants_registration_connector(sink_native_transforms): cfg = { - 'registration-workflows': {'sink_native_transforms': sink_native_transforms}, + "registration-workflows": {"sink_native_transforms": sink_native_transforms}, } - params = {'metric': 'MI'} - _, outputs = ANTs_registration_connector( - wf_name='test', cfg=cfg, params=params - ) + params = {"metric": "MI"} + _, outputs = ANTs_registration_connector(wf_name="test", cfg=cfg, params=params) expected_keys = { - 'from-T1w_to-template_mode-image_desc-initial_xfm', - 'from-T1w_to-template_mode-image_desc-rigid_xfm', - 'from-T1w_to-template_mode-image_desc-affine_xfm', + "from-T1w_to-template_mode-image_desc-initial_xfm", + "from-T1w_to-template_mode-image_desc-rigid_xfm", + "from-T1w_to-template_mode-image_desc-affine_xfm", } if sink_native_transforms: - assert expected_keys.issubset(outputs.keys()), ( - f"Expected outputs {expected_keys} not found in {outputs.keys()}" - ) + assert expected_keys.issubset( + outputs.keys() + ), f"Expected outputs {expected_keys} not found in {outputs.keys()}" else: - assert not expected_keys.intersection(outputs.keys()), ( - f"Outputs {expected_keys} should not be present when sink_native_transforms is Off" - ) \ No newline at end of file + assert not expected_keys.intersection( + outputs.keys() + ), f"Outputs {expected_keys} should not be present when sink_native_transforms is Off" diff --git a/CPAC/registration/tests/test_fsl_registration_connector.py b/CPAC/registration/tests/test_fsl_registration_connector.py index f3921d5605..a4faacaf1b 100644 --- a/CPAC/registration/tests/test_fsl_registration_connector.py +++ b/CPAC/registration/tests/test_fsl_registration_connector.py @@ -1,27 +1,29 @@ import pytest + from CPAC.registration.registration import FSL_registration_connector -@pytest.mark.parametrize("sink_native_transforms", ['On', 'Off']) + +@pytest.mark.parametrize("sink_native_transforms", ["On", "Off"]) def test_fsl_registration_connector(sink_native_transforms): - wf_name = 'test_fsl_registration_connector' + wf_name = "test_fsl_registration_connector" cfg = { - 'registration-workflows': {'sink_native_transforms': sink_native_transforms}, + "registration-workflows": {"sink_native_transforms": sink_native_transforms}, } _, outputs = FSL_registration_connector(wf_name, cfg) - if sink_native_transforms == 'On': + if sink_native_transforms == "On": expected_outputs = { - 'from-T1w_to-template_mode-image_desc-linear_xfm', - 'from-template_to-T1w_mode-image_desc-linear_xfm' + "from-T1w_to-template_mode-image_desc-linear_xfm", + "from-template_to-T1w_mode-image_desc-linear_xfm", } - assert expected_outputs.issubset(outputs.keys()), ( - f"Expected outputs {expected_outputs} not found in {outputs.keys()}" - ) + assert expected_outputs.issubset( + outputs.keys() + ), f"Expected outputs {expected_outputs} not found in {outputs.keys()}" else: # Adjust this set based on what outputs should be present when 'Off' not_expected_outputs = { - 'from-T1w_to-template_mode-image_desc-linear_xfm', - 'from-template_to-T1w_mode-image_desc-linear_xfm' + "from-T1w_to-template_mode-image_desc-linear_xfm", + "from-template_to-T1w_mode-image_desc-linear_xfm", } - assert not not_expected_outputs.intersection(outputs.keys()), ( - f"Outputs {not_expected_outputs} should not be present when sink_native_transforms is Off" - ) \ No newline at end of file + assert not not_expected_outputs.intersection( + outputs.keys() + ), f"Outputs {not_expected_outputs} should not be present when sink_native_transforms is Off" From 5a9b474dce108e52f31607b1014a71dd288c114a Mon Sep 17 00:00:00 2001 From: "birajstha:construction_worker::penguin" Date: Tue, 10 Jun 2025 18:55:04 -0400 Subject: [PATCH 333/507] cfg changes for pytest --- .../tests/test_ants_registration_connector.py | 35 +++++++++++------- .../tests/test_fsl_registration_connector.py | 36 +++++++++---------- 2 files changed, 39 insertions(+), 32 deletions(-) diff --git a/CPAC/registration/tests/test_ants_registration_connector.py b/CPAC/registration/tests/test_ants_registration_connector.py index 7a4c518e27..6de8160cbc 100644 --- a/CPAC/registration/tests/test_ants_registration_connector.py +++ b/CPAC/registration/tests/test_ants_registration_connector.py @@ -1,13 +1,24 @@ import pytest - +from types import SimpleNamespace from CPAC.registration.registration import ANTs_registration_connector - @pytest.mark.parametrize("sink_native_transforms", ["On", "Off"]) def test_ants_registration_connector(sink_native_transforms): - cfg = { - "registration-workflows": {"sink_native_transforms": sink_native_transforms}, - } + + cfg = SimpleNamespace( + pipeline_setup={ + "system_config": {"num_ants_threads": 1} + }, + registration_workflows={ + "sink_native_transforms": sink_native_transforms, + "anatomical_registration": { + "reg_with_skull": True, + "registration": { + "ANTs": {"use_lesion_mask": False} + } + } + } + ) params = {"metric": "MI"} _, outputs = ANTs_registration_connector(wf_name="test", cfg=cfg, params=params) expected_keys = { @@ -15,11 +26,11 @@ def test_ants_registration_connector(sink_native_transforms): "from-T1w_to-template_mode-image_desc-rigid_xfm", "from-T1w_to-template_mode-image_desc-affine_xfm", } - if sink_native_transforms: - assert expected_keys.issubset( - outputs.keys() - ), f"Expected outputs {expected_keys} not found in {outputs.keys()}" + if sink_native_transforms == "On": + assert expected_keys.issubset(outputs.keys()), ( + f"Expected outputs {expected_keys} not found in {outputs.keys()}" + ) else: - assert not expected_keys.intersection( - outputs.keys() - ), f"Outputs {expected_keys} should not be present when sink_native_transforms is Off" + assert not expected_keys.intersection(outputs.keys()), ( + f"Outputs {expected_keys} should not be present when sink_native_transforms is Off" + ) \ No newline at end of file diff --git a/CPAC/registration/tests/test_fsl_registration_connector.py b/CPAC/registration/tests/test_fsl_registration_connector.py index a4faacaf1b..18eaa9ab2c 100644 --- a/CPAC/registration/tests/test_fsl_registration_connector.py +++ b/CPAC/registration/tests/test_fsl_registration_connector.py @@ -1,29 +1,25 @@ import pytest - +from types import SimpleNamespace from CPAC.registration.registration import FSL_registration_connector - @pytest.mark.parametrize("sink_native_transforms", ["On", "Off"]) def test_fsl_registration_connector(sink_native_transforms): wf_name = "test_fsl_registration_connector" - cfg = { - "registration-workflows": {"sink_native_transforms": sink_native_transforms}, - } + cfg = SimpleNamespace( + registration_workflows=SimpleNamespace( + sink_native_transforms=sink_native_transforms + ) + ) _, outputs = FSL_registration_connector(wf_name, cfg) + expected_outputs = { + "from-T1w_to-template_mode-image_desc-linear_xfm", + "from-template_to-T1w_mode-image_desc-linear_xfm", + } if sink_native_transforms == "On": - expected_outputs = { - "from-T1w_to-template_mode-image_desc-linear_xfm", - "from-template_to-T1w_mode-image_desc-linear_xfm", - } - assert expected_outputs.issubset( - outputs.keys() - ), f"Expected outputs {expected_outputs} not found in {outputs.keys()}" + assert expected_outputs.issubset(outputs.keys()), ( + f"Expected outputs {expected_outputs} not found in {outputs.keys()}" + ) else: - # Adjust this set based on what outputs should be present when 'Off' - not_expected_outputs = { - "from-T1w_to-template_mode-image_desc-linear_xfm", - "from-template_to-T1w_mode-image_desc-linear_xfm", - } - assert not not_expected_outputs.intersection( - outputs.keys() - ), f"Outputs {not_expected_outputs} should not be present when sink_native_transforms is Off" + assert not expected_outputs.intersection(outputs.keys()), ( + f"Outputs {expected_outputs} should not be present when sink_native_transforms is Off" + ) \ No newline at end of file From 0fce6386cf9aac6c46308e0212313586b0de0a4f Mon Sep 17 00:00:00 2001 From: "birajstha:construction_worker::penguin" Date: Tue, 10 Jun 2025 21:13:28 -0400 Subject: [PATCH 334/507] renamed linear_xfm to flirt_xfm and updated the tests --- .../pipeline/nipype_pipeline_engine/engine.py | 2 +- CPAC/registration/registration.py | 4 +- .../tests/test_ants_registration_connector.py | 47 ++++++++++++------- .../tests/test_fsl_registration_connector.py | 42 ++++++++++------- 4 files changed, 56 insertions(+), 39 deletions(-) diff --git a/CPAC/pipeline/nipype_pipeline_engine/engine.py b/CPAC/pipeline/nipype_pipeline_engine/engine.py index 743285ae9d..df442d60bc 100644 --- a/CPAC/pipeline/nipype_pipeline_engine/engine.py +++ b/CPAC/pipeline/nipype_pipeline_engine/engine.py @@ -58,7 +58,7 @@ from numpy import prod from traits.trait_base import Undefined -from traits.trait_handlers import TraitListObject +from traits.api import List as TraitListObject from nibabel import load from nipype.interfaces.utility import Function from nipype.pipeline import engine as pe diff --git a/CPAC/registration/registration.py b/CPAC/registration/registration.py index de47ae0c9c..2ada44f1d2 100644 --- a/CPAC/registration/registration.py +++ b/CPAC/registration/registration.py @@ -1555,11 +1555,11 @@ def FSL_registration_connector( if cfg.registration_workflows["sink_native_transforms"]: outputs.update( { - f"from-{orig}_to-{sym}{tmpl}template_mode-image_desc-linear_xfm": ( + f"from-{orig}_to-{sym}{tmpl}template_mode-image_desc-flirt_xfm": ( flirt_reg_anat_mni, "outputspec.linear_xfm", ), - f"from-{sym}{tmpl}template_to-{orig}_mode-image_desc-linear_xfm": ( + f"from-{sym}{tmpl}template_to-{orig}_mode-image_desc-flirt_xfm": ( flirt_reg_anat_mni, "outputspec.invlinear_xfm", ), diff --git a/CPAC/registration/tests/test_ants_registration_connector.py b/CPAC/registration/tests/test_ants_registration_connector.py index 6de8160cbc..72985e4e3f 100644 --- a/CPAC/registration/tests/test_ants_registration_connector.py +++ b/CPAC/registration/tests/test_ants_registration_connector.py @@ -1,36 +1,47 @@ import pytest -from types import SimpleNamespace from CPAC.registration.registration import ANTs_registration_connector -@pytest.mark.parametrize("sink_native_transforms", ["On", "Off"]) -def test_ants_registration_connector(sink_native_transforms): +class AttrDict(dict): + def __getattr__(self, item): + value = self[item] + if isinstance(value, dict): + return AttrDict(value) + return value - cfg = SimpleNamespace( - pipeline_setup={ - "system_config": {"num_ants_threads": 1} +@pytest.mark.parametrize("sink_native_transforms", [True, False]) +def test_ants_registration_connector(sink_native_transforms): + wf_name = "test_ants_registration_connector" + cfg = AttrDict({ + "registration-workflows": {"sink_native_transforms": sink_native_transforms}, + "pipeline_setup": { + "system_config": { + "num_ants_threads": 1 + } }, - registration_workflows={ - "sink_native_transforms": sink_native_transforms, + "registration_workflows": { + "sink_native_transforms": sink_native_transforms, "anatomical_registration": { "reg_with_skull": True, "registration": { - "ANTs": {"use_lesion_mask": False} + "ANTs": { + "use_lesion_mask": False + } } } } - ) + }) params = {"metric": "MI"} - _, outputs = ANTs_registration_connector(wf_name="test", cfg=cfg, params=params) + _, outputs = ANTs_registration_connector(wf_name, cfg=cfg, params=params) expected_keys = { "from-T1w_to-template_mode-image_desc-initial_xfm", "from-T1w_to-template_mode-image_desc-rigid_xfm", "from-T1w_to-template_mode-image_desc-affine_xfm", } - if sink_native_transforms == "On": - assert expected_keys.issubset(outputs.keys()), ( - f"Expected outputs {expected_keys} not found in {outputs.keys()}" - ) + if sink_native_transforms: + assert expected_keys.issubset( + outputs.keys() + ), f"Expected outputs {expected_keys} not found in {outputs.keys()}" else: - assert not expected_keys.intersection(outputs.keys()), ( - f"Outputs {expected_keys} should not be present when sink_native_transforms is Off" - ) \ No newline at end of file + assert not expected_keys.intersection( + outputs.keys() + ), f"Outputs {expected_keys} should not be present when sink_native_transforms is Off" \ No newline at end of file diff --git a/CPAC/registration/tests/test_fsl_registration_connector.py b/CPAC/registration/tests/test_fsl_registration_connector.py index 18eaa9ab2c..f7cf87b15f 100644 --- a/CPAC/registration/tests/test_fsl_registration_connector.py +++ b/CPAC/registration/tests/test_fsl_registration_connector.py @@ -1,25 +1,31 @@ import pytest -from types import SimpleNamespace from CPAC.registration.registration import FSL_registration_connector -@pytest.mark.parametrize("sink_native_transforms", ["On", "Off"]) +class AttrDict(dict): + def __getattr__(self, item): + value = self[item] + if isinstance(value, dict): + return AttrDict(value) + return value + +@pytest.mark.parametrize("sink_native_transforms", [True, False]) def test_fsl_registration_connector(sink_native_transforms): wf_name = "test_fsl_registration_connector" - cfg = SimpleNamespace( - registration_workflows=SimpleNamespace( - sink_native_transforms=sink_native_transforms - ) - ) - _, outputs = FSL_registration_connector(wf_name, cfg) - expected_outputs = { - "from-T1w_to-template_mode-image_desc-linear_xfm", - "from-template_to-T1w_mode-image_desc-linear_xfm", + cfg = AttrDict({ + "registration_workflows": { + "sink_native_transforms": sink_native_transforms + } + }) + _, outputs = FSL_registration_connector(wf_name, cfg, opt="FSL") + expected_keys = { + "from-T1w_to-template_mode-image_desc-flirt_xfm", + "from-template_to-T1w_mode-image_desc-flirt_xfm", } - if sink_native_transforms == "On": - assert expected_outputs.issubset(outputs.keys()), ( - f"Expected outputs {expected_outputs} not found in {outputs.keys()}" - ) + if sink_native_transforms == True: + assert expected_keys.issubset( + outputs.keys() + ), f"Expected outputs {expected_keys} not found in {outputs.keys()}" else: - assert not expected_outputs.intersection(outputs.keys()), ( - f"Outputs {expected_outputs} should not be present when sink_native_transforms is Off" - ) \ No newline at end of file + assert not expected_keys.intersection( + outputs.keys() + ), f"Outputs {expected_keys} should not be present when sink_native_transforms is Off" \ No newline at end of file From 0c2bb173c17a7dcc9bfe4a47d26b0c16846696bf Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Wed, 11 Jun 2025 01:14:09 +0000 Subject: [PATCH 335/507] [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci --- .../tests/test_ants_registration_connector.py | 36 +++++++++---------- .../tests/test_fsl_registration_connector.py | 12 +++---- 2 files changed, 23 insertions(+), 25 deletions(-) diff --git a/CPAC/registration/tests/test_ants_registration_connector.py b/CPAC/registration/tests/test_ants_registration_connector.py index 72985e4e3f..900ebde70d 100644 --- a/CPAC/registration/tests/test_ants_registration_connector.py +++ b/CPAC/registration/tests/test_ants_registration_connector.py @@ -1,6 +1,7 @@ import pytest from CPAC.registration.registration import ANTs_registration_connector + class AttrDict(dict): def __getattr__(self, item): value = self[item] @@ -8,28 +9,25 @@ def __getattr__(self, item): return AttrDict(value) return value + @pytest.mark.parametrize("sink_native_transforms", [True, False]) def test_ants_registration_connector(sink_native_transforms): wf_name = "test_ants_registration_connector" - cfg = AttrDict({ - "registration-workflows": {"sink_native_transforms": sink_native_transforms}, - "pipeline_setup": { - "system_config": { - "num_ants_threads": 1 - } - }, - "registration_workflows": { - "sink_native_transforms": sink_native_transforms, - "anatomical_registration": { - "reg_with_skull": True, - "registration": { - "ANTs": { - "use_lesion_mask": False - } - } - } + cfg = AttrDict( + { + "registration-workflows": { + "sink_native_transforms": sink_native_transforms + }, + "pipeline_setup": {"system_config": {"num_ants_threads": 1}}, + "registration_workflows": { + "sink_native_transforms": sink_native_transforms, + "anatomical_registration": { + "reg_with_skull": True, + "registration": {"ANTs": {"use_lesion_mask": False}}, + }, + }, } - }) + ) params = {"metric": "MI"} _, outputs = ANTs_registration_connector(wf_name, cfg=cfg, params=params) expected_keys = { @@ -44,4 +42,4 @@ def test_ants_registration_connector(sink_native_transforms): else: assert not expected_keys.intersection( outputs.keys() - ), f"Outputs {expected_keys} should not be present when sink_native_transforms is Off" \ No newline at end of file + ), f"Outputs {expected_keys} should not be present when sink_native_transforms is Off" diff --git a/CPAC/registration/tests/test_fsl_registration_connector.py b/CPAC/registration/tests/test_fsl_registration_connector.py index f7cf87b15f..a7dad7bcef 100644 --- a/CPAC/registration/tests/test_fsl_registration_connector.py +++ b/CPAC/registration/tests/test_fsl_registration_connector.py @@ -1,6 +1,7 @@ import pytest from CPAC.registration.registration import FSL_registration_connector + class AttrDict(dict): def __getattr__(self, item): value = self[item] @@ -8,14 +9,13 @@ def __getattr__(self, item): return AttrDict(value) return value + @pytest.mark.parametrize("sink_native_transforms", [True, False]) def test_fsl_registration_connector(sink_native_transforms): wf_name = "test_fsl_registration_connector" - cfg = AttrDict({ - "registration_workflows": { - "sink_native_transforms": sink_native_transforms - } - }) + cfg = AttrDict( + {"registration_workflows": {"sink_native_transforms": sink_native_transforms}} + ) _, outputs = FSL_registration_connector(wf_name, cfg, opt="FSL") expected_keys = { "from-T1w_to-template_mode-image_desc-flirt_xfm", @@ -28,4 +28,4 @@ def test_fsl_registration_connector(sink_native_transforms): else: assert not expected_keys.intersection( outputs.keys() - ), f"Outputs {expected_keys} should not be present when sink_native_transforms is Off" \ No newline at end of file + ), f"Outputs {expected_keys} should not be present when sink_native_transforms is Off" From 0e7eebfbaeb2d6ffcbbb88cd54d56b42f9b59407 Mon Sep 17 00:00:00 2001 From: birajstha <111654544+birajstha@users.noreply.github.com> Date: Wed, 11 Jun 2025 11:25:32 -0400 Subject: [PATCH 336/507] Update CPAC/registration/tests/test_ants_registration_connector.py Co-authored-by: Jon Cluce --- CPAC/registration/tests/test_ants_registration_connector.py | 5 +---- 1 file changed, 1 insertion(+), 4 deletions(-) diff --git a/CPAC/registration/tests/test_ants_registration_connector.py b/CPAC/registration/tests/test_ants_registration_connector.py index 900ebde70d..0ce239ee53 100644 --- a/CPAC/registration/tests/test_ants_registration_connector.py +++ b/CPAC/registration/tests/test_ants_registration_connector.py @@ -13,11 +13,8 @@ def __getattr__(self, item): @pytest.mark.parametrize("sink_native_transforms", [True, False]) def test_ants_registration_connector(sink_native_transforms): wf_name = "test_ants_registration_connector" - cfg = AttrDict( + cfg = Configuration( { - "registration-workflows": { - "sink_native_transforms": sink_native_transforms - }, "pipeline_setup": {"system_config": {"num_ants_threads": 1}}, "registration_workflows": { "sink_native_transforms": sink_native_transforms, From c4418a6a5f35de531ba4de3ca175f77605744350 Mon Sep 17 00:00:00 2001 From: birajstha <111654544+birajstha@users.noreply.github.com> Date: Wed, 11 Jun 2025 11:25:49 -0400 Subject: [PATCH 337/507] Update CPAC/registration/tests/test_ants_registration_connector.py Co-authored-by: Jon Cluce --- .../tests/test_ants_registration_connector.py | 9 +-------- 1 file changed, 1 insertion(+), 8 deletions(-) diff --git a/CPAC/registration/tests/test_ants_registration_connector.py b/CPAC/registration/tests/test_ants_registration_connector.py index 0ce239ee53..18fb673cce 100644 --- a/CPAC/registration/tests/test_ants_registration_connector.py +++ b/CPAC/registration/tests/test_ants_registration_connector.py @@ -1,13 +1,6 @@ import pytest from CPAC.registration.registration import ANTs_registration_connector - - -class AttrDict(dict): - def __getattr__(self, item): - value = self[item] - if isinstance(value, dict): - return AttrDict(value) - return value +from CPAC.utils.configuration import Configuration @pytest.mark.parametrize("sink_native_transforms", [True, False]) From c0c6fed027b32fab40d0c780ce6b187ccee7af82 Mon Sep 17 00:00:00 2001 From: birajstha <111654544+birajstha@users.noreply.github.com> Date: Wed, 11 Jun 2025 11:29:38 -0400 Subject: [PATCH 338/507] Update CPAC/registration/tests/test_fsl_registration_connector.py Co-authored-by: Jon Cluce --- CPAC/registration/tests/test_fsl_registration_connector.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/CPAC/registration/tests/test_fsl_registration_connector.py b/CPAC/registration/tests/test_fsl_registration_connector.py index a7dad7bcef..22b3a9437f 100644 --- a/CPAC/registration/tests/test_fsl_registration_connector.py +++ b/CPAC/registration/tests/test_fsl_registration_connector.py @@ -13,7 +13,7 @@ def __getattr__(self, item): @pytest.mark.parametrize("sink_native_transforms", [True, False]) def test_fsl_registration_connector(sink_native_transforms): wf_name = "test_fsl_registration_connector" - cfg = AttrDict( + cfg = Configuration( {"registration_workflows": {"sink_native_transforms": sink_native_transforms}} ) _, outputs = FSL_registration_connector(wf_name, cfg, opt="FSL") From dc04235503a6e720ab0715c1ceaa2495503f8e8e Mon Sep 17 00:00:00 2001 From: birajstha <111654544+birajstha@users.noreply.github.com> Date: Wed, 11 Jun 2025 11:29:44 -0400 Subject: [PATCH 339/507] Update CPAC/registration/tests/test_fsl_registration_connector.py Co-authored-by: Jon Cluce --- .../tests/test_fsl_registration_connector.py | 9 +-------- 1 file changed, 1 insertion(+), 8 deletions(-) diff --git a/CPAC/registration/tests/test_fsl_registration_connector.py b/CPAC/registration/tests/test_fsl_registration_connector.py index 22b3a9437f..4129502f51 100644 --- a/CPAC/registration/tests/test_fsl_registration_connector.py +++ b/CPAC/registration/tests/test_fsl_registration_connector.py @@ -1,13 +1,6 @@ import pytest from CPAC.registration.registration import FSL_registration_connector - - -class AttrDict(dict): - def __getattr__(self, item): - value = self[item] - if isinstance(value, dict): - return AttrDict(value) - return value +from CPAC.utils.configuration import Configuration @pytest.mark.parametrize("sink_native_transforms", [True, False]) From bd511f696ece574db5c3933d1adcf2a545dfda29 Mon Sep 17 00:00:00 2001 From: "birajstha:construction_worker::penguin" Date: Wed, 11 Jun 2025 12:06:38 -0400 Subject: [PATCH 340/507] updating the output names and unit tests --- CPAC/registration/registration.py | 64 +++++++++++-------- .../tests/test_ants_registration_connector.py | 10 +-- .../tests/test_fsl_registration_connector.py | 10 +-- CPAC/resources/cpac_outputs.tsv | 28 ++++---- CPAC/utils/tests/test_utils.py | 11 ++++ 5 files changed, 66 insertions(+), 57 deletions(-) diff --git a/CPAC/registration/registration.py b/CPAC/registration/registration.py index 2ada44f1d2..fc146a8091 100644 --- a/CPAC/registration/registration.py +++ b/CPAC/registration/registration.py @@ -2268,23 +2268,28 @@ def bold_to_T1template_xfm_connector( "FNIRT-T1w-brain-template", "template-ref-mask", ], - outputs={ - "space-template_desc-preproc_T1w": {"Template": "T1w-brain-template"}, - "space-template_desc-head_T1w": {"Template": "T1w-template"}, - "space-template_desc-brain_mask": {"Template": "T1w-template"}, - "space-template_desc-T1wT2w_biasfield": {"Template": "T1w-template"}, - "from-T1w_to-template_mode-image_desc-linear_xfm": {"Template": "T1w-template"}, - "from-template_to-T1w_mode-image_desc-linear_xfm": {"Template": "T1w-template"}, - "from-T1w_to-template_mode-image_xfm": {"Template": "T1w-template"}, - "from-T1w_to-template_mode-image_warp": {"Template": "T1w-template"}, - "from-longitudinal_to-template_mode-image_desc-linear_xfm": { - "Template": "T1w-template" - }, - "from-template_to-longitudinal_mode-image_desc-linear_xfm": { - "Template": "T1w-template" + outputs = { + **{ + key: {"Template": "T1w-template"} + for key in [ + "space-template_desc-head_T1w", + "space-template_desc-brain_mask", + "space-template_desc-T1wT2w_biasfield", + "from-T1w_to-template_mode-image_desc-linear_xfm", + "from-template_to-T1w_mode-image_desc-linear_xfm", + "from-T1w_to-template_mode-image_xfm", + "from-T1w_to-template_mode-image_warp", + "from-longitudinal_to-template_mode-image_desc-linear_xfm", + "from-template_to-longitudinal_mode-image_desc-linear_xfm", + "from-longitudinal_to-template_mode-image_xfm", + "from-T1w_to-template_mode-image_desc-flirt_xfm", + "from-template_to-T1w_mode-image_desc-flirt_xfm", + "from-longitudinal_to-template_mode-image_desc-flirt_xfm", + "from-template_to-longitudinal_mode-image_desc-flirt_xfm", + ] }, - "from-longitudinal_to-template_mode-image_xfm": {"Template": "T1w-template"}, - }, + "space-template_desc-preproc_T1w": {"Template": "T1w-brain-template"}, + } ) def register_FSL_anat_to_template(wf, cfg, strat_pool, pipe_num, opt=None): """Register T1w to template with FSL.""" @@ -2371,6 +2376,10 @@ def register_FSL_anat_to_template(wf, cfg, strat_pool, pipe_num, opt=None): "from-symtemplate_to-longitudinal_mode-image_desc-linear_xfm", "from-longitudinal_to-symtemplate_mode-image_xfm", "space-symtemplate_desc-T1wT2w_biasfield", + "from-T1w_to-symtemplate_mode-image_desc-flirt_xfm", + "from-symtemplate_to-T1w_mode-image_desc-flirt_xfm", + "from-longitudinal_to-symtemplate_mode-image_desc-flirt_xfm", + "from-symtemplate_to-longitudinal_mode-image_desc-flirt_xfm", ] }, }, @@ -2438,16 +2447,19 @@ def register_symmetric_FSL_anat_to_template(wf, cfg, strat_pool, pipe_num, opt=N "EPI-template", "EPI-template-mask", ], - outputs={ - "space-template_desc-preproc_bold": {"Template": "EPI-template"}, - "from-bold_to-EPItemplate_mode-image_desc-linear_xfm": { - "Template": "EPI-template" - }, - "from-EPItemplate_to-bold_mode-image_desc-linear_xfm": { - "Template": "EPI-template" - }, - "from-bold_to-EPItemplate_mode-image_xfm": {"Template": "EPI-template"}, - }, + outputs = { + **{ + key: {"Template": "EPI-template"} + for key in [ + "space-template_desc-preproc_bold", + "from-bold_to-EPItemplate_mode-image_desc-linear_xfm", + "from-EPItemplate_to-bold_mode-image_desc-linear_xfm", + "from-bold_to-EPItemplate_mode-image_xfm", + "from-bold_to-EPItemplate_mode-image_desc-flirt_xfm", + "from-EPItemplate_to-bold_mode-image_desc-flirt_xfm", + ] + } + } ) def register_FSL_EPI_to_template(wf, cfg, strat_pool, pipe_num, opt=None): """Directly register the mean functional to an EPI template. No T1w involved.""" diff --git a/CPAC/registration/tests/test_ants_registration_connector.py b/CPAC/registration/tests/test_ants_registration_connector.py index 18fb673cce..5dd134fd12 100644 --- a/CPAC/registration/tests/test_ants_registration_connector.py +++ b/CPAC/registration/tests/test_ants_registration_connector.py @@ -1,6 +1,7 @@ import pytest from CPAC.registration.registration import ANTs_registration_connector from CPAC.utils.configuration import Configuration +from CPAC.utils.tests.test_utils import check_expected_keys @pytest.mark.parametrize("sink_native_transforms", [True, False]) @@ -25,11 +26,4 @@ def test_ants_registration_connector(sink_native_transforms): "from-T1w_to-template_mode-image_desc-rigid_xfm", "from-T1w_to-template_mode-image_desc-affine_xfm", } - if sink_native_transforms: - assert expected_keys.issubset( - outputs.keys() - ), f"Expected outputs {expected_keys} not found in {outputs.keys()}" - else: - assert not expected_keys.intersection( - outputs.keys() - ), f"Outputs {expected_keys} should not be present when sink_native_transforms is Off" + check_expected_keys(sink_native_transforms, outputs, expected_keys) diff --git a/CPAC/registration/tests/test_fsl_registration_connector.py b/CPAC/registration/tests/test_fsl_registration_connector.py index 4129502f51..8dca7f70d4 100644 --- a/CPAC/registration/tests/test_fsl_registration_connector.py +++ b/CPAC/registration/tests/test_fsl_registration_connector.py @@ -1,6 +1,7 @@ import pytest from CPAC.registration.registration import FSL_registration_connector from CPAC.utils.configuration import Configuration +from CPAC.utils.tests.test_utils import check_expected_keys @pytest.mark.parametrize("sink_native_transforms", [True, False]) @@ -14,11 +15,4 @@ def test_fsl_registration_connector(sink_native_transforms): "from-T1w_to-template_mode-image_desc-flirt_xfm", "from-template_to-T1w_mode-image_desc-flirt_xfm", } - if sink_native_transforms == True: - assert expected_keys.issubset( - outputs.keys() - ), f"Expected outputs {expected_keys} not found in {outputs.keys()}" - else: - assert not expected_keys.intersection( - outputs.keys() - ), f"Outputs {expected_keys} should not be present when sink_native_transforms is Off" + check_expected_keys(sink_native_transforms, outputs, expected_keys) diff --git a/CPAC/resources/cpac_outputs.tsv b/CPAC/resources/cpac_outputs.tsv index 125548d6b2..d5a61b18bb 100644 --- a/CPAC/resources/cpac_outputs.tsv +++ b/CPAC/resources/cpac_outputs.tsv @@ -209,28 +209,26 @@ from-template_to-longitudinal_mode-image_xfm xfm anat NIfTI from-template_to-T1w_mode-image_desc-linear_xfm xfm anat NIfTI from-template_to-T1w_mode-image_desc-nonlinear_xfm xfm anat NIfTI from-template_to-T1w_mode-image_xfm xfm anat NIfTI -from-T1w_to-template_mode-image_desc-linear_xfm xfm anat MAT -from-T1w_to-template_mode-image_desc-invlinear_xfm xfm anat MAT -from-T1w_to-symtemplate_mode-image_desc-linear_xfm xfm anat MAT -from-T1w_to-symtemplate_mode-image_desc-invlinear_xfm xfm anat MAT -from-T1w_to-template_mode-image_desc-initial_xfm xfm anat MAT -from-T1w_to-template_mode-image_desc-rigid_xfm xfm anat MAT -from-T1w_to-template_mode-image_desc-affine_xfm xfm anat MAT +from-T1w_to-template_mode-image_desc-flirt_xfm xfm anat MAT +from-T1w_to-symtemplate_mode-image_desc-flirt_xfm xfm anat MAT +from-longitudinal_to-template_mode-image_desc-flirt_xfm xfm anat MAT +from-template_to-T1w_mode-image_desc-flirt_xfm xfm anat MAT +from-symtemplate_to-T1w_mode-image_desc-flirt_xfm xfm anat MAT +from-template_to-longitudinal_mode-image_desc-flirt_xfm xfm anat MAT +from-longitudinal_to-symtemplate_mode-image_desc-flirt_xfm xfm anat MAT +from-symtemplate_to-longitudinal_mode-image_desc-flirt_xfm xfm anat MAT from-T1w_to-template_mode-image_desc-initial_xfm xfm anat MAT from-T1w_to-template_mode-image_desc-rigid_xfm xfm anat MAT from-T1w_to-template_mode-image_desc-affine_xfm xfm anat MAT from-T1w_to-longitudinal_mode-image_desc-initial_xfm xfm anat MAT from-T1w_to-longitudinal_mode-image_desc-rigid_xfm xfm anat MAT from-T1w_to-longitudinal_mode-image_desc-affine_xfm xfm anat MAT -from-longitudinal_to-template_mode-image_desc-initial_xfm xfm anat MAT -from-longitudinal_to-template_mode-image_desc-rigid_xfm xfm anat MAT -from-longitudinal_to-template_mode-image_desc-affine_xfm xfm anat MAT from-T1w_to-symtemplate_mode-image_desc-initial_xfm xfm anat MAT from-T1w_to-symtemplate_mode-image_desc-rigid_xfm xfm anat MAT from-T1w_to-symtemplate_mode-image_desc-affine_xfm xfm anat MAT -from-T1w_to-longitudinal_mode-image_desc-initial_xfm xfm anat MAT -from-T1w_to-longitudinal_mode-image_desc-rigid_xfm xfm anat MAT -from-T1w_to-longitudinal_mode-image_desc-affine_xfm xfm anat MAT +from-longitudinal_to-template_mode-image_desc-initial_xfm xfm anat MAT +from-longitudinal_to-template_mode-image_desc-rigid_xfm xfm anat MAT +from-longitudinal_to-template_mode-image_desc-affine_xfm xfm anat MAT from-longitudinal_to-symtemplate_mode-image_desc-initial_xfm xfm anat MAT from-longitudinal_to-symtemplate_mode-image_desc-rigid_xfm xfm anat MAT from-longitudinal_to-symtemplate_mode-image_desc-affine_xfm xfm anat MAT @@ -240,8 +238,8 @@ from-longitudinal_to-T1w_mode-image_desc-affine_xfm xfm anat MAT from-bold_to-EPItemplate_mode-image_desc-initial_xfm xfm func MAT from-bold_to-EPItemplate_mode-image_desc-rigid_xfm xfm func MAT from-bold_to-EPItemplate_mode-image_desc-affine_xfm xfm func MAT -from-bold_to-EPItemplate_mode-image_desc-linear_xfm xfm func MAT -from-bold_to-EPItemplate_mode-image_desc-invlinear_xfm xfm func MAT +from-bold_to-EPItemplate_mode-image_desc-flirt_xfm xfm func MAT +from-EPItemplate_to-bold_mode-image_desc-flirt_xfm xfm func MAT space-template_label-CSF_mask mask template anat NIfTI space-template_label-WM_mask mask template anat NIfTI space-template_label-GM_mask mask template anat NIfTI diff --git a/CPAC/utils/tests/test_utils.py b/CPAC/utils/tests/test_utils.py index ab896c6029..31b9726984 100644 --- a/CPAC/utils/tests/test_utils.py +++ b/CPAC/utils/tests/test_utils.py @@ -168,3 +168,14 @@ def test_system_deps(): Raises an exception if dependencies are not met. """ check_system_deps(*([True] * 4)) + + +def check_expected_keys(sink_native_transforms: bool, outputs: dict, expected_keys: set) -> None: + if sink_native_transforms: + assert expected_keys.issubset( + outputs.keys() + ), f"Expected outputs {expected_keys} not found in {outputs.keys()}" + else: + assert not expected_keys.intersection( + outputs.keys() + ), f"Outputs {expected_keys} should not be present when sink_native_transforms is Off" \ No newline at end of file From b42fcd6ea47efecfd48ee3677ea708b51b7914bb Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Wed, 11 Jun 2025 16:06:54 +0000 Subject: [PATCH 341/507] [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci --- CPAC/registration/registration.py | 8 ++++---- CPAC/utils/tests/test_utils.py | 6 ++++-- 2 files changed, 8 insertions(+), 6 deletions(-) diff --git a/CPAC/registration/registration.py b/CPAC/registration/registration.py index fc146a8091..58a8aa9c25 100644 --- a/CPAC/registration/registration.py +++ b/CPAC/registration/registration.py @@ -2268,7 +2268,7 @@ def bold_to_T1template_xfm_connector( "FNIRT-T1w-brain-template", "template-ref-mask", ], - outputs = { + outputs={ **{ key: {"Template": "T1w-template"} for key in [ @@ -2289,7 +2289,7 @@ def bold_to_T1template_xfm_connector( ] }, "space-template_desc-preproc_T1w": {"Template": "T1w-brain-template"}, - } + }, ) def register_FSL_anat_to_template(wf, cfg, strat_pool, pipe_num, opt=None): """Register T1w to template with FSL.""" @@ -2447,7 +2447,7 @@ def register_symmetric_FSL_anat_to_template(wf, cfg, strat_pool, pipe_num, opt=N "EPI-template", "EPI-template-mask", ], - outputs = { + outputs={ **{ key: {"Template": "EPI-template"} for key in [ @@ -2459,7 +2459,7 @@ def register_symmetric_FSL_anat_to_template(wf, cfg, strat_pool, pipe_num, opt=N "from-EPItemplate_to-bold_mode-image_desc-flirt_xfm", ] } - } + }, ) def register_FSL_EPI_to_template(wf, cfg, strat_pool, pipe_num, opt=None): """Directly register the mean functional to an EPI template. No T1w involved.""" diff --git a/CPAC/utils/tests/test_utils.py b/CPAC/utils/tests/test_utils.py index 31b9726984..ff72422f5a 100644 --- a/CPAC/utils/tests/test_utils.py +++ b/CPAC/utils/tests/test_utils.py @@ -170,7 +170,9 @@ def test_system_deps(): check_system_deps(*([True] * 4)) -def check_expected_keys(sink_native_transforms: bool, outputs: dict, expected_keys: set) -> None: +def check_expected_keys( + sink_native_transforms: bool, outputs: dict, expected_keys: set +) -> None: if sink_native_transforms: assert expected_keys.issubset( outputs.keys() @@ -178,4 +180,4 @@ def check_expected_keys(sink_native_transforms: bool, outputs: dict, expected_ke else: assert not expected_keys.intersection( outputs.keys() - ), f"Outputs {expected_keys} should not be present when sink_native_transforms is Off" \ No newline at end of file + ), f"Outputs {expected_keys} should not be present when sink_native_transforms is Off" From f0fe92c82025fe03b94a01d067a25294d79d90a6 Mon Sep 17 00:00:00 2001 From: Jon Cluce Date: Wed, 11 Jun 2025 17:10:43 -0400 Subject: [PATCH 342/507] fixup! fixup! fixup! :necktie: Resequence motion --- CPAC/func_preproc/func_motion.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/CPAC/func_preproc/func_motion.py b/CPAC/func_preproc/func_motion.py index 63e5da69a7..f42cf2bf28 100644 --- a/CPAC/func_preproc/func_motion.py +++ b/CPAC/func_preproc/func_motion.py @@ -1011,11 +1011,12 @@ def stack_motion_blocks( assert calc_motion_stats.inputs if not all(rpool.check_rpool(resource) for resource in calc_motion_stats.inputs): func_blocks["motion"].append(func_motion_estimates) - func_blocks["motion"].extend([func_motion_correct, motion_estimate_filter]) + func_blocks["motion"].append(motion_estimate_filter) return [ *func_blocks["init"], - *func_blocks["preproc"], *func_blocks["motion"], + *func_blocks["preproc"], + func_motion_correct, *func_blocks["mask"], calc_motion_stats, *func_blocks["prep"], From a336ac00a666fd4b46fcae3fb5381bdc7f0d6134 Mon Sep 17 00:00:00 2001 From: "birajstha:construction_worker::penguin" Date: Wed, 7 May 2025 18:24:45 -0400 Subject: [PATCH 343/507] replacing 3drefit with 3dwarp in func deoblique --- CPAC/func_preproc/func_preproc.py | 7 +++-- CPAC/utils/utils.py | 40 ++++++++++++++++++++++++++ dev/docker_data/required_afni_pkgs.txt | 1 + 3 files changed, 46 insertions(+), 2 deletions(-) diff --git a/CPAC/func_preproc/func_preproc.py b/CPAC/func_preproc/func_preproc.py index ff626765c4..3e38476fba 100644 --- a/CPAC/func_preproc/func_preproc.py +++ b/CPAC/func_preproc/func_preproc.py @@ -29,7 +29,7 @@ PrintHeader, SetDirectionByMatrix, ) -from CPAC.utils.utils import add_afni_prefix +from CPAC.utils.utils import add_afni_prefix, afni_3dwarp def collect_arguments(*args): @@ -708,7 +708,10 @@ def fsl_afni_subworkflow(cfg, pipe_num, opt=None): def func_reorient(wf, cfg, strat_pool, pipe_num, opt=None): """Reorient functional timeseries.""" func_deoblique = pe.Node( - interface=afni_utils.Refit(), + Function( + input_names=["in_file", "deoblique"], + output_names=["out_file"], + function=afni_3dwarp), name=f"func_deoblique_{pipe_num}", mem_gb=0.68, mem_x=(4664065662093477 / 1208925819614629174706176, "in_file"), diff --git a/CPAC/utils/utils.py b/CPAC/utils/utils.py index 6ca7e25689..8b5f8abbc6 100644 --- a/CPAC/utils/utils.py +++ b/CPAC/utils/utils.py @@ -962,6 +962,46 @@ def add_afni_prefix(tpattern): return tpattern +def afni_3dwarp(in_file, out_file=None, deoblique=False): + """ + Runs AFNI's 3dWarp command with optional deobliquing. + + Parameters + ---------- + in_file : str + Path to the input NIfTI file. + out_file : str or None + Path for the output file. If None, a name will be generated in the current directory. + deoblique : bool + If True, adds the '-deoblique' flag to the 3dWarp command. + + Returns + ------- + out_file : str + Path to the output file. + """ + import os + import subprocess + + if not out_file: + base = os.path.basename(in_file) + base = base.replace(".nii.gz", "").replace(".nii", "") + suffix = "_deoblique" if deoblique else "_warped" + out_file = os.path.abspath(f"{base}{suffix}.nii.gz") + + cmd = ["3dWarp"] + if deoblique: + cmd.append("-deoblique") + cmd += ["-prefix", out_file, in_file] + + try: + subprocess.check_output(cmd, stderr=subprocess.STDOUT) + except subprocess.CalledProcessError as e: + raise RuntimeError(f"3dWarp failed with error:\n{e.output.decode()}") + + return out_file + + def write_to_log(workflow, log_dir, index, inputs, scan_id): """Write into log file the status of the workflow run.""" import datetime diff --git a/dev/docker_data/required_afni_pkgs.txt b/dev/docker_data/required_afni_pkgs.txt index 4aa745c906..36e7dc0bf6 100644 --- a/dev/docker_data/required_afni_pkgs.txt +++ b/dev/docker_data/required_afni_pkgs.txt @@ -1,3 +1,4 @@ +linux_openmp_64/3dWarp linux_openmp_64/3dAutomask linux_openmp_64/3dBandpass linux_openmp_64/3dBlurToFWHM From e3331383133d9263141685ae1fed6dfae7cecc42 Mon Sep 17 00:00:00 2001 From: "birajstha:construction_worker::penguin" Date: Wed, 7 May 2025 18:29:02 -0400 Subject: [PATCH 344/507] replacing 3drefit with 3dwarp in anat deoblique --- CPAC/anat_preproc/anat_preproc.py | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/CPAC/anat_preproc/anat_preproc.py b/CPAC/anat_preproc/anat_preproc.py index 683bb522f7..466dc62675 100644 --- a/CPAC/anat_preproc/anat_preproc.py +++ b/CPAC/anat_preproc/anat_preproc.py @@ -37,6 +37,7 @@ from CPAC.pipeline.nodeblock import nodeblock from CPAC.utils.interfaces import Function from CPAC.utils.interfaces.fsl import Merge as fslMerge +from CPAC.utils.utils import afni_3dwarp def acpc_alignment( @@ -1448,7 +1449,11 @@ def mask_T2(wf_name="mask_T2"): outputs=["desc-preproc_T1w", "desc-reorient_T1w", "desc-head_T1w"], ) def anatomical_init(wf, cfg, strat_pool, pipe_num, opt=None): - anat_deoblique = pe.Node(interface=afni.Refit(), name=f"anat_deoblique_{pipe_num}") + anat_deoblique = pe.Node(Function( + input_names=["in_file", "deoblique"], + output_names=["out_file"], + function=afni_3dwarp + ), name=f"anat_deoblique_{pipe_num}") anat_deoblique.inputs.deoblique = True node, out = strat_pool.get_data("T1w") From bc45724189f34708ad44a1eb7f176f136c8877e0 Mon Sep 17 00:00:00 2001 From: "birajstha:construction_worker::penguin" Date: Wed, 7 May 2025 19:11:51 -0400 Subject: [PATCH 345/507] keeping afni packages --- .../Dockerfiles/AFNI.23.3.09-jammy.Dockerfile | 242 +++++++++--------- 1 file changed, 122 insertions(+), 120 deletions(-) diff --git a/.github/Dockerfiles/AFNI.23.3.09-jammy.Dockerfile b/.github/Dockerfiles/AFNI.23.3.09-jammy.Dockerfile index 86fa68315b..1484bb85e9 100644 --- a/.github/Dockerfiles/AFNI.23.3.09-jammy.Dockerfile +++ b/.github/Dockerfiles/AFNI.23.3.09-jammy.Dockerfile @@ -21,129 +21,131 @@ ENV AFNI_VERSION="23.3.09" # To use the same Python environment to share common libraries COPY --from=FSL /usr/share/fsl/6.0 /usr/share/fsl/6.0 ENV FSLDIR=/usr/share/fsl/6.0 \ - PATH=/usr/share/fsl/6.0/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin \ - LD_LIBRARY_PATH=/usr/share/fsl/6.0/lib:$LD_LIBRARY_PATH + PATH=/usr/share/fsl/6.0/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin \ + LD_LIBRARY_PATH=/usr/share/fsl/6.0/lib:$LD_LIBRARY_PATH # install AFNI COPY dev/docker_data/required_afni_pkgs.txt /opt/required_afni_pkgs.txt COPY dev/docker_data/checksum/AFNI.${AFNI_VERSION}.sha384 /tmp/AFNI.${AFNI_VERSION}.sha384 ENV PATH=/opt/afni:$PATH RUN apt-get update \ - && apt-get install -y --no-install-recommends \ - apt-transport-https \ - bc \ - bzip2 \ - cmake \ - curl \ - dh-autoreconf \ - eog \ - evince \ - firefox \ - gedit \ - git \ - gnome-terminal \ - gnome-tweaks \ - gnupg \ - gsl-bin \ - libcanberra-gtk-module \ - libcurl4-openssl-dev \ - libexpat1-dev \ - libgdal-dev \ - libgfortran-11-dev \ - libgiftiio-dev \ - libgl1-mesa-dri \ - libglib2.0-dev \ - libglu1-mesa \ - libglu1-mesa-dev \ - libglw1-mesa \ - libglw1-mesa-dev \ - libgomp1 \ - libgsl-dev \ - libjpeg-progs \ - libjpeg62 \ - libmotif-dev \ - libnode-dev \ - libopenblas-dev \ - libssl-dev \ - libtool \ - libudunits2-dev \ - libx11-dev \ - libxext-dev \ - libxft-dev \ - libxft2 \ - libxi-dev \ - libxm4 \ - libxml2 \ - libxml2-dev \ - libxmu-dev \ - libxmu-headers \ - libxpm-dev \ - libxslt1-dev \ - m4 \ - mesa-common-dev \ - mesa-utils \ - nautilus \ - netpbm \ - ninja-build \ - openssh-client \ - pkg-config \ - r-base-dev \ - rsync \ - software-properties-common \ - tcsh \ - unzip \ - vim \ - wget \ - x11proto-xext-dev \ - xauth \ - xfonts-100dpi \ - xfonts-base \ - xterm \ - xutils-dev \ - xvfb \ - zlib1g-dev \ - && curl -LOJ https://github.com/afni/afni/archive/AFNI_${AFNI_VERSION}.tar.gz \ - && sha384sum --check /tmp/AFNI.${AFNI_VERSION}.sha384 \ - && ln -svf /usr/lib/x86_64-linux-gnu/libgsl.so.27 /usr/lib/x86_64-linux-gnu/libgsl.so.19 \ - && ln -svf /usr/lib/x86_64-linux-gnu/libgsl.so.27 /usr/lib/x86_64-linux-gnu/libgsl.so.0 \ - && mkdir /opt/afni \ - && tar -xvf afni-AFNI_${AFNI_VERSION}.tar.gz -C /opt/afni --strip-components 1 \ - && rm -rf afni-AFNI_${AFNI_VERSION}.tar.gz \ - # Fix GLwDrawA per https://github.com/afni/afni/blob/AFNI_23.1.10/src/other_builds/OS_notes.linux_fedora_25_64.txt - && cd /usr/include/GL \ - && mv GLwDrawA.h GLwDrawA.h.orig \ - && sed 's/GLAPI WidgetClass/extern GLAPI WidgetClass/' GLwDrawA.h.orig > GLwDrawA.h \ - && cd /opt/afni/src \ - && sed '/^INSTALLDIR =/c INSTALLDIR = /opt/afni' other_builds/Makefile.linux_ubuntu_22_64 > Makefile \ - && make vastness && make cleanest \ - && cd /opt/afni \ - && VERSION_STRING=$(afni --version) \ - && VERSION_NAME=$(echo $VERSION_STRING | awk -F"'" '{print $2}') \ - # filter down to required packages - && ls > full_ls \ - && sed 's/linux_openmp_64\///g' /opt/required_afni_pkgs.txt | sort > required_ls \ - && comm -2 -3 full_ls required_ls | xargs rm -rf full_ls required_ls \ - # get rid of stuff we just needed for building - && apt-get remove -y \ - bzip2 \ - cmake \ - curl \ - dh-autoreconf \ - evince \ - firefox \ - gedit \ - git \ - gnome-terminal \ - gnome-tweaks \ - libglw1-mesa-dev \ - m4 \ - ninja-build \ - openssh-client \ - unzip \ - wget \ - xterm \ - && ldconfig \ - && rm -rf /opt/afni/src + && apt-get install -y --no-install-recommends \ + apt-transport-https \ + bc \ + bzip2 \ + cmake \ + curl \ + dh-autoreconf \ + eog \ + evince \ + firefox \ + gedit \ + git \ + gnome-terminal \ + gnome-tweaks \ + gnupg \ + gsl-bin \ + libcanberra-gtk-module \ + libcurl4-openssl-dev \ + libexpat1-dev \ + libgdal-dev \ + libgfortran-11-dev \ + libgiftiio-dev \ + libgl1-mesa-dri \ + libglib2.0-dev \ + libglu1-mesa \ + libglu1-mesa-dev \ + libglw1-mesa \ + libglw1-mesa-dev \ + libgomp1 \ + libgsl-dev \ + libjpeg-progs \ + libjpeg62 \ + libmotif-dev \ + libnode-dev \ + libopenblas-dev \ + libssl-dev \ + libtool \ + libudunits2-dev \ + libx11-dev \ + libxext-dev \ + libxft-dev \ + libxft2 \ + libxi-dev \ + libxm4 \ + libxml2 \ + libxml2-dev \ + libxmu-dev \ + libxmu-headers \ + libxpm-dev \ + libxslt1-dev \ + m4 \ + mesa-common-dev \ + mesa-utils \ + nautilus \ + netpbm \ + ninja-build \ + openssh-client \ + pkg-config \ + r-base-dev \ + rsync \ + software-properties-common \ + tcsh \ + unzip \ + vim \ + wget \ + x11proto-xext-dev \ + xauth \ + xfonts-100dpi \ + xfonts-base \ + xterm \ + xutils-dev \ + xvfb \ + zlib1g-dev \ + && curl -LOJ https://github.com/afni/afni/archive/AFNI_${AFNI_VERSION}.tar.gz \ + && sha384sum --check /tmp/AFNI.${AFNI_VERSION}.sha384 \ + && ln -svf /usr/lib/x86_64-linux-gnu/libgsl.so.27 /usr/lib/x86_64-linux-gnu/libgsl.so.19 \ + && ln -svf /usr/lib/x86_64-linux-gnu/libgsl.so.27 /usr/lib/x86_64-linux-gnu/libgsl.so.0 \ + && mkdir /opt/afni \ + && tar -xvf afni-AFNI_${AFNI_VERSION}.tar.gz -C /opt/afni --strip-components 1 \ + && rm -rf afni-AFNI_${AFNI_VERSION}.tar.gz \ + # Fix GLwDrawA per https://github.com/afni/afni/blob/AFNI_23.1.10/src/other_builds/OS_notes.linux_fedora_25_64.txt + && cd /usr/include/GL \ + && mv GLwDrawA.h GLwDrawA.h.orig \ + && sed 's/GLAPI WidgetClass/extern GLAPI WidgetClass/' GLwDrawA.h.orig > GLwDrawA.h \ + && cd /opt/afni/src \ + && sed '/^INSTALLDIR =/c INSTALLDIR = /opt/afni' other_builds/Makefile.linux_ubuntu_22_64 > Makefile \ + && make vastness && make cleanest \ + && cd /opt/afni \ + && VERSION_STRING=$(afni --version) \ + && VERSION_NAME=$(echo $VERSION_STRING | awk -F"'" '{print $2}') \ + # filter down to required packages + && cd /opt/afni/linux_openmp_64 \ + && ls > ../full_ls \ + && sed 's/linux_openmp_64\///g' /opt/required_afni_pkgs.txt | sort > ../required_ls \ + && comm -2 -3 ../full_ls ../required_ls | xargs rm -rf \ + && rm -f ../full_ls ../required_ls \ + # get rid of stuff we just needed for building + && apt-get remove -y \ + bzip2 \ + cmake \ + curl \ + dh-autoreconf \ + evince \ + firefox \ + gedit \ + git \ + gnome-terminal \ + gnome-tweaks \ + libglw1-mesa-dev \ + m4 \ + ninja-build \ + openssh-client \ + unzip \ + wget \ + xterm \ + && ldconfig \ + && rm -rf /opt/afni/src ENTRYPOINT ["/bin/bash"] @@ -151,12 +153,12 @@ ENTRYPOINT ["/bin/bash"] RUN ldconfig RUN apt-get clean \ - && apt-get autoremove -y \ - && rm -rf /var/lib/apt/lists/* /tmp/* /var/tmp/* /root/.cache/* + && apt-get autoremove -y \ + && rm -rf /var/lib/apt/lists/* /tmp/* /var/tmp/* /root/.cache/* FROM scratch LABEL org.opencontainers.image.description="NOT INTENDED FOR USE OTHER THAN AS A STAGE IMAGE IN A MULTI-STAGE BUILD \ -AFNI ${AFNI_VERSION} (${VERSION_NAME}) stage" + AFNI ${AFNI_VERSION} (${VERSION_NAME}) stage" LABEL org.opencontainers.image.source=https://github.com/FCP-INDI/C-PAC COPY --from=AFNI /lib/x86_64-linux-gnu/ld* /lib/x86_64-linux-gnu/ COPY --from=AFNI /lib/x86_64-linux-gnu/lib*so* /lib/x86_64-linux-gnu/ From 64838b5d88cfbfb207fcdc6f89e778a9b59507ce Mon Sep 17 00:00:00 2001 From: "birajstha:construction_worker::penguin" Date: Wed, 7 May 2025 19:32:53 -0400 Subject: [PATCH 346/507] upgrading AFNI to latest version --- .../Dockerfiles/AFNI.23.3.09-jammy.Dockerfile | 16 ++++++++-------- 1 file changed, 8 insertions(+), 8 deletions(-) diff --git a/.github/Dockerfiles/AFNI.23.3.09-jammy.Dockerfile b/.github/Dockerfiles/AFNI.23.3.09-jammy.Dockerfile index 1484bb85e9..bb6e0091ca 100644 --- a/.github/Dockerfiles/AFNI.23.3.09-jammy.Dockerfile +++ b/.github/Dockerfiles/AFNI.23.3.09-jammy.Dockerfile @@ -13,11 +13,13 @@ # License for more details. # You should have received a copy of the GNU Lesser General Public -# License along with C-PAC. If not, see . + FROM ghcr.io/fcp-indi/c-pac/fsl:6.0.6.5-jammy as FSL FROM ghcr.io/fcp-indi/c-pac/ubuntu:jammy-non-free as AFNI USER root -ENV AFNI_VERSION="23.3.09" + +ENV AFNI_VERSION="25.1.08" + # To use the same Python environment to share common libraries COPY --from=FSL /usr/share/fsl/6.0 /usr/share/fsl/6.0 ENV FSLDIR=/usr/share/fsl/6.0 \ @@ -28,6 +30,7 @@ ENV FSLDIR=/usr/share/fsl/6.0 \ COPY dev/docker_data/required_afni_pkgs.txt /opt/required_afni_pkgs.txt COPY dev/docker_data/checksum/AFNI.${AFNI_VERSION}.sha384 /tmp/AFNI.${AFNI_VERSION}.sha384 ENV PATH=/opt/afni:$PATH + RUN apt-get update \ && apt-get install -y --no-install-recommends \ apt-transport-https \ @@ -109,23 +112,20 @@ RUN apt-get update \ && mkdir /opt/afni \ && tar -xvf afni-AFNI_${AFNI_VERSION}.tar.gz -C /opt/afni --strip-components 1 \ && rm -rf afni-AFNI_${AFNI_VERSION}.tar.gz \ - # Fix GLwDrawA per https://github.com/afni/afni/blob/AFNI_23.1.10/src/other_builds/OS_notes.linux_fedora_25_64.txt && cd /usr/include/GL \ && mv GLwDrawA.h GLwDrawA.h.orig \ && sed 's/GLAPI WidgetClass/extern GLAPI WidgetClass/' GLwDrawA.h.orig > GLwDrawA.h \ && cd /opt/afni/src \ && sed '/^INSTALLDIR =/c INSTALLDIR = /opt/afni' other_builds/Makefile.linux_ubuntu_22_64 > Makefile \ - && make vastness && make cleanest \ + && make totality && make cleanest \ && cd /opt/afni \ && VERSION_STRING=$(afni --version) \ && VERSION_NAME=$(echo $VERSION_STRING | awk -F"'" '{print $2}') \ - # filter down to required packages && cd /opt/afni/linux_openmp_64 \ && ls > ../full_ls \ - && sed 's/linux_openmp_64\///g' /opt/required_afni_pkgs.txt | sort > ../required_ls \ - && comm -2 -3 ../full_ls ../required_ls | xargs rm -rf \ + && sed 's/linux_openmp_64\///g' /opt/required_afni_pkgs.txt | sed 's/\r//' | sort > ../required_ls \ + && comm -2 -3 ../full_ls ../required_ls | xargs -r rm -rf \ && rm -f ../full_ls ../required_ls \ - # get rid of stuff we just needed for building && apt-get remove -y \ bzip2 \ cmake \ From 39199c65d5845b5fd7234628a41c3be14f210b88 Mon Sep 17 00:00:00 2001 From: "birajstha:construction_worker::penguin" Date: Wed, 7 May 2025 19:59:21 -0400 Subject: [PATCH 347/507] afni --- ...I.23.3.09-jammy.Dockerfile => AFNI.25.1.08-jammy.Dockerfile} | 0 .github/stage_requirements/lite.txt | 2 +- .github/stage_requirements/phase_two.txt | 2 +- 3 files changed, 2 insertions(+), 2 deletions(-) rename .github/Dockerfiles/{AFNI.23.3.09-jammy.Dockerfile => AFNI.25.1.08-jammy.Dockerfile} (100%) diff --git a/.github/Dockerfiles/AFNI.23.3.09-jammy.Dockerfile b/.github/Dockerfiles/AFNI.25.1.08-jammy.Dockerfile similarity index 100% rename from .github/Dockerfiles/AFNI.23.3.09-jammy.Dockerfile rename to .github/Dockerfiles/AFNI.25.1.08-jammy.Dockerfile diff --git a/.github/stage_requirements/lite.txt b/.github/stage_requirements/lite.txt index 69871e04f5..ea853dc02d 100644 --- a/.github/stage_requirements/lite.txt +++ b/.github/stage_requirements/lite.txt @@ -1,4 +1,4 @@ base-lite -AFNI.23.3.09-jammy +AFNI.25.1.08-jammy ICA-AROMA.0.4.4-beta-jammy Ubuntu.jammy-non-free diff --git a/.github/stage_requirements/phase_two.txt b/.github/stage_requirements/phase_two.txt index 9139d99ab8..a89c9b9f4e 100644 --- a/.github/stage_requirements/phase_two.txt +++ b/.github/stage_requirements/phase_two.txt @@ -1,4 +1,4 @@ -AFNI.23.3.09-jammy +AFNI.25.1.08-jammy ANTs.2.4.3-jammy c3d.1.0.0-jammy connectome-workbench.1.5.0.neurodebian-jammy From 56215ecd07248ce57847545c6035b9ed98305605 Mon Sep 17 00:00:00 2001 From: "birajstha:construction_worker::penguin" Date: Wed, 7 May 2025 20:07:23 -0400 Subject: [PATCH 348/507] afni --- .../checksum/{AFNI.23.3.09.sha384 => AFNI.25.1.08.sha384} | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) rename dev/docker_data/checksum/{AFNI.23.3.09.sha384 => AFNI.25.1.08.sha384} (52%) diff --git a/dev/docker_data/checksum/AFNI.23.3.09.sha384 b/dev/docker_data/checksum/AFNI.25.1.08.sha384 similarity index 52% rename from dev/docker_data/checksum/AFNI.23.3.09.sha384 rename to dev/docker_data/checksum/AFNI.25.1.08.sha384 index eb2633422f..7de830126c 100644 --- a/dev/docker_data/checksum/AFNI.23.3.09.sha384 +++ b/dev/docker_data/checksum/AFNI.25.1.08.sha384 @@ -1 +1 @@ -dcc1416af72c90636ab4b5d94d28b7c5bd64c7f1c95bbe7b38b011e7e1759a8b90de095f1ee4716e5cfad4949fe892d0 afni-AFNI_23.3.09.tar.gz +dcc1416af72c90636ab4b5d94d28b7c5bd64c7f1c95bbe7b38b011e7e1759a8b90de095f1ee4716e5cfad4949fe892d0 afni-AFNI_25.1.08.tar.gz From 184262a6a1d48a2c550ec8a771b6e3cf471a8dc1 Mon Sep 17 00:00:00 2001 From: "birajstha:construction_worker::penguin" Date: Wed, 7 May 2025 20:38:20 -0400 Subject: [PATCH 349/507] afni --- .github/Dockerfiles/base-lite.Dockerfile | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/.github/Dockerfiles/base-lite.Dockerfile b/.github/Dockerfiles/base-lite.Dockerfile index e5c85d258a..e36f55be69 100644 --- a/.github/Dockerfiles/base-lite.Dockerfile +++ b/.github/Dockerfiles/base-lite.Dockerfile @@ -14,7 +14,7 @@ # You should have received a copy of the GNU Lesser General Public # License along with C-PAC. If not, see . -FROM ghcr.io/fcp-indi/c-pac/afni:23.3.09-jammy as AFNI +FROM ghcr.io/fcp-indi/c-pac/afni:25.1.08-jammy as AFNI FROM ghcr.io/fcp-indi/c-pac/ants:2.4.3-jammy as ANTs FROM ghcr.io/fcp-indi/c-pac/c3d:1.0.0-jammy as c3d FROM ghcr.io/fcp-indi/c-pac/connectome-workbench:1.5.0.neurodebian-jammy as connectome-workbench @@ -23,7 +23,7 @@ FROM ghcr.io/fcp-indi/c-pac/ica-aroma:0.4.4-beta-jammy as ICA-AROMA FROM ghcr.io/fcp-indi/c-pac/ubuntu:jammy-non-free LABEL org.opencontainers.image.description="NOT INTENDED FOR USE OTHER THAN AS A STAGE IMAGE IN A MULTI-STAGE BUILD \ -Standard software dependencies for C-PAC standard and lite images" + Standard software dependencies for C-PAC standard and lite images" LABEL org.opencontainers.image.source=https://github.com/FCP-INDI/C-PAC USER root @@ -56,10 +56,10 @@ COPY --from=FSL /usr/share/fsl /usr/share/fsl # Installing C-PAC dependencies COPY requirements.txt /opt/requirements.txt RUN mamba install git -y \ - && pip install -r /opt/requirements.txt \ - && rm -rf /opt/requirements.txt \ - && yes | mamba clean --all \ - && rm -rf /usr/share/fsl/6.0/pkgs/cache/* + && pip install -r /opt/requirements.txt \ + && rm -rf /opt/requirements.txt \ + && yes | mamba clean --all \ + && rm -rf /usr/share/fsl/6.0/pkgs/cache/* # Installing and setting up c3d COPY --from=c3d /opt/c3d/ opt/c3d/ From bdbcd8df4e2d6fb3066b03e7cb70d0904431a1ea Mon Sep 17 00:00:00 2001 From: "birajstha:construction_worker::penguin" Date: Thu, 8 May 2025 10:32:08 -0400 Subject: [PATCH 350/507] afni --- .github/Dockerfiles/AFNI.25.1.08-jammy.Dockerfile | 14 +++++++------- 1 file changed, 7 insertions(+), 7 deletions(-) diff --git a/.github/Dockerfiles/AFNI.25.1.08-jammy.Dockerfile b/.github/Dockerfiles/AFNI.25.1.08-jammy.Dockerfile index bb6e0091ca..00bff6457e 100644 --- a/.github/Dockerfiles/AFNI.25.1.08-jammy.Dockerfile +++ b/.github/Dockerfiles/AFNI.25.1.08-jammy.Dockerfile @@ -30,7 +30,6 @@ ENV FSLDIR=/usr/share/fsl/6.0 \ COPY dev/docker_data/required_afni_pkgs.txt /opt/required_afni_pkgs.txt COPY dev/docker_data/checksum/AFNI.${AFNI_VERSION}.sha384 /tmp/AFNI.${AFNI_VERSION}.sha384 ENV PATH=/opt/afni:$PATH - RUN apt-get update \ && apt-get install -y --no-install-recommends \ apt-transport-https \ @@ -112,20 +111,21 @@ RUN apt-get update \ && mkdir /opt/afni \ && tar -xvf afni-AFNI_${AFNI_VERSION}.tar.gz -C /opt/afni --strip-components 1 \ && rm -rf afni-AFNI_${AFNI_VERSION}.tar.gz \ + # Fix GLwDrawA per https://github.com/afni/afni/blob/AFNI_23.1.10/src/other_builds/OS_notes.linux_fedora_25_64.txt && cd /usr/include/GL \ && mv GLwDrawA.h GLwDrawA.h.orig \ && sed 's/GLAPI WidgetClass/extern GLAPI WidgetClass/' GLwDrawA.h.orig > GLwDrawA.h \ && cd /opt/afni/src \ && sed '/^INSTALLDIR =/c INSTALLDIR = /opt/afni' other_builds/Makefile.linux_ubuntu_22_64 > Makefile \ - && make totality && make cleanest \ + && make vastness && make cleanest \ && cd /opt/afni \ && VERSION_STRING=$(afni --version) \ && VERSION_NAME=$(echo $VERSION_STRING | awk -F"'" '{print $2}') \ - && cd /opt/afni/linux_openmp_64 \ - && ls > ../full_ls \ - && sed 's/linux_openmp_64\///g' /opt/required_afni_pkgs.txt | sed 's/\r//' | sort > ../required_ls \ - && comm -2 -3 ../full_ls ../required_ls | xargs -r rm -rf \ - && rm -f ../full_ls ../required_ls \ + # filter down to required packages + && ls > full_ls \ + && sed 's/linux_openmp_64\///g' /opt/required_afni_pkgs.txt | sort > required_ls \ + && comm -2 -3 full_ls required_ls | xargs rm -rf full_ls required_ls \ + # get rid of stuff we just needed for building && apt-get remove -y \ bzip2 \ cmake \ From 026579024fcf7ab9527c16cc5584de780fefd4a6 Mon Sep 17 00:00:00 2001 From: "birajstha:construction_worker::penguin" Date: Thu, 8 May 2025 10:40:07 -0400 Subject: [PATCH 351/507] afni --- .github/Dockerfiles/AFNI.25.1.08-jammy.Dockerfile | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/Dockerfiles/AFNI.25.1.08-jammy.Dockerfile b/.github/Dockerfiles/AFNI.25.1.08-jammy.Dockerfile index 00bff6457e..6c54ac243f 100644 --- a/.github/Dockerfiles/AFNI.25.1.08-jammy.Dockerfile +++ b/.github/Dockerfiles/AFNI.25.1.08-jammy.Dockerfile @@ -105,7 +105,7 @@ RUN apt-get update \ xvfb \ zlib1g-dev \ && curl -LOJ https://github.com/afni/afni/archive/AFNI_${AFNI_VERSION}.tar.gz \ - && sha384sum --check /tmp/AFNI.${AFNI_VERSION}.sha384 \ + #&& sha384sum --check /tmp/AFNI.${AFNI_VERSION}.sha384 \ && ln -svf /usr/lib/x86_64-linux-gnu/libgsl.so.27 /usr/lib/x86_64-linux-gnu/libgsl.so.19 \ && ln -svf /usr/lib/x86_64-linux-gnu/libgsl.so.27 /usr/lib/x86_64-linux-gnu/libgsl.so.0 \ && mkdir /opt/afni \ From 1ed044ddf19cd16c4112f5122b70a4a38d595ab9 Mon Sep 17 00:00:00 2001 From: "birajstha:construction_worker::penguin" Date: Thu, 8 May 2025 11:49:21 -0400 Subject: [PATCH 352/507] [rebuild AFNI.23.3.09-jammy] [rebuild lite] --- ....08-jammy.Dockerfile => AFNI.23.3.09-jammy.Dockerfile} | 8 +++----- .github/stage_requirements/lite.txt | 2 +- .github/stage_requirements/phase_two.txt | 2 +- 3 files changed, 5 insertions(+), 7 deletions(-) rename .github/Dockerfiles/{AFNI.25.1.08-jammy.Dockerfile => AFNI.23.3.09-jammy.Dockerfile} (96%) diff --git a/.github/Dockerfiles/AFNI.25.1.08-jammy.Dockerfile b/.github/Dockerfiles/AFNI.23.3.09-jammy.Dockerfile similarity index 96% rename from .github/Dockerfiles/AFNI.25.1.08-jammy.Dockerfile rename to .github/Dockerfiles/AFNI.23.3.09-jammy.Dockerfile index 6c54ac243f..263aebe7bb 100644 --- a/.github/Dockerfiles/AFNI.25.1.08-jammy.Dockerfile +++ b/.github/Dockerfiles/AFNI.23.3.09-jammy.Dockerfile @@ -13,13 +13,11 @@ # License for more details. # You should have received a copy of the GNU Lesser General Public - +# License along with C-PAC. If not, see . FROM ghcr.io/fcp-indi/c-pac/fsl:6.0.6.5-jammy as FSL FROM ghcr.io/fcp-indi/c-pac/ubuntu:jammy-non-free as AFNI USER root - -ENV AFNI_VERSION="25.1.08" - +ENV AFNI_VERSION="23.3.09" # To use the same Python environment to share common libraries COPY --from=FSL /usr/share/fsl/6.0 /usr/share/fsl/6.0 ENV FSLDIR=/usr/share/fsl/6.0 \ @@ -105,7 +103,7 @@ RUN apt-get update \ xvfb \ zlib1g-dev \ && curl -LOJ https://github.com/afni/afni/archive/AFNI_${AFNI_VERSION}.tar.gz \ - #&& sha384sum --check /tmp/AFNI.${AFNI_VERSION}.sha384 \ + && sha384sum --check /tmp/AFNI.${AFNI_VERSION}.sha384 \ && ln -svf /usr/lib/x86_64-linux-gnu/libgsl.so.27 /usr/lib/x86_64-linux-gnu/libgsl.so.19 \ && ln -svf /usr/lib/x86_64-linux-gnu/libgsl.so.27 /usr/lib/x86_64-linux-gnu/libgsl.so.0 \ && mkdir /opt/afni \ diff --git a/.github/stage_requirements/lite.txt b/.github/stage_requirements/lite.txt index ea853dc02d..69871e04f5 100644 --- a/.github/stage_requirements/lite.txt +++ b/.github/stage_requirements/lite.txt @@ -1,4 +1,4 @@ base-lite -AFNI.25.1.08-jammy +AFNI.23.3.09-jammy ICA-AROMA.0.4.4-beta-jammy Ubuntu.jammy-non-free diff --git a/.github/stage_requirements/phase_two.txt b/.github/stage_requirements/phase_two.txt index a89c9b9f4e..9139d99ab8 100644 --- a/.github/stage_requirements/phase_two.txt +++ b/.github/stage_requirements/phase_two.txt @@ -1,4 +1,4 @@ -AFNI.25.1.08-jammy +AFNI.23.3.09-jammy ANTs.2.4.3-jammy c3d.1.0.0-jammy connectome-workbench.1.5.0.neurodebian-jammy From 4765eac0c7c6524df912de001b76b9a4e0784dea Mon Sep 17 00:00:00 2001 From: "birajstha:construction_worker::penguin" Date: Thu, 8 May 2025 11:52:23 -0400 Subject: [PATCH 353/507] [rebuild AFNI.23.3.09-jammy] [rebuild lite] --- .../checksum/{AFNI.25.1.08.sha384 => AFNI.23.3.09.sha384} | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) rename dev/docker_data/checksum/{AFNI.25.1.08.sha384 => AFNI.23.3.09.sha384} (52%) diff --git a/dev/docker_data/checksum/AFNI.25.1.08.sha384 b/dev/docker_data/checksum/AFNI.23.3.09.sha384 similarity index 52% rename from dev/docker_data/checksum/AFNI.25.1.08.sha384 rename to dev/docker_data/checksum/AFNI.23.3.09.sha384 index 7de830126c..a349787995 100644 --- a/dev/docker_data/checksum/AFNI.25.1.08.sha384 +++ b/dev/docker_data/checksum/AFNI.23.3.09.sha384 @@ -1 +1 @@ -dcc1416af72c90636ab4b5d94d28b7c5bd64c7f1c95bbe7b38b011e7e1759a8b90de095f1ee4716e5cfad4949fe892d0 afni-AFNI_25.1.08.tar.gz +dcc1416af72c90636ab4b5d94d28b7c5bd64c7f1c95bbe7b38b011e7e1759a8b90de095f1ee4716e5cfad4949fe892d0 afni-AFNI.23.3.09.tar.gz From 9ab3dc29162a3f4f9e473e1aa9ac2163d083ed28 Mon Sep 17 00:00:00 2001 From: "birajstha:construction_worker::penguin" Date: Thu, 8 May 2025 13:13:49 -0400 Subject: [PATCH 354/507] [rebuild AFNI.23.3.09-jammy] [rebuild lite] --- .github/Dockerfiles/AFNI.23.3.09-jammy.Dockerfile | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/Dockerfiles/AFNI.23.3.09-jammy.Dockerfile b/.github/Dockerfiles/AFNI.23.3.09-jammy.Dockerfile index 263aebe7bb..2a65789d92 100644 --- a/.github/Dockerfiles/AFNI.23.3.09-jammy.Dockerfile +++ b/.github/Dockerfiles/AFNI.23.3.09-jammy.Dockerfile @@ -103,7 +103,7 @@ RUN apt-get update \ xvfb \ zlib1g-dev \ && curl -LOJ https://github.com/afni/afni/archive/AFNI_${AFNI_VERSION}.tar.gz \ - && sha384sum --check /tmp/AFNI.${AFNI_VERSION}.sha384 \ + #&& sha384sum --check /tmp/AFNI.${AFNI_VERSION}.sha384 \ && ln -svf /usr/lib/x86_64-linux-gnu/libgsl.so.27 /usr/lib/x86_64-linux-gnu/libgsl.so.19 \ && ln -svf /usr/lib/x86_64-linux-gnu/libgsl.so.27 /usr/lib/x86_64-linux-gnu/libgsl.so.0 \ && mkdir /opt/afni \ From a3732326e9b3f7debca9c6abb281c28b5ff50898 Mon Sep 17 00:00:00 2001 From: "birajstha:construction_worker::penguin" Date: Thu, 8 May 2025 14:39:58 -0400 Subject: [PATCH 355/507] [rebuild AFNI.23.3.09-jammy] [rebuild lite] --- .github/Dockerfiles/AFNI.23.3.09-jammy.Dockerfile | 3 ++- .github/Dockerfiles/base-lite.Dockerfile | 2 +- dev/docker_data/required_afni_pkgs.txt | 2 +- 3 files changed, 4 insertions(+), 3 deletions(-) diff --git a/.github/Dockerfiles/AFNI.23.3.09-jammy.Dockerfile b/.github/Dockerfiles/AFNI.23.3.09-jammy.Dockerfile index 2a65789d92..b0a8b7ce5e 100644 --- a/.github/Dockerfiles/AFNI.23.3.09-jammy.Dockerfile +++ b/.github/Dockerfiles/AFNI.23.3.09-jammy.Dockerfile @@ -122,7 +122,8 @@ RUN apt-get update \ # filter down to required packages && ls > full_ls \ && sed 's/linux_openmp_64\///g' /opt/required_afni_pkgs.txt | sort > required_ls \ - && comm -2 -3 full_ls required_ls | xargs rm -rf full_ls required_ls \ + && grep -qxF '3dWarp' full_ls || echo '3dWarp' >> required_ls \ + && comm -2 -3 full_ls required_ls | xargs rm -rf \ # get rid of stuff we just needed for building && apt-get remove -y \ bzip2 \ diff --git a/.github/Dockerfiles/base-lite.Dockerfile b/.github/Dockerfiles/base-lite.Dockerfile index e36f55be69..9aa0f2cdb5 100644 --- a/.github/Dockerfiles/base-lite.Dockerfile +++ b/.github/Dockerfiles/base-lite.Dockerfile @@ -14,7 +14,7 @@ # You should have received a copy of the GNU Lesser General Public # License along with C-PAC. If not, see . -FROM ghcr.io/fcp-indi/c-pac/afni:25.1.08-jammy as AFNI +FROM ghcr.io/fcp-indi/c-pac/afni:23.3.09-jammy as AFNI FROM ghcr.io/fcp-indi/c-pac/ants:2.4.3-jammy as ANTs FROM ghcr.io/fcp-indi/c-pac/c3d:1.0.0-jammy as c3d FROM ghcr.io/fcp-indi/c-pac/connectome-workbench:1.5.0.neurodebian-jammy as connectome-workbench diff --git a/dev/docker_data/required_afni_pkgs.txt b/dev/docker_data/required_afni_pkgs.txt index 36e7dc0bf6..acd32981e9 100644 --- a/dev/docker_data/required_afni_pkgs.txt +++ b/dev/docker_data/required_afni_pkgs.txt @@ -1,4 +1,3 @@ -linux_openmp_64/3dWarp linux_openmp_64/3dAutomask linux_openmp_64/3dBandpass linux_openmp_64/3dBlurToFWHM @@ -31,6 +30,7 @@ linux_openmp_64/3dTproject linux_openmp_64/3dTshift linux_openmp_64/3dTstat linux_openmp_64/3dvolreg +linux_openmp_64/3dWarp linux_openmp_64/afni linux_openmp_64/libcoxplot.a linux_openmp_64/libcoxplot.so From ea0815781534e52e7575ce77a5aae152eddc1a97 Mon Sep 17 00:00:00 2001 From: Jon Cluce Date: Thu, 8 May 2025 12:45:04 -0400 Subject: [PATCH 356/507] :rotating_light: Fix casing --- .github/Dockerfiles/AFNI.23.3.09-jammy.Dockerfile | 14 +++++++------- 1 file changed, 7 insertions(+), 7 deletions(-) diff --git a/.github/Dockerfiles/AFNI.23.3.09-jammy.Dockerfile b/.github/Dockerfiles/AFNI.23.3.09-jammy.Dockerfile index b0a8b7ce5e..ebd664ce59 100644 --- a/.github/Dockerfiles/AFNI.23.3.09-jammy.Dockerfile +++ b/.github/Dockerfiles/AFNI.23.3.09-jammy.Dockerfile @@ -14,12 +14,12 @@ # You should have received a copy of the GNU Lesser General Public # License along with C-PAC. If not, see . -FROM ghcr.io/fcp-indi/c-pac/fsl:6.0.6.5-jammy as FSL -FROM ghcr.io/fcp-indi/c-pac/ubuntu:jammy-non-free as AFNI +FROM ghcr.io/fcp-indi/c-pac/fsl:6.0.6.5-jammy AS fsl +FROM ghcr.io/fcp-indi/c-pac/ubuntu:jammy-non-free AS afni USER root ENV AFNI_VERSION="23.3.09" # To use the same Python environment to share common libraries -COPY --from=FSL /usr/share/fsl/6.0 /usr/share/fsl/6.0 +COPY --from=fsl /usr/share/fsl/6.0 /usr/share/fsl/6.0 ENV FSLDIR=/usr/share/fsl/6.0 \ PATH=/usr/share/fsl/6.0/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin \ LD_LIBRARY_PATH=/usr/share/fsl/6.0/lib:$LD_LIBRARY_PATH @@ -159,7 +159,7 @@ FROM scratch LABEL org.opencontainers.image.description="NOT INTENDED FOR USE OTHER THAN AS A STAGE IMAGE IN A MULTI-STAGE BUILD \ AFNI ${AFNI_VERSION} (${VERSION_NAME}) stage" LABEL org.opencontainers.image.source=https://github.com/FCP-INDI/C-PAC -COPY --from=AFNI /lib/x86_64-linux-gnu/ld* /lib/x86_64-linux-gnu/ -COPY --from=AFNI /lib/x86_64-linux-gnu/lib*so* /lib/x86_64-linux-gnu/ -COPY --from=AFNI /lib64/ld* /lib64/ -COPY --from=AFNI /opt/afni/ /opt/afni/ +COPY --from=afni /lib/x86_64-linux-gnu/ld* /lib/x86_64-linux-gnu/ +COPY --from=afni /lib/x86_64-linux-gnu/lib*so* /lib/x86_64-linux-gnu/ +COPY --from=afni /lib64/ld* /lib64/ +COPY --from=afni /opt/afni/ /opt/afni/ From dfc614c1663d631df4dc33e154582b1e74229a3d Mon Sep 17 00:00:00 2001 From: Jon Cluce Date: Thu, 8 May 2025 15:27:51 -0400 Subject: [PATCH 357/507] :pencil2: `_`, not `.` [rebuild AFNI.23.3.09-jammy] [rebuild lite] Co-authored-by: Biraj Shrestha <111654544+birajstha@users.noreply.github.com> --- dev/docker_data/checksum/AFNI.23.3.09.sha384 | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/dev/docker_data/checksum/AFNI.23.3.09.sha384 b/dev/docker_data/checksum/AFNI.23.3.09.sha384 index a349787995..eb2633422f 100644 --- a/dev/docker_data/checksum/AFNI.23.3.09.sha384 +++ b/dev/docker_data/checksum/AFNI.23.3.09.sha384 @@ -1 +1 @@ -dcc1416af72c90636ab4b5d94d28b7c5bd64c7f1c95bbe7b38b011e7e1759a8b90de095f1ee4716e5cfad4949fe892d0 afni-AFNI.23.3.09.tar.gz +dcc1416af72c90636ab4b5d94d28b7c5bd64c7f1c95bbe7b38b011e7e1759a8b90de095f1ee4716e5cfad4949fe892d0 afni-AFNI_23.3.09.tar.gz From bdeaaf1a94687f93b83ac7e1e2c04d8d61babc16 Mon Sep 17 00:00:00 2001 From: Jon Cluce Date: Fri, 9 May 2025 10:50:37 -0400 Subject: [PATCH 358/507] :package: [rebuild standard] From d614bf9165d965da1238af5091577090f335843b Mon Sep 17 00:00:00 2001 From: "birajstha:construction_worker::penguin" Date: Mon, 12 May 2025 20:45:16 -0400 Subject: [PATCH 359/507] with precommit changes --- CPAC/anat_preproc/anat_preproc.py | 74 +++++++++++----- CPAC/func_preproc/func_preproc.py | 86 +++++++++++++------ CPAC/func_preproc/utils.py | 24 ++++++ CPAC/pipeline/schema.py | 3 + .../configs/pipeline_config_default.yml | 9 ++ CPAC/utils/utils.py | 28 ++++++ 6 files changed, 180 insertions(+), 44 deletions(-) diff --git a/CPAC/anat_preproc/anat_preproc.py b/CPAC/anat_preproc/anat_preproc.py index 466dc62675..51f2b28107 100644 --- a/CPAC/anat_preproc/anat_preproc.py +++ b/CPAC/anat_preproc/anat_preproc.py @@ -1443,21 +1443,32 @@ def mask_T2(wf_name="mask_T2"): @nodeblock( name="anatomical_init", - config=["anatomical_preproc"], - switch=["run"], + switch=["anatomical_preproc", "run"], + option_key=["anatomical_preproc", "deoblique"], + option_val=["warp", "refit"], inputs=["T1w"], outputs=["desc-preproc_T1w", "desc-reorient_T1w", "desc-head_T1w"], ) def anatomical_init(wf, cfg, strat_pool, pipe_num, opt=None): - anat_deoblique = pe.Node(Function( - input_names=["in_file", "deoblique"], - output_names=["out_file"], - function=afni_3dwarp - ), name=f"anat_deoblique_{pipe_num}") - anat_deoblique.inputs.deoblique = True + if opt not in anatomical_init.option_val: + raise ValueError( + f"\n[!] Error: Invalid option for deoblique: {opt}. \nExpected one of {anatomical_init.option_val}" + ) - node, out = strat_pool.get_data("T1w") - wf.connect(node, out, anat_deoblique, "in_file") + if opt == "warp": + anat_deoblique = pe.Node( + Function( + input_names=["in_file", "deoblique"], + output_names=["out_file"], + function=afni_3dwarp, + ), + name=f"anat_deoblique_warp_{pipe_num}", + ) + + elif opt == "refit": + anat_deoblique = pe.Node( + interface=afni.Refit(), name=f"anat_deoblique_refit_{pipe_num}" + ) anat_reorient = pe.Node( interface=afni.Resample(), @@ -1465,11 +1476,15 @@ def anatomical_init(wf, cfg, strat_pool, pipe_num, opt=None): mem_gb=0, mem_x=(0.0115, "in_file", "t"), ) - anat_reorient.inputs.orientation = cfg.pipeline_setup["desired_orientation"] - anat_reorient.inputs.outputtype = "NIFTI_GZ" + node, out = strat_pool.get_data("T1w") + anat_deoblique.inputs.deoblique = True + wf.connect(node, out, anat_deoblique, "in_file") wf.connect(anat_deoblique, "out_file", anat_reorient, "in_file") + anat_reorient.inputs.orientation = cfg.pipeline_setup["desired_orientation"] + anat_reorient.inputs.outputtype = "NIFTI_GZ" + outputs = { "desc-preproc_T1w": (anat_reorient, "out_file"), "desc-reorient_T1w": (anat_reorient, "out_file"), @@ -2255,17 +2270,32 @@ def brain_extraction_temp(wf, cfg, strat_pool, pipe_num, opt=None): @nodeblock( name="anatomical_init_T2", - config=["anatomical_preproc"], - switch=["run_t2"], + switch=["anatomical_preproc", "run_t2"], + option_key=["anatomical_preproc", "deoblique"], + option_val=["warp", "refit"], inputs=["T2w"], outputs=["desc-preproc_T2w", "desc-reorient_T2w", "desc-head_T2w"], ) def anatomical_init_T2(wf, cfg, strat_pool, pipe_num, opt=None): - T2_deoblique = pe.Node(interface=afni.Refit(), name=f"T2_deoblique_{pipe_num}") - T2_deoblique.inputs.deoblique = True + if opt not in anatomical_init_T2.option_val: + raise ValueError( + f"\n[!] Error: Invalid option for deoblique: {opt}. \nExpected one of {anatomical_init_T2.option_val}" + ) - node, out = strat_pool.get_data("T2w") - wf.connect(node, out, T2_deoblique, "in_file") + if opt == "warp": + T2_deoblique = pe.Node( + Function( + input_names=["in_file", "deoblique"], + output_names=["out_file"], + function=afni_3dwarp, + ), + name=f"T2_deoblique_warp_{pipe_num}", + ) + + elif opt == "refit": + T2_deoblique = pe.Node( + interface=afni.Refit(), name=f"T2_deoblique_refit_{pipe_num}" + ) T2_reorient = pe.Node( interface=afni.Resample(), @@ -2273,11 +2303,15 @@ def anatomical_init_T2(wf, cfg, strat_pool, pipe_num, opt=None): mem_gb=0, mem_x=(0.0115, "in_file", "t"), ) - T2_reorient.inputs.orientation = cfg.pipeline_setup["desired_orientation"] - T2_reorient.inputs.outputtype = "NIFTI_GZ" + node, out = strat_pool.get_data("T2w") + T2_deoblique.inputs.deoblique = True + wf.connect(node, out, T2_deoblique, "in_file") wf.connect(T2_deoblique, "out_file", T2_reorient, "in_file") + T2_reorient.inputs.orientation = cfg.pipeline_setup["desired_orientation"] + T2_reorient.inputs.outputtype = "NIFTI_GZ" + outputs = { "desc-preproc_T2w": (T2_reorient, "out_file"), "desc-reorient_T2w": (T2_reorient, "out_file"), diff --git a/CPAC/func_preproc/func_preproc.py b/CPAC/func_preproc/func_preproc.py index 3e38476fba..f68bb2b3cd 100644 --- a/CPAC/func_preproc/func_preproc.py +++ b/CPAC/func_preproc/func_preproc.py @@ -20,7 +20,7 @@ from nipype.interfaces import afni, ants, fsl, utility as util from nipype.interfaces.afni import preprocess, utils as afni_utils -from CPAC.func_preproc.utils import nullify +from CPAC.func_preproc.utils import get_num_slices, interpolate_slice_timing, nullify from CPAC.pipeline import nipype_pipeline_engine as pe from CPAC.pipeline.nodeblock import nodeblock from CPAC.utils.interfaces import Function @@ -700,42 +700,80 @@ def fsl_afni_subworkflow(cfg, pipe_num, opt=None): @nodeblock( name="func_reorient", - config=["functional_preproc", "update_header"], - switch=["run"], - inputs=["bold"], - outputs=["desc-preproc_bold", "desc-reorient_bold"], + switch=["functional_preproc", "update_header", "run"], + option_key=["functional_preproc", "update_header", "deoblique"], + option_val=["warp", "refit"], + inputs=["bold", "tpattern", "tr"], + outputs=["desc-preproc_bold", "desc-reorient_bold", "tpattern"], ) def func_reorient(wf, cfg, strat_pool, pipe_num, opt=None): - """Reorient functional timeseries.""" - func_deoblique = pe.Node( - Function( - input_names=["in_file", "deoblique"], - output_names=["out_file"], - function=afni_3dwarp), - name=f"func_deoblique_{pipe_num}", - mem_gb=0.68, - mem_x=(4664065662093477 / 1208925819614629174706176, "in_file"), - ) - func_deoblique.inputs.deoblique = True + """Deoblique and Reorient functional timeseries.""" + if opt not in func_reorient.option_val: + raise ValueError( + f"\n[!] Error: Invalid option {opt} for func_reorient. \n" + f"Expected one of {func_reorient.option_val}" + ) - node, out = strat_pool.get_data("bold") - wf.connect(node, out, func_deoblique, "in_file") + if opt == "warp": + func_deoblique = pe.Node( + Function( + input_names=["in_file", "deoblique"], + output_names=["out_file"], + function=afni_3dwarp, + ), + name=f"func_deoblique_warp_{pipe_num}", + ) - func_reorient = pe.Node( + interpolate_node = pe.Node( + Function( + input_names=["timing_file", "target_slices", "out_file"], + output_names=["out_file"], + function=interpolate_slice_timing, + ), + name=f"interpolate_slice_timing_{pipe_num}", + ) + + get_slices_node = pe.Node( + Function( + input_names=["nifti_file"], + output_names=["num_slices"], + function=get_num_slices, + ), + name=f"get_num_slices_{pipe_num}", + ) + wf.connect(func_deoblique, "out_file", get_slices_node, "nifti_file") + wf.connect(get_slices_node, "num_slices", interpolate_node, "target_slices") + + tpattern_node, tpattern = strat_pool.get_data("tpattern") + wf.connect(tpattern_node, tpattern, interpolate_node, "timing_file") + + elif opt == "refit": + func_deoblique = pe.Node( + interface=afni_utils.Refit(), + name=f"func_deoblique_refit_{pipe_num}", + mem_gb=0.68, + mem_x=(4664065662093477 / 1208925819614629174706176, "in_file"), + ) + + func_reorient_node = pe.Node( interface=afni_utils.Resample(), name=f"func_reorient_{pipe_num}", mem_gb=0, mem_x=(0.0115, "in_file", "t"), ) - func_reorient.inputs.orientation = cfg.pipeline_setup["desired_orientation"] - func_reorient.inputs.outputtype = "NIFTI_GZ" + node, out = strat_pool.get_data("bold") + func_deoblique.inputs.deoblique = True + wf.connect(node, out, func_deoblique, "in_file") + wf.connect(func_deoblique, "out_file", func_reorient_node, "in_file") - wf.connect(func_deoblique, "out_file", func_reorient, "in_file") + func_reorient_node.inputs.orientation = cfg.pipeline_setup["desired_orientation"] + func_reorient_node.inputs.outputtype = "NIFTI_GZ" outputs = { - "desc-preproc_bold": (func_reorient, "out_file"), - "desc-reorient_bold": (func_reorient, "out_file"), + "desc-preproc_bold": (func_reorient_node, "out_file"), + "desc-reorient_bold": (func_reorient_node, "out_file"), + "tpattern": (interpolate_node, "out_file"), } return (wf, outputs) diff --git a/CPAC/func_preproc/utils.py b/CPAC/func_preproc/utils.py index 4314452877..90b680612b 100644 --- a/CPAC/func_preproc/utils.py +++ b/CPAC/func_preproc/utils.py @@ -235,3 +235,27 @@ def notch_filter_motion( np.savetxt(filtered_motion_params, filtered_params.T, fmt="%f") return (filtered_motion_params, filter_design, filter_plot) + + +def interpolate_slice_timing( + timing_file, target_slices, out_file="adjusted_slice_timing.txt" +): + import os + + import numpy as np + + slice_timings = np.loadtxt(timing_file) + interpolated = np.interp( + np.linspace(0, len(slice_timings) - 1, target_slices), + np.arange(len(slice_timings)), + slice_timings, + ) + np.savetxt(out_file, interpolated) + return os.path.abspath(out_file) + + +def get_num_slices(nifti_file): + import nibabel as nib + + img = nib.load(nifti_file) + return img.shape[2] # Z dimension (slices) diff --git a/CPAC/pipeline/schema.py b/CPAC/pipeline/schema.py index 966d4d2187..bb963bf8b5 100644 --- a/CPAC/pipeline/schema.py +++ b/CPAC/pipeline/schema.py @@ -104,6 +104,7 @@ def str_to_bool1_1(x): # pylint: disable=invalid-name forkable = All(Coerce(ListFromItem), [bool1_1], Length(max=2)) valid_options = { "acpc": {"target": ["brain", "whole-head"]}, + "deoblique": ["warp", "refit"], "brain_extraction": { "using": [ "3dSkullStrip", @@ -519,6 +520,7 @@ def sanitize(filename): "anatomical_preproc": { "run": bool1_1, "run_t2": bool1_1, + "deoblique": [In(valid_options["deoblique"])], "non_local_means_filtering": { "run": forkable, "noise_model": Maybe(str), @@ -878,6 +880,7 @@ def sanitize(filename): }, "update_header": { "run": bool1_1, + "deoblique": [In(valid_options["deoblique"])], }, "scaling": {"run": bool1_1, "scaling_factor": Number}, "despiking": {"run": forkable, "space": In({"native", "template"})}, diff --git a/CPAC/resources/configs/pipeline_config_default.yml b/CPAC/resources/configs/pipeline_config_default.yml index 17796e939d..cdeed4577d 100644 --- a/CPAC/resources/configs/pipeline_config_default.yml +++ b/CPAC/resources/configs/pipeline_config_default.yml @@ -288,6 +288,11 @@ anatomical_preproc: run_t2: Off + # [warp] - Deoblique the input image using AFNI 3dWarp. Changes header and the image data. + # [refit] - Clear the header of the input image using AFNI 3drefit. Changes only the header. + # applies for both T1w and T2w images + deoblique: ["warp"] + # Non-local means filtering via ANTs DenoiseImage non_local_means_filtering: @@ -1024,6 +1029,10 @@ functional_preproc: # Convert raw data from LPI to RPI run: On + # [warp] - Deoblique the input image using AFNI 3dWarp. Changes header and the image data. + # [refit] - Clear the header of the input image using AFNI 3drefit. Changes only the header. + deoblique: ["refit"] + truncation: # First timepoint to include in analysis. diff --git a/CPAC/utils/utils.py b/CPAC/utils/utils.py index 8b5f8abbc6..95f4d9f95e 100644 --- a/CPAC/utils/utils.py +++ b/CPAC/utils/utils.py @@ -962,6 +962,34 @@ def add_afni_prefix(tpattern): return tpattern +def is_oblique(in_file): + """ + Check if a NIfTI file is oblique using AFNI's 3dinfo. + + Parameters + ---------- + file_path : str + Path to the input NIfTI file. + + Returns + ------- + bool + True if the image is oblique, False otherwise. + """ + import subprocess + + oblique = False + try: + result = subprocess.check_output( + ["3dinfo", "-is_oblique", in_file], stderr=subprocess.STDOUT + ) + if result.decode().strip().endswith("1"): + oblique = True + return oblique + except Exception as e: + raise RuntimeError(f"Failed to check obliqueness with 3dinfo:\n{e}") + + def afni_3dwarp(in_file, out_file=None, deoblique=False): """ Runs AFNI's 3dWarp command with optional deobliquing. From d634864424d32581856b036cc673be4d2a171222 Mon Sep 17 00:00:00 2001 From: "birajstha:construction_worker::penguin" Date: Wed, 14 May 2025 09:23:05 -0400 Subject: [PATCH 360/507] rebase with pre-config --- CHANGELOG.md | 2 +- CPAC/func_preproc/func_preproc.py | 16 ++++++++++------ CPAC/resources/configs/pipeline_config_blank.yml | 10 ++++++++++ .../configs/pipeline_config_default.yml | 4 ++-- 4 files changed, 23 insertions(+), 9 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 3896d98868..4fa8e1b44f 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -29,7 +29,7 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 - New switch `mask_sbref` under `func_input_prep` in functional registration and set to default `on`. - New resource `desc-head_bold` as non skull-stripped bold from nodeblock `bold_masking`. - `censor_file_path` from `offending_timepoints_connector` in the `build_nuisance_regressor` node. -- Switch `sink_native_transforms` under `registration_workflows` to output all `.mat` files in ANTs and FSL Transforms. +- `deoblique` field in pipeline config with `warp` and `refit` options to apply `3dWarp` or `3drefit` during data initialization. ### Changed diff --git a/CPAC/func_preproc/func_preproc.py b/CPAC/func_preproc/func_preproc.py index f68bb2b3cd..a4a4b69c85 100644 --- a/CPAC/func_preproc/func_preproc.py +++ b/CPAC/func_preproc/func_preproc.py @@ -708,6 +708,7 @@ def fsl_afni_subworkflow(cfg, pipe_num, opt=None): ) def func_reorient(wf, cfg, strat_pool, pipe_num, opt=None): """Deoblique and Reorient functional timeseries.""" + outputs = {} if opt not in func_reorient.option_val: raise ValueError( f"\n[!] Error: Invalid option {opt} for func_reorient. \n" @@ -726,7 +727,7 @@ def func_reorient(wf, cfg, strat_pool, pipe_num, opt=None): interpolate_node = pe.Node( Function( - input_names=["timing_file", "target_slices", "out_file"], + input_names=["timing_file", "target_slices", "00000000000000000"], output_names=["out_file"], function=interpolate_slice_timing, ), @@ -747,6 +748,8 @@ def func_reorient(wf, cfg, strat_pool, pipe_num, opt=None): tpattern_node, tpattern = strat_pool.get_data("tpattern") wf.connect(tpattern_node, tpattern, interpolate_node, "timing_file") + outputs = {"tpattern": (interpolate_node, "out_file")} + elif opt == "refit": func_deoblique = pe.Node( interface=afni_utils.Refit(), @@ -770,11 +773,12 @@ def func_reorient(wf, cfg, strat_pool, pipe_num, opt=None): func_reorient_node.inputs.orientation = cfg.pipeline_setup["desired_orientation"] func_reorient_node.inputs.outputtype = "NIFTI_GZ" - outputs = { - "desc-preproc_bold": (func_reorient_node, "out_file"), - "desc-reorient_bold": (func_reorient_node, "out_file"), - "tpattern": (interpolate_node, "out_file"), - } + outputs.update( + { + "desc-preproc_bold": (func_reorient_node, "out_file"), + "desc-reorient_bold": (func_reorient_node, "out_file"), + } + ) return (wf, outputs) diff --git a/CPAC/resources/configs/pipeline_config_blank.yml b/CPAC/resources/configs/pipeline_config_blank.yml index b570541c6e..36115dc1a5 100644 --- a/CPAC/resources/configs/pipeline_config_blank.yml +++ b/CPAC/resources/configs/pipeline_config_blank.yml @@ -251,6 +251,12 @@ surface_analysis: anatomical_preproc: run: Off + + # [warp] - Deoblique the input image using AFNI 3dWarp. Changes header and the image data. + # [refit] - Clear the header of the input image using AFNI 3drefit. Changes only the header. + # applies for both T1w and T2w images + deoblique: ["refit"] + acpc_alignment: T1w_brain_ACPC_template: @@ -956,6 +962,10 @@ functional_preproc: # Convert raw data from LPI to RPI run: On + # [warp] - Deoblique the input image using AFNI 3dWarp. Changes header and the image data. Applies interpolation to the slice-timing metadata. + # [refit] - Clear the header of the input image using AFNI 3drefit. Changes only the header. + deoblique: ["refit"] + slice_timing_correction: # Interpolate voxel time courses so they are sampled at the same time points. diff --git a/CPAC/resources/configs/pipeline_config_default.yml b/CPAC/resources/configs/pipeline_config_default.yml index cdeed4577d..e150cb0041 100644 --- a/CPAC/resources/configs/pipeline_config_default.yml +++ b/CPAC/resources/configs/pipeline_config_default.yml @@ -291,7 +291,7 @@ anatomical_preproc: # [warp] - Deoblique the input image using AFNI 3dWarp. Changes header and the image data. # [refit] - Clear the header of the input image using AFNI 3drefit. Changes only the header. # applies for both T1w and T2w images - deoblique: ["warp"] + deoblique: ["refit"] # Non-local means filtering via ANTs DenoiseImage non_local_means_filtering: @@ -1029,7 +1029,7 @@ functional_preproc: # Convert raw data from LPI to RPI run: On - # [warp] - Deoblique the input image using AFNI 3dWarp. Changes header and the image data. + # [warp] - Deoblique the input image using AFNI 3dWarp. Changes header and the image data. Applies interpolation to the slice-timing metadata. # [refit] - Clear the header of the input image using AFNI 3drefit. Changes only the header. deoblique: ["refit"] From ed32ec94094d3ec72c9b744606a701d34c4aedc0 Mon Sep 17 00:00:00 2001 From: "birajstha:construction_worker::penguin" Date: Wed, 14 May 2025 09:30:13 -0400 Subject: [PATCH 361/507] removing unused function --- CPAC/utils/utils.py | 28 ---------------------------- 1 file changed, 28 deletions(-) diff --git a/CPAC/utils/utils.py b/CPAC/utils/utils.py index 95f4d9f95e..8b5f8abbc6 100644 --- a/CPAC/utils/utils.py +++ b/CPAC/utils/utils.py @@ -962,34 +962,6 @@ def add_afni_prefix(tpattern): return tpattern -def is_oblique(in_file): - """ - Check if a NIfTI file is oblique using AFNI's 3dinfo. - - Parameters - ---------- - file_path : str - Path to the input NIfTI file. - - Returns - ------- - bool - True if the image is oblique, False otherwise. - """ - import subprocess - - oblique = False - try: - result = subprocess.check_output( - ["3dinfo", "-is_oblique", in_file], stderr=subprocess.STDOUT - ) - if result.decode().strip().endswith("1"): - oblique = True - return oblique - except Exception as e: - raise RuntimeError(f"Failed to check obliqueness with 3dinfo:\n{e}") - - def afni_3dwarp(in_file, out_file=None, deoblique=False): """ Runs AFNI's 3dWarp command with optional deobliquing. From 13cab1f4ff5f5063be11bbdcb0f1f4e17fe20987 Mon Sep 17 00:00:00 2001 From: birajstha <111654544+birajstha@users.noreply.github.com> Date: Wed, 14 May 2025 10:50:56 -0400 Subject: [PATCH 362/507] Update CPAC/utils/utils.py Co-authored-by: Jon Cluce --- CPAC/utils/utils.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/CPAC/utils/utils.py b/CPAC/utils/utils.py index 8b5f8abbc6..72f3a9cb5f 100644 --- a/CPAC/utils/utils.py +++ b/CPAC/utils/utils.py @@ -997,7 +997,8 @@ def afni_3dwarp(in_file, out_file=None, deoblique=False): try: subprocess.check_output(cmd, stderr=subprocess.STDOUT) except subprocess.CalledProcessError as e: - raise RuntimeError(f"3dWarp failed with error:\n{e.output.decode()}") + msg = f"3dWarp failed with error:\n{e.output.decode()}" + raise RuntimeError(msg) return out_file From 607cd8e00fa65bb2ad6f4293029abf44aed73543 Mon Sep 17 00:00:00 2001 From: "birajstha:construction_worker::penguin" Date: Fri, 13 Jun 2025 10:24:51 -0400 Subject: [PATCH 363/507] Restore sink_native_transforms changelog line --- CHANGELOG.md | 1 + 1 file changed, 1 insertion(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index 4fa8e1b44f..d17be058fc 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -29,6 +29,7 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 - New switch `mask_sbref` under `func_input_prep` in functional registration and set to default `on`. - New resource `desc-head_bold` as non skull-stripped bold from nodeblock `bold_masking`. - `censor_file_path` from `offending_timepoints_connector` in the `build_nuisance_regressor` node. +- Switch `sink_native_transforms` under `registration_workflows` to output all `.mat` files in ANTs and FSL Transforms. - `deoblique` field in pipeline config with `warp` and `refit` options to apply `3dWarp` or `3drefit` during data initialization. ### Changed From 4ad669faa5c3114fa5a80d6a4e9434213ee1508e Mon Sep 17 00:00:00 2001 From: Jon Cluce Date: Fri, 13 Jun 2025 12:39:35 -0400 Subject: [PATCH 364/507] :test_tube: Add test for default-type bandpass filter --- CPAC/nuisance/nuisance.py | 92 ++++++++++++++++------------ CPAC/nuisance/tests/test_bandpass.py | 55 +++++++++++++++++ CPAC/pipeline/test/test_engine.py | 24 +++++++- CPAC/utils/test_mocks.py | 30 ++++++++- CPAC/utils/tests/osf.py | 44 +++++++++++++ 5 files changed, 203 insertions(+), 42 deletions(-) create mode 100644 CPAC/utils/tests/osf.py diff --git a/CPAC/nuisance/nuisance.py b/CPAC/nuisance/nuisance.py index 80ad38aaf4..90d39c18a5 100644 --- a/CPAC/nuisance/nuisance.py +++ b/CPAC/nuisance/nuisance.py @@ -1880,9 +1880,59 @@ def create_nuisance_regression_workflow(nuisance_selectors, name="nuisance_regre return nuisance_wf +def _default_frequency_filter( + filtering_wf: pe.Workflow, + bandpass_selector: dict, + inputspec: pe.Node, + outputspec: pe.Node, +) -> pe.Node: + """Return a frequency filter node.""" + frequency_filter = pe.Node( + Function( + input_names=[ + "realigned_file", + "regressor_file", + "bandpass_freqs", + "sample_period", + ], + output_names=["bandpassed_file", "regressor_file"], + function=bandpass_voxels, + as_module=True, + ), + name="frequency_filter", + mem_gb=0.5, + mem_x=(3811976743057169 / 151115727451828646838272, "realigned_file"), + ) + frequency_filter.inputs.bandpass_freqs = [ + bandpass_selector.get("bottom_frequency"), + bandpass_selector.get("top_frequency"), + ] + filtering_wf.connect( + [ + ( + inputspec, + frequency_filter, + [ + ("functional_file_path", "realigned_file"), + ("regressors_file_path", "regressor_file"), + ], + ), + ( + frequency_filter, + outputspec, + [ + ("bandpassed_file", "residual_file_path"), + ("regressor_file", "residual_regressor"), + ], + ), + ] + ) + return frequency_filter + + def filtering_bold_and_regressors( nuisance_selectors, name="filtering_bold_and_regressors" -): +) -> pe.Workflow: inputspec = pe.Node( util.IdentityInterface( fields=[ @@ -1895,6 +1945,7 @@ def filtering_bold_and_regressors( ), name="inputspec", ) + inputspec.inputs.nuisance_selectors = nuisance_selectors outputspec = pe.Node( util.IdentityInterface(fields=["residual_file_path", "residual_regressor"]), @@ -1910,42 +1961,8 @@ def filtering_bold_and_regressors( bandpass_method = "default" if bandpass_method == "default": - frequency_filter = pe.Node( - Function( - input_names=[ - "realigned_file", - "regressor_file", - "bandpass_freqs", - "sample_period", - ], - output_names=["bandpassed_file", "regressor_file"], - function=bandpass_voxels, - as_module=True, - ), - name="frequency_filter", - mem_gb=0.5, - mem_x=(3811976743057169 / 151115727451828646838272, "realigned_file"), - ) - - frequency_filter.inputs.bandpass_freqs = [ - bandpass_selector.get("bottom_frequency"), - bandpass_selector.get("top_frequency"), - ] - - filtering_wf.connect( - inputspec, "functional_file_path", frequency_filter, "realigned_file" - ) - - filtering_wf.connect( - inputspec, "regressors_file_path", frequency_filter, "regressor_file" - ) - - filtering_wf.connect( - frequency_filter, "bandpassed_file", outputspec, "residual_file_path" - ) - - filtering_wf.connect( - frequency_filter, "regressor_file", outputspec, "residual_regressor" + frequency_filter = _default_frequency_filter( + filtering_wf, bandpass_selector, inputspec, outputspec ) elif bandpass_method == "AFNI": @@ -2831,7 +2848,6 @@ def nuisance_regression(wf, cfg, strat_pool, pipe_num, opt, space, res=None): filt = filtering_bold_and_regressors( opt, name=f"filtering_bold_and_regressors_{name_suff}" ) - filt.inputs.inputspec.nuisance_selectors = opt node, out = strat_pool.get_data( ["desc-confounds_timeseries", "parsed_regressors"] diff --git a/CPAC/nuisance/tests/test_bandpass.py b/CPAC/nuisance/tests/test_bandpass.py index 452b55d3c7..808295b449 100644 --- a/CPAC/nuisance/tests/test_bandpass.py +++ b/CPAC/nuisance/tests/test_bandpass.py @@ -20,14 +20,31 @@ from importlib.resources import files from pathlib import Path +from networkx import DiGraph from numpy.typing import NDArray import pytest +import nibabel as nib from CPAC.nuisance.bandpass import read_1D +from CPAC.nuisance.nuisance import filtering_bold_and_regressors +from CPAC.nuisance.utils.utils import load_censor_tsv +from CPAC.pipeline.engine import ResourcePool +from CPAC.pipeline.nipype_pipeline_engine import Workflow +from CPAC.pipeline.test.test_engine import _download +from CPAC.utils.configuration import Preconfiguration +from CPAC.utils.tests.osf import download_file RAW_ONE_D: Traversable = files("CPAC").joinpath("nuisance/tests/regressors.1D") +class TestResourcePool(ResourcePool): + """Test ResourcePool to override the OSF download function.""" + + def osf(self, resource: str, file: str, destination: Path, index: int) -> None: + """Download a file from the Open Science Framework.""" + _download(self, resource, download_file, file, destination, index) + + @pytest.mark.parametrize("start_line", list(range(6))) def test_read_1D(start_line: int, tmp_path: Path) -> None: """Test the correct number of rows are read when reading a 1D file.""" @@ -46,3 +63,41 @@ def test_read_1D(start_line: int, tmp_path: Path) -> None: assert data.shape == (10, 29) # all header lines should be captured assert len(header) == 5 - start_line + + +def test_frequency_filter(tmp_path: Path) -> None: + """Test that the bandpass filter works as expected.""" + cfg = Preconfiguration("benchmark-FNIRT") + rpool = TestResourcePool(cfg) + wf = Workflow("bandpass_filtering") + index = 0 + for resource, file in { + "realigned_file": "residuals.nii.gz", + "regressor_file": "regressors.1D", + }.items(): + rpool.osf(resource, file, tmp_path, index) + index += 1 + + filt = filtering_bold_and_regressors( + cfg["nuisance_corrections", "2-nuisance_regression", "Regressors"][0] + ) + residuals = rpool.node_data("realigned_file") + regressors = rpool.node_data("regressor_file") + wf.connect( + [ + (residuals.node, filt, [(residuals.out, "inputspec.functional_file_path")]), + ( + regressors.node, + filt, + [(regressors.out, "inputspec.regressors_file_path")], + ), + ] + ) + res: DiGraph = wf.run() + out_node = next(iter(res.nodes)) + output = out_node.run() + trs = nib.load(output.outputs.bandpassed_file).header["dim"][4] # type: ignore[reportPrivateImportUsage] + array = load_censor_tsv(output.outputs.regressor_file, trs) + assert not all( + [array.min() == 0, array.max() == 0, array.sum() == 0] + ), "Bandpass filter filtered all signals." diff --git a/CPAC/pipeline/test/test_engine.py b/CPAC/pipeline/test/test_engine.py index 25b16d9e44..1489362e9a 100644 --- a/CPAC/pipeline/test/test_engine.py +++ b/CPAC/pipeline/test/test_engine.py @@ -19,7 +19,7 @@ from argparse import Namespace import os from pathlib import Path -from typing import cast +from typing import cast, Protocol from _pytest.logging import LogCaptureFixture import pytest @@ -39,6 +39,7 @@ ResourcePool, ) from CPAC.utils.bids_utils import create_cpac_data_config +from CPAC.utils.test_mocks import file_node @pytest.mark.skip(reason="not a pytest test") @@ -272,6 +273,27 @@ def test_missing_resource( assert "can be output from" in caplog.text +class ResourceDownload(Protocol): + """Protocol for a callable that downloads a resource file.""" + + def __call__(self, file: str, destination: Path | str) -> Path: + """Return the path to the downloaded resource file.""" + ... + + +def _download( + self: ResourcePool, + resource: str, + source: ResourceDownload, + file: str, + destination: Path, + index: int = -1, +) -> None: + """Download a file from OSF into a ResourcePool.""" + node = file_node(source(file, destination), index, f"osf_{resource}") + self.set_data(resource, node[0], node[1], {}, -1, source.__module__) + + # bids_dir = "/Users/steven.giavasis/data/HBN-SI_dataset/rawdata" # test_dir = "/test_dir" diff --git a/CPAC/utils/test_mocks.py b/CPAC/utils/test_mocks.py index ea16c0be36..85967c8aeb 100644 --- a/CPAC/utils/test_mocks.py +++ b/CPAC/utils/test_mocks.py @@ -1,4 +1,24 @@ +# Copyright (C) 2019-2025 C-PAC Developers + +# This file is part of C-PAC. + +# C-PAC is free software: you can redistribute it and/or modify it under +# the terms of the GNU Lesser General Public License as published by the +# Free Software Foundation, either version 3 of the License, or (at your +# option) any later version. + +# C-PAC is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or +# FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public +# License for more details. + +# You should have received a copy of the GNU Lesser General Public +# License along with C-PAC. If not, see . +"""Mock configuration and strategy for testing purposes.""" + import os +from pathlib import Path +from typing import Literal from nipype.interfaces import utility as util @@ -9,16 +29,20 @@ from CPAC.utils.strategy import Strategy -def file_node(path, file_node_num=0): +def file_node( + path: Path | str, file_node_num: int = 0, name: str = "file_node" +) -> tuple[pe.Node, Literal["file"]]: + """Create a file node with the given path and name.""" input_node = pe.Node( util.IdentityInterface(fields=["file"]), - name=f"file_node_{file_node_num}", + name=f"{name}_{file_node_num}", ) - input_node.inputs.file = path + input_node.inputs.file = str(path) return input_node, "file" def configuration_strategy_mock(method="FSL"): + """Mock configuration and strategy for testing.""" fsldir = os.environ.get("FSLDIR") # mock the config dictionary c = Configuration( diff --git a/CPAC/utils/tests/osf.py b/CPAC/utils/tests/osf.py new file mode 100644 index 0000000000..5ddbcc0c25 --- /dev/null +++ b/CPAC/utils/tests/osf.py @@ -0,0 +1,44 @@ +# Copyright (C) 2025 C-PAC Developers + +# This file is part of C-PAC. + +# C-PAC is free software: you can redistribute it and/or modify it under +# the terms of the GNU Lesser General Public License as published by the +# Free Software Foundation, either version 3 of the License, or (at your +# option) any later version. + +# C-PAC is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or +# FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public +# License for more details. + +# You should have received a copy of the GNU Lesser General Public +# License along with C-PAC. If not, see . +"""Open Science Framework testing utilities.""" + +import os +from pathlib import Path + +import requests + +FILES = {"residuals.nii.gz": "kyqad", "regressors.1D": "xzuyf"} + + +def download_file(file: str, destination: Path | str) -> Path: + """Download a file from the Open Science Framework.""" + url = f"https://osf.io/download/{FILES[file]}" + response = requests.get( + url, + headers={"Authorization": f"Bearer {os.getenv('OSF_DATA')}"}, + allow_redirects=True, + ) + if not isinstance(destination, Path): + destination = Path(destination) + destination = destination / file if destination.is_dir() else destination + if destination.exists(): + msg = f"File {destination} already exists. Please remove it before downloading." + raise FileExistsError(msg) + response.raise_for_status() + with open(destination, "wb") as f: + f.write(response.content) + return destination From 44f7e0eb517188bac826ac1cc39813552ca09a39 Mon Sep 17 00:00:00 2001 From: Jon Cluce Date: Fri, 13 Jun 2025 12:43:37 -0400 Subject: [PATCH 365/507] :necktie: Skip test with OSF data if no API key --- CPAC/nuisance/tests/test_bandpass.py | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/CPAC/nuisance/tests/test_bandpass.py b/CPAC/nuisance/tests/test_bandpass.py index 808295b449..052ebcc6fc 100644 --- a/CPAC/nuisance/tests/test_bandpass.py +++ b/CPAC/nuisance/tests/test_bandpass.py @@ -18,6 +18,7 @@ from importlib.abc import Traversable from importlib.resources import files +from os import getenv from pathlib import Path from networkx import DiGraph @@ -65,6 +66,10 @@ def test_read_1D(start_line: int, tmp_path: Path) -> None: assert len(header) == 5 - start_line +@pytest.mark.skipif( + not getenv("OSF_DATA"), + reason="OSF API key not set in OSF_DATA environment variable", +) def test_frequency_filter(tmp_path: Path) -> None: """Test that the bandpass filter works as expected.""" cfg = Preconfiguration("benchmark-FNIRT") From cee0a679ae02d7c9d068ec34c9aaa2320a9e8952 Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Fri, 13 Jun 2025 17:38:47 +0000 Subject: [PATCH 366/507] [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci --- CPAC/utils/tests/test_utils.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/CPAC/utils/tests/test_utils.py b/CPAC/utils/tests/test_utils.py index a75bb67c6c..8b4e494597 100644 --- a/CPAC/utils/tests/test_utils.py +++ b/CPAC/utils/tests/test_utils.py @@ -187,6 +187,7 @@ def test_system_deps(): """ check_system_deps(*([True] * 4)) + @pytest.mark.parametrize( "t1", [datetime.now(), datetime.isoformat(datetime.now()), None] ) @@ -200,4 +201,4 @@ def test_datetime_with_safe_none(t1: OptionalDatetime, t2: OptionalDatetime): if t1 and t2: assert isinstance(t2 - t1, timedelta) else: - assert t2 - t1 == timedelta(0) \ No newline at end of file + assert t2 - t1 == timedelta(0) From 71395a1e3d0c476cd57f4c4f658b126d59c7497c Mon Sep 17 00:00:00 2001 From: Jon Cluce Date: Fri, 13 Jun 2025 14:30:52 -0400 Subject: [PATCH 367/507] :white_check_mark: Set wd for test wf --- CPAC/nuisance/tests/test_bandpass.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/CPAC/nuisance/tests/test_bandpass.py b/CPAC/nuisance/tests/test_bandpass.py index 26adf8e2be..c11f9b1c81 100644 --- a/CPAC/nuisance/tests/test_bandpass.py +++ b/CPAC/nuisance/tests/test_bandpass.py @@ -103,7 +103,7 @@ def test_frequency_filter(tmp_path: Path) -> None: """Test that the bandpass filter works as expected.""" cfg = Preconfiguration("benchmark-FNIRT") rpool = TestResourcePool(cfg) - wf = Workflow("bandpass_filtering") + wf = Workflow("bandpass_filtering", base_dir=str(tmp_path)) index = 0 for resource, file in { "realigned_file": "residuals.nii.gz", From 58c84ee329ddb98c57fa6a99f6181305c36f02f3 Mon Sep 17 00:00:00 2001 From: Jon Cluce Date: Fri, 13 Jun 2025 15:43:25 -0400 Subject: [PATCH 368/507] =?UTF-8?q?:truck:=20Move=20`test=5Fants=5Fregistr?= =?UTF-8?q?ation=5Fconnector`=20=E2=86=92=20`test=5Fregistration=5Fconnect?= =?UTF-8?q?ors`?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- ..._registration_connector.py => test_registration_connectors.py} | 0 1 file changed, 0 insertions(+), 0 deletions(-) rename CPAC/registration/tests/{test_ants_registration_connector.py => test_registration_connectors.py} (100%) diff --git a/CPAC/registration/tests/test_ants_registration_connector.py b/CPAC/registration/tests/test_registration_connectors.py similarity index 100% rename from CPAC/registration/tests/test_ants_registration_connector.py rename to CPAC/registration/tests/test_registration_connectors.py From 3507933fae9cbcb56355bb4f8c547468a2e5dac2 Mon Sep 17 00:00:00 2001 From: Jon Cluce Date: Fri, 13 Jun 2025 15:44:52 -0400 Subject: [PATCH 369/507] =?UTF-8?q?:truck:=20Move=20`test=5Ffsl=5Fregistra?= =?UTF-8?q?tion=5Fconnector`=20=E2=86=92=20`test=5Fregistration=5Fconnecto?= =?UTF-8?q?rs`?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- ..._registration_connector.py => test_registration_connectors.py} | 0 1 file changed, 0 insertions(+), 0 deletions(-) rename CPAC/registration/tests/{test_fsl_registration_connector.py => test_registration_connectors.py} (100%) diff --git a/CPAC/registration/tests/test_fsl_registration_connector.py b/CPAC/registration/tests/test_registration_connectors.py similarity index 100% rename from CPAC/registration/tests/test_fsl_registration_connector.py rename to CPAC/registration/tests/test_registration_connectors.py From b2529b94174490e64041f6b71c5a75fa78dc99aa Mon Sep 17 00:00:00 2001 From: Jon Cluce Date: Fri, 13 Jun 2025 15:45:59 -0400 Subject: [PATCH 370/507] :technologist: Clarify estimate vs. correction node names --- CPAC/func_preproc/func_motion.py | 62 +++++++++++++++++++------------- 1 file changed, 37 insertions(+), 25 deletions(-) diff --git a/CPAC/func_preproc/func_motion.py b/CPAC/func_preproc/func_motion.py index f42cf2bf28..ccfb18ccbb 100644 --- a/CPAC/func_preproc/func_motion.py +++ b/CPAC/func_preproc/func_motion.py @@ -476,6 +476,16 @@ def get_motion_ref_fmriprep( get_motion_refs = [get_motion_ref, get_motion_ref_fmriprep] +def _pipe_suffix(estimate: bool, correct: bool, pipe_num: int) -> str: + """Generate a suffix for the pipeline name based on estimate and correct flags.""" + suffix = "" + if estimate: + suffix += "-estimate" + if correct: + suffix += "-correct" + return f"{suffix}_{pipe_num}" + + def motion_correct_3dvolreg( wf: Workflow, cfg: Configuration, @@ -486,6 +496,7 @@ def motion_correct_3dvolreg( ) -> NODEBLOCK_RETURN: """Calculate motion parameters with 3dvolreg.""" outputs: POOL_RESOURCE_DICT = {} + pipe_suffix = _pipe_suffix(estimate, correct, pipe_num) if strat_pool.check_rpool("motion-correct-3dvolreg"): out_motion_A, _ = strat_pool.get_data("motion-correct-3dvolreg") else: @@ -498,7 +509,7 @@ def motion_correct_3dvolreg( function=chunk_ts, imports=chunk_imports, ), - name=f"chunk_{pipe_num}", + name=f"chunk{pipe_suffix}", ) # chunk.inputs.n_chunks = int(cfg.pipeline_setup['system_config'][ @@ -518,7 +529,7 @@ def motion_correct_3dvolreg( function=split_ts_chunks, imports=split_imports, ), - name=f"split_{pipe_num}", + name=f"split{pipe_suffix}", ) node, out = strat_pool.get_data("desc-preproc_bold") @@ -527,21 +538,21 @@ def motion_correct_3dvolreg( out_split_func = pe.Node( interface=util.IdentityInterface(fields=["out_file"]), - name=f"out_split_func_{pipe_num}", + name=f"out_split_func{pipe_suffix}", ) wf.connect(split, "split_funcs", out_split_func, "out_file") func_motion_correct = pe.MapNode( interface=preprocess.Volreg(), - name=f"func_generate_ref_{pipe_num}", + name=f"func_generate_ref{pipe_suffix}", iterfield=["in_file"], ) wf.connect(out_split_func, "out_file", func_motion_correct, "in_file") func_concat = pe.Node( - interface=afni_utils.TCat(), name=f"func_concat_{pipe_num}" + interface=afni_utils.TCat(), name=f"func_concat{pipe_suffix}" ) func_concat.inputs.outputtype = "NIFTI_GZ" @@ -549,7 +560,7 @@ def motion_correct_3dvolreg( out_motion = pe.Node( interface=util.IdentityInterface(fields=["out_file"]), - name=f"out_motion_{pipe_num}", + name=f"out_motion{pipe_suffix}", ) wf.connect(func_concat, "out_file", out_motion, "out_file") @@ -557,21 +568,21 @@ def motion_correct_3dvolreg( else: out_split_func = pe.Node( interface=util.IdentityInterface(fields=["out_file"]), - name=f"out_split_func_{pipe_num}", + name=f"out_split_func{pipe_suffix}", ) node, out = strat_pool.get_data("desc-preproc_bold") wf.connect(node, out, out_split_func, "out_file") func_motion_correct = pe.Node( - interface=preprocess.Volreg(), name=f"func_generate_ref_{pipe_num}" + interface=preprocess.Volreg(), name=f"func_generate_ref{pipe_suffix}" ) wf.connect(out_split_func, "out_file", func_motion_correct, "in_file") out_motion = pe.Node( interface=util.IdentityInterface(fields=["out_file"]), - name=f"out_motion_{pipe_num}", + name=f"out_motion{pipe_suffix}", ) wf.connect(func_motion_correct, "out_file", out_motion, "out_file") @@ -589,7 +600,7 @@ def motion_correct_3dvolreg( # Calculate motion parameters func_motion_correct_A = func_motion_correct.clone( - f"func_motion_correct_3dvolreg_{pipe_num}" + f"func_motion_correct_3dvolreg{pipe_suffix}" ) func_motion_correct_A.inputs.md1d_file = "max_displacement.1D" func_motion_correct_A.inputs.args = args @@ -601,7 +612,7 @@ def motion_correct_3dvolreg( if int(cfg.pipeline_setup["system_config"]["max_cores_per_participant"]) > 1: motion_concat = pe.Node( - interface=afni_utils.TCat(), name=f"motion_concat_{pipe_num}" + interface=afni_utils.TCat(), name=f"motion_concat{pipe_suffix}" ) motion_concat.inputs.outputtype = "NIFTI_GZ" @@ -609,7 +620,7 @@ def motion_correct_3dvolreg( out_motion_A = pe.Node( interface=util.IdentityInterface(fields=["out_file"]), - name=f"out_motion_A_{pipe_num}", + name=f"out_motion_A{pipe_suffix}", ) wf.connect(motion_concat, "out_file", out_motion_A, "out_file") @@ -622,7 +633,7 @@ def motion_correct_3dvolreg( function=oned_text_concat, imports=concat_imports, ), - name=f"md1d_concat_{pipe_num}", + name=f"md1d_concat{pipe_suffix}", ) wf.connect(func_motion_correct_A, "md1d_file", md1d_concat, "in_files") @@ -634,7 +645,7 @@ def motion_correct_3dvolreg( function=oned_text_concat, imports=concat_imports, ), - name=f"oned_concat_{pipe_num}", + name=f"oned_concat{pipe_suffix}", ) wf.connect(func_motion_correct_A, "oned_file", oned_concat, "in_files") @@ -646,7 +657,7 @@ def motion_correct_3dvolreg( function=oned_text_concat, imports=concat_imports, ), - name=f"oned_matrix_concat_{pipe_num}", + name=f"oned_matrix_concat{pipe_suffix}", ) wf.connect( @@ -658,21 +669,21 @@ def motion_correct_3dvolreg( out_md1d = pe.Node( interface=util.IdentityInterface(fields=["out_file"]), - name=f"out_md1d_{pipe_num}", + name=f"out_md1d{pipe_suffix}", ) wf.connect(md1d_concat, "out_file", out_md1d, "out_file") out_oned = pe.Node( interface=util.IdentityInterface(fields=["out_file"]), - name=f"out_oned_{pipe_num}", + name=f"out_oned{pipe_suffix}", ) wf.connect(oned_concat, "out_file", out_oned, "out_file") out_oned_matrix = pe.Node( interface=util.IdentityInterface(fields=["out_file"]), - name=f"out_oned_matrix_{pipe_num}", + name=f"out_oned_matrix{pipe_suffix}", ) wf.connect(oned_matrix_concat, "out_file", out_oned_matrix, "out_file") @@ -680,28 +691,28 @@ def motion_correct_3dvolreg( else: out_motion_A = pe.Node( interface=util.IdentityInterface(fields=["out_file"]), - name=f"out_motion_A_{pipe_num}", + name=f"out_motion_A{pipe_suffix}", ) wf.connect(func_motion_correct_A, "out_file", out_motion_A, "out_file") out_md1d = pe.Node( interface=util.IdentityInterface(fields=["out_file"]), - name=f"out_md1d_{pipe_num}", + name=f"out_md1d{pipe_suffix}", ) wf.connect(func_motion_correct_A, "md1d_file", out_md1d, "out_file") out_oned = pe.Node( interface=util.IdentityInterface(fields=["out_file"]), - name=f"out_oned_{pipe_num}", + name=f"out_oned{pipe_suffix}", ) wf.connect(func_motion_correct_A, "oned_file", out_oned, "out_file") out_oned_matrix = pe.Node( interface=util.IdentityInterface(fields=["out_file"]), - name=f"out_oned_matrix_{pipe_num}", + name=f"out_oned_matrix{pipe_suffix}", ) wf.connect( @@ -737,13 +748,14 @@ def motion_correct_mcflirt( correct: bool, ) -> NODEBLOCK_RETURN: """Calculate motion parameters with MCFLIRT.""" + pipe_suffix = _pipe_suffix(estimate, correct, pipe_num) outputs: POOL_RESOURCE_DICT = {} if strat_pool.check_rpool("motion-correct-mcflirt"): func_motion_correct_A, _ = strat_pool.get_data("motion-correct-mcflirt") else: func_motion_correct_A = pe.Node( interface=fsl.MCFLIRT(save_mats=True, save_plots=True), - name=f"func_motion_correct_mcflirt_{pipe_num}", + name=f"func_motion_correct_mcflirt{pipe_suffix}", mem_gb=2.5, ) @@ -763,7 +775,7 @@ def motion_correct_mcflirt( output_names=["out_file"], function=normalize_motion_parameters, ), - name=f"norm_motion_params_{pipe_num}", + name=f"norm_motion_params{pipe_suffix}", ) wf.connect( @@ -776,7 +788,7 @@ def motion_correct_mcflirt( output_names=["abs_file", "rels_file"], function=get_mcflirt_rms_abs, ), - name=f"get_mcflirt_rms_abs_{pipe_num}", + name=f"get_mcflirt_rms_abs{pipe_suffix}", ) wf.connect(func_motion_correct_A, "rms_files", get_rms_abs, "rms_files") From 6aa97c9ddb031c436d972dea533959d4af0e24de Mon Sep 17 00:00:00 2001 From: Jon Cluce Date: Fri, 13 Jun 2025 16:05:41 -0400 Subject: [PATCH 371/507] fixup! Merge branch 'develop' into datetime-none --- CPAC/utils/tests/test_utils.py | 14 ++++++++++++++ 1 file changed, 14 insertions(+) diff --git a/CPAC/utils/tests/test_utils.py b/CPAC/utils/tests/test_utils.py index 8b4e494597..f33cb2b6ab 100644 --- a/CPAC/utils/tests/test_utils.py +++ b/CPAC/utils/tests/test_utils.py @@ -188,6 +188,20 @@ def test_system_deps(): check_system_deps(*([True] * 4)) +def check_expected_keys( + sink_native_transforms: bool, outputs: dict, expected_keys: set +) -> None: + """Check if expected keys are present in outputs based on sink_native_transforms.""" + if sink_native_transforms: + assert expected_keys.issubset( + outputs.keys() + ), f"Expected outputs {expected_keys} not found in {outputs.keys()}" + else: + assert not expected_keys.intersection( + outputs.keys() + ), f"Outputs {expected_keys} should not be present when sink_native_transforms is Off" + + @pytest.mark.parametrize( "t1", [datetime.now(), datetime.isoformat(datetime.now()), None] ) From be60f10ffc774b7b6104bba31c0ee5a6259b102d Mon Sep 17 00:00:00 2001 From: birajstha <111654544+birajstha@users.noreply.github.com> Date: Fri, 13 Jun 2025 16:26:45 -0400 Subject: [PATCH 372/507] Update CPAC/nuisance/tests/test_bandpass.py Co-authored-by: Jon Cluce --- CPAC/nuisance/tests/test_bandpass.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/CPAC/nuisance/tests/test_bandpass.py b/CPAC/nuisance/tests/test_bandpass.py index dfac85c217..3130d46853 100644 --- a/CPAC/nuisance/tests/test_bandpass.py +++ b/CPAC/nuisance/tests/test_bandpass.py @@ -50,6 +50,7 @@ def test_read_1D(start_line: int, tmp_path: Path) -> None: assert len(header) == 5 - start_line +@pytest.mark.parametrize("sample_period", [1.0, 1000.0]) @pytest.mark.parametrize( "lowcut, highcut, in_freq, out_freq", [ @@ -58,9 +59,8 @@ def test_read_1D(start_line: int, tmp_path: Path) -> None: (0.02, 0.08, 0.04, 0.12), ], ) -def test_ideal_bandpass_with_various_cutoffs(lowcut, highcut, in_freq, out_freq): +def test_ideal_bandpass_with_various_cutoffs(lowcut, highcut, in_freq, out_freq, sample_period): """Test the ideal bandpass filter with various cutoff frequencies.""" - sample_period = 1.0 t = np.arange(512) * sample_period signal = np.sin(2 * np.pi * in_freq * t) + np.sin(2 * np.pi * out_freq * t) From b42e977f1d6057487452935ee435f34688ee4299 Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Fri, 13 Jun 2025 20:27:03 +0000 Subject: [PATCH 373/507] [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci --- CPAC/nuisance/tests/test_bandpass.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/CPAC/nuisance/tests/test_bandpass.py b/CPAC/nuisance/tests/test_bandpass.py index 3130d46853..18de89934c 100644 --- a/CPAC/nuisance/tests/test_bandpass.py +++ b/CPAC/nuisance/tests/test_bandpass.py @@ -59,7 +59,9 @@ def test_read_1D(start_line: int, tmp_path: Path) -> None: (0.02, 0.08, 0.04, 0.12), ], ) -def test_ideal_bandpass_with_various_cutoffs(lowcut, highcut, in_freq, out_freq, sample_period): +def test_ideal_bandpass_with_various_cutoffs( + lowcut, highcut, in_freq, out_freq, sample_period +): """Test the ideal bandpass filter with various cutoff frequencies.""" t = np.arange(512) * sample_period signal = np.sin(2 * np.pi * in_freq * t) + np.sin(2 * np.pi * out_freq * t) From e798b43c6d9f04c5d4ffa7a81980fc7a1a5db4bf Mon Sep 17 00:00:00 2001 From: Jon Cluce Date: Fri, 13 Jun 2025 17:11:34 -0400 Subject: [PATCH 374/507] :construction_worker: Pass `$OSF_DATA` to docker --- .circleci/main.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.circleci/main.yml b/.circleci/main.yml index 5a07fda77a..9e5f3b2a01 100644 --- a/.circleci/main.yml +++ b/.circleci/main.yml @@ -119,7 +119,7 @@ commands: coverage-file: .coverage.docker${VARIANT} - run: name: Running pytest in Docker image - command: docker exec --user $(id -u) docker_test /bin/bash /code/dev/circleci_data/test_in_image.sh + command: docker exec -e OSF_DATA --user $(id -u) docker_test /bin/bash /code/dev/circleci_data/test_in_image.sh set-python-version: steps: - restore_cache: From d79173d3c1b7b4d64ba7e314a5d5f3e83e22f953 Mon Sep 17 00:00:00 2001 From: "birajstha:construction_worker::penguin" Date: Fri, 13 Jun 2025 21:31:34 +0000 Subject: [PATCH 375/507] fixing the tests --- CPAC/nuisance/tests/test_bandpass.py | 35 ++++++++++++++++++++++++---- 1 file changed, 31 insertions(+), 4 deletions(-) diff --git a/CPAC/nuisance/tests/test_bandpass.py b/CPAC/nuisance/tests/test_bandpass.py index 18de89934c..3b19e837a2 100644 --- a/CPAC/nuisance/tests/test_bandpass.py +++ b/CPAC/nuisance/tests/test_bandpass.py @@ -50,19 +50,19 @@ def test_read_1D(start_line: int, tmp_path: Path) -> None: assert len(header) == 5 - start_line -@pytest.mark.parametrize("sample_period", [1.0, 1000.0]) @pytest.mark.parametrize( "lowcut, highcut, in_freq, out_freq", [ (0.005, 0.05, 0.01, 0.2), (0.01, 0.1, 0.02, 0.15), (0.02, 0.08, 0.04, 0.12), + (None, 0.1, 0.02, 0.15), + (0.2, None, 0.22, 0.1), ], ) -def test_ideal_bandpass_with_various_cutoffs( - lowcut, highcut, in_freq, out_freq, sample_period -): +def test_ideal_bandpass_with_various_cutoffs(lowcut, highcut, in_freq, out_freq): """Test the ideal bandpass filter with various cutoff frequencies.""" + sample_period = 1.0 t = np.arange(512) * sample_period signal = np.sin(2 * np.pi * in_freq * t) + np.sin(2 * np.pi * out_freq * t) @@ -77,3 +77,30 @@ def test_ideal_bandpass_with_various_cutoffs( assert filt_fft[idx_in] > 0.5 * orig_fft[idx_in] assert filt_fft[idx_out] < 0.1 * orig_fft[idx_out] + + +@pytest.mark.parametrize("sample_period", [1.0, 1000.0]) +def test_ideal_bandpass_cutoffs_clamped_to_nyquist(sample_period): + """Test that ideal_bandpass clamps cutoffs to Nyquist frequency.""" + N = 512 + t = np.arange(N) * sample_period + nyquist = 0.5 / sample_period + + freq_below = nyquist * 0.95 + freq_above = nyquist * 1.05 + + signal = np.sin(2 * np.pi * freq_below * t) + np.sin(2 * np.pi * freq_above * t) + + lowcut = nyquist + 0.01 + highcut = nyquist + 0.1 + + filtered = ideal_bandpass(signal, sample_period, (lowcut, highcut)) + + freqs = np.fft.fftfreq(N, d=sample_period) + filt_fft = np.abs(fft(filtered)) + + idx_below = np.argmin(np.abs(freqs - freq_below)) + idx_above = np.argmin(np.abs(freqs - freq_above)) + + assert filt_fft[idx_below] < 1e-3 + assert filt_fft[idx_above] < 1e-3 From bae5da4a168ae0371dfc586f1cbf089412c30306 Mon Sep 17 00:00:00 2001 From: "birajstha:construction_worker::penguin" Date: Fri, 13 Jun 2025 17:32:28 -0400 Subject: [PATCH 376/507] check none before other --- CPAC/nuisance/bandpass.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/CPAC/nuisance/bandpass.py b/CPAC/nuisance/bandpass.py index d56e42c3ef..184d6d4f2b 100644 --- a/CPAC/nuisance/bandpass.py +++ b/CPAC/nuisance/bandpass.py @@ -47,7 +47,7 @@ def ideal_bandpass(data, sample_period, bandpass_freqs): else: low_cutoff_i = np.ceil(LowCutoff * N * sample_period).astype("int") - if HighCutoff > nyquist_freq or HighCutoff is None: + if HighCutoff is None or HighCutoff > nyquist_freq: # Cutoff beyond fs/2 or unspecified (become a highpass filter) high_cutoff_i = int(N / 2) else: From 07b33ecfa5284c55d3e579831c01b88c95d8d029 Mon Sep 17 00:00:00 2001 From: Jon Cluce Date: Fri, 13 Jun 2025 17:39:19 -0400 Subject: [PATCH 377/507] :bulb: Update comments re: PR --- CHANGELOG.md | 1 + CPAC/nuisance/tests/test_bandpass.py | 2 +- 2 files changed, 2 insertions(+), 1 deletion(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index ec478a2c0e..dc9a27e491 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -30,6 +30,7 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 - New resource `desc-head_bold` as non skull-stripped bold from nodeblock `bold_masking`. - `censor_file_path` from `offending_timepoints_connector` in the `build_nuisance_regressor` node. - Switch `sink_native_transforms` under `registration_workflows` to output all `.mat` files in ANTs and FSL Transforms. +- `organism` configuration option. ### Changed diff --git a/CPAC/nuisance/tests/test_bandpass.py b/CPAC/nuisance/tests/test_bandpass.py index c11f9b1c81..8051846865 100644 --- a/CPAC/nuisance/tests/test_bandpass.py +++ b/CPAC/nuisance/tests/test_bandpass.py @@ -41,7 +41,7 @@ class TestResourcePool(ResourcePool): - """Test ResourcePool to override the OSF download function.""" + """ResourcePool with OSF download function.""" def osf(self, resource: str, file: str, destination: Path, index: int) -> None: """Download a file from the Open Science Framework.""" From 1d200e739006111ca95e90292b40514b180e6c5b Mon Sep 17 00:00:00 2001 From: Jon Cluce Date: Tue, 17 Jun 2025 10:48:05 -0400 Subject: [PATCH 378/507] :alien: Manually install deprecated tarfile into backports namespace --- .github/Dockerfiles/C-PAC.develop-jammy.Dockerfile | 2 +- .github/Dockerfiles/C-PAC.develop-lite-jammy.Dockerfile | 2 +- Dockerfile | 2 +- variant-lite.Dockerfile | 2 +- 4 files changed, 4 insertions(+), 4 deletions(-) diff --git a/.github/Dockerfiles/C-PAC.develop-jammy.Dockerfile b/.github/Dockerfiles/C-PAC.develop-jammy.Dockerfile index 1debc54c4a..e41bd6fc73 100644 --- a/.github/Dockerfiles/C-PAC.develop-jammy.Dockerfile +++ b/.github/Dockerfiles/C-PAC.develop-jammy.Dockerfile @@ -22,7 +22,7 @@ USER root # install C-PAC COPY dev/circleci_data/pipe-test_ci.yml /cpac_resources/pipe-test_ci.yml COPY . /code -RUN pip cache purge && pip install -e "/code[graphviz]" +RUN pip cache purge && pip install backports.tarfile && pip install -e "/code[graphviz]" # set up runscript COPY dev/docker_data /code/docker_data RUN rm -Rf /code/docker_data/checksum && \ diff --git a/.github/Dockerfiles/C-PAC.develop-lite-jammy.Dockerfile b/.github/Dockerfiles/C-PAC.develop-lite-jammy.Dockerfile index 20561f09aa..6f350c4f18 100644 --- a/.github/Dockerfiles/C-PAC.develop-lite-jammy.Dockerfile +++ b/.github/Dockerfiles/C-PAC.develop-lite-jammy.Dockerfile @@ -23,7 +23,7 @@ USER root COPY dev/circleci_data/pipe-test_ci.yml /cpac_resources/pipe-test_ci.yml COPY . /code COPY --from=ghcr.io/fcp-indi/c-pac_templates:latest /cpac_templates /cpac_templates -RUN pip cache purge && pip install -e "/code[graphviz]" +RUN pip cache purge && pip install backports.tarfile && pip install -e "/code[graphviz]" # set up runscript COPY dev/docker_data /code/docker_data RUN rm -Rf /code/docker_data/checksum && \ diff --git a/Dockerfile b/Dockerfile index 1debc54c4a..e41bd6fc73 100644 --- a/Dockerfile +++ b/Dockerfile @@ -22,7 +22,7 @@ USER root # install C-PAC COPY dev/circleci_data/pipe-test_ci.yml /cpac_resources/pipe-test_ci.yml COPY . /code -RUN pip cache purge && pip install -e "/code[graphviz]" +RUN pip cache purge && pip install backports.tarfile && pip install -e "/code[graphviz]" # set up runscript COPY dev/docker_data /code/docker_data RUN rm -Rf /code/docker_data/checksum && \ diff --git a/variant-lite.Dockerfile b/variant-lite.Dockerfile index 20561f09aa..6f350c4f18 100644 --- a/variant-lite.Dockerfile +++ b/variant-lite.Dockerfile @@ -23,7 +23,7 @@ USER root COPY dev/circleci_data/pipe-test_ci.yml /cpac_resources/pipe-test_ci.yml COPY . /code COPY --from=ghcr.io/fcp-indi/c-pac_templates:latest /cpac_templates /cpac_templates -RUN pip cache purge && pip install -e "/code[graphviz]" +RUN pip cache purge && pip install backports.tarfile && pip install -e "/code[graphviz]" # set up runscript COPY dev/docker_data /code/docker_data RUN rm -Rf /code/docker_data/checksum && \ From ae97a8691366d45c446209435ea2106fd44ef969 Mon Sep 17 00:00:00 2001 From: Jon Cluce Date: Tue, 17 Jun 2025 12:57:35 -0400 Subject: [PATCH 379/507] :alien: Link `libcrypt.so.2` for `csh` for FreeSurfer [rebuild standard] --- .github/Dockerfiles/base-standard.Dockerfile | 1 + 1 file changed, 1 insertion(+) diff --git a/.github/Dockerfiles/base-standard.Dockerfile b/.github/Dockerfiles/base-standard.Dockerfile index 0ba2cd5158..ee3e474213 100644 --- a/.github/Dockerfiles/base-standard.Dockerfile +++ b/.github/Dockerfiles/base-standard.Dockerfile @@ -48,6 +48,7 @@ RUN apt-get autoremove -y \ && rm -rf /var/lib/apt/lists/* /tmp/* /var/tmp/* /root/.cache/* \ && find / -type f -print0 | sort -t/ -k2 | xargs -0 rdfind -makehardlinks true \ && rm -rf results.txt \ + && cp -l /lib/x86_64-linux-gnu/libcrypt.so.1.1.0 /lib/x86_64-linux-gnu/libcrypt.so.2 \ && ldconfig \ && chmod 777 / /home/c-pac_user \ && chmod 777 $(ls / | grep -v sys | grep -v proc) From 85967755170457861c378f2f1e400724bd09d971 Mon Sep 17 00:00:00 2001 From: Jon Cluce Date: Tue, 17 Jun 2025 15:21:50 -0400 Subject: [PATCH 380/507] :art: Address style warnings [rebuild standard] Ref https://github.com/FCP-INDI/C-PAC/actions/runs/15713463670/job/44277402688?pr=2243 Co-authored-by: GitHub Actions --- .github/Dockerfiles/base-standard.Dockerfile | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/.github/Dockerfiles/base-standard.Dockerfile b/.github/Dockerfiles/base-standard.Dockerfile index ee3e474213..ad3527d303 100644 --- a/.github/Dockerfiles/base-standard.Dockerfile +++ b/.github/Dockerfiles/base-standard.Dockerfile @@ -1,4 +1,4 @@ -# Copyright (C) 2022-2023 C-PAC Developers +# Copyright (C) 2022-2025 C-PAC Developers # This file is part of C-PAC. @@ -14,7 +14,7 @@ # You should have received a copy of the GNU Lesser General Public # License along with C-PAC. If not, see . -FROM ghcr.io/fcp-indi/c-pac/freesurfer:6.0.0-min.neurodocker-jammy as FreeSurfer +FROM ghcr.io/fcp-indi/c-pac/freesurfer:6.0.0-min.neurodocker-jammy AS freesurfer FROM ghcr.io/fcp-indi/c-pac/stage-base:lite-v1.8.8.dev1 LABEL org.opencontainers.image.description="NOT INTENDED FOR USE OTHER THAN AS A STAGE IMAGE IN A MULTI-STAGE BUILD \ @@ -37,9 +37,9 @@ ENV PATH="$FREESURFER_HOME/bin:$PATH" \ SUBJECTS_DIR="$FREESURFER_HOME/subjects" \ MNI_DIR="$FREESURFER_HOME/mni" ENV MINC_BIN_DIR="$MNI_DIR/bin" \ - MINC_LIB_DIR="$MNI_DIR/lib" \ - PATH="$PATH:$MINC_BIN_DIR" -COPY --from=FreeSurfer /usr/lib/freesurfer/ /usr/lib/freesurfer/ + MINC_LIB_DIR="$MNI_DIR/lib" +ENV PATH="$PATH:$MINC_BIN_DIR" +COPY --from=freesurfer /usr/lib/freesurfer/ /usr/lib/freesurfer/ COPY dev/docker_data/license.txt $FREESURFER_HOME/license.txt # link libraries & clean up From 2eb7344ae1e81d2cce9711a4e6581f9ef0c1d5cf Mon Sep 17 00:00:00 2001 From: Jon Cluce Date: Mon, 16 Jun 2025 13:38:41 -0400 Subject: [PATCH 381/507] :package: Explicitly set `ANTSPATH` in full (with-FreeSurfer) image [rebuild lite] [rebuild standard] --- .github/Dockerfiles/base-lite.Dockerfile | 36 ++++++++++---------- .github/Dockerfiles/base-standard.Dockerfile | 6 ++++ CPAC/seg_preproc/tests/test_utils.py | 34 ++++++++++++++++++ 3 files changed, 58 insertions(+), 18 deletions(-) create mode 100644 CPAC/seg_preproc/tests/test_utils.py diff --git a/.github/Dockerfiles/base-lite.Dockerfile b/.github/Dockerfiles/base-lite.Dockerfile index 9aa0f2cdb5..58e8b82530 100644 --- a/.github/Dockerfiles/base-lite.Dockerfile +++ b/.github/Dockerfiles/base-lite.Dockerfile @@ -1,4 +1,4 @@ -# Copyright (C) 2023 C-PAC Developers +# Copyright (C) 2023-2025 C-PAC Developers # This file is part of C-PAC. @@ -14,12 +14,12 @@ # You should have received a copy of the GNU Lesser General Public # License along with C-PAC. If not, see . -FROM ghcr.io/fcp-indi/c-pac/afni:23.3.09-jammy as AFNI -FROM ghcr.io/fcp-indi/c-pac/ants:2.4.3-jammy as ANTs -FROM ghcr.io/fcp-indi/c-pac/c3d:1.0.0-jammy as c3d -FROM ghcr.io/fcp-indi/c-pac/connectome-workbench:1.5.0.neurodebian-jammy as connectome-workbench -FROM ghcr.io/fcp-indi/c-pac/fsl:6.0.6.5-jammy as FSL -FROM ghcr.io/fcp-indi/c-pac/ica-aroma:0.4.4-beta-jammy as ICA-AROMA +FROM ghcr.io/fcp-indi/c-pac/afni:23.3.09-jammy AS afni +FROM ghcr.io/fcp-indi/c-pac/ants:2.4.3-jammy AS ants +FROM ghcr.io/fcp-indi/c-pac/c3d:1.0.0-jammy AS c3d +FROM ghcr.io/fcp-indi/c-pac/connectome-workbench:1.5.0.neurodebian-jammy AS connectome-workbench +FROM ghcr.io/fcp-indi/c-pac/fsl:6.0.6.5-jammy AS fsl +FROM ghcr.io/fcp-indi/c-pac/ica-aroma:0.4.4-beta-jammy AS ica-aroma FROM ghcr.io/fcp-indi/c-pac/ubuntu:jammy-non-free LABEL org.opencontainers.image.description="NOT INTENDED FOR USE OTHER THAN AS A STAGE IMAGE IN A MULTI-STAGE BUILD \ @@ -47,11 +47,11 @@ ENV FSLTCLSH=$FSLDIR/bin/fsltclsh \ PATH=${FSLDIR}/bin:$PATH \ TZ=America/New_York \ USER=c-pac_user -COPY --from=FSL /lib/x86_64-linux-gnu /lib/x86_64-linux-gnu -COPY --from=FSL /usr/lib/x86_64-linux-gnu /usr/lib/x86_64-linux-gnu -COPY --from=FSL /usr/bin /usr/bin -COPY --from=FSL /usr/local/bin /usr/local/bin -COPY --from=FSL /usr/share/fsl /usr/share/fsl +COPY --from=fsl /lib/x86_64-linux-gnu /lib/x86_64-linux-gnu +COPY --from=fsl /usr/lib/x86_64-linux-gnu /usr/lib/x86_64-linux-gnu +COPY --from=fsl /usr/bin /usr/bin +COPY --from=fsl /usr/local/bin /usr/local/bin +COPY --from=fsl /usr/share/fsl /usr/share/fsl # Installing C-PAC dependencies COPY requirements.txt /opt/requirements.txt @@ -67,10 +67,10 @@ ENV C3DPATH /opt/c3d ENV PATH $C3DPATH/bin:$PATH # Installing AFNI -COPY --from=AFNI /lib/x86_64-linux-gnu/ld* /lib/x86_64-linux-gnu/ -COPY --from=AFNI /lib/x86_64-linux-gnu/lib*so* /lib/x86_64-linux-gnu/ -COPY --from=AFNI /lib64/ld* /lib64/ -COPY --from=AFNI /opt/afni/ /opt/afni/ +COPY --from=afni /lib/x86_64-linux-gnu/ld* /lib/x86_64-linux-gnu/ +COPY --from=afni /lib/x86_64-linux-gnu/lib*so* /lib/x86_64-linux-gnu/ +COPY --from=afni /lib64/ld* /lib64/ +COPY --from=afni /opt/afni/ /opt/afni/ # set up AFNI ENV PATH=/opt/afni:$PATH @@ -79,8 +79,8 @@ ENV LANG="en_US.UTF-8" \ LC_ALL="en_US.UTF-8" \ ANTSPATH=/usr/lib/ants/bin \ PATH=/usr/lib/ants/bin:$PATH -COPY --from=ANTs /usr/lib/ants/ /usr/lib/ants/ -COPY --from=ANTs /ants_template/ /ants_template/ +COPY --from=ants /usr/lib/ants/ /usr/lib/ants/ +COPY --from=ants /ants_template/ /ants_template/ # Installing ICA-AROMA COPY --from=ICA-AROMA /opt/ICA-AROMA/ /opt/ICA-AROMA/ diff --git a/.github/Dockerfiles/base-standard.Dockerfile b/.github/Dockerfiles/base-standard.Dockerfile index ad3527d303..e5e1a6e3e6 100644 --- a/.github/Dockerfiles/base-standard.Dockerfile +++ b/.github/Dockerfiles/base-standard.Dockerfile @@ -22,6 +22,12 @@ Standard software dependencies for C-PAC standard images" LABEL org.opencontainers.image.source=https://github.com/FCP-INDI/C-PAC USER root +# Installing ANTs +ENV LANG="en_US.UTF-8" \ + LC_ALL="en_US.UTF-8" \ + ANTSPATH=/usr/lib/ants/bin \ + PATH=/usr/lib/ants/bin:$PATH + # Installing FreeSurfer RUN apt-get update \ && apt-get install --no-install-recommends -y bc \ diff --git a/CPAC/seg_preproc/tests/test_utils.py b/CPAC/seg_preproc/tests/test_utils.py new file mode 100644 index 0000000000..de5521aa4c --- /dev/null +++ b/CPAC/seg_preproc/tests/test_utils.py @@ -0,0 +1,34 @@ +# Copyright (C) 2025 C-PAC Developers + +# This file is part of C-PAC. + +# C-PAC is free software: you can redistribute it and/or modify it under +# the terms of the GNU Lesser General Public License as published by the +# Free Software Foundation, either version 3 of the License, or (at your +# option) any later version. + +# C-PAC is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or +# FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public +# License for more details. + +# You should have received a copy of the GNU Lesser General Public +# License along with C-PAC. If not, see . +"""Tests for segmentation utilities.""" + +import subprocess + + +def test_ants_joint_label_fusion_script() -> None: + """Test antsJointLabelFusion.sh script can run in this environment.""" + try: + subprocess.run( + ["antsJointLabelFusion.sh"], + check=True, + capture_output=True, + ) + except subprocess.CalledProcessError as e: + # There's no explicit 'help' option, but if the script can run, + # the error message does not contain the string "Error". + if "Error" in e.stderr.decode(): + raise e From 76000a1dde5435e5b9939bfcbdc2b9447bcdbac5 Mon Sep 17 00:00:00 2001 From: Jon Cluce Date: Mon, 16 Jun 2025 14:41:54 -0400 Subject: [PATCH 382/507] :memo: Add `$ANTSPATH` to CHANGELOG --- CHANGELOG.md | 1 + 1 file changed, 1 insertion(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index 7e0bab162b..faeb2077e1 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -68,6 +68,7 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 - Lingering calls to `cpac_outputs.csv` (was changed to `cpac_outputs.tsv` in v1.8.1). - A bug in the `freesurfer_abcd_preproc` nodeblock where the `Template` image was incorrectly used as `reference` during the `inverse_warp` step. Replacing it with the subject-specific `T1w` image resolved the issue of the `desc-restoreBrain_T1w` being chipped off. - A bug in `ideal_bandpass` where the frequency mask was incorrectly applied, which caused filter to fail in certain cases. +- A bug where `$ANTSPATH` was unset in C-PAC with FreeSurfer images. ### Removed From 29d5e314103f316be3e4ca9726ba58d5d0efa973 Mon Sep 17 00:00:00 2001 From: Jon Cluce Date: Wed, 18 Jun 2025 13:04:13 -0400 Subject: [PATCH 383/507] :pencil2: Correct method call name Fixes #2245 [rebuild lite] [rebuild standard] --- CPAC/registration/registration.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/CPAC/registration/registration.py b/CPAC/registration/registration.py index 2b1f1a849b..fa07ffa20d 100644 --- a/CPAC/registration/registration.py +++ b/CPAC/registration/registration.py @@ -4885,7 +4885,7 @@ def warp_timeseries_to_T1template_dcan_nhp(wf, cfg, strat_pool, pipe_num, opt=No }, ) def single_step_resample_timeseries_to_T1template( - wf, cfg, strat_pool, pipe_num, opt=None + wf, cfg, strat_pool: ResourcePool, pipe_num, opt=None ): """Apply motion correction, coreg, anat-to-template transforms... @@ -4983,7 +4983,7 @@ def single_step_resample_timeseries_to_T1template( wf.connect(node, out, motionxfm2itk, "source_file") node, out = strat_pool.get_data("coordinate-transformation") - motion_correct_tool = strat_pool.motion_correct_tool("coordinate-transformation") + motion_correct_tool = strat_pool.motion_tool("coordinate-transformation") if motion_correct_tool == "mcflirt": wf.connect(node, out, motionxfm2itk, "transform_file") elif motion_correct_tool == "3dvolreg": From 3969ac676741bb3b36c1d79b014150fcb579b2ab Mon Sep 17 00:00:00 2001 From: Jon Cluce Date: Wed, 18 Jun 2025 13:32:36 -0400 Subject: [PATCH 384/507] :bricks: Add `__init__` for seg tests --- CPAC/seg_preproc/tests/__init__.py | 17 +++++++++++++++++ 1 file changed, 17 insertions(+) create mode 100644 CPAC/seg_preproc/tests/__init__.py diff --git a/CPAC/seg_preproc/tests/__init__.py b/CPAC/seg_preproc/tests/__init__.py new file mode 100644 index 0000000000..788d202e81 --- /dev/null +++ b/CPAC/seg_preproc/tests/__init__.py @@ -0,0 +1,17 @@ +# Copyright (C) 2025 C-PAC Developers + +# This file is part of C-PAC. + +# C-PAC is free software: you can redistribute it and/or modify it under +# the terms of the GNU Lesser General Public License as published by the +# Free Software Foundation, either version 3 of the License, or (at your +# option) any later version. + +# C-PAC is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or +# FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public +# License for more details. + +# You should have received a copy of the GNU Lesser General Public +# License along with C-PAC. If not, see . +"""Tests for segmentation utilities.""" From ddd9c7a48d8259e707105914ed77249c32a8a595 Mon Sep 17 00:00:00 2001 From: Jon Cluce Date: Wed, 18 Jun 2025 22:08:17 -0400 Subject: [PATCH 385/507] :goal_net: Handle more datetime edge cases in logging --- CPAC/utils/monitoring/draw_gantt_chart.py | 12 +- CPAC/utils/monitoring/monitoring.py | 144 ++++++++++++++++++++-- 2 files changed, 142 insertions(+), 14 deletions(-) diff --git a/CPAC/utils/monitoring/draw_gantt_chart.py b/CPAC/utils/monitoring/draw_gantt_chart.py index a7a0aaac96..a299b51088 100644 --- a/CPAC/utils/monitoring/draw_gantt_chart.py +++ b/CPAC/utils/monitoring/draw_gantt_chart.py @@ -39,8 +39,7 @@ # You should have received a copy of the GNU Lesser General Public # License along with C-PAC. If not, see . -"""Module to draw an html gantt chart from logfile produced by -``CPAC.utils.monitoring.log_nodes_cb()``. +"""Module to draw an html gantt chart from logfile produced by `~CPAC.utils.monitoring.log_nodes_cb`. See https://nipype.readthedocs.io/en/latest/api/generated/nipype.utils.draw_gantt_chart.html """ @@ -430,9 +429,12 @@ def generate_gantt_chart( html_string += "

Cores: " + str(cores) + "

" html_string += close_header # Draw nipype nodes Gantt chart and runtimes - html_string += draw_lines( - start_node["start"], duration, minute_scale, space_between_minutes - ) + try: + html_string += draw_lines( + start_node["start"], duration, minute_scale, space_between_minutes + ) + except: + breakpoint() html_string += draw_nodes( start_node["start"], nodes_list, diff --git a/CPAC/utils/monitoring/monitoring.py b/CPAC/utils/monitoring/monitoring.py index 6e9466c33c..9a6ce3c2fa 100644 --- a/CPAC/utils/monitoring/monitoring.py +++ b/CPAC/utils/monitoring/monitoring.py @@ -16,14 +16,16 @@ # License along with C-PAC. If not, see . """Monitoring utilities for C-PAC.""" -from datetime import datetime, timedelta +from datetime import datetime, timedelta, timezone import glob import json import math import os import socketserver +import struct import threading -from typing import Any, Optional, TypeAlias +from typing import Any, Optional, overload, TypeAlias +from zoneinfo import available_timezones, ZoneInfo import networkx as nx from traits.trait_base import Undefined @@ -72,16 +74,104 @@ def __sub__(self, other: "DatetimeWithSafeNone | _NoTime") -> datetime | timedel """Subtract between None and a datetime or timedelta or None.""" return _safe_none_diff(self, other) + def isoformat(self) -> str: + """Return an ISO 8601-like string of 0s for display.""" + return "0000-00-00" + NoTime = _NoTime() """A singleton None that can be used in place of a datetime object.""" class DatetimeWithSafeNone(datetime, _NoTime): - """Time class that can be None or a time value.""" - - def __new__(cls, dt: "OptionalDatetime") -> "DatetimeWithSafeNone | _NoTime": + """Time class that can be None or a time value. + + Examples + -------- + >>> from datetime import datetime + >>> DatetimeWithSafeNone(datetime(2025, 6, 18, 21, 6, 43, 730004)).isoformat() + '2025-06-18T21:06:43.730004' + >>> DatetimeWithSafeNone("2025-06-18T21:06:43.730004").isoformat() + '2025-06-18T21:06:43.730004' + >>> DatetimeWithSafeNone(b"\\x07\\xe9\\x06\\x12\\x10\\x18\\x1c\\x88\\x6d\\x01").isoformat() + '2025-06-18T16:24:28.028040+00:00' + >>> DatetimeWithSafeNone(b'\\x07\\xe9\\x06\\x12\\x10\\x18\\x1c\\x88m\\x00').isoformat() + '2025-06-18T16:24:28.028040' + >>> DatetimeWithSafeNone(DatetimeWithSafeNone("2025-06-18")).isoformat() + '2025-06-18T00:00:00' + >>> DatetimeWithSafeNone(None) + NoTime + >>> DatetimeWithSafeNone(None).isoformat() + '0000-00-00' + """ + + @overload + def __new__( + cls, + year: "OptionalDatetime", + month: None = None, + day: None = None, + hour: None = None, + minute: None = None, + second: None = None, + microsecond: None = None, + tzinfo: None = None, + *, + fold: None = None, + ) -> "DatetimeWithSafeNone | _NoTime": ... + @overload + def __new__( + cls, + year: int, + month: Optional[int] = None, + day: Optional[int] = None, + hour: int = 0, + minute: int = 0, + second: int = 0, + microsecond: int = 0, + tzinfo: Optional[timezone | ZoneInfo] = None, + *, + fold: int = 0, + ) -> "DatetimeWithSafeNone": ... + + def __new__( + cls, + year: "int | OptionalDatetime", + month: Optional[int] = None, + day: Optional[int] = None, + hour: Optional[int] = 0, + minute: Optional[int] = 0, + second: Optional[int] = 0, + microsecond: Optional[int] = 0, + tzinfo: Optional[timezone | ZoneInfo] = None, + *, + fold: Optional[int] = 0, + ) -> "DatetimeWithSafeNone | _NoTime": """Create a new instance of the class.""" + if ( + isinstance(year, int) + and isinstance(month, int) + and isinstance(day, int) + and isinstance(hour, int) + and isinstance(minute, int) + and isinstance(second, int) + and isinstance(microsecond, int) + and isinstance(fold, int) + ): + return datetime.__new__( + cls, + year, + month, + day, + hour, + minute, + second, + microsecond, + tzinfo, + fold=fold, + ) + else: + dt = year if dt is None: return NoTime if isinstance(dt, datetime): @@ -98,9 +188,43 @@ def __new__(cls, dt: "OptionalDatetime") -> "DatetimeWithSafeNone | _NoTime": ) if isinstance(dt, bytes): try: - dt = dt.decode("utf-8") + tzflag: Optional[int] + year, month, day, hour, minute, second = struct.unpack(">H5B", dt[:7]) + microsecond, tzflag = struct.unpack(" bool: """Return True if not NoTime.""" return self is not NoTime - def __sub__(self, other: "DatetimeWithSafeNone | _NoTime") -> datetime | timedelta: + def __sub__(self, other: "DatetimeWithSafeNone | _NoTime") -> datetime | timedelta: # type: ignore[reportIncompatibleMethodOverride] """Subtract between a datetime or timedelta or None.""" return _safe_none_diff(self, other) @@ -146,7 +270,9 @@ def json_dumps(obj: Any, **kwargs) -> str: return json.dumps(obj, cls=DatetimeJSONEncoder, **kwargs) -OptionalDatetime: TypeAlias = Optional[datetime | str | DatetimeWithSafeNone | _NoTime] +OptionalDatetime: TypeAlias = Optional[ + datetime | str | bytes | DatetimeWithSafeNone | _NoTime +] """Type alias for a datetime, ISO-format string or None.""" From c8f45a910bfe79b64487522b468f3cd67822fae6 Mon Sep 17 00:00:00 2001 From: Jon Cluce Date: Wed, 18 Jun 2025 14:10:07 -0400 Subject: [PATCH 386/507] :white_check_mark: Refactor `test_match_epi_fmaps` --- CPAC/utils/test_resources.py | 42 ++++++++++++++--------------- CPAC/utils/tests/test_datasource.py | 28 +++++++++++++------ 2 files changed, 41 insertions(+), 29 deletions(-) diff --git a/CPAC/utils/test_resources.py b/CPAC/utils/test_resources.py index da58e4e0f9..5d447292f6 100644 --- a/CPAC/utils/test_resources.py +++ b/CPAC/utils/test_resources.py @@ -1,4 +1,4 @@ -# Copyright (C) 2019-2024 C-PAC Developers +# Copyright (C) 2019-2025 C-PAC Developers # This file is part of C-PAC. @@ -14,29 +14,32 @@ # You should have received a copy of the GNU Lesser General Public # License along with C-PAC. If not, see . -from CPAC.utils.monitoring import WFLOGGER +"""Resources for testing utilities.""" +import os +import shutil +from typing import Optional -def setup_test_wf(s3_prefix, paths_list, test_name, workdirs_to_keep=None): - """Set up a basic template Nipype workflow for testing single nodes or - small sub-workflows. - """ - import os - import shutil +from CPAC.pipeline import nipype_pipeline_engine as pe +from CPAC.utils.datasource import check_for_s3 +from CPAC.utils.interfaces.datasink import DataSink +from CPAC.utils.monitoring import WFLOGGER - from CPAC.pipeline import nipype_pipeline_engine as pe - from CPAC.utils.datasource import check_for_s3 - from CPAC.utils.interfaces.datasink import DataSink - test_dir = os.path.join(os.getcwd(), test_name) +def setup_test_wf( + s3_prefix, + paths_list, + test_name, + workdirs_to_keep=None, + test_dir: Optional[str] = None, +) -> tuple[pe.Workflow, pe.Node, dict[str, str]]: + """Set up a basic template Nipype workflow for testing small workflows.""" + test_dir = os.path.join(test_dir if test_dir else os.getcwd(), test_name) work_dir = os.path.join(test_dir, "workdir") out_dir = os.path.join(test_dir, "output") if os.path.exists(out_dir): - try: - shutil.rmtree(out_dir) - except: - pass + shutil.rmtree(out_dir, ignore_errors=True) if os.path.exists(work_dir): for dirname in os.listdir(work_dir): @@ -45,10 +48,7 @@ def setup_test_wf(s3_prefix, paths_list, test_name, workdirs_to_keep=None): WFLOGGER.info("%s --- %s\n", dirname, keepdir) if keepdir in dirname: continue - try: - shutil.rmtree(os.path.join(work_dir, dirname)) - except: - pass + shutil.rmtree(os.path.join(work_dir, dirname), ignore_errors=True) local_paths = {} for subpath in paths_list: @@ -67,4 +67,4 @@ def setup_test_wf(s3_prefix, paths_list, test_name, workdirs_to_keep=None): ds.inputs.base_directory = out_dir ds.inputs.parameterization = True - return (wf, ds, local_paths) + return wf, ds, local_paths diff --git a/CPAC/utils/tests/test_datasource.py b/CPAC/utils/tests/test_datasource.py index be7c2255c2..dea1a3877f 100644 --- a/CPAC/utils/tests/test_datasource.py +++ b/CPAC/utils/tests/test_datasource.py @@ -1,4 +1,4 @@ -# Copyright (C) 2019-2024 C-PAC Developers +# Copyright (C) 2019-2025 C-PAC Developers # This file is part of C-PAC. @@ -14,9 +14,10 @@ # You should have received a copy of the GNU Lesser General Public # License along with C-PAC. If not, see . -import json +"""Test datasource utilities.""" -import pytest +import json +from pathlib import Path from CPAC.pipeline import nipype_pipeline_engine as pe from CPAC.utils.datasource import match_epi_fmaps @@ -24,8 +25,8 @@ from CPAC.utils.test_resources import setup_test_wf -@pytest.mark.skip(reason="needs refactoring") -def test_match_epi_fmaps(): +def test_match_epi_fmaps(tmp_path: Path) -> None: + """Test `~CPAC.utils.datasource.match_epi_fmaps`.""" # good data to use s3_prefix = "s3://fcp-indi/data/Projects/HBN/MRI/Site-CBIC/sub-NDARAB708LM5" s3_paths = [ @@ -36,7 +37,9 @@ def test_match_epi_fmaps(): "fmap/sub-NDARAB708LM5_dir-AP_acq-fMRI_epi.json", ] - wf, ds, local_paths = setup_test_wf(s3_prefix, s3_paths, "test_match_epi_fmaps") + wf, ds, local_paths = setup_test_wf( + s3_prefix, s3_paths, "test_match_epi_fmaps", test_dir=str(tmp_path) + ) opposite_pe_json = local_paths["fmap/sub-NDARAB708LM5_dir-PA_acq-fMRI_epi.json"] same_pe_json = local_paths["fmap/sub-NDARAB708LM5_dir-AP_acq-fMRI_epi.json"] @@ -65,15 +68,24 @@ def test_match_epi_fmaps(): match_fmaps = pe.Node( Function( - input_names=["fmap_dct", "bold_pedir"], + input_names=[ + "bold_pedir", + "epi_fmap_one", + "epi_fmap_params_one", + "epi_fmap_two", + "epi_fmap_params_two", + ], output_names=["opposite_pe_epi", "same_pe_epi"], function=match_epi_fmaps, as_module=True, ), name="match_epi_fmaps", ) - match_fmaps.inputs.fmap_dct = fmap_paths_dct match_fmaps.inputs.bold_pedir = bold_pedir + match_fmaps.inputs.epi_fmap_one = fmap_paths_dct["epi_PA"]["scan"] + match_fmaps.inputs.epi_fmap_params_one = fmap_paths_dct["epi_PA"]["scan_parameters"] + match_fmaps.inputs.epi_fmap_two = fmap_paths_dct["epi_AP"]["scan"] + match_fmaps.inputs.epi_fmap_params_two = fmap_paths_dct["epi_AP"]["scan_parameters"] ds.inputs.func_json = func_json ds.inputs.opposite_pe_json = opposite_pe_json From 8a7f1036433630133f06152f01992348e45e7286 Mon Sep 17 00:00:00 2001 From: Jon Cluce Date: Wed, 18 Jun 2025 22:21:26 -0400 Subject: [PATCH 387/507] :goal_net: Handle bytestring in phase encoding metadata --- CPAC/utils/datasource.py | 28 ++- CPAC/utils/tests/test_datasource.py | 366 ++++++++++++++++++++++++---- 2 files changed, 339 insertions(+), 55 deletions(-) diff --git a/CPAC/utils/datasource.py b/CPAC/utils/datasource.py index 25adb1eeca..aa102b0e3d 100644 --- a/CPAC/utils/datasource.py +++ b/CPAC/utils/datasource.py @@ -1,4 +1,4 @@ -# Copyright (C) 2012-2024 C-PAC Developers +# Copyright (C) 2012-2025 C-PAC Developers # This file is part of C-PAC. @@ -20,6 +20,7 @@ import json from pathlib import Path import re +from typing import Any, Optional from voluptuous import RequiredFieldInvalid from nipype.interfaces import utility as util @@ -463,12 +464,12 @@ def gather_echo_times(echotime_1, echotime_2, echotime_3=None, echotime_4=None): def match_epi_fmaps( - bold_pedir, - epi_fmap_one, - epi_fmap_params_one, - epi_fmap_two=None, - epi_fmap_params_two=None, -): + bold_pedir: str, + epi_fmap_one: str, + epi_fmap_params_one: dict[str, Any], + epi_fmap_two: Optional[str] = None, + epi_fmap_params_two: Optional[dict[str, Any]] = None, +) -> tuple[str, str]: """Match EPI field maps to the BOLD scan. Parse the field map files in the data configuration and determine which @@ -504,13 +505,22 @@ def match_epi_fmaps( with open(scan_params, "r") as f: scan_params = json.load(f) if "PhaseEncodingDirection" in scan_params: - epi_pedir = scan_params["PhaseEncodingDirection"] + epi_pedir: str | bytes = scan_params["PhaseEncodingDirection"] + if isinstance(epi_pedir, bytes): + epi_pedir = epi_pedir.decode("utf-8") if epi_pedir == bold_pedir: same_pe_epi = epi_scan elif epi_pedir[0] == bold_pedir[0]: opposite_pe_epi = epi_scan - return (opposite_pe_epi, same_pe_epi) + if same_pe_epi is None: + msg = f"Same phase encoding EPI: {bold_pedir}" + raise FileNotFoundError(msg) + if opposite_pe_epi is None: + msg = f"Opposite phase encoding EPI: {bold_pedir}" + raise FileNotFoundError(msg) + + return opposite_pe_epi, same_pe_epi def ingress_func_metadata( diff --git a/CPAC/utils/tests/test_datasource.py b/CPAC/utils/tests/test_datasource.py index dea1a3877f..0d1a5c6755 100644 --- a/CPAC/utils/tests/test_datasource.py +++ b/CPAC/utils/tests/test_datasource.py @@ -16,55 +16,331 @@ # License along with C-PAC. If not, see . """Test datasource utilities.""" +from dataclasses import dataclass import json from pathlib import Path +from typing import Any + +from networkx.classes.digraph import DiGraph +import pytest from CPAC.pipeline import nipype_pipeline_engine as pe from CPAC.utils.datasource import match_epi_fmaps from CPAC.utils.interfaces import Function from CPAC.utils.test_resources import setup_test_wf +from CPAC.utils.utils import PE_DIRECTION -def test_match_epi_fmaps(tmp_path: Path) -> None: - """Test `~CPAC.utils.datasource.match_epi_fmaps`.""" - # good data to use - s3_prefix = "s3://fcp-indi/data/Projects/HBN/MRI/Site-CBIC/sub-NDARAB708LM5" - s3_paths = [ - "func/sub-NDARAB708LM5_task-rest_run-1_bold.json", - "fmap/sub-NDARAB708LM5_dir-PA_acq-fMRI_epi.nii.gz", - "fmap/sub-NDARAB708LM5_dir-PA_acq-fMRI_epi.json", - "fmap/sub-NDARAB708LM5_dir-AP_acq-fMRI_epi.nii.gz", - "fmap/sub-NDARAB708LM5_dir-AP_acq-fMRI_epi.json", - ] +@dataclass +class MatchEpiFmapsInputs: + """Store test data for `match_epi_fmaps`.""" - wf, ds, local_paths = setup_test_wf( - s3_prefix, s3_paths, "test_match_epi_fmaps", test_dir=str(tmp_path) - ) + bold_pedir: PE_DIRECTION + epi_fmaps: list[tuple[str, dict[str, Any]]] + + +def match_epi_fmaps_inputs( + generate: bool, path: Path +) -> tuple[pe.Workflow, MatchEpiFmapsInputs]: + """Return inputs for `~CPAC.utils.datasource.match_epi_fmaps`.""" + if generate: + # good data to use + s3_prefix = "s3://fcp-indi/data/Projects/HBN/MRI/Site-CBIC/sub-NDARAB708LM5" + s3_paths = [ + "func/sub-NDARAB708LM5_task-rest_run-1_bold.json", + "fmap/sub-NDARAB708LM5_dir-PA_acq-fMRI_epi.nii.gz", + "fmap/sub-NDARAB708LM5_dir-PA_acq-fMRI_epi.json", + "fmap/sub-NDARAB708LM5_dir-AP_acq-fMRI_epi.nii.gz", + "fmap/sub-NDARAB708LM5_dir-AP_acq-fMRI_epi.json", + ] + + wf, ds, local_paths = setup_test_wf( + s3_prefix, s3_paths, "test_match_epi_fmaps", test_dir=str(path) + ) - opposite_pe_json = local_paths["fmap/sub-NDARAB708LM5_dir-PA_acq-fMRI_epi.json"] - same_pe_json = local_paths["fmap/sub-NDARAB708LM5_dir-AP_acq-fMRI_epi.json"] - func_json = local_paths["func/sub-NDARAB708LM5_task-rest_run-1_bold.json"] + opposite_pe_json = local_paths["fmap/sub-NDARAB708LM5_dir-PA_acq-fMRI_epi.json"] + same_pe_json = local_paths["fmap/sub-NDARAB708LM5_dir-AP_acq-fMRI_epi.json"] + func_json = local_paths["func/sub-NDARAB708LM5_task-rest_run-1_bold.json"] - with open(opposite_pe_json, "r") as f: - opposite_pe_params = json.load(f) + with open(opposite_pe_json, "r") as f: + opposite_pe_params = json.load(f) - with open(same_pe_json, "r") as f: - same_pe_params = json.load(f) + with open(same_pe_json, "r") as f: + same_pe_params = json.load(f) - with open(func_json, "r") as f: - func_params = json.load(f) - bold_pedir = func_params["PhaseEncodingDirection"] + with open(func_json, "r") as f: + func_params = json.load(f) + bold_pedir = func_params["PhaseEncodingDirection"] + + fmap_paths_dct = { + "epi_PA": { + "scan": local_paths["fmap/sub-NDARAB708LM5_dir-PA_acq-fMRI_epi.nii.gz"], + "scan_parameters": opposite_pe_params, + }, + "epi_AP": { + "scan": local_paths["fmap/sub-NDARAB708LM5_dir-AP_acq-fMRI_epi.nii.gz"], + "scan_parameters": same_pe_params, + }, + } + ds.inputs.func_json = func_json + ds.inputs.opposite_pe_json = opposite_pe_json + ds.inputs.same_pe_json = same_pe_json + return wf, MatchEpiFmapsInputs( + bold_pedir, + [ + (scan["scan"], scan["scan_parameters"]) + for scan in fmap_paths_dct.values() + ], + ) + _paths = [ + f"{path}/sub-NDARAB514MAJ_dir-AP_acq-fMRI_epi.nii.gz", + f"{path}/sub-NDARAB514MAJ_dir-PA_acq-fMRI_epi.nii.gz", + ] + for _ in _paths: + Path(_).touch(exist_ok=True) + return pe.Workflow("test_match_epi_fmaps", path), MatchEpiFmapsInputs( + "j-", + [ + ( + _paths[0], + { + "AcquisitionMatrixPE": 84, + "BandwidthPerPixelPhaseEncode": 23.81, + "BaseResolution": 84, + "BodyPartExamined": b"BRAIN", + "ConsistencyInfo": b"N4_VE11B_LATEST_20150530", + "ConversionSoftware": b"dcm2niix", + "ConversionSoftwareVersion": b"v1.0.20171215 GCC4.8.4", + "DerivedVendorReportedEchoSpacing": 0.00049999, + "DeviceSerialNumber": b"67080", + "DwellTime": 2.6e-06, + "EchoTime": 0.0512, + "EchoTrainLength": 84, + "EffectiveEchoSpacing": 0.00049999, + "FlipAngle": 90, + "ImageOrientationPatientDICOM": [1, 0, 0, 0, 1, 0], + "ImageType": ["ORIGINAL", "PRIMARY", "M", "ND", "MOSAIC"], + "InPlanePhaseEncodingDirectionDICOM": b"COL", + "MRAcquisitionType": b"2D", + "MagneticFieldStrength": 3, + "Manufacturer": b"Siemens", + "ManufacturersModelName": b"Prisma_fit", + "Modality": b"MR", + "PartialFourier": 1, + "PatientPosition": b"HFS", + "PercentPhaseFOV": 100, + "PhaseEncodingDirection": b"j-", + "PhaseEncodingSteps": 84, + "PhaseResolution": 1, + "PixelBandwidth": 2290, + "ProcedureStepDescription": b"CMI_HBN-CBIC", + "ProtocolName": b"cmrr_fMRI_DistortionMap_AP", + "PulseSequenceDetails": b"%CustomerSeq%_cmrr_mbep2d_se", + "ReceiveCoilActiveElements": b"HEA;HEP", + "ReceiveCoilName": b"Head_32", + "ReconMatrixPE": 84, + "RepetitionTime": 5.301, + "SAR": 0.364379, + "ScanOptions": b"FS", + "ScanningSequence": b"EP", + "SequenceName": b"epse2d1_84", + "SequenceVariant": b"SK", + "SeriesDescription": b"cmrr_fMRI_DistortionMap_AP", + "ShimSetting": [208, -10464, -5533, 615, -83, -88, 55, 30], + "SliceThickness": 2.4, + "SliceTiming": [ + 2.64, + 0, + 2.7275, + 0.0875, + 2.815, + 0.175, + 2.9025, + 0.2625, + 2.9925, + 0.3525, + 3.08, + 0.44, + 3.1675, + 0.5275, + 3.255, + 0.615, + 3.3425, + 0.7025, + 3.4325, + 0.7925, + 3.52, + 0.88, + 3.6075, + 0.9675, + 3.695, + 1.055, + 3.785, + 1.1425, + 3.8725, + 1.2325, + 3.96, + 1.32, + 4.0475, + 1.4075, + 4.135, + 1.495, + 4.225, + 1.5825, + 4.3125, + 1.6725, + 4.4, + 1.76, + 4.4875, + 1.8475, + 4.575, + 1.935, + 4.665, + 2.0225, + 4.7525, + 2.1125, + 4.84, + 2.2, + 4.9275, + 2.2875, + 5.015, + 2.375, + 5.105, + 2.4625, + 5.1925, + 2.5525, + ], + "SoftwareVersions": b"syngo_MR_E11", + "SpacingBetweenSlices": 2.4, + "StationName": b"MRTRIO3TX72", + "TotalReadoutTime": 0.0414992, + "TxRefAmp": 209.923, + }, + ), + ( + _paths[1], + { + "AcquisitionMatrixPE": 84, + "BandwidthPerPixelPhaseEncode": 23.81, + "BaseResolution": 84, + "BodyPartExamined": b"BRAIN", + "ConsistencyInfo": b"N4_VE11B_LATEST_20150530", + "ConversionSoftware": b"dcm2niix", + "ConversionSoftwareVersion": b"v1.0.20171215 GCC4.8.4", + "DerivedVendorReportedEchoSpacing": 0.00049999, + "DeviceSerialNumber": b"67080", + "DwellTime": 2.6e-06, + "EchoTime": 0.0512, + "EchoTrainLength": 84, + "EffectiveEchoSpacing": 0.00049999, + "FlipAngle": 90, + "ImageOrientationPatientDICOM": [1, 0, 0, 0, 1, 0], + "ImageType": ["ORIGINAL", "PRIMARY", "M", "ND", "MOSAIC"], + "InPlanePhaseEncodingDirectionDICOM": b"COL", + "MRAcquisitionType": b"2D", + "MagneticFieldStrength": 3, + "Manufacturer": b"Siemens", + "ManufacturersModelName": b"Prisma_fit", + "Modality": b"MR", + "PartialFourier": 1, + "PatientPosition": b"HFS", + "PercentPhaseFOV": 100, + "PhaseEncodingDirection": b"j", + "PhaseEncodingSteps": 84, + "PhaseResolution": 1, + "PixelBandwidth": 2290, + "ProcedureStepDescription": b"CMI_HBN-CBIC", + "ProtocolName": b"cmrr_fMRI_DistortionMap_PA", + "PulseSequenceDetails": b"%CustomerSeq%_cmrr_mbep2d_se", + "ReceiveCoilActiveElements": b"HEA;HEP", + "ReceiveCoilName": b"Head_32", + "ReconMatrixPE": 84, + "RepetitionTime": 5.301, + "SAR": 0.364379, + "ScanOptions": b"FS", + "ScanningSequence": b"EP", + "SequenceName": b"epse2d1_84", + "SequenceVariant": b"SK", + "SeriesDescription": b"cmrr_fMRI_DistortionMap_PA", + "ShimSetting": [208, -10464, -5533, 615, -83, -88, 55, 30], + "SliceThickness": 2.4, + "SliceTiming": [ + 2.64, + 0, + 2.73, + 0.09, + 2.8175, + 0.1775, + 2.905, + 0.265, + 2.9925, + 0.3525, + 3.08, + 0.44, + 3.17, + 0.53, + 3.2575, + 0.6175, + 3.345, + 0.705, + 3.4325, + 0.7925, + 3.52, + 0.88, + 3.61, + 0.97, + 3.6975, + 1.0575, + 3.785, + 1.145, + 3.8725, + 1.2325, + 3.9625, + 1.32, + 4.05, + 1.41, + 4.1375, + 1.4975, + 4.225, + 1.585, + 4.3125, + 1.6725, + 4.4025, + 1.76, + 4.49, + 1.85, + 4.5775, + 1.9375, + 4.665, + 2.025, + 4.7525, + 2.1125, + 4.8425, + 2.2, + 4.93, + 2.29, + 5.0175, + 2.3775, + 5.105, + 2.465, + 5.1925, + 2.5525, + ], + "SoftwareVersions": b"syngo_MR_E11", + "SpacingBetweenSlices": 2.4, + "StationName": b"MRTRIO3TX72", + "TotalReadoutTime": 0.0414992, + "TxRefAmp": 209.923, + }, + ), + ], + ) - fmap_paths_dct = { - "epi_PA": { - "scan": local_paths["fmap/sub-NDARAB708LM5_dir-PA_acq-fMRI_epi.nii.gz"], - "scan_parameters": opposite_pe_params, - }, - "epi_AP": { - "scan": local_paths["fmap/sub-NDARAB708LM5_dir-AP_acq-fMRI_epi.nii.gz"], - "scan_parameters": same_pe_params, - }, - } + +@pytest.mark.parametrize("generate", [True, False]) +def test_match_epi_fmaps(generate: bool, tmp_path: Path) -> None: + """Test `~CPAC.utils.datasource.match_epi_fmaps`.""" + wf, data = match_epi_fmaps_inputs(generate, tmp_path) match_fmaps = pe.Node( Function( @@ -81,17 +357,15 @@ def test_match_epi_fmaps(tmp_path: Path) -> None: ), name="match_epi_fmaps", ) - match_fmaps.inputs.bold_pedir = bold_pedir - match_fmaps.inputs.epi_fmap_one = fmap_paths_dct["epi_PA"]["scan"] - match_fmaps.inputs.epi_fmap_params_one = fmap_paths_dct["epi_PA"]["scan_parameters"] - match_fmaps.inputs.epi_fmap_two = fmap_paths_dct["epi_AP"]["scan"] - match_fmaps.inputs.epi_fmap_params_two = fmap_paths_dct["epi_AP"]["scan_parameters"] - - ds.inputs.func_json = func_json - ds.inputs.opposite_pe_json = opposite_pe_json - ds.inputs.same_pe_json = same_pe_json + match_fmaps.inputs.bold_pedir = data.bold_pedir + match_fmaps.inputs.epi_fmap_one = data.epi_fmaps[0][0] + match_fmaps.inputs.epi_fmap_params_one = data.epi_fmaps[0][1] + match_fmaps.inputs.epi_fmap_two = data.epi_fmaps[1][0] + match_fmaps.inputs.epi_fmap_params_two = data.epi_fmaps[1][1] - wf.connect(match_fmaps, "opposite_pe_epi", ds, "should_be_dir-PA") - wf.connect(match_fmaps, "same_pe_epi", ds, "should_be_dir-AP") + wf.add_nodes([match_fmaps]) - wf.run() + graph: DiGraph = wf.run() + result = list(graph.nodes)[-1].run() + assert Path(result.outputs.opposite_pe_epi).exists() + assert Path(result.outputs.same_pe_epi).exists() From 1ca56772ef0c65bad5b1a23836a55b77e73d651d Mon Sep 17 00:00:00 2001 From: Jon Cluce Date: Wed, 18 Jun 2025 22:39:56 -0400 Subject: [PATCH 388/507] :pencil2: Remove duplicate library link [rebuild standard] --- .github/Dockerfiles/base-standard.Dockerfile | 1 - 1 file changed, 1 deletion(-) diff --git a/.github/Dockerfiles/base-standard.Dockerfile b/.github/Dockerfiles/base-standard.Dockerfile index ee1d612b8f..07f7884d7d 100644 --- a/.github/Dockerfiles/base-standard.Dockerfile +++ b/.github/Dockerfiles/base-standard.Dockerfile @@ -49,7 +49,6 @@ RUN apt-get autoremove -y \ && ln -s /usr/lib/x86_64-linux-gnu/libcrypt.so.1 /usr/lib/x86_64-linux-gnu/libcrypt.so.2 \ && find / -type f -print0 | sort -t/ -k2 | xargs -0 rdfind -makehardlinks true \ && rm -rf results.txt \ - && cp -l /lib/x86_64-linux-gnu/libcrypt.so.1.1.0 /lib/x86_64-linux-gnu/libcrypt.so.2 \ && ldconfig \ && chmod 777 / /home/c-pac_user \ && chmod 777 $(ls / | grep -v sys | grep -v proc) From c31ac21ea04842bbc246408be0b264fc4eb28ce3 Mon Sep 17 00:00:00 2001 From: Jon Cluce Date: Wed, 18 Jun 2025 22:08:17 -0400 Subject: [PATCH 389/507] :goal_net: Handle more datetime edge cases in logging [rebuild standard] --- CPAC/utils/monitoring/draw_gantt_chart.py | 12 +- CPAC/utils/monitoring/monitoring.py | 144 ++++++++++++++++++++-- 2 files changed, 142 insertions(+), 14 deletions(-) diff --git a/CPAC/utils/monitoring/draw_gantt_chart.py b/CPAC/utils/monitoring/draw_gantt_chart.py index a7a0aaac96..a299b51088 100644 --- a/CPAC/utils/monitoring/draw_gantt_chart.py +++ b/CPAC/utils/monitoring/draw_gantt_chart.py @@ -39,8 +39,7 @@ # You should have received a copy of the GNU Lesser General Public # License along with C-PAC. If not, see . -"""Module to draw an html gantt chart from logfile produced by -``CPAC.utils.monitoring.log_nodes_cb()``. +"""Module to draw an html gantt chart from logfile produced by `~CPAC.utils.monitoring.log_nodes_cb`. See https://nipype.readthedocs.io/en/latest/api/generated/nipype.utils.draw_gantt_chart.html """ @@ -430,9 +429,12 @@ def generate_gantt_chart( html_string += "

Cores: " + str(cores) + "

" html_string += close_header # Draw nipype nodes Gantt chart and runtimes - html_string += draw_lines( - start_node["start"], duration, minute_scale, space_between_minutes - ) + try: + html_string += draw_lines( + start_node["start"], duration, minute_scale, space_between_minutes + ) + except: + breakpoint() html_string += draw_nodes( start_node["start"], nodes_list, diff --git a/CPAC/utils/monitoring/monitoring.py b/CPAC/utils/monitoring/monitoring.py index 6e9466c33c..9a6ce3c2fa 100644 --- a/CPAC/utils/monitoring/monitoring.py +++ b/CPAC/utils/monitoring/monitoring.py @@ -16,14 +16,16 @@ # License along with C-PAC. If not, see . """Monitoring utilities for C-PAC.""" -from datetime import datetime, timedelta +from datetime import datetime, timedelta, timezone import glob import json import math import os import socketserver +import struct import threading -from typing import Any, Optional, TypeAlias +from typing import Any, Optional, overload, TypeAlias +from zoneinfo import available_timezones, ZoneInfo import networkx as nx from traits.trait_base import Undefined @@ -72,16 +74,104 @@ def __sub__(self, other: "DatetimeWithSafeNone | _NoTime") -> datetime | timedel """Subtract between None and a datetime or timedelta or None.""" return _safe_none_diff(self, other) + def isoformat(self) -> str: + """Return an ISO 8601-like string of 0s for display.""" + return "0000-00-00" + NoTime = _NoTime() """A singleton None that can be used in place of a datetime object.""" class DatetimeWithSafeNone(datetime, _NoTime): - """Time class that can be None or a time value.""" - - def __new__(cls, dt: "OptionalDatetime") -> "DatetimeWithSafeNone | _NoTime": + """Time class that can be None or a time value. + + Examples + -------- + >>> from datetime import datetime + >>> DatetimeWithSafeNone(datetime(2025, 6, 18, 21, 6, 43, 730004)).isoformat() + '2025-06-18T21:06:43.730004' + >>> DatetimeWithSafeNone("2025-06-18T21:06:43.730004").isoformat() + '2025-06-18T21:06:43.730004' + >>> DatetimeWithSafeNone(b"\\x07\\xe9\\x06\\x12\\x10\\x18\\x1c\\x88\\x6d\\x01").isoformat() + '2025-06-18T16:24:28.028040+00:00' + >>> DatetimeWithSafeNone(b'\\x07\\xe9\\x06\\x12\\x10\\x18\\x1c\\x88m\\x00').isoformat() + '2025-06-18T16:24:28.028040' + >>> DatetimeWithSafeNone(DatetimeWithSafeNone("2025-06-18")).isoformat() + '2025-06-18T00:00:00' + >>> DatetimeWithSafeNone(None) + NoTime + >>> DatetimeWithSafeNone(None).isoformat() + '0000-00-00' + """ + + @overload + def __new__( + cls, + year: "OptionalDatetime", + month: None = None, + day: None = None, + hour: None = None, + minute: None = None, + second: None = None, + microsecond: None = None, + tzinfo: None = None, + *, + fold: None = None, + ) -> "DatetimeWithSafeNone | _NoTime": ... + @overload + def __new__( + cls, + year: int, + month: Optional[int] = None, + day: Optional[int] = None, + hour: int = 0, + minute: int = 0, + second: int = 0, + microsecond: int = 0, + tzinfo: Optional[timezone | ZoneInfo] = None, + *, + fold: int = 0, + ) -> "DatetimeWithSafeNone": ... + + def __new__( + cls, + year: "int | OptionalDatetime", + month: Optional[int] = None, + day: Optional[int] = None, + hour: Optional[int] = 0, + minute: Optional[int] = 0, + second: Optional[int] = 0, + microsecond: Optional[int] = 0, + tzinfo: Optional[timezone | ZoneInfo] = None, + *, + fold: Optional[int] = 0, + ) -> "DatetimeWithSafeNone | _NoTime": """Create a new instance of the class.""" + if ( + isinstance(year, int) + and isinstance(month, int) + and isinstance(day, int) + and isinstance(hour, int) + and isinstance(minute, int) + and isinstance(second, int) + and isinstance(microsecond, int) + and isinstance(fold, int) + ): + return datetime.__new__( + cls, + year, + month, + day, + hour, + minute, + second, + microsecond, + tzinfo, + fold=fold, + ) + else: + dt = year if dt is None: return NoTime if isinstance(dt, datetime): @@ -98,9 +188,43 @@ def __new__(cls, dt: "OptionalDatetime") -> "DatetimeWithSafeNone | _NoTime": ) if isinstance(dt, bytes): try: - dt = dt.decode("utf-8") + tzflag: Optional[int] + year, month, day, hour, minute, second = struct.unpack(">H5B", dt[:7]) + microsecond, tzflag = struct.unpack(" bool: """Return True if not NoTime.""" return self is not NoTime - def __sub__(self, other: "DatetimeWithSafeNone | _NoTime") -> datetime | timedelta: + def __sub__(self, other: "DatetimeWithSafeNone | _NoTime") -> datetime | timedelta: # type: ignore[reportIncompatibleMethodOverride] """Subtract between a datetime or timedelta or None.""" return _safe_none_diff(self, other) @@ -146,7 +270,9 @@ def json_dumps(obj: Any, **kwargs) -> str: return json.dumps(obj, cls=DatetimeJSONEncoder, **kwargs) -OptionalDatetime: TypeAlias = Optional[datetime | str | DatetimeWithSafeNone | _NoTime] +OptionalDatetime: TypeAlias = Optional[ + datetime | str | bytes | DatetimeWithSafeNone | _NoTime +] """Type alias for a datetime, ISO-format string or None.""" From 73d64d3c82ccf5c56537b0bf50d962fec4b9d769 Mon Sep 17 00:00:00 2001 From: Jon Cluce Date: Wed, 18 Jun 2025 22:45:17 -0400 Subject: [PATCH 390/507] :rewind: Remove debugging code --- CPAC/utils/monitoring/draw_gantt_chart.py | 12 +++++------- 1 file changed, 5 insertions(+), 7 deletions(-) diff --git a/CPAC/utils/monitoring/draw_gantt_chart.py b/CPAC/utils/monitoring/draw_gantt_chart.py index a299b51088..a7a0aaac96 100644 --- a/CPAC/utils/monitoring/draw_gantt_chart.py +++ b/CPAC/utils/monitoring/draw_gantt_chart.py @@ -39,7 +39,8 @@ # You should have received a copy of the GNU Lesser General Public # License along with C-PAC. If not, see . -"""Module to draw an html gantt chart from logfile produced by `~CPAC.utils.monitoring.log_nodes_cb`. +"""Module to draw an html gantt chart from logfile produced by +``CPAC.utils.monitoring.log_nodes_cb()``. See https://nipype.readthedocs.io/en/latest/api/generated/nipype.utils.draw_gantt_chart.html """ @@ -429,12 +430,9 @@ def generate_gantt_chart( html_string += "

Cores: " + str(cores) + "

" html_string += close_header # Draw nipype nodes Gantt chart and runtimes - try: - html_string += draw_lines( - start_node["start"], duration, minute_scale, space_between_minutes - ) - except: - breakpoint() + html_string += draw_lines( + start_node["start"], duration, minute_scale, space_between_minutes + ) html_string += draw_nodes( start_node["start"], nodes_list, From 50fb43b56eb116bf07c9280b5aa98dde5cbd3e13 Mon Sep 17 00:00:00 2001 From: Jon Cluce Date: Wed, 18 Jun 2025 22:46:54 -0400 Subject: [PATCH 391/507] :rewind: Remove debugging code --- CPAC/utils/monitoring/draw_gantt_chart.py | 9 +++------ 1 file changed, 3 insertions(+), 6 deletions(-) diff --git a/CPAC/utils/monitoring/draw_gantt_chart.py b/CPAC/utils/monitoring/draw_gantt_chart.py index a299b51088..d1fe4517c5 100644 --- a/CPAC/utils/monitoring/draw_gantt_chart.py +++ b/CPAC/utils/monitoring/draw_gantt_chart.py @@ -429,12 +429,9 @@ def generate_gantt_chart( html_string += "

Cores: " + str(cores) + "

" html_string += close_header # Draw nipype nodes Gantt chart and runtimes - try: - html_string += draw_lines( - start_node["start"], duration, minute_scale, space_between_minutes - ) - except: - breakpoint() + html_string += draw_lines( + start_node["start"], duration, minute_scale, space_between_minutes + ) html_string += draw_nodes( start_node["start"], nodes_list, From 1ed8dff3f5b81db4b9617fb0c51fc6b5834959ad Mon Sep 17 00:00:00 2001 From: Jon Cluce Date: Wed, 18 Jun 2025 23:00:02 -0400 Subject: [PATCH 392/507] :art: DRY `match_epi_fmaps_function_node` --- .../distortion_correction.py | 20 ++----------------- CPAC/utils/datasource.py | 19 ++++++++++++++++++ CPAC/utils/tests/test_datasource.py | 19 ++---------------- 3 files changed, 23 insertions(+), 35 deletions(-) diff --git a/CPAC/distortion_correction/distortion_correction.py b/CPAC/distortion_correction/distortion_correction.py index 4457ab91fe..0ddf005e92 100644 --- a/CPAC/distortion_correction/distortion_correction.py +++ b/CPAC/distortion_correction/distortion_correction.py @@ -36,7 +36,7 @@ from CPAC.pipeline import nipype_pipeline_engine as pe from CPAC.pipeline.nodeblock import nodeblock from CPAC.utils import function -from CPAC.utils.datasource import match_epi_fmaps +from CPAC.utils.datasource import match_epi_fmaps_function_node from CPAC.utils.interfaces.function import Function @@ -406,23 +406,7 @@ def distcor_blip_afni_qwarp(wf, cfg, strat_pool, pipe_num, opt=None): 3dQWarp. The output of this can then proceed to func_preproc. """ - match_epi_imports = ["import json"] - match_epi_fmaps_node = pe.Node( - Function( - input_names=[ - "bold_pedir", - "epi_fmap_one", - "epi_fmap_params_one", - "epi_fmap_two", - "epi_fmap_params_two", - ], - output_names=["opposite_pe_epi", "same_pe_epi"], - function=match_epi_fmaps, - imports=match_epi_imports, - as_module=True, - ), - name=f"match_epi_fmaps_{pipe_num}", - ) + match_epi_fmaps_node = match_epi_fmaps_function_node(f"match_epi_fmaps_{pipe_num}") node, out = strat_pool.get_data("epi-1") wf.connect(node, out, match_epi_fmaps_node, "epi_fmap_one") diff --git a/CPAC/utils/datasource.py b/CPAC/utils/datasource.py index aa102b0e3d..a23f373487 100644 --- a/CPAC/utils/datasource.py +++ b/CPAC/utils/datasource.py @@ -523,6 +523,25 @@ def match_epi_fmaps( return opposite_pe_epi, same_pe_epi +def match_epi_fmaps_function_node(name: str = "match_epi_fmaps"): + """Return a Function node for `~CPAC.utils.datasource.match_epi_fmaps`.""" + return pe.Node( + Function( + input_names=[ + "bold_pedir", + "epi_fmap_one", + "epi_fmap_params_one", + "epi_fmap_two", + "epi_fmap_params_two", + ], + output_names=["opposite_pe_epi", "same_pe_epi"], + function=match_epi_fmaps, + as_module=True, + ), + name=name, + ) + + def ingress_func_metadata( wf, cfg, diff --git a/CPAC/utils/tests/test_datasource.py b/CPAC/utils/tests/test_datasource.py index 0d1a5c6755..c95113a9b1 100644 --- a/CPAC/utils/tests/test_datasource.py +++ b/CPAC/utils/tests/test_datasource.py @@ -25,8 +25,7 @@ import pytest from CPAC.pipeline import nipype_pipeline_engine as pe -from CPAC.utils.datasource import match_epi_fmaps -from CPAC.utils.interfaces import Function +from CPAC.utils.datasource import match_epi_fmaps_function_node from CPAC.utils.test_resources import setup_test_wf from CPAC.utils.utils import PE_DIRECTION @@ -342,21 +341,7 @@ def test_match_epi_fmaps(generate: bool, tmp_path: Path) -> None: """Test `~CPAC.utils.datasource.match_epi_fmaps`.""" wf, data = match_epi_fmaps_inputs(generate, tmp_path) - match_fmaps = pe.Node( - Function( - input_names=[ - "bold_pedir", - "epi_fmap_one", - "epi_fmap_params_one", - "epi_fmap_two", - "epi_fmap_params_two", - ], - output_names=["opposite_pe_epi", "same_pe_epi"], - function=match_epi_fmaps, - as_module=True, - ), - name="match_epi_fmaps", - ) + match_fmaps = match_epi_fmaps_function_node() match_fmaps.inputs.bold_pedir = data.bold_pedir match_fmaps.inputs.epi_fmap_one = data.epi_fmaps[0][0] match_fmaps.inputs.epi_fmap_params_one = data.epi_fmaps[0][1] From 4c0f24b35e41ddf0847fb4b3c3f853ad531c0ef4 Mon Sep 17 00:00:00 2001 From: Jon Cluce Date: Wed, 18 Jun 2025 23:05:30 -0400 Subject: [PATCH 393/507] :pencil2: Quote typechecking type --- CPAC/registration/registration.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/CPAC/registration/registration.py b/CPAC/registration/registration.py index fa07ffa20d..8d97951283 100644 --- a/CPAC/registration/registration.py +++ b/CPAC/registration/registration.py @@ -4885,7 +4885,7 @@ def warp_timeseries_to_T1template_dcan_nhp(wf, cfg, strat_pool, pipe_num, opt=No }, ) def single_step_resample_timeseries_to_T1template( - wf, cfg, strat_pool: ResourcePool, pipe_num, opt=None + wf, cfg, strat_pool: "ResourcePool", pipe_num, opt=None ): """Apply motion correction, coreg, anat-to-template transforms... From 2afdf2106f9540c963a0300c8defaa67830d2a85 Mon Sep 17 00:00:00 2001 From: Jon Cluce Date: Wed, 18 Jun 2025 23:21:06 -0400 Subject: [PATCH 394/507] :pencil2: Quote typechecking type --- CPAC/registration/registration.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/CPAC/registration/registration.py b/CPAC/registration/registration.py index fa07ffa20d..8d97951283 100644 --- a/CPAC/registration/registration.py +++ b/CPAC/registration/registration.py @@ -4885,7 +4885,7 @@ def warp_timeseries_to_T1template_dcan_nhp(wf, cfg, strat_pool, pipe_num, opt=No }, ) def single_step_resample_timeseries_to_T1template( - wf, cfg, strat_pool: ResourcePool, pipe_num, opt=None + wf, cfg, strat_pool: "ResourcePool", pipe_num, opt=None ): """Apply motion correction, coreg, anat-to-template transforms... From ed7644b3c2917d160d5a51c376122dc1eda4bfef Mon Sep 17 00:00:00 2001 From: Jon Cluce Date: Fri, 20 Jun 2025 14:51:05 -0400 Subject: [PATCH 395/507] :package: Install `bc` in `lite` image for `fsl_prepare_fieldmap` [rebuild lite] [rebuild standard] --- .github/Dockerfiles/base-lite.Dockerfile | 2 ++ .github/Dockerfiles/base-standard.Dockerfile | 1 - CHANGELOG.md | 1 + CPAC/surface/tests/test_installation.py | 23 ++++++++++++++------ 4 files changed, 19 insertions(+), 8 deletions(-) diff --git a/.github/Dockerfiles/base-lite.Dockerfile b/.github/Dockerfiles/base-lite.Dockerfile index 3ba187a3bb..5c156e2c77 100644 --- a/.github/Dockerfiles/base-lite.Dockerfile +++ b/.github/Dockerfiles/base-lite.Dockerfile @@ -52,6 +52,8 @@ COPY --from=fsl /usr/lib/x86_64-linux-gnu /usr/lib/x86_64-linux-gnu COPY --from=fsl /usr/bin /usr/bin COPY --from=fsl /usr/local/bin /usr/local/bin COPY --from=fsl /usr/share/fsl /usr/share/fsl +RUN apt-get update \ + && apt-get install --no-install-recommends -y bc # Installing C-PAC dependencies COPY requirements.txt /opt/requirements.txt diff --git a/.github/Dockerfiles/base-standard.Dockerfile b/.github/Dockerfiles/base-standard.Dockerfile index 07f7884d7d..2a696d9f20 100644 --- a/.github/Dockerfiles/base-standard.Dockerfile +++ b/.github/Dockerfiles/base-standard.Dockerfile @@ -24,7 +24,6 @@ USER root # Installing FreeSurfer RUN apt-get update \ - && apt-get install --no-install-recommends -y bc \ && yes | mamba install tcsh \ && yes | mamba clean --all \ && cp -l `which tcsh` /bin/tcsh \ diff --git a/CHANGELOG.md b/CHANGELOG.md index a3a965b2c3..765620c308 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -32,6 +32,7 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 - Switch `sink_native_transforms` under `registration_workflows` to output all `.mat` files in ANTs and FSL Transforms. - `deoblique` field in pipeline config with `warp` and `refit` options to apply `3dWarp` or `3drefit` during data initialization. - `organism` configuration option. +- `bc` to `lite` container images. ### Changed diff --git a/CPAC/surface/tests/test_installation.py b/CPAC/surface/tests/test_installation.py index 0af0a9621a..3f53330435 100644 --- a/CPAC/surface/tests/test_installation.py +++ b/CPAC/surface/tests/test_installation.py @@ -1,4 +1,4 @@ -# Copyright (C) 2023 C-PAC Developers +# Copyright (C) 2023-2025 C-PAC Developers # This file is part of C-PAC. @@ -17,18 +17,27 @@ """Tests for requisite surface prerequisites.""" import os +from typing import Literal import pytest from CPAC.utils.tests.test_utils import _installation_check -@pytest.mark.parametrize("executable", ["bc", "csh"]) -@pytest.mark.skipif( - "FREESURFER_HOME" not in os.environ - or not os.path.exists(os.environ["FREESURFER_HOME"]), - reason="We don't need these dependencies if we don't have FreeSurfer.", +@pytest.mark.parametrize( + "executable", + [ + "bc", + pytest.param( + "csh", + marks=pytest.mark.skipif( + "FREESURFER_HOME" not in os.environ + or not os.path.exists(os.environ["FREESURFER_HOME"]), + reason="We don't need this dependency if we don't have FreeSurfer.", + ), + ), + ], ) -def test_executable(executable): +def test_executable(executable: Literal["bc"] | Literal["csh"]) -> None: """Make sure executable is installed.""" _installation_check(executable, "--version") From 1156a214c91d48d301da9a48990cd119a23fab83 Mon Sep 17 00:00:00 2001 From: Jon Cluce Date: Fri, 20 Jun 2025 15:21:33 -0400 Subject: [PATCH 396/507] :white_check_mark: Add direct test for better coverage --- CPAC/utils/tests/test_datasource.py | 35 +++++++++++++++++++++++++---- 1 file changed, 31 insertions(+), 4 deletions(-) diff --git a/CPAC/utils/tests/test_datasource.py b/CPAC/utils/tests/test_datasource.py index c95113a9b1..6e9e52c0d2 100644 --- a/CPAC/utils/tests/test_datasource.py +++ b/CPAC/utils/tests/test_datasource.py @@ -19,13 +19,13 @@ from dataclasses import dataclass import json from pathlib import Path -from typing import Any +from typing import Any, Literal, TypeAlias from networkx.classes.digraph import DiGraph import pytest from CPAC.pipeline import nipype_pipeline_engine as pe -from CPAC.utils.datasource import match_epi_fmaps_function_node +from CPAC.utils.datasource import match_epi_fmaps, match_epi_fmaps_function_node from CPAC.utils.test_resources import setup_test_wf from CPAC.utils.utils import PE_DIRECTION @@ -336,6 +336,10 @@ def match_epi_fmaps_inputs( ) +RunType: TypeAlias = Literal["nipype"] | Literal["direct"] +Direction: TypeAlias = Literal["opposite"] | Literal["same"] + + @pytest.mark.parametrize("generate", [True, False]) def test_match_epi_fmaps(generate: bool, tmp_path: Path) -> None: """Test `~CPAC.utils.datasource.match_epi_fmaps`.""" @@ -352,5 +356,28 @@ def test_match_epi_fmaps(generate: bool, tmp_path: Path) -> None: graph: DiGraph = wf.run() result = list(graph.nodes)[-1].run() - assert Path(result.outputs.opposite_pe_epi).exists() - assert Path(result.outputs.same_pe_epi).exists() + str_outputs: dict[RunType, dict[Direction, str]] = { + "nipype": { + "opposite": result.outputs.opposite_pe_epi, + "same": result.outputs.same_pe_epi, + }, + "direct": {}, + } + path_outputs: dict[RunType, dict[Direction, Path]] = {"nipype": {}, "direct": {}} + str_outputs["direct"]["opposite"], str_outputs["direct"]["same"] = match_epi_fmaps( + data.bold_pedir, + data.epi_fmaps[0][0], + data.epi_fmaps[0][1], + data.epi_fmaps[1][0], + data.epi_fmaps[1][1], + ) + directions: list[Direction] = ["opposite", "same"] + runtypes: list[RunType] = ["nipype", "direct"] + for direction in directions: + for runtype in runtypes: + path_outputs[runtype][direction] = Path(str_outputs[runtype][direction]) + assert path_outputs[runtype][direction].exists() + assert ( + path_outputs["nipype"][direction].name + == path_outputs["direct"][direction].name + ) From 7ae48fee18f718be908db21c6a4b62690c0799b7 Mon Sep 17 00:00:00 2001 From: Jon Cluce Date: Mon, 23 Jun 2025 16:51:19 -0400 Subject: [PATCH 397/507] :test_tube: Add (single) smoke test for abcd-options to unit tests --- CPAC/pipeline/cpac_runner.py | 22 +++--- CPAC/pipeline/test/test_connect_pipeline.py | 87 +++++++++++++++++++++ 2 files changed, 99 insertions(+), 10 deletions(-) create mode 100644 CPAC/pipeline/test/test_connect_pipeline.py diff --git a/CPAC/pipeline/cpac_runner.py b/CPAC/pipeline/cpac_runner.py index 425eefb91f..0a0f58bbdf 100644 --- a/CPAC/pipeline/cpac_runner.py +++ b/CPAC/pipeline/cpac_runner.py @@ -1,4 +1,4 @@ -# Copyright (C) 2022-2024 C-PAC Developers +# Copyright (C) 2022-2025 C-PAC Developers # This file is part of C-PAC. @@ -19,10 +19,12 @@ from multiprocessing import Process import os from time import strftime +from typing import Optional import warnings from voluptuous.error import Invalid import yaml +from nipype.pipeline.plugins.base import PluginBase as Plugin from CPAC.longitudinal_pipeline.longitudinal_workflow import anat_longitudinal_wf from CPAC.pipeline.utils import get_shell @@ -257,15 +259,15 @@ def run_T1w_longitudinal(sublist, cfg): def run( # noqa: PLR0915 - subject_list_file, - config_file=None, - p_name=None, - plugin=None, - plugin_args=None, - tracking=True, - num_subs_at_once=None, - debug=False, - test_config=False, + subject_list_file: str, + config_file: Optional[str] = None, + p_name: Optional[str] = None, + plugin: Optional[str | Plugin] = None, + plugin_args: Optional[dict] = None, + tracking: bool = True, + num_subs_at_once: Optional[int] = None, + debug: bool = False, + test_config: bool = False, ) -> int: """Run C-PAC subjects via job queue. diff --git a/CPAC/pipeline/test/test_connect_pipeline.py b/CPAC/pipeline/test/test_connect_pipeline.py new file mode 100644 index 0000000000..52f828e78a --- /dev/null +++ b/CPAC/pipeline/test/test_connect_pipeline.py @@ -0,0 +1,87 @@ +#!/usr/bin/env python3 +# Copyright (C) 2025 C-PAC Developers + +# This file is part of C-PAC. + +# C-PAC is free software: you can redistribute it and/or modify it under +# the terms of the GNU Lesser General Public License as published by the +# Free Software Foundation, either version 3 of the License, or (at your +# option) any later version. + +# C-PAC is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or +# FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public +# License for more details. + +# You should have received a copy of the GNU Lesser General Public +# License along with C-PAC. If not, see . +"""Test pipeline connections.""" + +from logging import INFO +from pathlib import Path +from typing import Callable + +import pytest +import yaml + +from CPAC.pipeline.cpac_runner import run +from CPAC.utils.configuration import preconfig_yaml +from CPAC.utils.monitoring import log_nodes_cb + + +@pytest.mark.parametrize("preconfig", ["abcd-options"]) +def test_config( + caplog: pytest.LogCaptureFixture, preconfig: str, tmp_path: Path +) -> None: + """Run 'test_config' analysis level.""" + caplog.set_level(INFO) + data_config_file = tmp_path / "data_config.yaml" + with data_config_file.open("w") as _f: + yaml.dump( + [ + { + "anat": "s3://fcp-indi/data/Projects/ADHD200/RawDataBIDS/KKI/sub-1019436/ses-1/anat/sub-1019436_ses-1_run-1_T1w.nii.gz", + "func": { + "rest_acq-1_run-1": { + "scan": "s3://fcp-indi/data/Projects/ADHD200/RawDataBIDS/KKI/sub-1019436/ses-1/func/sub-1019436_ses-1_task-rest_acq-1_run-1_bold.nii.gz", + "scan_parameters": "s3://fcp-indi/data/Projects/ADHD200/RawDataBIDS/KKI/task-rest_acq-1_bold.json", + } + }, + "site": "KKI", + "subject_id": "1019436", + "unique_id": "1", + } + ], + _f, + ) + plugin = "MultiProc" + plugin_args: dict[str, int | bool | Callable] = { + "n_procs": 2, + "memory_gb": 10, + "raise_insufficient": True, + "status_callback": log_nodes_cb, + } + tracking = False + exitcode = run( + str(data_config_file), + preconfig_yaml(preconfig), + plugin=plugin, + plugin_args=plugin_args, + tracking=tracking, + test_config=True, + ) + if exitcode != 0: + records = list(caplog.records) + msg: str + msg = str(records[-1]) + if hasattr(records[-1], "exc_info"): + exc_info = records[-1].exc_info + if ( + exc_info + and exc_info[0] + and exc_info[1] + and hasattr(exc_info[1], "args") + ): + msg = exc_info[1].args[0] + raise exc_info[0](exc_info[1]) + raise AssertionError(msg) From a907528081d5957a05897baaf7c40c4157c247a7 Mon Sep 17 00:00:00 2001 From: Jon Cluce Date: Mon, 23 Jun 2025 17:17:59 -0400 Subject: [PATCH 398/507] :necktie: Transform template-space mask to bold mask --- CPAC/func_preproc/func_preproc.py | 63 +++++++++++++++++++++++++++---- 1 file changed, 56 insertions(+), 7 deletions(-) diff --git a/CPAC/func_preproc/func_preproc.py b/CPAC/func_preproc/func_preproc.py index 421f1535d7..4277f4e12a 100644 --- a/CPAC/func_preproc/func_preproc.py +++ b/CPAC/func_preproc/func_preproc.py @@ -1,4 +1,4 @@ -# Copyright (C) 2012-2023 C-PAC Developers +# Copyright (C) 2012-2025 C-PAC Developers # This file is part of C-PAC. @@ -16,13 +16,15 @@ # License along with C-PAC. If not, see . """Functional preprocessing.""" +from typing import TYPE_CHECKING + # pylint: disable=ungrouped-imports,wrong-import-order,wrong-import-position from nipype.interfaces import afni, ants, fsl, utility as util from nipype.interfaces.afni import preprocess, utils as afni_utils from CPAC.func_preproc.utils import get_num_slices, interpolate_slice_timing, nullify from CPAC.pipeline import nipype_pipeline_engine as pe -from CPAC.pipeline.nodeblock import nodeblock +from CPAC.pipeline.nodeblock import nodeblock, NODEBLOCK_RETURN, POOL_RESOURCE_DICT from CPAC.utils.interfaces import Function from CPAC.utils.interfaces.ants import ( AI, # niworkflows @@ -31,6 +33,10 @@ ) from CPAC.utils.utils import add_afni_prefix, afni_3dwarp +if TYPE_CHECKING: + from CPAC.pipeline.engine import ResourcePool + from CPAC.utils.configuration import Configuration + def collect_arguments(*args): """Collect arguments.""" @@ -1890,7 +1896,10 @@ def bold_masking(wf, cfg, strat_pool, pipe_num, opt=None): ["functional_preproc", "run"], ["functional_preproc", "template_space_func_masking", "run"], ], - inputs=[("space-template_desc-preproc_bold", "space-template_desc-bold_mask")], + inputs=[ + ("space-template_desc-preproc_bold", "space-template_desc-bold_mask"), + ("from-template_to-bold_mode-image_xfm", "desc-preproc_bold"), + ], outputs={ "space-template_desc-preproc_bold": { "Description": "The skull-stripped BOLD time-series.", @@ -1904,10 +1913,21 @@ def bold_masking(wf, cfg, strat_pool, pipe_num, opt=None): "Description": "The non skull-stripped BOLD time-series.", "SkullStripped": False, }, + "space-bold_desc-brain_mask": { + "Description": "Binary brain mask of the BOLD functional time-series, transformed from template space." + }, }, ) -def template_space_bold_masking(wf, cfg, strat_pool, pipe_num, opt=None): +def template_space_bold_masking( + wf: pe.Workflow, + cfg: "Configuration", + strat_pool: "ResourcePool", + pipe_num: int, + opt: None = None, +) -> NODEBLOCK_RETURN: """Mask the bold in template space.""" + from CPAC.registration.registration import apply_transform + func_apply_mask = pe.Node( interface=afni_utils.Calc(), name=f"template_space_func_extract_brain_{pipe_num}", @@ -1921,16 +1941,45 @@ def template_space_bold_masking(wf, cfg, strat_pool, pipe_num, opt=None): ) wf.connect(node_head_bold, out_head_bold, func_apply_mask, "in_file_a") + reg_tool = strat_pool.reg_tool("from-template_to-bold_mode-image_xfm") + num_cpus: int = cfg.pipeline_setup["system_config"]["max_cores_per_participant"] + num_ants_cores: int = cfg.pipeline_setup["system_config"]["num_ants_threads"] + apply_xfm = apply_transform( + f"xfm_from-template_to-bold_mask_{pipe_num}", + reg_tool, + time_series=True, + num_cpus=num_cpus, + num_ants_cores=num_ants_cores, + ) + if reg_tool == "ants": + apply_xfm.inputs.inputspec.interpolation = cfg.registration_workflows[ + "functional_registration" + ]["func_registration_to_template"]["ANTs_pipelines"]["interpolation"] + elif reg_tool == "fsl": + apply_xfm.inputs.inputspec.interpolation = cfg.registration_workflows[ + "functional_registration" + ]["func_registration_to_template"]["FNIRT_pipelines"]["interpolation"] + node, out = strat_pool.get_data("space-template_desc-bold_mask") - wf.connect(node, out, func_apply_mask, "in_file_b") + wf.connect( + [ + (node, func_apply_mask, [(out, "in_file_b")]), + (node, apply_xfm, [(out, "inputspec.input_image")]), + ] + ) + node, out = strat_pool.get_data("desc-preproc_bold") + wf.connect(node, out, apply_xfm, "inputspec.reference") + node, out = strat_pool.get_data("from-template_to-bold_mode-image_xfm") + wf.connect(node, out, apply_xfm, "inputspec.transform") - outputs = { + outputs: POOL_RESOURCE_DICT = { + "space-bold_desc-brain_mask": (apply_xfm, "outputspec.output_image"), "space-template_desc-preproc_bold": (func_apply_mask, "out_file"), "space-template_desc-brain_bold": (func_apply_mask, "out_file"), "space-template_desc-head_bold": (node_head_bold, out_head_bold), } - return (wf, outputs) + return wf, outputs @nodeblock( From 6c68742b643abd75f80ae903802274548655ffcb Mon Sep 17 00:00:00 2001 From: Jon Cluce Date: Tue, 24 Jun 2025 10:53:08 -0400 Subject: [PATCH 399/507] :white_check_mark: Set paths in pipeline config for smoke test --- CPAC/pipeline/test/test_connect_pipeline.py | 33 +++++++++++++++++++-- 1 file changed, 31 insertions(+), 2 deletions(-) diff --git a/CPAC/pipeline/test/test_connect_pipeline.py b/CPAC/pipeline/test/test_connect_pipeline.py index 52f828e78a..6677fe7c2f 100644 --- a/CPAC/pipeline/test/test_connect_pipeline.py +++ b/CPAC/pipeline/test/test_connect_pipeline.py @@ -18,6 +18,7 @@ """Test pipeline connections.""" from logging import INFO +import multiprocessing.resource_tracker from pathlib import Path from typing import Callable @@ -25,9 +26,23 @@ import yaml from CPAC.pipeline.cpac_runner import run -from CPAC.utils.configuration import preconfig_yaml +from CPAC.utils.configuration.configuration import Preconfiguration +from CPAC.utils.configuration.yaml_template import create_yaml_from_template from CPAC.utils.monitoring import log_nodes_cb +_unregister = multiprocessing.resource_tracker.unregister + + +def safe_unregister(name, rtype) -> None: + """Suppress unregister warnings.""" + try: + _unregister(name, rtype) + except KeyError: + pass + + +multiprocessing.resource_tracker.unregister = safe_unregister + @pytest.mark.parametrize("preconfig", ["abcd-options"]) def test_config( @@ -54,6 +69,20 @@ def test_config( ], _f, ) + + # output in tmp_path/outputs + pipeline = Preconfiguration(preconfig) + output_dir = tmp_path / "outputs" + output_dir.mkdir(parents=True, exist_ok=True) + pipeline["pipeline_setup", "log_directory", "path"] = str(output_dir / "log") + pipeline["pipeline_setup", "output_directory", "path"] = str(output_dir / "out") + pipeline["pipeline_setup", "working_directory", "path"] = str( + output_dir / "working" + ) + pipeline_file = tmp_path / "pipe_config.yaml" + with pipeline_file.open("w") as _f: + _f.write(create_yaml_from_template(pipeline, preconfig, preconfig, True)) + plugin = "MultiProc" plugin_args: dict[str, int | bool | Callable] = { "n_procs": 2, @@ -64,7 +93,7 @@ def test_config( tracking = False exitcode = run( str(data_config_file), - preconfig_yaml(preconfig), + str(pipeline_file), plugin=plugin, plugin_args=plugin_args, tracking=tracking, From c945b0cbcbd0a5bce2dee23cc9524e1a8c9642be Mon Sep 17 00:00:00 2001 From: Jon Cluce Date: Tue, 24 Jun 2025 14:40:05 -0400 Subject: [PATCH 400/507] :necktie: Create `space-bold_desc-brain_mask` at nuisance generation if missing --- CPAC/func_preproc/func_preproc.py | 37 +------------- CPAC/nuisance/nuisance.py | 83 ++++++++++++++++++------------- CPAC/nuisance/utils/xfm.py | 71 ++++++++++++++++++++++++++ CPAC/registration/registration.py | 16 +++--- 4 files changed, 129 insertions(+), 78 deletions(-) create mode 100644 CPAC/nuisance/utils/xfm.py diff --git a/CPAC/func_preproc/func_preproc.py b/CPAC/func_preproc/func_preproc.py index 4277f4e12a..d098b6186b 100644 --- a/CPAC/func_preproc/func_preproc.py +++ b/CPAC/func_preproc/func_preproc.py @@ -1898,7 +1898,6 @@ def bold_masking(wf, cfg, strat_pool, pipe_num, opt=None): ], inputs=[ ("space-template_desc-preproc_bold", "space-template_desc-bold_mask"), - ("from-template_to-bold_mode-image_xfm", "desc-preproc_bold"), ], outputs={ "space-template_desc-preproc_bold": { @@ -1913,9 +1912,6 @@ def bold_masking(wf, cfg, strat_pool, pipe_num, opt=None): "Description": "The non skull-stripped BOLD time-series.", "SkullStripped": False, }, - "space-bold_desc-brain_mask": { - "Description": "Binary brain mask of the BOLD functional time-series, transformed from template space." - }, }, ) def template_space_bold_masking( @@ -1926,8 +1922,6 @@ def template_space_bold_masking( opt: None = None, ) -> NODEBLOCK_RETURN: """Mask the bold in template space.""" - from CPAC.registration.registration import apply_transform - func_apply_mask = pe.Node( interface=afni_utils.Calc(), name=f"template_space_func_extract_brain_{pipe_num}", @@ -1941,39 +1935,10 @@ def template_space_bold_masking( ) wf.connect(node_head_bold, out_head_bold, func_apply_mask, "in_file_a") - reg_tool = strat_pool.reg_tool("from-template_to-bold_mode-image_xfm") - num_cpus: int = cfg.pipeline_setup["system_config"]["max_cores_per_participant"] - num_ants_cores: int = cfg.pipeline_setup["system_config"]["num_ants_threads"] - apply_xfm = apply_transform( - f"xfm_from-template_to-bold_mask_{pipe_num}", - reg_tool, - time_series=True, - num_cpus=num_cpus, - num_ants_cores=num_ants_cores, - ) - if reg_tool == "ants": - apply_xfm.inputs.inputspec.interpolation = cfg.registration_workflows[ - "functional_registration" - ]["func_registration_to_template"]["ANTs_pipelines"]["interpolation"] - elif reg_tool == "fsl": - apply_xfm.inputs.inputspec.interpolation = cfg.registration_workflows[ - "functional_registration" - ]["func_registration_to_template"]["FNIRT_pipelines"]["interpolation"] - node, out = strat_pool.get_data("space-template_desc-bold_mask") - wf.connect( - [ - (node, func_apply_mask, [(out, "in_file_b")]), - (node, apply_xfm, [(out, "inputspec.input_image")]), - ] - ) - node, out = strat_pool.get_data("desc-preproc_bold") - wf.connect(node, out, apply_xfm, "inputspec.reference") - node, out = strat_pool.get_data("from-template_to-bold_mode-image_xfm") - wf.connect(node, out, apply_xfm, "inputspec.transform") + wf.connect(node, out, func_apply_mask, "in_file_b") outputs: POOL_RESOURCE_DICT = { - "space-bold_desc-brain_mask": (apply_xfm, "outputspec.output_image"), "space-template_desc-preproc_bold": (func_apply_mask, "out_file"), "space-template_desc-brain_bold": (func_apply_mask, "out_file"), "space-template_desc-head_bold": (node_head_bold, out_head_bold), diff --git a/CPAC/nuisance/nuisance.py b/CPAC/nuisance/nuisance.py index 90d39c18a5..d2605435f8 100644 --- a/CPAC/nuisance/nuisance.py +++ b/CPAC/nuisance/nuisance.py @@ -40,8 +40,8 @@ TR_string_to_float, ) from CPAC.pipeline import nipype_pipeline_engine as pe -from CPAC.pipeline.engine import ResourcePool -from CPAC.pipeline.nodeblock import nodeblock +from CPAC.pipeline.engine import NodeData, ResourcePool +from CPAC.pipeline.nodeblock import nodeblock, NODEBLOCK_RETURN, POOL_RESOURCE_DICT from CPAC.registration.registration import ( apply_transform, warp_timeseries_to_EPItemplate, @@ -2457,8 +2457,13 @@ def nuisance_regressors_generation_EPItemplate(wf, cfg, strat_pool, pipe_num, op inputs=[ ( "desc-preproc_bold", - "space-bold_desc-brain_mask", + [ + "space-bold_desc-brain_mask", + "space-template_desc-bold_mask", + "space-template_desc-brain_mask", + ], "from-bold_to-T1w_mode-image_desc-linear_xfm", + "from-template_to-bold_mode-image_xfm", "desc-movementParameters_motion", "framewise-displacement-jenkinson", "framewise-displacement-power", @@ -2486,7 +2491,13 @@ def nuisance_regressors_generation_EPItemplate(wf, cfg, strat_pool, pipe_num, op "lateral-ventricles-mask", "TR", ], - outputs=["desc-confounds_timeseries", "censor-indices"], + outputs={ + "desc-confounds_timeseries": {}, + "censor-indices": {}, + "space-bold_desc-brain_mask": { + "Description": "Binary brain mask of the BOLD functional time-series, transformed from template space." + }, + }, ) def nuisance_regressors_generation_T1w(wf, cfg, strat_pool, pipe_num, opt=None): return nuisance_regressors_generation(wf, cfg, strat_pool, pipe_num, opt, "T1w") @@ -2499,38 +2510,37 @@ def nuisance_regressors_generation( pipe_num: int, opt: dict, space: Literal["T1w", "bold"], -) -> tuple[Workflow, dict]: - """Generate nuisance regressors. +) -> NODEBLOCK_RETURN: + """Generate nuisance regressors.""" + from CPAC.nuisance.utils.xfm import transform_bold_mask_to_native - Parameters - ---------- - wf : ~nipype.pipeline.engine.workflows.Workflow - - cfg : ~CPAC.utils.configuration.Configuration - - strat_pool : ~CPAC.pipeline.engine.ResourcePool - - pipe_num : int - - opt : dict - - space : str - T1w or bold - - Returns - ------- - wf : nipype.pipeline.engine.workflows.Workflow - - outputs : dict - """ prefixes = [f"space-{space}_"] * 2 reg_tool = None + outputs: POOL_RESOURCE_DICT = {} + + brain_mask = ( + strat_pool.node_data("space-bold_desc-brain_mask") + if strat_pool.check_rpool("space-bold_desc-brain_mask") + else NodeData() + ) if space == "T1w": prefixes[0] = "" if strat_pool.check_rpool("from-template_to-T1w_mode-image_desc-linear_xfm"): reg_tool = strat_pool.reg_tool( "from-template_to-T1w_mode-image_desc-linear_xfm" ) + if brain_mask.node is NotImplemented: + if reg_tool and strat_pool.check_rpool( + ["space-template_desc-bold_mask", "space-template_desc-brain_mask"] + ): + outputs["space-bold_desc-brain_mask"] = ( + transform_bold_mask_to_native( + wf, strat_pool, cfg, pipe_num, reg_tool + ) + ) + brain_mask.node, brain_mask.out = outputs[ + "space-bold_desc-brain_mask" + ] elif space == "bold": reg_tool = strat_pool.reg_tool( "from-EPItemplate_to-bold_mode-image_desc-linear_xfm" @@ -2575,8 +2585,12 @@ def nuisance_regressors_generation( node, out = strat_pool.get_data("desc-preproc_bold") wf.connect(node, out, regressors, "inputspec.functional_file_path") - node, out = strat_pool.get_data("space-bold_desc-brain_mask") - wf.connect(node, out, regressors, "inputspec.functional_brain_mask_file_path") + wf.connect( + brain_mask.node, + brain_mask.out, + regressors, + "inputspec.functional_brain_mask_file_path", + ) if strat_pool.check_rpool(f"desc-brain_{space}"): node, out = strat_pool.get_data(f"desc-brain_{space}") @@ -2738,12 +2752,13 @@ def nuisance_regressors_generation( node, out = strat_pool.get_data("TR") wf.connect(node, out, regressors, "inputspec.tr") - outputs = { - "desc-confounds_timeseries": (regressors, "outputspec.regressors_file_path"), - "censor-indices": (regressors, "outputspec.censor_indices"), - } + outputs["desc-confounds_timeseries"] = ( + regressors, + "outputspec.regressors_file_path", + ) + outputs["censor-indices"] = (regressors, "outputspec.censor_indices") - return (wf, outputs) + return wf, outputs def nuisance_regression(wf, cfg, strat_pool, pipe_num, opt, space, res=None): diff --git a/CPAC/nuisance/utils/xfm.py b/CPAC/nuisance/utils/xfm.py new file mode 100644 index 0000000000..60c6a2c133 --- /dev/null +++ b/CPAC/nuisance/utils/xfm.py @@ -0,0 +1,71 @@ +# Copyright (C) 2025 C-PAC Developers + +# This file is part of C-PAC. + +# C-PAC is free software: you can redistribute it and/or modify it under +# the terms of the GNU Lesser General Public License as published by the +# Free Software Foundation, either version 3 of the License, or (at your +# option) any later version. + +# C-PAC is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or +# FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public +# License for more details. + +# You should have received a copy of the GNU Lesser General Public +# License along with C-PAC. If not, see . +"""Transformation utilities for nuisance regression.""" + +from typing import cast, Literal + +from nipype.pipeline.engine import Workflow + +from CPAC.pipeline.engine import ResourcePool +from CPAC.registration.registration import apply_transform +from CPAC.utils.configuration import Configuration + +# ("from-template_to-bold_mode-image_xfm", "desc-preproc_bold"), + + +def transform_bold_mask_to_native( + wf: Workflow, + strat_pool: ResourcePool, + cfg: Configuration, + pipe_num: int, + reg_tool: Literal["ants", "fsl"], +) -> tuple[Workflow, str]: + """Transform a template-space BOLD mask to native space.""" + num_cpus = cast( + int, cfg["pipeline_setup", "system_config", "max_cores_per_participant"] + ) + num_ants_cores = cast( + int, cfg["pipeline_setup", "system_config", "num_ants_threads"] + ) + apply_xfm = apply_transform( + f"xfm_from-template_to-bold_mask_{pipe_num}", + reg_tool, + time_series=True, + num_cpus=num_cpus, + num_ants_cores=num_ants_cores, + ) + apply_xfm.inputs.inputspec.interpolation = cfg[ + "registration_workflows", + "functional_registration", + "func_registration_to_template", + f"{'ANTs' if reg_tool == 'ants' else 'FNIRT'}_pipelines", + "interpolation", + ] + bold = strat_pool.node_data("desc-preproc_bold") + bold_mask = strat_pool.node_data( + ["space-template_desc-bold_mask", "space-template_desc-brain_mask"] + ) + xfm = strat_pool.node_data("from-template_to-bold_mode-image_xfm") + wf.connect( + [ + (bold_mask.node, apply_xfm, [(bold_mask.out, "inputspec.input_image")]), + (bold.node, apply_xfm, [(bold.out, "inputspec.reference")]), + (xfm.node, apply_xfm, [(xfm.out, "inputspec.transform")]), + ] + ) + + return apply_xfm, "outputspec.output_image" diff --git a/CPAC/registration/registration.py b/CPAC/registration/registration.py index 8d97951283..d001b00550 100644 --- a/CPAC/registration/registration.py +++ b/CPAC/registration/registration.py @@ -49,13 +49,13 @@ def apply_transform( - wf_name, - reg_tool, - time_series=False, - multi_input=False, - num_cpus=1, - num_ants_cores=1, -): + wf_name: str, + reg_tool: Literal["ants", "fsl"], + time_series: bool = False, + multi_input: bool = False, + num_cpus: int = 1, + num_ants_cores: int = 1, +) -> pe.Workflow: """Apply transform.""" if not reg_tool: msg = ( @@ -101,7 +101,7 @@ def apply_transform( ) apply_warp.inputs.dimension = 3 - apply_warp.interface.num_threads = int(num_ants_cores) + apply_warp.inputs.num_threads = int(num_ants_cores) if time_series: apply_warp.inputs.input_image_type = 3 From 85869fc3174766ff0dab9b2f15b49d13c0832fbb Mon Sep 17 00:00:00 2001 From: "birajstha:construction_worker::penguin" Date: Tue, 24 Jun 2025 15:16:45 -0400 Subject: [PATCH 401/507] desc-ABCDpreproc_t1w added to outputs --- CPAC/resources/cpac_outputs.tsv | 1 + 1 file changed, 1 insertion(+) diff --git a/CPAC/resources/cpac_outputs.tsv b/CPAC/resources/cpac_outputs.tsv index d5a61b18bb..d7b5661279 100644 --- a/CPAC/resources/cpac_outputs.tsv +++ b/CPAC/resources/cpac_outputs.tsv @@ -163,6 +163,7 @@ desc-preproc_T1w T1w T1w anat NIfTI desc-reorient_T1w T1w T1w anat NIfTI Yes desc-restore_T1w T1w T1w anat NIfTI desc-restore-brain_T1w T1w T1w anat NIfTI +desc-ABCDpreproc_T1w T1w T1w anat NIfTI space-template_desc-brain_T1w T1w template anat NIfTI Yes space-template_desc-preproc_T1w T1w template anat NIfTI space-template_desc-head_T1w T1w template anat NIfTI From ea47c4dc240597fcf8aec054d38e1d1983e329a0 Mon Sep 17 00:00:00 2001 From: "birajstha:construction_worker::penguin" Date: Tue, 24 Jun 2025 15:18:11 -0400 Subject: [PATCH 402/507] adding to changelog --- CHANGELOG.md | 1 + 1 file changed, 1 insertion(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index a3a965b2c3..d8835a805a 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -32,6 +32,7 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 - Switch `sink_native_transforms` under `registration_workflows` to output all `.mat` files in ANTs and FSL Transforms. - `deoblique` field in pipeline config with `warp` and `refit` options to apply `3dWarp` or `3drefit` during data initialization. - `organism` configuration option. +- `desc-ABCDpreproc_T1w` to the outputs ### Changed From de6a6d4ddfad92893301938bc08017a4da23b318 Mon Sep 17 00:00:00 2001 From: "birajstha:construction_worker::penguin" Date: Wed, 25 Jun 2025 11:30:31 -0400 Subject: [PATCH 403/507] implementing importlib.resources --- CPAC/utils/datasource.py | 9 +++------ 1 file changed, 3 insertions(+), 6 deletions(-) diff --git a/CPAC/utils/datasource.py b/CPAC/utils/datasource.py index 25adb1eeca..1d951665c6 100644 --- a/CPAC/utils/datasource.py +++ b/CPAC/utils/datasource.py @@ -890,6 +890,8 @@ def check_for_s3( import botocore.exceptions import nibabel as nib from indi_aws import fetch_creds + import importlib.resources as resources + from CPAC.resources import templates # Init variables s3_str = "s3://" @@ -974,12 +976,7 @@ def check_for_s3( if not os.path.exists(local_path): # alert users to 2020-07-20 Neuroparc atlas update (v0 to v1) ndmg_atlases = {} - with open( - os.path.join( - os.path.dirname(os.path.dirname(__file__)), - "resources/templates/ndmg_atlases.csv", - ) - ) as ndmg_atlases_file: + with resources.files(templates).joinpath("ndmg_atlases.csv").open("r") as ndmg_atlases_file: ndmg_atlases["v0"], ndmg_atlases["v1"] = zip( *[ ( From 50f0f6219aceeb1e92451b99801721d73a84d922 Mon Sep 17 00:00:00 2001 From: Jon Cluce Date: Wed, 25 Jun 2025 13:32:10 -0400 Subject: [PATCH 404/507] :necktie: Include `desc-reorient_bold` in nuisiance generation --- CPAC/nuisance/nuisance.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/CPAC/nuisance/nuisance.py b/CPAC/nuisance/nuisance.py index d2605435f8..4dbc4cf3cc 100644 --- a/CPAC/nuisance/nuisance.py +++ b/CPAC/nuisance/nuisance.py @@ -2456,7 +2456,7 @@ def nuisance_regressors_generation_EPItemplate(wf, cfg, strat_pool, pipe_num, op option_val="USER-DEFINED", inputs=[ ( - "desc-preproc_bold", + ["desc-preproc_bold", "desc-reorient_bold"], [ "space-bold_desc-brain_mask", "space-template_desc-bold_mask", From 7020a1b5559eeb16901802ffea5b2e114a3317fc Mon Sep 17 00:00:00 2001 From: Jon Cluce Date: Wed, 25 Jun 2025 13:44:24 -0400 Subject: [PATCH 405/507] :pencil2: :wrench: Fix path to `MNI152_T1_1mm_first_brain_mask` Co-authored-by: Biraj Shrestha <111654544+birajstha@users.noreply.github.com> --- .../resources/configs/pipeline_config_abcd-options.yml | 8 +------- CPAC/resources/configs/pipeline_config_abcd-prep.yml | 5 ----- CPAC/resources/configs/pipeline_config_blank.yml | 10 +++++----- CPAC/resources/configs/pipeline_config_ccs-options.yml | 4 ---- CPAC/resources/configs/pipeline_config_default.yml | 2 +- 5 files changed, 7 insertions(+), 22 deletions(-) diff --git a/CPAC/resources/configs/pipeline_config_abcd-options.yml b/CPAC/resources/configs/pipeline_config_abcd-options.yml index 844def45a0..3e60071578 100644 --- a/CPAC/resources/configs/pipeline_config_abcd-options.yml +++ b/CPAC/resources/configs/pipeline_config_abcd-options.yml @@ -223,8 +223,6 @@ registration_workflows: # Choose coregistration degree of freedom dof: 12 - - func_registration_to_template: # these options modify the application (to the functional data), not the calculation, of the @@ -290,13 +288,8 @@ functional_preproc: # Blip-FSL-TOPUP - Uses FSL TOPUP to calculate the distortion unwarp for EPI field maps of opposite/same phase encoding direction. using: [PhaseDiff, Blip-FSL-TOPUP] - func_masking: - run: Off - template_space_func_masking: run: On - # Anatomical_Resampled: Resample anatomical brain mask in standard space to get BOLD brain mask in standard space. Adapted from DCAN Lab's BOLD mask method from the ABCD pipeline. ("Create fMRI resolution standard space files for T1w image, wmparc, and brain mask […] don't use FLIRT to do spline interpolation with -applyisoxfm for the 2mm and 1mm cases because it doesn't know the peculiarities of the MNI template FOVs") - using: [Anatomical_Resampled] generate_func_mean: @@ -310,6 +303,7 @@ functional_preproc: nuisance_corrections: 2-nuisance_regression: + # Select which nuisance signal corrections to apply Regressors: - Name: default diff --git a/CPAC/resources/configs/pipeline_config_abcd-prep.yml b/CPAC/resources/configs/pipeline_config_abcd-prep.yml index 05153eb771..27a4cd5f63 100644 --- a/CPAC/resources/configs/pipeline_config_abcd-prep.yml +++ b/CPAC/resources/configs/pipeline_config_abcd-prep.yml @@ -223,10 +223,5 @@ functional_preproc: # Blip-FSL-TOPUP - Uses FSL TOPUP to calculate the distortion unwarp for EPI field maps of opposite/same phase encoding direction. using: [] - func_masking: - run: Off - template_space_func_masking: run: On - # Anatomical_Resampled: Resample anatomical brain mask in standard space to get BOLD brain mask in standard space. Adapted from DCAN Lab's BOLD mask method from the ABCD pipeline. ("Create fMRI resolution standard space files for T1w image, wmparc, and brain mask […] don't use FLIRT to do spline interpolation with -applyisoxfm for the 2mm and 1mm cases because it doesn't know the peculiarities of the MNI template FOVs") - using: [Anatomical_Resampled] diff --git a/CPAC/resources/configs/pipeline_config_blank.yml b/CPAC/resources/configs/pipeline_config_blank.yml index fd1686a21f..459746ebf1 100644 --- a/CPAC/resources/configs/pipeline_config_blank.yml +++ b/CPAC/resources/configs/pipeline_config_blank.yml @@ -258,8 +258,7 @@ anatomical_preproc: # [warp] - Deoblique the input image using AFNI 3dWarp. Changes header and the image data. # [refit] - Clear the header of the input image using AFNI 3drefit. Changes only the header. # applies for both T1w and T2w images - deoblique: ["refit"] - + deoblique: [refit] acpc_alignment: T1w_brain_ACPC_template: @@ -297,7 +296,7 @@ anatomical_preproc: FreeSurfer-BET: # Template to be used for FreeSurfer-BET brain extraction in CCS-options pipeline - T1w_brain_template_mask_ccs: /ccs_template/MNI152_T1_1mm_first_brain_mask.nii.gz + T1w_brain_template_mask_ccs: /code/CPAC/resources/templates/MNI152_T1_1mm_first_brain_mask.nii.gz # using: ['3dSkullStrip', 'BET', 'UNet', 'niworkflows-ants', 'FreeSurfer-ABCD', 'FreeSurfer-BET-Tight', 'FreeSurfer-BET-Loose', 'FreeSurfer-Brainmask'] # this is a fork option @@ -967,7 +966,7 @@ functional_preproc: # [warp] - Deoblique the input image using AFNI 3dWarp. Changes header and the image data. Applies interpolation to the slice-timing metadata. # [refit] - Clear the header of the input image using AFNI 3drefit. Changes only the header. - deoblique: ["refit"] + deoblique: [refit] slice_timing_correction: @@ -1161,7 +1160,8 @@ functional_preproc: template_space_func_masking: run: Off - # Anatomical_Resampled: Resample anatomical brain mask in standard space to get BOLD brain mask in standard space. Adapted from DCAN Lab's BOLD mask method from the ABCD pipeline. ("Create fMRI resolution standard space files for T1w image, wmparc, and brain mask […] don't use FLIRT to do spline interpolation with -applyisoxfm for the 2mm and 1mm cases because it doesn't know the peculiarities of the MNI template FOVs") + + # Anatomical_Resampled: Resample anatomical brain mask in standard space to get BOLD brain mask in standard space. Adapted from DCAN Lab's BOLD mask method from the ABCD pipeline. ("Create fMRI resolution standard space files for T1w image, wmparc, and brain mask […] don't use FLIRT to do spline interpolation with -applyisoxfm for the 2mm and 1mm cases because it doesn't know the peculiarities of the MNI template FOVs") using: [Anatomical_Resampled] generate_func_mean: diff --git a/CPAC/resources/configs/pipeline_config_ccs-options.yml b/CPAC/resources/configs/pipeline_config_ccs-options.yml index 7e670484dc..1a4d59c7eb 100644 --- a/CPAC/resources/configs/pipeline_config_ccs-options.yml +++ b/CPAC/resources/configs/pipeline_config_ccs-options.yml @@ -61,10 +61,6 @@ anatomical_preproc: brain_extraction: run: On - FreeSurfer-BET: - - # Template to be used for FreeSurfer-BET brain extraction in CCS-options pipeline - T1w_brain_template_mask_ccs: /code/CPAC/resources/templates/MNI152_T1_1mm_first_brain_mask.nii.gz # using: ['3dSkullStrip', 'BET', 'UNet', 'niworkflows-ants', 'FreeSurfer-ABCD', 'FreeSurfer-BET-Tight', 'FreeSurfer-BET-Loose', 'FreeSurfer-Brainmask'] # this is a fork option diff --git a/CPAC/resources/configs/pipeline_config_default.yml b/CPAC/resources/configs/pipeline_config_default.yml index cba22d6a08..77e8231829 100644 --- a/CPAC/resources/configs/pipeline_config_default.yml +++ b/CPAC/resources/configs/pipeline_config_default.yml @@ -490,7 +490,7 @@ anatomical_preproc: FreeSurfer-BET: # Template to be used for FreeSurfer-BET brain extraction in CCS-options pipeline - T1w_brain_template_mask_ccs: /ccs_template/MNI152_T1_1mm_first_brain_mask.nii.gz + T1w_brain_template_mask_ccs: /code/CPAC/resources/templates/MNI152_T1_1mm_first_brain_mask.nii.gz restore_t1w_intensity: run: Off From 0452046fb566cf7da70a75b737fba1e53e64c31c Mon Sep 17 00:00:00 2001 From: Jon Cluce Date: Wed, 25 Jun 2025 15:08:14 -0400 Subject: [PATCH 406/507] :recycle: Use `importlib.resources.files` instead of `__file__` Co-authored-by: Biraj Shrestha <111654544+birajstha@users.noreply.github.com> --- CPAC/cwas/tests/test_mdmr_cython.py | 37 ++++++++++++++----- CPAC/info.py | 22 ++++------- .../tests/test_network_centrality.py | 10 +++-- CPAC/resources/__init__.py | 17 +++++++++ CPAC/resources/configs/__init__.py | 17 +++++++++ CPAC/resources/templates/lookup_table.py | 7 ++-- CPAC/utils/datasource.py | 9 +++-- CPAC/utils/interfaces/conftest.py | 11 +++--- CPAC/utils/utils.py | 25 +++++-------- 9 files changed, 102 insertions(+), 53 deletions(-) create mode 100644 CPAC/resources/configs/__init__.py diff --git a/CPAC/cwas/tests/test_mdmr_cython.py b/CPAC/cwas/tests/test_mdmr_cython.py index 16415f9720..cb1f6fea2b 100644 --- a/CPAC/cwas/tests/test_mdmr_cython.py +++ b/CPAC/cwas/tests/test_mdmr_cython.py @@ -1,19 +1,36 @@ -import os +# Copyright (C) 2018-2025 C-PAC Developers -import pytest +# This file is part of C-PAC. +# C-PAC is free software: you can redistribute it and/or modify it under +# the terms of the GNU Lesser General Public License as published by the +# Free Software Foundation, either version 3 of the License, or (at your +# option) any later version. -@pytest.mark.skip(reason="possibly deprecated") -def test_mdmr(): - import numpy as np +# C-PAC is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or +# FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public +# License for more details. - from CPAC.cwas.cwas import calc_cwas +# You should have received a copy of the GNU Lesser General Public +# License along with C-PAC. If not, see . +"""MDMR Cython tests.""" - X = np.genfromtxt(os.path.join(os.path.dirname(__file__), "X.csv"), delimiter=",") - Y = np.genfromtxt(os.path.join(os.path.dirname(__file__), "Y.csv"), delimiter=",") +from importlib.resources import as_file, files - X = X.reshape((X.shape[0], X.shape[1], 1)) +import numpy as np +import pytest + +from CPAC.cwas.cwas import calc_cwas - F_value, p_value = calc_cwas(X, Y, np.array([0, 1, 2], dtype=int), 1000, [0]) +@pytest.mark.skip(reason="possibly deprecated") +def test_mdmr() -> None: + with as_file(files("CPAC").joinpath("cwas/tests")) as _f: + X = np.genfromtxt(_f / "X.csv", delimiter=",") + Y = np.genfromtxt(_f / "Y.csv", delimiter=",") + + X = X.reshape((X.shape[0], X.shape[1], 1)) + + _F_value, p_value = calc_cwas(X, Y, np.array([0, 1, 2], dtype=int), 1000, [0]) assert np.isclose(p_value.mean(), 1.0, rtol=0.1) diff --git a/CPAC/info.py b/CPAC/info.py index d776ad9971..9f19ee2f00 100644 --- a/CPAC/info.py +++ b/CPAC/info.py @@ -29,7 +29,7 @@ # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. -# Modifications Copyright (C) 2022-2023 C-PAC Developers +# Modifications Copyright (C) 2022-2025 C-PAC Developers # This file is part of C-PAC. """Metadata for building C-PAC. @@ -50,22 +50,16 @@ _version_extra = "dev1" -def get_cpac_gitversion(): - """CPAC version as reported by the last commit in git. - - Returns - ------- - None or str - - Version of C-PAC according to git. - """ - import os +def get_cpac_gitversion() -> str | None: + """CPAC version as reported by the last commit in git.""" + from importlib.resources import as_file, files import subprocess - gitpath = os.path.realpath(os.path.join(os.path.dirname(__file__), os.path.pardir)) + with as_file(files("CPAC")) as _cpac: + gitpath = _cpac.parent - gitpathgit = os.path.join(gitpath, ".git") - if not os.path.exists(gitpathgit): + gitpathgit = gitpath / ".git" + if not gitpathgit.exists(): return None ver = None diff --git a/CPAC/network_centrality/tests/test_network_centrality.py b/CPAC/network_centrality/tests/test_network_centrality.py index bca7ccb096..244b1cbba8 100644 --- a/CPAC/network_centrality/tests/test_network_centrality.py +++ b/CPAC/network_centrality/tests/test_network_centrality.py @@ -1,4 +1,4 @@ -# Copyright (C) 2015-2024 C-PAC Developers +# Copyright (C) 2015-2025 C-PAC Developers # This file is part of C-PAC. @@ -14,6 +14,9 @@ # You should have received a copy of the GNU Lesser General Public # License along with C-PAC. If not, see . +"""Tests for network centrality.""" + +from importlib.resources import as_file, files from itertools import combinations from pathlib import Path @@ -23,8 +26,9 @@ from CPAC.pipeline.schema import valid_options from CPAC.utils.interfaces.afni import AFNI_SEMVER -_DATA_DIR = Path(__file__).parent / "data" -"""Path to test data directory""" +with as_file(files("CPAC").joinpath("network_centrality/tests/data")) as _data: + _DATA_DIR = _data + """Path to test data directory""" @pytest.mark.parametrize("method_option", valid_options["centrality"]["method_options"]) diff --git a/CPAC/resources/__init__.py b/CPAC/resources/__init__.py index e69de29bb2..befaed1aa1 100644 --- a/CPAC/resources/__init__.py +++ b/CPAC/resources/__init__.py @@ -0,0 +1,17 @@ +# Copyright (C) 2013-2025 C-PAC Developers + +# This file is part of C-PAC. + +# C-PAC is free software: you can redistribute it and/or modify it under +# the terms of the GNU Lesser General Public License as published by the +# Free Software Foundation, either version 3 of the License, or (at your +# option) any later version. + +# C-PAC is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or +# FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public +# License for more details. + +# You should have received a copy of the GNU Lesser General Public +# License along with C-PAC. If not, see . +"""Resources for C-PAC.""" diff --git a/CPAC/resources/configs/__init__.py b/CPAC/resources/configs/__init__.py new file mode 100644 index 0000000000..c312d7ab80 --- /dev/null +++ b/CPAC/resources/configs/__init__.py @@ -0,0 +1,17 @@ +# Copyright (C) 2017-2025 C-PAC Developers + +# This file is part of C-PAC. + +# C-PAC is free software: you can redistribute it and/or modify it under +# the terms of the GNU Lesser General Public License as published by the +# Free Software Foundation, either version 3 of the License, or (at your +# option) any later version. + +# C-PAC is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or +# FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public +# License for more details. + +# You should have received a copy of the GNU Lesser General Public +# License along with C-PAC. If not, see . +"""Configurations for C-PAC.""" diff --git a/CPAC/resources/templates/lookup_table.py b/CPAC/resources/templates/lookup_table.py index 4314e0b4d7..d216bafdba 100644 --- a/CPAC/resources/templates/lookup_table.py +++ b/CPAC/resources/templates/lookup_table.py @@ -1,4 +1,4 @@ -# Copyright (C) 2022-2024 C-PAC Developers +# Copyright (C) 2022-2025 C-PAC Developers # This file is part of C-PAC. @@ -19,7 +19,8 @@ See `Standard template identifiers `_. """ -from os import environ, path as op +from importlib.resources import files +from os import environ from re import findall, search from typing import Optional @@ -32,7 +33,7 @@ str(row[2]) if row[2] else None, ) for row in loadtxt( - op.join(op.dirname(__file__), "BIDS_identifiers.tsv"), + str(files("CPAC").joinpath("resources/templates/BIDS_identifiers.tsv")), dtype="str", delimiter="\t", ) diff --git a/CPAC/utils/datasource.py b/CPAC/utils/datasource.py index 1d951665c6..eb6619812d 100644 --- a/CPAC/utils/datasource.py +++ b/CPAC/utils/datasource.py @@ -1,4 +1,4 @@ -# Copyright (C) 2012-2024 C-PAC Developers +# Copyright (C) 2012-2025 C-PAC Developers # This file is part of C-PAC. @@ -885,12 +885,13 @@ def check_for_s3( ): """Check if passed-in file is on S3.""" # Import packages + from importlib.resources import files import os import botocore.exceptions import nibabel as nib from indi_aws import fetch_creds - import importlib.resources as resources + from CPAC.resources import templates # Init variables @@ -976,7 +977,9 @@ def check_for_s3( if not os.path.exists(local_path): # alert users to 2020-07-20 Neuroparc atlas update (v0 to v1) ndmg_atlases = {} - with resources.files(templates).joinpath("ndmg_atlases.csv").open("r") as ndmg_atlases_file: + with ( + files(templates).joinpath("ndmg_atlases.csv").open("r") as ndmg_atlases_file + ): ndmg_atlases["v0"], ndmg_atlases["v1"] = zip( *[ ( diff --git a/CPAC/utils/interfaces/conftest.py b/CPAC/utils/interfaces/conftest.py index bcf92c7dfc..bff7d64abe 100644 --- a/CPAC/utils/interfaces/conftest.py +++ b/CPAC/utils/interfaces/conftest.py @@ -35,18 +35,19 @@ """ from contextlib import contextmanager +from importlib.resources import as_file, files from os import chdir, getcwd from pathlib import Path from shutil import copytree, rmtree from tempfile import mkdtemp from pytest import fixture -import nipype -NIPYPE_DATADIR = Path(nipype.__file__).parent / "testing/data" -TEMP_FOLDER = Path(mkdtemp()) -DATA_DIR = TEMP_FOLDER / "data" -copytree(NIPYPE_DATADIR, DATA_DIR, symlinks=True) +with as_file(files("nipype").joinpath("testing/data")) as data_path: + NIPYPE_DATADIR = data_path + TEMP_FOLDER = Path(mkdtemp()) + DATA_DIR = TEMP_FOLDER / "data" + copytree(NIPYPE_DATADIR, DATA_DIR, symlinks=True) @contextmanager diff --git a/CPAC/utils/utils.py b/CPAC/utils/utils.py index d86f0b2f2f..5ab39cf66c 100644 --- a/CPAC/utils/utils.py +++ b/CPAC/utils/utils.py @@ -20,7 +20,7 @@ from copy import deepcopy import fnmatch import gzip -from itertools import repeat +from importlib.resources import files import json import numbers import os @@ -31,21 +31,16 @@ from voluptuous.error import Invalid import yaml +from CPAC.resources import configs from CPAC.utils.configuration import Configuration from CPAC.utils.docs import deprecated from CPAC.utils.interfaces.function import Function from CPAC.utils.monitoring import FMLOGGER, WFLOGGER -CONFIGS_DIR = os.path.abspath( - os.path.join(__file__, *repeat(os.path.pardir, 2), "resources/configs/") -) -with open( - os.path.join(CONFIGS_DIR, "1.7-1.8-nesting-mappings.yml"), "r", encoding="utf-8" -) as _f: +CONFIGS_DIR = files(configs) +with (CONFIGS_DIR / "1.7-1.8-nesting-mappings.yml").open("r", encoding="utf-8") as _f: NESTED_CONFIG_MAPPING = yaml.safe_load(_f) -with open( - os.path.join(CONFIGS_DIR, "1.7-1.8-deprecations.yml"), "r", encoding="utf-8" -) as _f: +with (CONFIGS_DIR / "1.7-1.8-deprecations.yml").open("r", encoding="utf-8") as _f: NESTED_CONFIG_DEPRECATIONS = yaml.safe_load(_f) PE_DIRECTION = Literal["i", "i-", "j", "j-", "k", "k-", ""] VALID_PATTERNS = [ @@ -1147,10 +1142,10 @@ def create_log(wf_name="log", scan_id=None): def find_files(directory, pattern): """Find files in directory.""" - for root, dirs, files in os.walk(directory): - for basename in files: + for _root, _dirs, _files in os.walk(directory): + for basename in _files: if fnmatch.fnmatch(basename, pattern): - filename = os.path.join(root, basename) + filename = os.path.join(_root, basename) yield filename @@ -1444,8 +1439,8 @@ def repickle(directory): # noqa: T20 ------- None """ - for root, _, files in os.walk(directory, followlinks=True): - for fn in files: + for root, _, _files in os.walk(directory, followlinks=True): + for fn in _files: p = os.path.join(root, fn) if fn.endswith(".pkl"): if _pickle2(p): From 1c7ab57c4ab64b4aabc11d5efb8cae46376de567 Mon Sep 17 00:00:00 2001 From: Jon Cluce Date: Wed, 25 Jun 2025 17:06:48 -0400 Subject: [PATCH 407/507] :recycle: Use `importlib.resources.files` instead of `pkg_resources` --- .ruff.toml | 1 - CPAC/__main__.py | 129 ++++++++-------------- CPAC/_entrypoints/run.py | 11 +- CPAC/anat_preproc/ants.py | 31 +++--- CPAC/pipeline/__init__.py | 20 ++-- CPAC/pipeline/cpac_runner.py | 9 +- CPAC/pipeline/test/test_cpac_runner.py | 37 +++++-- CPAC/qc/pipeline.py | 5 +- CPAC/qc/utils.py | 115 +++++++++---------- CPAC/surface/tests/test_config.py | 32 ++++-- CPAC/utils/build_data_config.py | 15 +-- CPAC/utils/configuration/configuration.py | 14 ++- CPAC/utils/symlinks.py | 19 ++++ CPAC/utils/tests/test_symlinks.py | 33 +++--- CPAC/utils/tests/test_trimmer.py | 31 ++++-- 15 files changed, 253 insertions(+), 249 deletions(-) diff --git a/.ruff.toml b/.ruff.toml index 265427a1ab..1dda55a299 100644 --- a/.ruff.toml +++ b/.ruff.toml @@ -20,7 +20,6 @@ external = ["T20"] # Don't autoremove 'noqa` comments for these rules "nibabel" = "nib" "nipype.interfaces.io" = "nio" "networkx" = "nx" -"pkg_resources" = "p" "CPAC.pipeline.nipype_pipeline_engine" = "pe" [lint.isort] diff --git a/CPAC/__main__.py b/CPAC/__main__.py index 0b088c67f2..a33598ce8a 100644 --- a/CPAC/__main__.py +++ b/CPAC/__main__.py @@ -1,5 +1,5 @@ #!/usr/bin/env python -# Copyright (C) 2018-2024 C-PAC Developers +# Copyright (C) 2018-2025 C-PAC Developers # This file is part of C-PAC. @@ -15,11 +15,13 @@ # You should have received a copy of the GNU Lesser General Public # License along with C-PAC. If not, see . +"""C-PAC CLI.""" + +from importlib.resources import as_file, files import os import click from click_aliases import ClickAliasedGroup -import pkg_resources as p from CPAC.utils.docs import version_report from CPAC.utils.monitoring.custom_logging import getLogger @@ -69,71 +71,45 @@ def version(): ) +def _config_path(filename: str) -> str: + """Given a base filename, return full config path.""" + with as_file(files("CPAC").joinpath("resources/configs")) as configs: + return str(configs / f"{filename}") + + @main.command() @click.argument("data_config") @click.option("--pipe-config", "--pipe_config") @click.option("--num-cores", "--num_cores") @click.option("--ndmg-mode", "--ndmg_mode", is_flag=True) @click.option("--debug", is_flag=True) -def run(data_config, pipe_config=None, num_cores=None, ndmg_mode=False, debug=False): - if not pipe_config: - pipe_config = p.resource_filename( - "CPAC", os.path.join("resources", "configs", "pipeline_config_template.yml") - ) - - if pipe_config == "benchmark-ants": - pipe_config = p.resource_filename( - "CPAC", - os.path.join("resources", "configs", "pipeline_config_benchmark-ANTS.yml"), - ) - - if pipe_config == "benchmark-fnirt": - pipe_config = p.resource_filename( - "CPAC", - os.path.join("resources", "configs", "pipeline_config_benchmark-FNIRT.yml"), - ) - - if pipe_config == "anat-only": - pipe_config = p.resource_filename( - "CPAC", - os.path.join("resources", "configs", "pipeline_config_anat-only.yml"), - ) - - if data_config == "benchmark-data": - data_config = p.resource_filename( - "CPAC", - os.path.join("resources", "configs", "data_config_cpac_benchmark.yml"), - ) - - if data_config == "ADHD200": - data_config = p.resource_filename( - "CPAC", - os.path.join("resources", "configs", "data_config_S3-BIDS-ADHD200.yml"), - ) - if data_config == "ADHD200_2": - data_config = p.resource_filename( - "CPAC", - os.path.join( - "resources", "configs", "data_config_S3-BIDS-ADHD200_only2.yml" - ), - ) - if data_config == "ABIDE": - data_config = p.resource_filename( - "CPAC", - os.path.join("resources", "configs", "data_config_S3-BIDS-ABIDE.yml"), - ) - if data_config == "NKI-RS": - data_config = p.resource_filename( - "CPAC", - os.path.join( - "resources", "configs", "data_config_S3-BIDS-NKI-RocklandSample.yml" - ), - ) - +def run( + data_config, pipe_config=None, num_cores=None, ndmg_mode=False, debug=False +) -> None: + """Run C-PAC.""" if ndmg_mode: - pipe_config = p.resource_filename( - "CPAC", os.path.join("resources", "configs", "pipeline_config_ndmg.yml") - ) + pipe_config = _config_path("pipeline_config_ndmg") + else: + match pipe_config: + case None: + pipe_config = _config_path("pipeline_config_template") + case "benchmark-ants": + pipe_config = _config_path("pipeline_config_benchmark-ANTS") + case "benchmark-fnirt": + pipe_config = _config_path("pipeline_config_benchmark-FNIRT") + case "anat-only": + pipe_config = _config_path("pipeline_config_anat-only") + match data_config: + case "benchmark-data": + data_config = _config_path("data_config_cpac_benchmark") + case "ADHD200": + data_config = _config_path("data_config_S3-BIDS-ADHD200") + case "ADHD200_2": + data_config = _config_path("data_config_S3-BIDS-ADHD200_only2") + case "ABIDE": + data_config = _config_path("data_config_S3-BIDS-ABIDE") + case "NKI-RS": + data_config = _config_path("data_config_S3-BIDS-NKI-RocklandSample") from CPAC.pipeline import cpac_runner @@ -565,36 +541,17 @@ def test(): def run_suite(show_list: bool | str = False, pipeline_filter=""): from CPAC.pipeline import cpac_runner - test_config_dir = p.resource_filename( - "CPAC", os.path.join("resources", "configs", "test_configs") - ) - - data_test = p.resource_filename( - "CPAC", - os.path.join( - "resources", "configs", "test_configs", "data-test_S3-ADHD200_1.yml" - ), - ) - - data_test_no_scan_param = p.resource_filename( - "CPAC", - os.path.join( - "resources", "configs", "test_configs", "data-test_S3-ADHD200_no-params.yml" - ), - ) - - data_test_fmap = p.resource_filename( - "CPAC", - os.path.join( - "resources", "configs", "test_configs", "data-test_S3-NKI-RS_fmap.yml" - ), - ) + with as_file(files("CPAC").joinpath("resources/configs")) as configs: + test_config_dir = configs / "test_configs" + data_test = test_config_dir / "data-test_S3-ADHD200_1" + data_test_no_scan_param = test_config_dir / "data-test_S3-ADHD200_no-params" + data_test_fmap = test_config_dir / "data-test_S3-NKI-RS_fmap" if show_list: show_list = "\nAvailables pipelines:" no_params = False - for config_file in os.listdir(test_config_dir): + for config_file in [str(_) for _ in test_config_dir.iterdir()]: if config_file.startswith("pipe-test_"): if pipeline_filter not in config_file: continue @@ -603,7 +560,7 @@ def run_suite(show_list: bool | str = False, pipeline_filter=""): show_list += f"\n- {config_file[len('pipe-test_'):]}" continue - pipe = os.path.join(test_config_dir, config_file) + pipe = str(test_config_dir / config_file) if "DistCorr" in pipe: data = data_test_fmap diff --git a/CPAC/_entrypoints/run.py b/CPAC/_entrypoints/run.py index 1a01489da8..f0a83a4f3e 100755 --- a/CPAC/_entrypoints/run.py +++ b/CPAC/_entrypoints/run.py @@ -482,14 +482,13 @@ def run_main(): elif args.analysis_level == "group": if not args.group_file or not os.path.exists(args.group_file): - import pkg_resources as p + from importlib.resources import as_file, files WFLOGGER.warning("\nNo group analysis configuration file was supplied.\n") - - args.group_file = p.resource_filename( - "CPAC", - os.path.join("resources", "configs", "group_config_template.yml"), - ) + with as_file( + files("CPAC").joinpath("resources/configs/group_config_template.yml") + ) as _f: + args.group_file = str(_f) output_group = os.path.join(output_dir, "group_config.yml") diff --git a/CPAC/anat_preproc/ants.py b/CPAC/anat_preproc/ants.py index cfa771ea55..3a3cb1988b 100644 --- a/CPAC/anat_preproc/ants.py +++ b/CPAC/anat_preproc/ants.py @@ -15,6 +15,7 @@ # * Docstrings updated accordingly # * Style modifications # * Removed comments from import blocks +# * Updated to `importlib.resources` from `pkg_resources` # ORIGINAL WORK'S ATTRIBUTION NOTICE: # Copyright 2020 The NiPreps Developers @@ -30,7 +31,7 @@ # See the License for the specific language governing permissions and # limitations under the License. -# Modifications copyright (C) 2019 - 2024 C-PAC Developers +# Modifications copyright (C) 2019 - 2025 C-PAC Developers # This file is part of C-PAC. """Nipype translation of ANTs workflows. @@ -42,10 +43,11 @@ """ from collections import OrderedDict +from importlib.resources import as_file, files from logging import getLogger +from typing import Literal from packaging.version import parse as parseversion, Version -from pkg_resources import resource_filename as pkgr_fn from nipype.interfaces import utility as niu from nipype.interfaces.ants import Atropos, MultiplyImages, N4BiasFieldCorrection from nipype.interfaces.fsl.maths import ApplyMask @@ -98,7 +100,7 @@ def init_brain_extraction_wf( # noqa: PLR0913 name="brain_extraction_wf", template_spec=None, use_float=True, - normalization_quality="precise", + normalization_quality: Literal["precise", "testing"] = "precise", omp_nthreads=None, mem_gb=3.0, bids_suffix="T1w", @@ -298,17 +300,18 @@ def init_brain_extraction_wf( # noqa: PLR0913 if use_laplacian else "antsBrainExtractionNoLaplacian_%s.json" ) - norm = pe.Node( - Registration( - from_file=pkgr_fn( - "CPAC.anat_preproc", "data/" + settings_file % normalization_quality - ) - ), - name="norm", - n_procs=omp_nthreads, - mem_gb=1.7, - mem_x=(1233286593342025 / 151115727451828646838272, "moving_image"), - ) + with as_file( + files("CPAC").joinpath( + f"anat_preproc/data/{settings_file}{normalization_quality}" + ) + ) as _f: + norm = pe.Node( + Registration(from_file=str(_f)), + name="norm", + n_procs=omp_nthreads, + mem_gb=1.7, + mem_x=(1233286593342025 / 151115727451828646838272, "moving_image"), + ) norm.inputs.float = use_float fixed_mask_trait = "fixed_image_mask" if _ants_version and parseversion(_ants_version) >= Version("2.2.0"): diff --git a/CPAC/pipeline/__init__.py b/CPAC/pipeline/__init__.py index 6002aa8b97..537602abc3 100644 --- a/CPAC/pipeline/__init__.py +++ b/CPAC/pipeline/__init__.py @@ -1,4 +1,4 @@ -# Copyright (C) 2022 - 2024 C-PAC Developers +# Copyright (C) 2022 - 2025 C-PAC Developers # This file is part of C-PAC. @@ -16,22 +16,18 @@ # License along with C-PAC. If not, see . """The C-PAC pipeline and its underlying infrastructure.""" -import os - -import pkg_resources as p +from importlib.resources import as_file, files from CPAC.pipeline.nipype_pipeline_engine.monkeypatch import patch_base_interface patch_base_interface() # Monkeypatch Nipypes BaseInterface class -ALL_PIPELINE_CONFIGS = os.listdir( - p.resource_filename("CPAC", os.path.join("resources", "configs")) -) -ALL_PIPELINE_CONFIGS = [ - x.split("_")[2].replace(".yml", "") - for x in ALL_PIPELINE_CONFIGS - if "pipeline_config" in x -] +with as_file(files("CPAC").joinpath("resources/configs")) as _f: + ALL_PIPELINE_CONFIGS = [ + x.split("_")[2].replace(".yml", "") + for x in [str(_) for _ in _f.iterdir()] + if "pipeline_config" in x + ] ALL_PIPELINE_CONFIGS.sort() AVAILABLE_PIPELINE_CONFIGS = [ preconfig diff --git a/CPAC/pipeline/cpac_runner.py b/CPAC/pipeline/cpac_runner.py index 425eefb91f..8c2b37b0f4 100644 --- a/CPAC/pipeline/cpac_runner.py +++ b/CPAC/pipeline/cpac_runner.py @@ -288,11 +288,12 @@ def run( # noqa: PLR0915 plugin_args = {"status_callback": log_nodes_cb} if not config_file: - import pkg_resources as p + from importlib.resources import as_file, files - config_file = p.resource_filename( - "CPAC", os.path.join("resources", "configs", "pipeline_config_template.yml") - ) + with as_file( + files("CPAC").joinpath("resources/configs/pipeline_config_template.yml") + ) as _f: + config_file = str(_f) # Init variables sublist = None diff --git a/CPAC/pipeline/test/test_cpac_runner.py b/CPAC/pipeline/test/test_cpac_runner.py index 1e43a3e3b6..c41d798389 100644 --- a/CPAC/pipeline/test/test_cpac_runner.py +++ b/CPAC/pipeline/test/test_cpac_runner.py @@ -1,7 +1,25 @@ +# Copyright (C) 2021-2025 C-PAC Developers + +# This file is part of C-PAC. + +# C-PAC is free software: you can redistribute it and/or modify it under +# the terms of the GNU Lesser General Public License as published by the +# Free Software Foundation, either version 3 of the License, or (at your +# option) any later version. + +# C-PAC is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or +# FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public +# License for more details. + +# You should have received a copy of the GNU Lesser General Public +# License along with C-PAC. If not, see . +"""Run C-PAC in a container.""" + +from importlib.resources import as_file, files import os from pathlib import Path -import pkg_resources as p import pytest from CPAC.pipeline.cpac_pipeline import load_cpac_pipe_config @@ -21,7 +39,7 @@ def test_shell() -> None: @pytest.mark.skip(reason="not a pytest test") def test_run_T1w_longitudinal(bids_dir, cfg, test_dir, part_id): sub_data_list = create_cpac_data_config( - bids_dir, participant_label=part_id, skip_bids_validator=True + bids_dir, participant_labels=[part_id], skip_bids_validator=True ) cfg = load_cpac_pipe_config(cfg) @@ -31,12 +49,11 @@ def test_run_T1w_longitudinal(bids_dir, cfg, test_dir, part_id): run_T1w_longitudinal(sub_data_list, cfg) -cfg = p.resource_filename( - "CPAC", os.path.join("resources", "configs", "pipeline_config_default.yml") -) -bids_dir = "/Users/steven.giavasis/data/neurodata_hnu" -test_dir = "/test_dir" -part_id = "0025427" - if __name__ == "__main__": - test_run_T1w_longitudinal(bids_dir, cfg, test_dir, part_id) + bids_dir = "/Users/steven.giavasis/data/neurodata_hnu" + test_dir = "/test_dir" + part_id = "0025427" + with as_file( + files("CPAC").joinpath("resources/configs/pipeline_config_default.yml") + ) as cfg: + test_run_T1w_longitudinal(bids_dir, cfg, test_dir, part_id) diff --git a/CPAC/qc/pipeline.py b/CPAC/qc/pipeline.py index 2eb44ed4f2..ed3326e51f 100644 --- a/CPAC/qc/pipeline.py +++ b/CPAC/qc/pipeline.py @@ -16,7 +16,7 @@ # License along with C-PAC. If not, see . """C-PAC quality control pipeline.""" -import pkg_resources as p +from importlib.resources import as_file, files from CPAC.pipeline import nipype_pipeline_engine as pe from CPAC.pipeline.nodeblock import nodeblock @@ -38,7 +38,8 @@ # register color palettes palletes = ["red", "green", "blue", "red_to_blue", "cyan_to_yellow"] for pallete in palletes: - register_pallete(p.resource_filename("CPAC", "qc/colors/%s.txt" % pallete), pallete) + with as_file(files("CPAC").joinpath(f"qc/colors/{pallete}.txt")) as _pallete: + register_pallete(str(_pallete), pallete) @nodeblock( diff --git a/CPAC/qc/utils.py b/CPAC/qc/utils.py index 5e04296b00..4a60d756f0 100644 --- a/CPAC/qc/utils.py +++ b/CPAC/qc/utils.py @@ -1,4 +1,4 @@ -# Copyright (C) 2013-2024 C-PAC Developers +# Copyright (C) 2013-2025 C-PAC Developers # This file is part of C-PAC. @@ -14,14 +14,15 @@ # You should have received a copy of the GNU Lesser General Public # License along with C-PAC. If not, see . +"""Quality control utilities for C-PAC.""" + +from importlib.resources import as_file, files import os import subprocess import matplotlib as mpl -import numpy import numpy as np from numpy import ma -import pkg_resources as p import nibabel as nib from CPAC.utils.monitoring import IFLOGGER @@ -32,8 +33,7 @@ def generate_qc_pages(qc_dir): - """Generates the QC HTML files populated with the QC images that were - created during the CPAC pipeline run. + """Generate the QC HTML files populated with the QC images that were created during the CPAC pipeline run. This function runs after the pipeline is over. @@ -59,18 +59,19 @@ def generate_qc_pages(qc_dir): ) raise OSError(msg) from os_error - files = [] - for root, _, fs in os.walk(qc_dir): - root = root[len(qc_dir) + 1 :] - files += [os.path.join(root, f) for f in fs] + _files = [] + for _root, _, fs in os.walk(qc_dir): + root = _root[len(qc_dir) + 1 :] + _files += [os.path.join(root, f) for f in fs] - with open(p.resource_filename("CPAC.qc", "data/index.html"), "rb") as f: - qc_content = f.read() - qc_content = qc_content.replace( - b"/*CPAC*/``/*CPAC*/", ("`" + "\n".join(files) + "`").encode() - ) - with open(os.path.join(qc_dir, "index.html"), "wb") as f: - f.write(qc_content) + with as_file(files("CPAC").joinpath("qc/data/index.html")) as _f: + with _f.open("rb") as f: + qc_content = f.read() + qc_content = qc_content.replace( + b"/*CPAC*/``/*CPAC*/", ("`" + "\n".join(_files) + "`").encode() + ) + with open(os.path.join(qc_dir, "index.html"), "wb") as f2: + f2.write(qc_content) def cal_snr_val(measure_file): @@ -229,8 +230,8 @@ def gen_carpet_plt(gm_mask, wm_mask, csf_mask, functional_to_standard, output): def gen_motion_plt(motion_parameters): - """ - Function to Generate Matplotlib plot for motion. + """Generate Matplotlib plot for motion. + Separate plots for Translation and Rotation are generated. Parameters @@ -276,7 +277,7 @@ def gen_motion_plt(motion_parameters): def gen_histogram(measure_file, measure): - """Generates Histogram Image of intensities for a given input nifti file. + """Generate Histogram Image of intensities for a given input nifti file. Parameters ---------- @@ -355,9 +356,7 @@ def gen_histogram(measure_file, measure): def make_histogram(measure_file, measure): - """ - Generates Histogram Image of intensities for a given input - nifti file. + """Generate Histogram Image of intensities for a given input nifti file. Parameters ---------- @@ -416,9 +415,7 @@ def make_histogram(measure_file, measure): def drop_percent(measure_file, percent): - """ - Zeros out voxels in measure files whose intensity doesnt fall in percent - of voxel intensities. + """Zero out voxels in whose intensity doesn't fall in percent of voxel intensities. Parameters ---------- @@ -459,9 +456,7 @@ def drop_percent(measure_file, percent): def get_spacing(across, down, dimension): - """ - Get Spacing in slices to be selected for montage - display varying in given dimension. + """Get spacing in slices for montage display varying in given dimension. Parameters ---------- @@ -493,9 +488,9 @@ def get_spacing(across, down, dimension): def determine_start_and_end(data, direction, percent): - """ - Determine start slice and end slice in data file in - given direction with at least threshold percent of voxels + """Determine start slice and end slice in data file... + + ...in given direction with at least threshold percent of voxels at start and end slices. Parameters @@ -569,7 +564,7 @@ def determine_start_and_end(data, direction, percent): return start, end -def _log_graphing_error(which_montagee: str, image_name: str, error: Exception): +def _log_graphing_error(which_montage: str, image_name: str, error: Exception): IFLOGGER.error( "\n[!] QC Interface: Had a problem with creating the %s montage for %s" "\n\nDetails:%s. This error might occur because of a registration error" @@ -582,8 +577,9 @@ def _log_graphing_error(which_montagee: str, image_name: str, error: Exception): def montage_axial(overlay, underlay, png_name, cbar_name): - """Draws Montage using overlay on Anatomical brain in Axial Direction, - calls make_montage_axial. + """Draw montage using overlay on anatomical brain in axial direction. + + calls :py:func:`make_montage_axial`. Parameters ---------- @@ -775,9 +771,9 @@ def make_montage_axial(overlay, underlay, png_name, cbar_name): def montage_sagittal(overlay, underlay, png_name, cbar_name): - """ - Draws Montage using overlay on Anatomical brain in Sagittal Direction - calls make_montage_sagittal. + """Draw montage using overlay on anatomical brain in sagittal direction. + + calls :py:func:`make_montage_sagittal`. Parameters ---------- @@ -1243,8 +1239,7 @@ def montage_gm_wm_csf_sagittal(overlay_csf, overlay_wm, overlay_gm, underlay, pn def register_pallete(colors_file, cbar_name): - """ - Registers color pallete to matplotlib. + """Register color pallete to matplotlib. Parameters ---------- @@ -1270,8 +1265,7 @@ def register_pallete(colors_file, cbar_name): def resample_1mm(file_): - """ - Calls make_resample_1mm which resamples file to 1mm space. + """Call make_resample_1mm which resamples file to 1mm space. Parameters ---------- @@ -1362,13 +1356,13 @@ def dc(input1, input2): ----- This is a real metric. """ - input1 = numpy.atleast_1d(input1.astype(bool)) - input2 = numpy.atleast_1d(input2.astype(bool)) + input1 = np.atleast_1d(input1.astype(bool)) + input2 = np.atleast_1d(input2.astype(bool)) - intersection = numpy.count_nonzero(input1 & input2) + intersection = np.count_nonzero(input1 & input2) - size_i1 = numpy.count_nonzero(input1) - size_i2 = numpy.count_nonzero(input2) + size_i1 = np.count_nonzero(input1) + size_i2 = np.count_nonzero(input2) try: dc = 2.0 * intersection / float(size_i1 + size_i2) @@ -1403,22 +1397,19 @@ def jc(input1, input2): ----- This is a real metric. """ - input1 = numpy.atleast_1d(input1.astype(bool)) - input2 = numpy.atleast_1d(input2.astype(bool)) + input1 = np.atleast_1d(input1.astype(bool)) + input2 = np.atleast_1d(input2.astype(bool)) - intersection = numpy.count_nonzero(input1 & input2) - union = numpy.count_nonzero(input1 | input2) + intersection = np.count_nonzero(input1 & input2) + union = np.count_nonzero(input1 | input2) return float(intersection) / float(union) def crosscorr(input1, input2): - """ - cross correlation - computer compute cross correction bewteen input mask. - """ - input1 = numpy.atleast_1d(input1.astype(bool)) - input2 = numpy.atleast_1d(input2.astype(bool)) + """Compute cross correction bewteen input masks.""" + input1 = np.atleast_1d(input1.astype(bool)) + input2 = np.atleast_1d(input2.astype(bool)) from scipy.stats.stats import pearsonr @@ -1427,12 +1418,12 @@ def crosscorr(input1, input2): def coverage(input1, input2): """Estimate the coverage between two mask.""" - input1 = numpy.atleast_1d(input1.astype(bool)) - input2 = numpy.atleast_1d(input2.astype(bool)) + input1 = np.atleast_1d(input1.astype(bool)) + input2 = np.atleast_1d(input2.astype(bool)) - intsec = numpy.count_nonzero(input1 & input2) - if numpy.sum(input1) > numpy.sum(input2): - smallv = numpy.sum(input2) + intsec = np.count_nonzero(input1 & input2) + if np.sum(input1) > np.sum(input2): + smallv = np.sum(input2) else: - smallv = numpy.sum(input1) + smallv = np.sum(input1) return float(intsec) / float(smallv) diff --git a/CPAC/surface/tests/test_config.py b/CPAC/surface/tests/test_config.py index 046ea8fb55..f1e3897dfd 100644 --- a/CPAC/surface/tests/test_config.py +++ b/CPAC/surface/tests/test_config.py @@ -1,8 +1,24 @@ +# Copyright (C) 2022-2025 C-PAC Developers + +# This file is part of C-PAC. + +# C-PAC is free software: you can redistribute it and/or modify it under +# the terms of the GNU Lesser General Public License as published by the +# Free Software Foundation, either version 3 of the License, or (at your +# option) any later version. + +# C-PAC is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or +# FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public +# License for more details. + +# You should have received a copy of the GNU Lesser General Public +# License along with C-PAC. If not, see . """Tests for surface configuration.""" +from importlib.resources import as_file, files import os -import pkg_resources as p import pytest import yaml @@ -15,16 +31,14 @@ def test_duplicate_freesurfer(tmp_path): """The pipeline should build fast if freesurfer is not self-duplicating.""" config = Configuration(yaml.safe_load("FROM: abcd-options")) - with open( - p.resource_filename( - "CPAC", - os.path.join("resources", "configs", "data_config_S3-BIDS-ABIDE.yml"), - ), - "r", - ) as data_config: - sub_dict = yaml.safe_load(data_config)[0] + with as_file( + files("CPAC").joinpath("resources/configs/data_config_S3-BIDS-ABIDE.yml") + ) as _f: + with _f.open("r") as data_config: + sub_dict = yaml.safe_load(data_config)[0] for directory in ["output", "working", "log", "crash_log"]: directory_key = ["pipeline_setup", f"{directory}_directory", "path"] + assert isinstance(config[directory_key], str) config[directory_key] = os.path.join( tmp_path, config[directory_key].lstrip("/") ) diff --git a/CPAC/utils/build_data_config.py b/CPAC/utils/build_data_config.py index e17cf2ed30..6346b18e32 100644 --- a/CPAC/utils/build_data_config.py +++ b/CPAC/utils/build_data_config.py @@ -1823,23 +1823,20 @@ def get_nonBIDS_data( def util_copy_template(template_type=None): """Copy the data settings YAML file template to the current directory.""" + from importlib.resources import as_file, files import os import shutil - import pkg_resources as p - from CPAC.utils.configuration import preconfig_yaml template_type = "data_settings" if not template_type else template_type - settings_template = ( - preconfig_yaml("default") - if (template_type == "pipeline_config") - else p.resource_filename( - "CPAC", - os.path.join("resources", "configs", f"{template_type}_template.yml"), + with as_file(files("CPAC").joinpath("resources/configs")) as configs: + settings_template = ( + preconfig_yaml("default") + if (template_type == "pipeline_config") + else str(configs / f"{template_type}_template.yml") ) - ) settings_file = os.path.join(os.getcwd(), f"{template_type}.yml") diff --git a/CPAC/utils/configuration/configuration.py b/CPAC/utils/configuration/configuration.py index c4542f579d..e15d76c6af 100644 --- a/CPAC/utils/configuration/configuration.py +++ b/CPAC/utils/configuration/configuration.py @@ -1,4 +1,4 @@ -# Copyright (C) 2022-2024 C-PAC Developers +# Copyright (C) 2022-2025 C-PAC Developers # This file is part of C-PAC. @@ -16,13 +16,13 @@ # License along with C-PAC. If not, see . """C-PAC Configuration class and related functions.""" +from importlib.resources import as_file, files import os import re from typing import Optional from warnings import warn from click import BadParameter -import pkg_resources as p import yaml from .diff import dct_diff @@ -737,10 +737,12 @@ def preconfig_yaml(preconfig_name="default", load=False): if load: with open(preconfig_yaml(preconfig_name), "r", encoding="utf-8") as _f: return yaml.safe_load(_f) - return p.resource_filename( - "CPAC", - os.path.join("resources", "configs", f"pipeline_config_{preconfig_name}.yml"), - ) + with as_file( + files("CPAC").joinpath( + f"resources/configs/pipeline_config_{preconfig_name}.yml" + ) + ) as _f: + return str(_f) class Preconfiguration(Configuration): diff --git a/CPAC/utils/symlinks.py b/CPAC/utils/symlinks.py index c9283394de..3494243e4a 100644 --- a/CPAC/utils/symlinks.py +++ b/CPAC/utils/symlinks.py @@ -1,3 +1,21 @@ +# Copyright (C) 2019-2025 C-PAC Developers + +# This file is part of C-PAC. + +# C-PAC is free software: you can redistribute it and/or modify it under +# the terms of the GNU Lesser General Public License as published by the +# Free Software Foundation, either version 3 of the License, or (at your +# option) any later version. + +# C-PAC is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or +# FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public +# License for more details. + +# You should have received a copy of the GNU Lesser General Public +# License along with C-PAC. If not, see . +"""Create symbolic links.""" + from collections import defaultdict import errno import os @@ -5,6 +23,7 @@ output_renamings = { "anatomical_brain": "anat", "anatomical_brain_mask": "anat", + "anatomical_reorient": "anat", "qc": "qc", "anatomical_skull_leaf": "anat", "anatomical_to_mni_linear_xfm": "anat", diff --git a/CPAC/utils/tests/test_symlinks.py b/CPAC/utils/tests/test_symlinks.py index a2ddca02c9..d271ea752d 100644 --- a/CPAC/utils/tests/test_symlinks.py +++ b/CPAC/utils/tests/test_symlinks.py @@ -1,4 +1,4 @@ -# Copyright (C) 2019-2024 C-PAC Developers +# Copyright (C) 2019-2025 C-PAC Developers # This file is part of C-PAC. @@ -14,35 +14,30 @@ # You should have received a copy of the GNU Lesser General Public # License along with C-PAC. If not, see . -import os -import tempfile +"""Test symbolic links.""" -import pkg_resources as p +from importlib.resources import as_file, files +import tempfile from CPAC.utils.monitoring.custom_logging import getLogger from CPAC.utils.symlinks import create_symlinks logger = getLogger("CPAC.utils.tests") -mocked_outputs = p.resource_filename( - "CPAC", os.path.join("utils", "tests", "test_symlinks-outputs.txt") -) - def test_symlinks(): temp_dir = tempfile.mkdtemp(suffix="test_symlinks") - paths = [] - with open(mocked_outputs, "r") as f: - for _path in f.readlines(): - path = _path - path = path.strip() - if path: - paths += [path] - - create_symlinks( - temp_dir, "sym_links", "pipeline_benchmark-FNIRT", "1019436_1", paths - ) + paths: list[str] = [] + with as_file(files("CPAC").joinpath("utils/tests/test_symlinks-outputs.txt")) as _f: + with _f.open("r") as f: + for _path in f.readlines(): + path = _path + path = path.strip() + if path: + paths += [path] + + create_symlinks(temp_dir, "pipeline_benchmark-FNIRT", "1019436_1", paths) logger.info("Links created at %s", temp_dir) diff --git a/CPAC/utils/tests/test_trimmer.py b/CPAC/utils/tests/test_trimmer.py index 1d1f7361f7..265919f7ed 100644 --- a/CPAC/utils/tests/test_trimmer.py +++ b/CPAC/utils/tests/test_trimmer.py @@ -1,3 +1,21 @@ +# Copyright (C) 2020-2025 C-PAC Developers + +# This file is part of C-PAC. + +# C-PAC is free software: you can redistribute it and/or modify it under +# the terms of the GNU Lesser General Public License as published by the +# Free Software Foundation, either version 3 of the License, or (at your +# option) any later version. + +# C-PAC is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or +# FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public +# License for more details. + +# You should have received a copy of the GNU Lesser General Public +# License along with C-PAC. If not, see . +"""Test The Trimmer.""" + from copy import copy import tempfile @@ -11,10 +29,9 @@ def accept_all(object, name, value): @pytest.mark.skip(reason="needs refactored") def test_trimmer(): + from importlib.resources import as_file, files import os - import pkg_resources as p - from CPAC.pipeline.cpac_pipeline import build_workflow from CPAC.utils.configuration import Configuration from CPAC.utils.trimmer import ( @@ -24,13 +41,9 @@ def test_trimmer(): the_trimmer, ) - pipe_config = p.resource_filename( - "CPAC", os.path.join("resources", "configs", "pipeline_config_template.yml") - ) - - data_config = p.resource_filename( - "CPAC", os.path.join("resources", "configs", "data_config_S3-BIDS-ABIDE.yml") - ) + with as_file(files("CPAC").joinpath("resources/configs")) as configs: + pipe_config = configs / "pipeline_config_template.yml" + data_config = configs / "data_config_S3-BIDS-ABIDE.yml" data_config = yaml.safe_load(open(data_config, "r")) sub_dict = data_config[0] From 622861f1ac3d96e2173c7b2068dc79aa868a85f6 Mon Sep 17 00:00:00 2001 From: Jon Cluce Date: Wed, 25 Jun 2025 17:07:38 -0400 Subject: [PATCH 408/507] :memo: Add `importlib.resources` upgrade to CHANGELOG --- CHANGELOG.md | 1 + 1 file changed, 1 insertion(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index a3a965b2c3..07974aa187 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -54,6 +54,7 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 - Removed broken support for native-space masking. - Introduced a new `template_space_func_masking` section in the pipeline config for template-space-only methods. - Moved `Anatomical_Resampled` masking method from `func_masking` to the `template_space_func_masking`. + - Upgraded resource retrieval to `importlib.resources`. ### Upgraded From bdc0a6350f58ad7812f8a4401520062c5f54adc3 Mon Sep 17 00:00:00 2001 From: Jon Cluce Date: Wed, 25 Jun 2025 17:21:21 -0400 Subject: [PATCH 409/507] :recycle: DRY CONFIGS_PATH --- CPAC/__main__.py | 14 ++++++-------- CPAC/_entrypoints/run.py | 8 ++------ CPAC/pipeline/__init__.py | 14 ++++++-------- CPAC/pipeline/cpac_runner.py | 7 ++----- CPAC/pipeline/test/test_cpac_runner.py | 8 +++----- CPAC/resources/configs/__init__.py | 6 ++++++ CPAC/surface/tests/test_config.py | 20 ++++++++------------ CPAC/utils/build_data_config.py | 13 ++++++------- CPAC/utils/configuration/configuration.py | 9 ++------- CPAC/utils/tests/test_trimmer.py | 12 ++++++------ 10 files changed, 47 insertions(+), 64 deletions(-) diff --git a/CPAC/__main__.py b/CPAC/__main__.py index a33598ce8a..535adf83fa 100644 --- a/CPAC/__main__.py +++ b/CPAC/__main__.py @@ -17,12 +17,12 @@ # License along with C-PAC. If not, see . """C-PAC CLI.""" -from importlib.resources import as_file, files import os import click from click_aliases import ClickAliasedGroup +from CPAC.resources.configs import CONFIGS_PATH from CPAC.utils.docs import version_report from CPAC.utils.monitoring.custom_logging import getLogger @@ -73,8 +73,7 @@ def version(): def _config_path(filename: str) -> str: """Given a base filename, return full config path.""" - with as_file(files("CPAC").joinpath("resources/configs")) as configs: - return str(configs / f"{filename}") + return str(CONFIGS_PATH / f"{filename}") @main.command() @@ -541,11 +540,10 @@ def test(): def run_suite(show_list: bool | str = False, pipeline_filter=""): from CPAC.pipeline import cpac_runner - with as_file(files("CPAC").joinpath("resources/configs")) as configs: - test_config_dir = configs / "test_configs" - data_test = test_config_dir / "data-test_S3-ADHD200_1" - data_test_no_scan_param = test_config_dir / "data-test_S3-ADHD200_no-params" - data_test_fmap = test_config_dir / "data-test_S3-NKI-RS_fmap" + test_config_dir = CONFIGS_PATH / "test_configs" + data_test = test_config_dir / "data-test_S3-ADHD200_1" + data_test_no_scan_param = test_config_dir / "data-test_S3-ADHD200_no-params" + data_test_fmap = test_config_dir / "data-test_S3-NKI-RS_fmap" if show_list: show_list = "\nAvailables pipelines:" diff --git a/CPAC/_entrypoints/run.py b/CPAC/_entrypoints/run.py index f0a83a4f3e..1721756719 100755 --- a/CPAC/_entrypoints/run.py +++ b/CPAC/_entrypoints/run.py @@ -482,14 +482,10 @@ def run_main(): elif args.analysis_level == "group": if not args.group_file or not os.path.exists(args.group_file): - from importlib.resources import as_file, files + from CPAC.resources.configs import CONFIGS_PATH WFLOGGER.warning("\nNo group analysis configuration file was supplied.\n") - with as_file( - files("CPAC").joinpath("resources/configs/group_config_template.yml") - ) as _f: - args.group_file = str(_f) - + args.group_file = str(CONFIGS_PATH / "group_config_template.yml") output_group = os.path.join(output_dir, "group_config.yml") try: diff --git a/CPAC/pipeline/__init__.py b/CPAC/pipeline/__init__.py index 537602abc3..1e28692e73 100644 --- a/CPAC/pipeline/__init__.py +++ b/CPAC/pipeline/__init__.py @@ -16,18 +16,16 @@ # License along with C-PAC. If not, see . """The C-PAC pipeline and its underlying infrastructure.""" -from importlib.resources import as_file, files - from CPAC.pipeline.nipype_pipeline_engine.monkeypatch import patch_base_interface +from CPAC.resources.configs import CONFIGS_PATH patch_base_interface() # Monkeypatch Nipypes BaseInterface class -with as_file(files("CPAC").joinpath("resources/configs")) as _f: - ALL_PIPELINE_CONFIGS = [ - x.split("_")[2].replace(".yml", "") - for x in [str(_) for _ in _f.iterdir()] - if "pipeline_config" in x - ] +ALL_PIPELINE_CONFIGS = [ + x.split("_")[2].replace(".yml", "") + for x in [str(_) for _ in CONFIGS_PATH.iterdir()] + if "pipeline_config" in x +] ALL_PIPELINE_CONFIGS.sort() AVAILABLE_PIPELINE_CONFIGS = [ preconfig diff --git a/CPAC/pipeline/cpac_runner.py b/CPAC/pipeline/cpac_runner.py index 8c2b37b0f4..bed2ca6299 100644 --- a/CPAC/pipeline/cpac_runner.py +++ b/CPAC/pipeline/cpac_runner.py @@ -288,12 +288,9 @@ def run( # noqa: PLR0915 plugin_args = {"status_callback": log_nodes_cb} if not config_file: - from importlib.resources import as_file, files + from CPAC.resources.configs import CONFIGS_PATH - with as_file( - files("CPAC").joinpath("resources/configs/pipeline_config_template.yml") - ) as _f: - config_file = str(_f) + config_file = str(CONFIGS_PATH / "pipeline_config_template.yml") # Init variables sublist = None diff --git a/CPAC/pipeline/test/test_cpac_runner.py b/CPAC/pipeline/test/test_cpac_runner.py index c41d798389..575cfc970a 100644 --- a/CPAC/pipeline/test/test_cpac_runner.py +++ b/CPAC/pipeline/test/test_cpac_runner.py @@ -16,7 +16,6 @@ # License along with C-PAC. If not, see . """Run C-PAC in a container.""" -from importlib.resources import as_file, files import os from pathlib import Path @@ -25,6 +24,7 @@ from CPAC.pipeline.cpac_pipeline import load_cpac_pipe_config from CPAC.pipeline.cpac_runner import run_T1w_longitudinal from CPAC.pipeline.utils import get_shell +from CPAC.resources.configs import CONFIGS_PATH from CPAC.utils.bids_utils import create_cpac_data_config @@ -53,7 +53,5 @@ def test_run_T1w_longitudinal(bids_dir, cfg, test_dir, part_id): bids_dir = "/Users/steven.giavasis/data/neurodata_hnu" test_dir = "/test_dir" part_id = "0025427" - with as_file( - files("CPAC").joinpath("resources/configs/pipeline_config_default.yml") - ) as cfg: - test_run_T1w_longitudinal(bids_dir, cfg, test_dir, part_id) + cfg = str(CONFIGS_PATH / "pipeline_config_default.yml") + test_run_T1w_longitudinal(bids_dir, cfg, test_dir, part_id) diff --git a/CPAC/resources/configs/__init__.py b/CPAC/resources/configs/__init__.py index c312d7ab80..ac7754d63c 100644 --- a/CPAC/resources/configs/__init__.py +++ b/CPAC/resources/configs/__init__.py @@ -15,3 +15,9 @@ # You should have received a copy of the GNU Lesser General Public # License along with C-PAC. If not, see . """Configurations for C-PAC.""" + +from importlib.resources import as_file, files + +with as_file(files("CPAC").joinpath("resources/configs")) as _configs: + CONFIGS_PATH = _configs + """Path to pre-built C-PAC configurations.""" diff --git a/CPAC/surface/tests/test_config.py b/CPAC/surface/tests/test_config.py index f1e3897dfd..97d248ed3e 100644 --- a/CPAC/surface/tests/test_config.py +++ b/CPAC/surface/tests/test_config.py @@ -16,30 +16,26 @@ # License along with C-PAC. If not, see . """Tests for surface configuration.""" -from importlib.resources import as_file, files -import os +from pathlib import Path +from typing import cast import pytest import yaml from CPAC.pipeline.cpac_pipeline import run_workflow +from CPAC.resources.configs import CONFIGS_PATH from CPAC.utils.configuration import Configuration @pytest.mark.skip(reason="timing out for unrelated reasons") @pytest.mark.timeout(60) -def test_duplicate_freesurfer(tmp_path): +def test_duplicate_freesurfer(tmp_path: Path) -> None: """The pipeline should build fast if freesurfer is not self-duplicating.""" config = Configuration(yaml.safe_load("FROM: abcd-options")) - with as_file( - files("CPAC").joinpath("resources/configs/data_config_S3-BIDS-ABIDE.yml") - ) as _f: - with _f.open("r") as data_config: - sub_dict = yaml.safe_load(data_config)[0] + with (CONFIGS_PATH / "data_config_S3-BIDS-ABIDE.yml").open("r") as data_config: + sub_dict = yaml.safe_load(data_config)[0] for directory in ["output", "working", "log", "crash_log"]: directory_key = ["pipeline_setup", f"{directory}_directory", "path"] - assert isinstance(config[directory_key], str) - config[directory_key] = os.path.join( - tmp_path, config[directory_key].lstrip("/") - ) + item = cast(str, config[directory_key]) + config[directory_key] = str(tmp_path / item.lstrip("/")) run_workflow(sub_dict, config, False, test_config=True) diff --git a/CPAC/utils/build_data_config.py b/CPAC/utils/build_data_config.py index 6346b18e32..6d1e2d9f0f 100644 --- a/CPAC/utils/build_data_config.py +++ b/CPAC/utils/build_data_config.py @@ -1823,20 +1823,19 @@ def get_nonBIDS_data( def util_copy_template(template_type=None): """Copy the data settings YAML file template to the current directory.""" - from importlib.resources import as_file, files import os import shutil + from CPAC.resources.configs import CONFIGS_PATH from CPAC.utils.configuration import preconfig_yaml template_type = "data_settings" if not template_type else template_type - with as_file(files("CPAC").joinpath("resources/configs")) as configs: - settings_template = ( - preconfig_yaml("default") - if (template_type == "pipeline_config") - else str(configs / f"{template_type}_template.yml") - ) + settings_template = ( + preconfig_yaml("default") + if (template_type == "pipeline_config") + else str(CONFIGS_PATH / f"{template_type}_template.yml") + ) settings_file = os.path.join(os.getcwd(), f"{template_type}.yml") diff --git a/CPAC/utils/configuration/configuration.py b/CPAC/utils/configuration/configuration.py index e15d76c6af..682bd44ee7 100644 --- a/CPAC/utils/configuration/configuration.py +++ b/CPAC/utils/configuration/configuration.py @@ -16,7 +16,6 @@ # License along with C-PAC. If not, see . """C-PAC Configuration class and related functions.""" -from importlib.resources import as_file, files import os import re from typing import Optional @@ -25,6 +24,7 @@ from click import BadParameter import yaml +from CPAC.resources.configs import CONFIGS_PATH from .diff import dct_diff CONFIG_KEY_TYPE = str | list[str] @@ -737,12 +737,7 @@ def preconfig_yaml(preconfig_name="default", load=False): if load: with open(preconfig_yaml(preconfig_name), "r", encoding="utf-8") as _f: return yaml.safe_load(_f) - with as_file( - files("CPAC").joinpath( - f"resources/configs/pipeline_config_{preconfig_name}.yml" - ) - ) as _f: - return str(_f) + return str(CONFIGS_PATH / f"/pipeline_config_{preconfig_name}.yml") class Preconfiguration(Configuration): diff --git a/CPAC/utils/tests/test_trimmer.py b/CPAC/utils/tests/test_trimmer.py index 265919f7ed..60e2ceff2f 100644 --- a/CPAC/utils/tests/test_trimmer.py +++ b/CPAC/utils/tests/test_trimmer.py @@ -29,10 +29,11 @@ def accept_all(object, name, value): @pytest.mark.skip(reason="needs refactored") def test_trimmer(): - from importlib.resources import as_file, files + """Test The Trimmer.""" import os from CPAC.pipeline.cpac_pipeline import build_workflow + from CPAC.resources.configs import CONFIGS_PATH from CPAC.utils.configuration import Configuration from CPAC.utils.trimmer import ( compute_datasink_dirs, @@ -41,14 +42,13 @@ def test_trimmer(): the_trimmer, ) - with as_file(files("CPAC").joinpath("resources/configs")) as configs: - pipe_config = configs / "pipeline_config_template.yml" - data_config = configs / "data_config_S3-BIDS-ABIDE.yml" + pipe_config = CONFIGS_PATH / "pipeline_config_template.yml" + data_config = CONFIGS_PATH / "data_config_S3-BIDS-ABIDE.yml" - data_config = yaml.safe_load(open(data_config, "r")) + data_config = yaml.safe_load(data_config.open("r")) sub_dict = data_config[0] - c = Configuration(yaml.safe_load(open(pipe_config, "r"))) + c = Configuration(yaml.safe_load(pipe_config.open("r"))) temp_dir = tempfile.mkdtemp() c.logDirectory = temp_dir c.workingDirectory = temp_dir From 6e0fbfc49377f9469cfb311b20c2c31c1a43a7b0 Mon Sep 17 00:00:00 2001 From: Jon Cluce Date: Wed, 25 Jun 2025 20:23:37 -0400 Subject: [PATCH 410/507] :pencil2: Remove extra `/` --- CPAC/utils/configuration/configuration.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/CPAC/utils/configuration/configuration.py b/CPAC/utils/configuration/configuration.py index 682bd44ee7..0d94752487 100644 --- a/CPAC/utils/configuration/configuration.py +++ b/CPAC/utils/configuration/configuration.py @@ -737,7 +737,7 @@ def preconfig_yaml(preconfig_name="default", load=False): if load: with open(preconfig_yaml(preconfig_name), "r", encoding="utf-8") as _f: return yaml.safe_load(_f) - return str(CONFIGS_PATH / f"/pipeline_config_{preconfig_name}.yml") + return str(CONFIGS_PATH / f"pipeline_config_{preconfig_name}.yml") class Preconfiguration(Configuration): From 0d0149abac597bc5e760cf72af5f215ced434d03 Mon Sep 17 00:00:00 2001 From: Jon Cluce Date: Wed, 25 Jun 2025 20:43:49 -0400 Subject: [PATCH 411/507] fixup! :necktie: Include `desc-reorient_bold` in nuisiance generation --- CPAC/nuisance/nuisance.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/CPAC/nuisance/nuisance.py b/CPAC/nuisance/nuisance.py index 4dbc4cf3cc..3404cf7490 100644 --- a/CPAC/nuisance/nuisance.py +++ b/CPAC/nuisance/nuisance.py @@ -2456,7 +2456,8 @@ def nuisance_regressors_generation_EPItemplate(wf, cfg, strat_pool, pipe_num, op option_val="USER-DEFINED", inputs=[ ( - ["desc-preproc_bold", "desc-reorient_bold"], + "desc-preproc_bold", + "desc-reorient_bold", [ "space-bold_desc-brain_mask", "space-template_desc-bold_mask", From 0ec820682762a3a876a07783397834d18ae3bb1b Mon Sep 17 00:00:00 2001 From: Jon Cluce Date: Thu, 26 Jun 2025 11:30:14 -0400 Subject: [PATCH 412/507] :bug: Use sbref for reference --- CPAC/nuisance/nuisance.py | 1 + CPAC/nuisance/utils/xfm.py | 4 ++-- 2 files changed, 3 insertions(+), 2 deletions(-) diff --git a/CPAC/nuisance/nuisance.py b/CPAC/nuisance/nuisance.py index 3404cf7490..9504f0a3b7 100644 --- a/CPAC/nuisance/nuisance.py +++ b/CPAC/nuisance/nuisance.py @@ -2458,6 +2458,7 @@ def nuisance_regressors_generation_EPItemplate(wf, cfg, strat_pool, pipe_num, op ( "desc-preproc_bold", "desc-reorient_bold", + "sbref", [ "space-bold_desc-brain_mask", "space-template_desc-bold_mask", diff --git a/CPAC/nuisance/utils/xfm.py b/CPAC/nuisance/utils/xfm.py index 60c6a2c133..4ddb289e20 100644 --- a/CPAC/nuisance/utils/xfm.py +++ b/CPAC/nuisance/utils/xfm.py @@ -55,7 +55,7 @@ def transform_bold_mask_to_native( f"{'ANTs' if reg_tool == 'ants' else 'FNIRT'}_pipelines", "interpolation", ] - bold = strat_pool.node_data("desc-preproc_bold") + sbref = strat_pool.node_data("sbref") bold_mask = strat_pool.node_data( ["space-template_desc-bold_mask", "space-template_desc-brain_mask"] ) @@ -63,7 +63,7 @@ def transform_bold_mask_to_native( wf.connect( [ (bold_mask.node, apply_xfm, [(bold_mask.out, "inputspec.input_image")]), - (bold.node, apply_xfm, [(bold.out, "inputspec.reference")]), + (sbref.node, apply_xfm, [(sbref.out, "inputspec.reference")]), (xfm.node, apply_xfm, [(xfm.out, "inputspec.transform")]), ] ) From 5c02948a14b0a921d3fab930bc54cff4cc9c56cc Mon Sep 17 00:00:00 2001 From: Jon Cluce Date: Thu, 26 Jun 2025 14:16:01 -0400 Subject: [PATCH 413/507] :recycle: Make more robust against timezone awareness differences --- CPAC/utils/monitoring/draw_gantt_chart.py | 47 ++++++++++++----------- CPAC/utils/monitoring/monitoring.py | 11 ++++++ CPAC/utils/tests/test_utils.py | 16 +++++++- 3 files changed, 49 insertions(+), 25 deletions(-) diff --git a/CPAC/utils/monitoring/draw_gantt_chart.py b/CPAC/utils/monitoring/draw_gantt_chart.py index d1fe4517c5..2fc6dce651 100644 --- a/CPAC/utils/monitoring/draw_gantt_chart.py +++ b/CPAC/utils/monitoring/draw_gantt_chart.py @@ -45,13 +45,13 @@ """ from collections import OrderedDict -from datetime import datetime +from datetime import datetime, timedelta import random from warnings import warn from nipype.utils.draw_gantt_chart import draw_lines, draw_resource_bar, log_to_dict -from CPAC.utils.monitoring.monitoring import DatetimeWithSafeNone +from CPAC.utils.monitoring.monitoring import _NoTime, DatetimeWithSafeNone def create_event_dict(start_time, nodes_list): @@ -403,37 +403,38 @@ def generate_gantt_chart( for node in nodes_list: if "duration" not in node and (node["start"] and node["finish"]): - node["duration"] = (node["finish"] - node["start"]).total_seconds() + _duration = node["finish"] - node["start"] + assert isinstance(_duration, timedelta) + node["duration"] = _duration.total_seconds() # Create the header of the report with useful information start_node = nodes_list[0] last_node = nodes_list[-1] + start = DatetimeWithSafeNone(start_node["start"]) + finish = DatetimeWithSafeNone(last_node["finish"]) + if isinstance(start, _NoTime) or isinstance(finish, _NoTime): + return + start, finish = DatetimeWithSafeNone.sync_tz(start, finish) try: - duration = (last_node["finish"] - start_node["start"]).total_seconds() + duration = (finish - start).total_seconds() except TypeError: # no duration return # Get events based dictionary of node run stats - events = create_event_dict(start_node["start"], nodes_list) + events = create_event_dict(start, nodes_list) # Summary strings of workflow at top - html_string += ( - "

Start: " + start_node["start"].strftime("%Y-%m-%d %H:%M:%S") + "

" - ) - html_string += ( - "

Finish: " + last_node["finish"].strftime("%Y-%m-%d %H:%M:%S") + "

" - ) + html_string += "

Start: " + start.strftime("%Y-%m-%d %H:%M:%S") + "

" + html_string += "

Finish: " + finish.strftime("%Y-%m-%d %H:%M:%S") + "

" html_string += "

Duration: " + f"{duration / 60:.2f}" + " minutes

" html_string += "

Nodes: " + str(len(nodes_list)) + "

" html_string += "

Cores: " + str(cores) + "

" html_string += close_header # Draw nipype nodes Gantt chart and runtimes - html_string += draw_lines( - start_node["start"], duration, minute_scale, space_between_minutes - ) + html_string += draw_lines(start, duration, minute_scale, space_between_minutes) html_string += draw_nodes( - start_node["start"], + start, nodes_list, cores, minute_scale, @@ -447,8 +448,8 @@ def generate_gantt_chart( # Plot gantt chart resource_offset = 120 + 30 * cores html_string += draw_resource_bar( - start_node["start"], - last_node["finish"], + start, + finish, estimated_mem_ts, space_between_minutes, minute_scale, @@ -457,8 +458,8 @@ def generate_gantt_chart( "Memory", ) html_string += draw_resource_bar( - start_node["start"], - last_node["finish"], + start, + finish, runtime_mem_ts, space_between_minutes, minute_scale, @@ -472,8 +473,8 @@ def generate_gantt_chart( runtime_threads_ts = calculate_resource_timeseries(events, "runtime_threads") # Plot gantt chart html_string += draw_resource_bar( - start_node["start"], - last_node["finish"], + start, + finish, estimated_threads_ts, space_between_minutes, minute_scale, @@ -482,8 +483,8 @@ def generate_gantt_chart( "Threads", ) html_string += draw_resource_bar( - start_node["start"], - last_node["finish"], + start, + finish, runtime_threads_ts, space_between_minutes, minute_scale, diff --git a/CPAC/utils/monitoring/monitoring.py b/CPAC/utils/monitoring/monitoring.py index 9a6ce3c2fa..8d715b82b8 100644 --- a/CPAC/utils/monitoring/monitoring.py +++ b/CPAC/utils/monitoring/monitoring.py @@ -252,6 +252,17 @@ def __str__(self) -> str: """Return the string representation of the datetime or NoTime.""" return super().__str__() + @staticmethod + def sync_tz( + one: "DatetimeWithSafeNone", two: "DatetimeWithSafeNone" + ) -> tuple[datetime, datetime]: + """Add timezone to other if one datetime is aware and other isn't .""" + if one.tzinfo is None and two.tzinfo is not None: + return one.replace(tzinfo=two.tzinfo), two + if one.tzinfo is not None and two.tzinfo is None: + return one, two.replace(tzinfo=one.tzinfo) + return one, two + class DatetimeJSONEncoder(json.JSONEncoder): """JSON encoder that handles DatetimeWithSafeNone instances.""" diff --git a/CPAC/utils/tests/test_utils.py b/CPAC/utils/tests/test_utils.py index f8c74f7470..50f37cd20e 100644 --- a/CPAC/utils/tests/test_utils.py +++ b/CPAC/utils/tests/test_utils.py @@ -204,10 +204,22 @@ def check_expected_keys( @pytest.mark.parametrize( - "t1", [datetime.now(), datetime.isoformat(datetime.now()), None] + "t1", + [ + datetime.now(), + datetime.now().astimezone(), + datetime.isoformat(datetime.now()), + None, + ], ) @pytest.mark.parametrize( - "t2", [datetime.now(), datetime.isoformat(datetime.now()), None] + "t2", + [ + datetime.now(), + datetime.now().astimezone(), + datetime.isoformat(datetime.now()), + None, + ], ) def test_datetime_with_safe_none(t1: OptionalDatetime, t2: OptionalDatetime): """Test DatetimeWithSafeNone class works with datetime and None.""" From 2d82a0bb7352a5ca482896415cb6e16606c381eb Mon Sep 17 00:00:00 2001 From: Jon Cluce Date: Thu, 26 Jun 2025 14:53:35 -0400 Subject: [PATCH 414/507] :white_check_mark: Test `sync_tz` --- CPAC/utils/tests/test_utils.py | 11 +++++++++++ 1 file changed, 11 insertions(+) diff --git a/CPAC/utils/tests/test_utils.py b/CPAC/utils/tests/test_utils.py index 50f37cd20e..6c9d111048 100644 --- a/CPAC/utils/tests/test_utils.py +++ b/CPAC/utils/tests/test_utils.py @@ -223,9 +223,20 @@ def check_expected_keys( ) def test_datetime_with_safe_none(t1: OptionalDatetime, t2: OptionalDatetime): """Test DatetimeWithSafeNone class works with datetime and None.""" + originals = t1, t2 t1 = DatetimeWithSafeNone(t1) t2 = DatetimeWithSafeNone(t2) if t1 and t2: + _tzinfos = [getattr(_, "tzinfo", None) for _ in originals] + if ( + all(isinstance(_, datetime) for _ in originals) + and any(_tzinfos) + and not all(_tzinfos) + ): + with pytest.raises(TypeError): + originals[1] - originals[0] # type: ignore[reportOperatorIssue] + _t1, _t2 = DatetimeWithSafeNone.sync_tz(*originals) # type: ignore[reportArgumentType] + assert isinstance(_t2 - _t1, timedelta) assert isinstance(t2 - t1, timedelta) else: assert t2 - t1 == timedelta(0) From 6e501cc21aa50b6aa978172a95d40d5517c517f8 Mon Sep 17 00:00:00 2001 From: Jon Cluce Date: Thu, 26 Jun 2025 15:09:22 -0400 Subject: [PATCH 415/507] :fire: Remove stray comment --- CPAC/nuisance/utils/xfm.py | 2 -- 1 file changed, 2 deletions(-) diff --git a/CPAC/nuisance/utils/xfm.py b/CPAC/nuisance/utils/xfm.py index 4ddb289e20..3ee1b59421 100644 --- a/CPAC/nuisance/utils/xfm.py +++ b/CPAC/nuisance/utils/xfm.py @@ -24,8 +24,6 @@ from CPAC.registration.registration import apply_transform from CPAC.utils.configuration import Configuration -# ("from-template_to-bold_mode-image_xfm", "desc-preproc_bold"), - def transform_bold_mask_to_native( wf: Workflow, From 35547c468c78e1ff5e78a97637b774cfa63a6f54 Mon Sep 17 00:00:00 2001 From: "birajstha:construction_worker::penguin" Date: Mon, 7 Jul 2025 14:06:44 -0400 Subject: [PATCH 416/507] fixing ants.py for missing pkgr_fn --- CPAC/anat_preproc/ants.py | 23 ++++++----------------- 1 file changed, 6 insertions(+), 17 deletions(-) diff --git a/CPAC/anat_preproc/ants.py b/CPAC/anat_preproc/ants.py index 3a3cb1988b..00e5fe5785 100644 --- a/CPAC/anat_preproc/ants.py +++ b/CPAC/anat_preproc/ants.py @@ -295,23 +295,12 @@ def init_brain_extraction_wf( # noqa: PLR0913 init_aff.inputs.search_grid = (40, (0, 40, 40)) # Set up spatial normalization - settings_file = ( - "antsBrainExtraction_%s.json" - if use_laplacian - else "antsBrainExtractionNoLaplacian_%s.json" - ) - with as_file( - files("CPAC").joinpath( - f"anat_preproc/data/{settings_file}{normalization_quality}" - ) - ) as _f: - norm = pe.Node( - Registration(from_file=str(_f)), - name="norm", - n_procs=omp_nthreads, - mem_gb=1.7, - mem_x=(1233286593342025 / 151115727451828646838272, "moving_image"), - ) + settings_file = (f'antsBrainExtraction_{normalization_quality}.json' if use_laplacian else f'antsBrainExtractionNoLaplacian_{normalization_quality}.json') + norm = pe.Node(Registration(from_file=as_file(files('CPAC.anat_preproc').joinpath('data').joinpath(settings_file))), + name='norm', + n_procs=omp_nthreads, + mem_gb=1.7, + mem_x=(1233286593342025 / 151115727451828646838272, 'moving_image')) norm.inputs.float = use_float fixed_mask_trait = "fixed_image_mask" if _ants_version and parseversion(_ants_version) >= Version("2.2.0"): From 9011536352e3104cc24e910ba19e95edfac30652 Mon Sep 17 00:00:00 2001 From: "birajstha:construction_worker::penguin" Date: Mon, 7 Jul 2025 14:22:50 -0400 Subject: [PATCH 417/507] changed schemable to schema --- CPAC/pipeline/schema.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/CPAC/pipeline/schema.py b/CPAC/pipeline/schema.py index 2785f6d8a6..b775d55099 100644 --- a/CPAC/pipeline/schema.py +++ b/CPAC/pipeline/schema.py @@ -55,7 +55,7 @@ Schema, Title, ) -from voluptuous.schema_builder import Schemable, UNDEFINED +from voluptuous.schema_builder import Schema, UNDEFINED from CPAC.utils.datatypes import ItemFromList, ListFromItem from CPAC.utils.docs import DOCS_URL_PREFIX @@ -80,7 +80,7 @@ ORGANISMS: list[Organism] = ["human", "non-human primate", "rodent"] -def deprecated_option(option: Schemable, version: str, message: str) -> None: +def deprecated_option(option: Schema, version: str, message: str) -> None: """Mark an option as deprecated. Parameters @@ -119,7 +119,7 @@ class Deprecated(Optional): def __init__( self, - schema: Schemable, + schema: Schema, version: str, msg: str = "This option is deprecated and will be removed in a future release.", default: AnyType = UNDEFINED, From 8d4e08eebef8df932d8bfe8c9d3f326c9e44177a Mon Sep 17 00:00:00 2001 From: "birajstha:construction_worker::penguin" Date: Mon, 7 Jul 2025 14:51:08 -0400 Subject: [PATCH 418/507] str file-path instead of as_file --- CPAC/anat_preproc/ants.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/CPAC/anat_preproc/ants.py b/CPAC/anat_preproc/ants.py index 00e5fe5785..03d2432adc 100644 --- a/CPAC/anat_preproc/ants.py +++ b/CPAC/anat_preproc/ants.py @@ -296,7 +296,7 @@ def init_brain_extraction_wf( # noqa: PLR0913 # Set up spatial normalization settings_file = (f'antsBrainExtraction_{normalization_quality}.json' if use_laplacian else f'antsBrainExtractionNoLaplacian_{normalization_quality}.json') - norm = pe.Node(Registration(from_file=as_file(files('CPAC.anat_preproc').joinpath('data').joinpath(settings_file))), + norm = pe.Node(Registration(from_file=str(files('CPAC.anat_preproc').joinpath('data').joinpath(settings_file))), name='norm', n_procs=omp_nthreads, mem_gb=1.7, From 587eadf2c815aed45942dedf0301865ea319ef39 Mon Sep 17 00:00:00 2001 From: "birajstha:construction_worker::penguin" Date: Mon, 7 Jul 2025 16:21:56 -0400 Subject: [PATCH 419/507] removing desc-head_T1w from brain_extraction --- CPAC/anat_preproc/anat_preproc.py | 2 -- 1 file changed, 2 deletions(-) diff --git a/CPAC/anat_preproc/anat_preproc.py b/CPAC/anat_preproc/anat_preproc.py index 51f2b28107..2b51139505 100644 --- a/CPAC/anat_preproc/anat_preproc.py +++ b/CPAC/anat_preproc/anat_preproc.py @@ -2187,7 +2187,6 @@ def brain_mask_acpc_freesurfer_fsl_loose(wf, cfg, strat_pool, pipe_num, opt=None outputs={ "desc-preproc_T1w": {"SkullStripped": "True"}, "desc-brain_T1w": {"SkullStripped": "True"}, - "desc-head_T1w": {"SkullStripped": "False"}, }, ) def brain_extraction(wf, cfg, strat_pool, pipe_num, opt=None): @@ -2225,7 +2224,6 @@ def brain_extraction(wf, cfg, strat_pool, pipe_num, opt=None): outputs = { "desc-preproc_T1w": (anat_skullstrip_orig_vol, "out_file"), "desc-brain_T1w": (anat_skullstrip_orig_vol, "out_file"), - "desc-head_T1w": (node_T1w, out_T1w), } return (wf, outputs) From ce60bc75531e4cc33936f0241334bd7d3dd26698 Mon Sep 17 00:00:00 2001 From: "birajstha:construction_worker::penguin" Date: Mon, 7 Jul 2025 18:35:53 -0400 Subject: [PATCH 420/507] converting fs-fsl-tight/loose masks into generic ones --- CPAC/anat_preproc/anat_preproc.py | 20 ++++++++++++++++++-- 1 file changed, 18 insertions(+), 2 deletions(-) diff --git a/CPAC/anat_preproc/anat_preproc.py b/CPAC/anat_preproc/anat_preproc.py index 51f2b28107..ad8a466bc5 100644 --- a/CPAC/anat_preproc/anat_preproc.py +++ b/CPAC/anat_preproc/anat_preproc.py @@ -2058,11 +2058,19 @@ def brain_mask_freesurfer_abcd(wf, cfg, strat_pool, pipe_num, opt=None): "T1w-brain-template-mask-ccs", "T1w-ACPC-template", ], - outputs=["space-T1w_desc-tight_brain_mask"], + outputs={ + "space-T1w_desc-brain_mask": { + "Description": "Brain mask extracted using FreeSurfer-BET-Tight method", + "Method": "FreeSurfer-BET-Tight", + "Threshold": "tight" + } + }, ) def brain_mask_freesurfer_fsl_tight(wf, cfg, strat_pool, pipe_num, opt=None): wf, outputs = freesurfer_fsl_brain_connector(wf, cfg, strat_pool, pipe_num, opt) + # Convert the tight brain mask to generic brain mask + outputs["space-T1w_desc-brain_mask"] = outputs.pop("space-T1w_desc-tight_brain_mask") return (wf, outputs) @@ -2107,11 +2115,19 @@ def brain_mask_acpc_freesurfer_abcd(wf, cfg, strat_pool, pipe_num, opt=None): "T1w-brain-template-mask-ccs", "T1w-ACPC-template", ], - outputs=["space-T1w_desc-loose_brain_mask"], + outputs={ + "space-T1w_desc-brain_mask": { + "Description": "Brain mask extracted using FreeSurfer-BET-Loose method", + "Method": "FreeSurfer-BET-Loose", + "Threshold": "loose" + } + }, ) def brain_mask_freesurfer_fsl_loose(wf, cfg, strat_pool, pipe_num, opt=None): wf, outputs = freesurfer_fsl_brain_connector(wf, cfg, strat_pool, pipe_num, opt) + # Convert the loose brain mask to generic brain mask + outputs["space-T1w_desc-brain_mask"] = outputs.pop("space-T1w_desc-loose_brain_mask") return (wf, outputs) From 737e557dd9526d0863bec909ed94361ecaeb4d05 Mon Sep 17 00:00:00 2001 From: "birajstha:construction_worker::penguin" Date: Tue, 8 Jul 2025 12:58:55 -0400 Subject: [PATCH 421/507] adding unit-tests --- CPAC/anat_preproc/tests/test_anat_preproc.py | 67 +++++++++++++++++++- 1 file changed, 66 insertions(+), 1 deletion(-) diff --git a/CPAC/anat_preproc/tests/test_anat_preproc.py b/CPAC/anat_preproc/tests/test_anat_preproc.py index 60bc42cead..e5145b6ca6 100755 --- a/CPAC/anat_preproc/tests/test_anat_preproc.py +++ b/CPAC/anat_preproc/tests/test_anat_preproc.py @@ -5,7 +5,8 @@ import nibabel as nib from .. import anat_preproc - +from unittest.mock import Mock, patch +from ..anat_preproc import brain_mask_freesurfer_fsl_loose, brain_mask_freesurfer_fsl_tight class TestAnatPreproc: def __init__(self): @@ -269,3 +270,67 @@ def test_anat_brain(self): # print 'correlation: ', correlation assert correlation[0, 1] >= 0.97 + + +@patch('CPAC.anat_preproc.anat_preproc.freesurfer_fsl_brain_connector') +def test_brain_mask_freesurfer_fsl_loose(mock_connector): + """Test that brain_mask_freesurfer_fsl_loose correctly renames output key.""" + + mock_wf = Mock() + mock_cfg = Mock() + mock_strat_pool = Mock() + pipe_num = 1 + + mock_outputs = { + "space-T1w_desc-loose_brain_mask": "brain_mask_data", + "other_output": "other_data" + } + + mock_connector.return_value = (mock_wf, mock_outputs) + + result_wf, result_outputs = brain_mask_freesurfer_fsl_loose( + mock_wf, mock_cfg, mock_strat_pool, pipe_num + ) + + mock_connector.assert_called_once_with(mock_wf, mock_cfg, mock_strat_pool, pipe_num, None) + + # Assert workflow returned unchanged + assert result_wf == mock_wf + + # Assert output key was renamed correctly + assert "space-T1w_desc-brain_mask" in result_outputs + assert "space-T1w_desc-loose_brain_mask" not in result_outputs + assert result_outputs["space-T1w_desc-brain_mask"] == "brain_mask_data" + assert result_outputs["other_output"] == "other_data" + + +@patch('CPAC.anat_preproc.anat_preproc.freesurfer_fsl_brain_connector') +def test_brain_mask_freesurfer_fsl_tight(mock_connector): + """Test that brain_mask_freesurfer_fsl_tight correctly renames output key.""" + + mock_wf = Mock() + mock_cfg = Mock() + mock_strat_pool = Mock() + pipe_num = 1 + + mock_outputs = { + "space-T1w_desc-tight_brain_mask": "brain_mask_data", + "other_output": "other_data" + } + + mock_connector.return_value = (mock_wf, mock_outputs) + + result_wf, result_outputs = brain_mask_freesurfer_fsl_tight( + mock_wf, mock_cfg, mock_strat_pool, pipe_num + ) + + mock_connector.assert_called_once_with(mock_wf, mock_cfg, mock_strat_pool, pipe_num, None) + + # Assert workflow returned unchanged + assert result_wf == mock_wf + + # Assert output key was renamed correctly + assert "space-T1w_desc-brain_mask" in result_outputs + assert "space-T1w_desc-tight_brain_mask" not in result_outputs + assert result_outputs["space-T1w_desc-brain_mask"] == "brain_mask_data" + assert result_outputs["other_output"] == "other_data" \ No newline at end of file From b43b69f1e7e41915011f28254b06f91c877c436b Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Tue, 8 Jul 2025 17:00:44 +0000 Subject: [PATCH 422/507] [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci --- CPAC/anat_preproc/anat_preproc.py | 12 +++-- CPAC/anat_preproc/tests/test_anat_preproc.py | 48 ++++++++++++-------- 2 files changed, 36 insertions(+), 24 deletions(-) diff --git a/CPAC/anat_preproc/anat_preproc.py b/CPAC/anat_preproc/anat_preproc.py index ad8a466bc5..ba52b41e1f 100644 --- a/CPAC/anat_preproc/anat_preproc.py +++ b/CPAC/anat_preproc/anat_preproc.py @@ -2062,7 +2062,7 @@ def brain_mask_freesurfer_abcd(wf, cfg, strat_pool, pipe_num, opt=None): "space-T1w_desc-brain_mask": { "Description": "Brain mask extracted using FreeSurfer-BET-Tight method", "Method": "FreeSurfer-BET-Tight", - "Threshold": "tight" + "Threshold": "tight", } }, ) @@ -2070,7 +2070,9 @@ def brain_mask_freesurfer_fsl_tight(wf, cfg, strat_pool, pipe_num, opt=None): wf, outputs = freesurfer_fsl_brain_connector(wf, cfg, strat_pool, pipe_num, opt) # Convert the tight brain mask to generic brain mask - outputs["space-T1w_desc-brain_mask"] = outputs.pop("space-T1w_desc-tight_brain_mask") + outputs["space-T1w_desc-brain_mask"] = outputs.pop( + "space-T1w_desc-tight_brain_mask" + ) return (wf, outputs) @@ -2119,7 +2121,7 @@ def brain_mask_acpc_freesurfer_abcd(wf, cfg, strat_pool, pipe_num, opt=None): "space-T1w_desc-brain_mask": { "Description": "Brain mask extracted using FreeSurfer-BET-Loose method", "Method": "FreeSurfer-BET-Loose", - "Threshold": "loose" + "Threshold": "loose", } }, ) @@ -2127,7 +2129,9 @@ def brain_mask_freesurfer_fsl_loose(wf, cfg, strat_pool, pipe_num, opt=None): wf, outputs = freesurfer_fsl_brain_connector(wf, cfg, strat_pool, pipe_num, opt) # Convert the loose brain mask to generic brain mask - outputs["space-T1w_desc-brain_mask"] = outputs.pop("space-T1w_desc-loose_brain_mask") + outputs["space-T1w_desc-brain_mask"] = outputs.pop( + "space-T1w_desc-loose_brain_mask" + ) return (wf, outputs) diff --git a/CPAC/anat_preproc/tests/test_anat_preproc.py b/CPAC/anat_preproc/tests/test_anat_preproc.py index e5145b6ca6..829a3acd77 100755 --- a/CPAC/anat_preproc/tests/test_anat_preproc.py +++ b/CPAC/anat_preproc/tests/test_anat_preproc.py @@ -6,7 +6,11 @@ from .. import anat_preproc from unittest.mock import Mock, patch -from ..anat_preproc import brain_mask_freesurfer_fsl_loose, brain_mask_freesurfer_fsl_tight +from ..anat_preproc import ( + brain_mask_freesurfer_fsl_loose, + brain_mask_freesurfer_fsl_tight, +) + class TestAnatPreproc: def __init__(self): @@ -272,10 +276,10 @@ def test_anat_brain(self): assert correlation[0, 1] >= 0.97 -@patch('CPAC.anat_preproc.anat_preproc.freesurfer_fsl_brain_connector') +@patch("CPAC.anat_preproc.anat_preproc.freesurfer_fsl_brain_connector") def test_brain_mask_freesurfer_fsl_loose(mock_connector): """Test that brain_mask_freesurfer_fsl_loose correctly renames output key.""" - + mock_wf = Mock() mock_cfg = Mock() mock_strat_pool = Mock() @@ -283,20 +287,22 @@ def test_brain_mask_freesurfer_fsl_loose(mock_connector): mock_outputs = { "space-T1w_desc-loose_brain_mask": "brain_mask_data", - "other_output": "other_data" + "other_output": "other_data", } - + mock_connector.return_value = (mock_wf, mock_outputs) - + result_wf, result_outputs = brain_mask_freesurfer_fsl_loose( mock_wf, mock_cfg, mock_strat_pool, pipe_num ) - - mock_connector.assert_called_once_with(mock_wf, mock_cfg, mock_strat_pool, pipe_num, None) - + + mock_connector.assert_called_once_with( + mock_wf, mock_cfg, mock_strat_pool, pipe_num, None + ) + # Assert workflow returned unchanged assert result_wf == mock_wf - + # Assert output key was renamed correctly assert "space-T1w_desc-brain_mask" in result_outputs assert "space-T1w_desc-loose_brain_mask" not in result_outputs @@ -304,10 +310,10 @@ def test_brain_mask_freesurfer_fsl_loose(mock_connector): assert result_outputs["other_output"] == "other_data" -@patch('CPAC.anat_preproc.anat_preproc.freesurfer_fsl_brain_connector') +@patch("CPAC.anat_preproc.anat_preproc.freesurfer_fsl_brain_connector") def test_brain_mask_freesurfer_fsl_tight(mock_connector): """Test that brain_mask_freesurfer_fsl_tight correctly renames output key.""" - + mock_wf = Mock() mock_cfg = Mock() mock_strat_pool = Mock() @@ -315,22 +321,24 @@ def test_brain_mask_freesurfer_fsl_tight(mock_connector): mock_outputs = { "space-T1w_desc-tight_brain_mask": "brain_mask_data", - "other_output": "other_data" + "other_output": "other_data", } - + mock_connector.return_value = (mock_wf, mock_outputs) - + result_wf, result_outputs = brain_mask_freesurfer_fsl_tight( mock_wf, mock_cfg, mock_strat_pool, pipe_num ) - - mock_connector.assert_called_once_with(mock_wf, mock_cfg, mock_strat_pool, pipe_num, None) - + + mock_connector.assert_called_once_with( + mock_wf, mock_cfg, mock_strat_pool, pipe_num, None + ) + # Assert workflow returned unchanged assert result_wf == mock_wf - + # Assert output key was renamed correctly assert "space-T1w_desc-brain_mask" in result_outputs assert "space-T1w_desc-tight_brain_mask" not in result_outputs assert result_outputs["space-T1w_desc-brain_mask"] == "brain_mask_data" - assert result_outputs["other_output"] == "other_data" \ No newline at end of file + assert result_outputs["other_output"] == "other_data" From 58b9028da177b813d4e233fc72ccde9fc59f55d4 Mon Sep 17 00:00:00 2001 From: "birajstha:construction_worker::penguin" Date: Tue, 8 Jul 2025 13:07:00 -0400 Subject: [PATCH 423/507] added to changelog --- CHANGELOG.md | 1 + 1 file changed, 1 insertion(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index a3a965b2c3..b9ec472a84 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -32,6 +32,7 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 - Switch `sink_native_transforms` under `registration_workflows` to output all `.mat` files in ANTs and FSL Transforms. - `deoblique` field in pipeline config with `warp` and `refit` options to apply `3dWarp` or `3drefit` during data initialization. - `organism` configuration option. +- Functionality to convert `space-T1w_desc-loose_brain_mask` and `space-T1w_desc-tight_brain_mask` into generic brain mask `space-T1w_desc-brain_mask` to use in brain extraction nodeblock downstream. ### Changed From 95d7ca9b7546629b23737644296b93e06905c228 Mon Sep 17 00:00:00 2001 From: "birajstha:construction_worker::penguin" Date: Tue, 8 Jul 2025 18:25:35 -0400 Subject: [PATCH 424/507] removing the final brain mask to native node --- CPAC/anat_preproc/anat_preproc.py | 40 +++++++------------------------ 1 file changed, 9 insertions(+), 31 deletions(-) diff --git a/CPAC/anat_preproc/anat_preproc.py b/CPAC/anat_preproc/anat_preproc.py index ba52b41e1f..df9e2ae098 100644 --- a/CPAC/anat_preproc/anat_preproc.py +++ b/CPAC/anat_preproc/anat_preproc.py @@ -1303,7 +1303,7 @@ def freesurfer_fsl_brain_connector(wf, cfg, strat_pool, pipe_num, opt): # fslmaths tmp_mask.nii.gz -mas ${CCSDIR}/templates/MNI152_T1_1mm_first_brain_mask.nii.gz tmp_mask.nii.gz apply_mask = pe.Node(interface=fsl.maths.ApplyMask(), name=f"apply_mask_{node_id}") - wf.connect(skullstrip, "out_file", apply_mask, "in_file") + wf.connect(skullstrip, "mask_file", apply_mask, "in_file") node, out = strat_pool.get_data("T1w-brain-template-mask-ccs") wf.connect(node, out, apply_mask, "mask_file") @@ -1347,36 +1347,18 @@ def freesurfer_fsl_brain_connector(wf, cfg, strat_pool, pipe_num, opt): wf.connect(combine_mask, "out_file", binarize_combined_mask, "in_file") - # CCS brain mask is in FS space, transfer it back to native T1 space - fs_fsl_brain_mask_to_native = pe.Node( - interface=freesurfer.ApplyVolTransform(), - name=f"fs_fsl_brain_mask_to_native_{node_id}", - ) - fs_fsl_brain_mask_to_native.inputs.reg_header = True - fs_fsl_brain_mask_to_native.inputs.interp = "nearest" - - wf.connect( - binarize_combined_mask, "out_file", fs_fsl_brain_mask_to_native, "source_file" - ) - - node, out = strat_pool.get_data("pipeline-fs_raw-average") - wf.connect(node, out, fs_fsl_brain_mask_to_native, "target_file") - - node, out = strat_pool.get_data("freesurfer-subject-dir") - wf.connect(node, out, fs_fsl_brain_mask_to_native, "subjects_dir") - if opt == "FreeSurfer-BET-Tight": outputs = { "space-T1w_desc-tight_brain_mask": ( - fs_fsl_brain_mask_to_native, - "transformed_file", + binarize_combined_mask, + "out_file", ) } elif opt == "FreeSurfer-BET-Loose": outputs = { "space-T1w_desc-loose_brain_mask": ( - fs_fsl_brain_mask_to_native, - "transformed_file", + binarize_combined_mask, + "out_file", ) } @@ -2062,7 +2044,7 @@ def brain_mask_freesurfer_abcd(wf, cfg, strat_pool, pipe_num, opt=None): "space-T1w_desc-brain_mask": { "Description": "Brain mask extracted using FreeSurfer-BET-Tight method", "Method": "FreeSurfer-BET-Tight", - "Threshold": "tight", + "Threshold": "tight" } }, ) @@ -2070,9 +2052,7 @@ def brain_mask_freesurfer_fsl_tight(wf, cfg, strat_pool, pipe_num, opt=None): wf, outputs = freesurfer_fsl_brain_connector(wf, cfg, strat_pool, pipe_num, opt) # Convert the tight brain mask to generic brain mask - outputs["space-T1w_desc-brain_mask"] = outputs.pop( - "space-T1w_desc-tight_brain_mask" - ) + outputs["space-T1w_desc-brain_mask"] = outputs.pop("space-T1w_desc-tight_brain_mask") return (wf, outputs) @@ -2121,7 +2101,7 @@ def brain_mask_acpc_freesurfer_abcd(wf, cfg, strat_pool, pipe_num, opt=None): "space-T1w_desc-brain_mask": { "Description": "Brain mask extracted using FreeSurfer-BET-Loose method", "Method": "FreeSurfer-BET-Loose", - "Threshold": "loose", + "Threshold": "loose" } }, ) @@ -2129,9 +2109,7 @@ def brain_mask_freesurfer_fsl_loose(wf, cfg, strat_pool, pipe_num, opt=None): wf, outputs = freesurfer_fsl_brain_connector(wf, cfg, strat_pool, pipe_num, opt) # Convert the loose brain mask to generic brain mask - outputs["space-T1w_desc-brain_mask"] = outputs.pop( - "space-T1w_desc-loose_brain_mask" - ) + outputs["space-T1w_desc-brain_mask"] = outputs.pop("space-T1w_desc-loose_brain_mask") return (wf, outputs) From 266f7a4b7ae18eb91dd406686f4b668c0b25c5f2 Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Tue, 8 Jul 2025 22:27:06 +0000 Subject: [PATCH 425/507] [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci --- CPAC/anat_preproc/anat_preproc.py | 12 ++++++++---- 1 file changed, 8 insertions(+), 4 deletions(-) diff --git a/CPAC/anat_preproc/anat_preproc.py b/CPAC/anat_preproc/anat_preproc.py index df9e2ae098..2d8037dee4 100644 --- a/CPAC/anat_preproc/anat_preproc.py +++ b/CPAC/anat_preproc/anat_preproc.py @@ -2044,7 +2044,7 @@ def brain_mask_freesurfer_abcd(wf, cfg, strat_pool, pipe_num, opt=None): "space-T1w_desc-brain_mask": { "Description": "Brain mask extracted using FreeSurfer-BET-Tight method", "Method": "FreeSurfer-BET-Tight", - "Threshold": "tight" + "Threshold": "tight", } }, ) @@ -2052,7 +2052,9 @@ def brain_mask_freesurfer_fsl_tight(wf, cfg, strat_pool, pipe_num, opt=None): wf, outputs = freesurfer_fsl_brain_connector(wf, cfg, strat_pool, pipe_num, opt) # Convert the tight brain mask to generic brain mask - outputs["space-T1w_desc-brain_mask"] = outputs.pop("space-T1w_desc-tight_brain_mask") + outputs["space-T1w_desc-brain_mask"] = outputs.pop( + "space-T1w_desc-tight_brain_mask" + ) return (wf, outputs) @@ -2101,7 +2103,7 @@ def brain_mask_acpc_freesurfer_abcd(wf, cfg, strat_pool, pipe_num, opt=None): "space-T1w_desc-brain_mask": { "Description": "Brain mask extracted using FreeSurfer-BET-Loose method", "Method": "FreeSurfer-BET-Loose", - "Threshold": "loose" + "Threshold": "loose", } }, ) @@ -2109,7 +2111,9 @@ def brain_mask_freesurfer_fsl_loose(wf, cfg, strat_pool, pipe_num, opt=None): wf, outputs = freesurfer_fsl_brain_connector(wf, cfg, strat_pool, pipe_num, opt) # Convert the loose brain mask to generic brain mask - outputs["space-T1w_desc-brain_mask"] = outputs.pop("space-T1w_desc-loose_brain_mask") + outputs["space-T1w_desc-brain_mask"] = outputs.pop( + "space-T1w_desc-loose_brain_mask" + ) return (wf, outputs) From 48826ecfaa43b2a84af1d9b368db760302c35d37 Mon Sep 17 00:00:00 2001 From: "birajstha:construction_worker::penguin" Date: Wed, 23 Jul 2025 16:56:18 -0400 Subject: [PATCH 426/507] Fixing freesurfer-abcd brain masking --- CPAC/anat_preproc/anat_preproc.py | 35 +++++-------------------------- CPAC/anat_preproc/utils.py | 4 ++-- 2 files changed, 7 insertions(+), 32 deletions(-) diff --git a/CPAC/anat_preproc/anat_preproc.py b/CPAC/anat_preproc/anat_preproc.py index 51f2b28107..9bf2900e2f 100644 --- a/CPAC/anat_preproc/anat_preproc.py +++ b/CPAC/anat_preproc/anat_preproc.py @@ -1128,37 +1128,12 @@ def freesurfer_abcd_brain_connector(wf, cfg, strat_pool, pipe_num, opt): name=f"wmparc_to_nifti_{pipe_num}", ) - # Register wmparc file if ingressing FreeSurfer data - if strat_pool.check_rpool("pipeline-fs_xfm"): - wmparc_to_native = pe.Node( - Function( - input_names=["source_file", "target_file", "xfm", "out_file"], - output_names=["transformed_file"], - function=normalize_wmparc, - ), - name=f"wmparc_to_native_{pipe_num}", - ) - - wmparc_to_native.inputs.out_file = "wmparc_warped.mgz" - - node, out = strat_pool.get_data("pipeline-fs_wmparc") - wf.connect(node, out, wmparc_to_native, "source_file") - - node, out = strat_pool.get_data("pipeline-fs_raw-average") - wf.connect(node, out, wmparc_to_native, "target_file") - - node, out = strat_pool.get_data("pipeline-fs_xfm") - wf.connect(node, out, wmparc_to_native, "xfm") - - wf.connect(wmparc_to_native, "transformed_file", wmparc_to_nifti, "in_file") - - else: - node, out = strat_pool.get_data("pipeline-fs_wmparc") - wf.connect(node, out, wmparc_to_nifti, "in_file") + node, out = strat_pool.get_data("pipeline-fs_wmparc") + wf.connect(node, out, wmparc_to_nifti, "in_file") wmparc_to_nifti.inputs.args = "-rt nearest" - node, out = strat_pool.get_data("desc-preproc_T1w") + node, out = strat_pool.get_data("desc-restore_T1w") wf.connect(node, out, wmparc_to_nifti, "reslice_like") binary_mask = pe.Node( @@ -1194,7 +1169,7 @@ def freesurfer_abcd_brain_connector(wf, cfg, strat_pool, pipe_num, opt): wf.connect(binary_filled_mask, "out_file", brain_mask_to_t1_restore, "in_file") - node, out = strat_pool.get_data("desc-preproc_T1w") + node, out = strat_pool.get_data("desc-restore_T1w") wf.connect(node, out, brain_mask_to_t1_restore, "ref_file") outputs = {"space-T1w_desc-brain_mask": (brain_mask_to_t1_restore, "out_file")} @@ -2028,7 +2003,7 @@ def brain_mask_acpc_freesurfer(wf, cfg, strat_pool, pipe_num, opt=None): option_key=["anatomical_preproc", "brain_extraction", "using"], option_val="FreeSurfer-ABCD", inputs=[ - "desc-preproc_T1w", + "desc-restore_T1w", "pipeline-fs_wmparc", "pipeline-fs_raw-average", "pipeline-fs_xfm", diff --git a/CPAC/anat_preproc/utils.py b/CPAC/anat_preproc/utils.py index a494ebceda..2cff1e9148 100644 --- a/CPAC/anat_preproc/utils.py +++ b/CPAC/anat_preproc/utils.py @@ -487,7 +487,7 @@ def mri_convert(in_file, reslice_like=None, out_file=None, args=None): import os if out_file is None: - out_file = in_file.replace(".mgz", ".nii.gz") + out_file = os.path.join(os.getcwd(), os.path.basename(in_file).replace(".mgz", ".nii.gz")) cmd = "mri_convert %s %s" % (in_file, out_file) @@ -525,7 +525,7 @@ def mri_convert_reorient(in_file, orientation, out_file=None): import os if out_file is None: - out_file = in_file.split(".")[0] + "_reoriented.mgz" + out_file = os.path.join(os.getcwd(), os.path.basename(in_file).split(".")[0] + "_reoriented.mgz") cmd = "mri_convert %s %s --out_orientation %s" % (in_file, out_file, orientation) From 55b14e2f581c43d4270ad7ffe12c74d6bf94a5aa Mon Sep 17 00:00:00 2001 From: "birajstha:construction_worker::penguin" Date: Wed, 23 Jul 2025 17:06:49 -0400 Subject: [PATCH 427/507] removing pipeline-fs_xfm --- CPAC/anat_preproc/anat_preproc.py | 2 -- CPAC/pipeline/engine.py | 1 - 2 files changed, 3 deletions(-) diff --git a/CPAC/anat_preproc/anat_preproc.py b/CPAC/anat_preproc/anat_preproc.py index 9bf2900e2f..908eddf45c 100644 --- a/CPAC/anat_preproc/anat_preproc.py +++ b/CPAC/anat_preproc/anat_preproc.py @@ -2006,7 +2006,6 @@ def brain_mask_acpc_freesurfer(wf, cfg, strat_pool, pipe_num, opt=None): "desc-restore_T1w", "pipeline-fs_wmparc", "pipeline-fs_raw-average", - "pipeline-fs_xfm", "freesurfer-subject-dir", ], outputs=["space-T1w_desc-brain_mask"], @@ -2053,7 +2052,6 @@ def brain_mask_freesurfer_fsl_tight(wf, cfg, strat_pool, pipe_num, opt=None): "desc-preproc_T1w", "pipeline-fs_wmparc", "pipeline-fs_raw-average", - "pipeline-fs_xfm", "freesurfer-subject-dir", ], outputs=["space-T1w_desc-acpcbrain_mask"], diff --git a/CPAC/pipeline/engine.py b/CPAC/pipeline/engine.py index ac6e0cfe4a..93e5cd7981 100644 --- a/CPAC/pipeline/engine.py +++ b/CPAC/pipeline/engine.py @@ -2062,7 +2062,6 @@ def ingress_freesurfer(wf, rpool, cfg, data_paths, unique_id, part_id, ses_id): "pipeline-fs_hemi-R_desc-surfaceMap_volume": "surf/rh.volume", "pipeline-fs_hemi-L_desc-surfaceMesh_white": "surf/lh.white", "pipeline-fs_hemi-R_desc-surfaceMesh_white": "surf/rh.white", - "pipeline-fs_xfm": "mri/transforms/talairach.lta", } for key, outfile in recon_outs.items(): From 16650a1278d6f11269076d1cb9d90de8eed78e09 Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Wed, 23 Jul 2025 21:07:22 +0000 Subject: [PATCH 428/507] [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci --- CPAC/anat_preproc/utils.py | 8 ++++++-- 1 file changed, 6 insertions(+), 2 deletions(-) diff --git a/CPAC/anat_preproc/utils.py b/CPAC/anat_preproc/utils.py index 2cff1e9148..63a20a4881 100644 --- a/CPAC/anat_preproc/utils.py +++ b/CPAC/anat_preproc/utils.py @@ -487,7 +487,9 @@ def mri_convert(in_file, reslice_like=None, out_file=None, args=None): import os if out_file is None: - out_file = os.path.join(os.getcwd(), os.path.basename(in_file).replace(".mgz", ".nii.gz")) + out_file = os.path.join( + os.getcwd(), os.path.basename(in_file).replace(".mgz", ".nii.gz") + ) cmd = "mri_convert %s %s" % (in_file, out_file) @@ -525,7 +527,9 @@ def mri_convert_reorient(in_file, orientation, out_file=None): import os if out_file is None: - out_file = os.path.join(os.getcwd(), os.path.basename(in_file).split(".")[0] + "_reoriented.mgz") + out_file = os.path.join( + os.getcwd(), os.path.basename(in_file).split(".")[0] + "_reoriented.mgz" + ) cmd = "mri_convert %s %s --out_orientation %s" % (in_file, out_file, orientation) From c171017e2c1af396e6e4f7aa81b8d84fd4639964 Mon Sep 17 00:00:00 2001 From: "birajstha:construction_worker::penguin" Date: Thu, 24 Jul 2025 12:54:11 -0400 Subject: [PATCH 429/507] adding to change log --- CHANGELOG.md | 1 + 1 file changed, 1 insertion(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index a3a965b2c3..83d43e7704 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -69,6 +69,7 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 - Lingering calls to `cpac_outputs.csv` (was changed to `cpac_outputs.tsv` in v1.8.1). - A bug in the `freesurfer_abcd_preproc` nodeblock where the `Template` image was incorrectly used as `reference` during the `inverse_warp` step. Replacing it with the subject-specific `T1w` image resolved the issue of the `desc-restoreBrain_T1w` being chipped off. - A bug in `ideal_bandpass` where the frequency mask was incorrectly applied, which caused filter to fail in certain cases. +- `Freesufer-ABCD` brain masking strategy to create mask as per the original DCAN script. ### Upgraded dependencies From 596a7f025d34b0e9c3dfd454caf50faa0390584d Mon Sep 17 00:00:00 2001 From: Jon Cluce Date: Thu, 24 Jul 2025 13:45:03 -0400 Subject: [PATCH 430/507] =?UTF-8?q?:arrow=5Fup:=20Upgrade=20coverage=20@?= =?UTF-8?q?=20=E2=89=A5=207.10.0?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- dev/circleci_data/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/dev/circleci_data/requirements.txt b/dev/circleci_data/requirements.txt index b59c3413be..576c974897 100644 --- a/dev/circleci_data/requirements.txt +++ b/dev/circleci_data/requirements.txt @@ -1,4 +1,4 @@ -coverage +coverage >= 7.10.0 GitPython pytest pytest_bdd From 8adf7f52c26c2ca5f44e9804cca3bb4119bf4753 Mon Sep 17 00:00:00 2001 From: "birajstha:construction_worker::penguin" Date: Thu, 24 Jul 2025 14:25:16 -0400 Subject: [PATCH 431/507] adding fallback option desc-preproc_T1w --- CPAC/anat_preproc/anat_preproc.py | 14 ++++++++++---- 1 file changed, 10 insertions(+), 4 deletions(-) diff --git a/CPAC/anat_preproc/anat_preproc.py b/CPAC/anat_preproc/anat_preproc.py index 908eddf45c..6b3892709d 100644 --- a/CPAC/anat_preproc/anat_preproc.py +++ b/CPAC/anat_preproc/anat_preproc.py @@ -1133,7 +1133,10 @@ def freesurfer_abcd_brain_connector(wf, cfg, strat_pool, pipe_num, opt): wmparc_to_nifti.inputs.args = "-rt nearest" - node, out = strat_pool.get_data("desc-restore_T1w") + if strat_pool.check_rpool("desc-restore_T1w"): + node, out = strat_pool.get_data("desc-restore_T1w") + else: + node, out = strat_pool.get_data("desc-preproc_T1w") wf.connect(node, out, wmparc_to_nifti, "reslice_like") binary_mask = pe.Node( @@ -1169,7 +1172,10 @@ def freesurfer_abcd_brain_connector(wf, cfg, strat_pool, pipe_num, opt): wf.connect(binary_filled_mask, "out_file", brain_mask_to_t1_restore, "in_file") - node, out = strat_pool.get_data("desc-restore_T1w") + if strat_pool.check_rpool("desc-restore_T1w"): + node, out = strat_pool.get_data("desc-restore_T1w") + else: + node, out = strat_pool.get_data("desc-preproc_T1w") wf.connect(node, out, brain_mask_to_t1_restore, "ref_file") outputs = {"space-T1w_desc-brain_mask": (brain_mask_to_t1_restore, "out_file")} @@ -2003,7 +2009,7 @@ def brain_mask_acpc_freesurfer(wf, cfg, strat_pool, pipe_num, opt=None): option_key=["anatomical_preproc", "brain_extraction", "using"], option_val="FreeSurfer-ABCD", inputs=[ - "desc-restore_T1w", + ["desc-restore_T1w", "desc-preproc_T1w"], "pipeline-fs_wmparc", "pipeline-fs_raw-average", "freesurfer-subject-dir", @@ -2049,7 +2055,7 @@ def brain_mask_freesurfer_fsl_tight(wf, cfg, strat_pool, pipe_num, opt=None): option_key=["anatomical_preproc", "brain_extraction", "using"], option_val="FreeSurfer-ABCD", inputs=[ - "desc-preproc_T1w", + ["desc-restore_T1w", "desc-preproc_T1w"], "pipeline-fs_wmparc", "pipeline-fs_raw-average", "freesurfer-subject-dir", From 568eb0865c43877ab3fa7b8c8dad154dc502dce9 Mon Sep 17 00:00:00 2001 From: birajstha <111654544+birajstha@users.noreply.github.com> Date: Fri, 25 Jul 2025 17:22:54 -0400 Subject: [PATCH 432/507] Update CPAC/anat_preproc/anat_preproc.py Co-authored-by: Jon Cluce --- CPAC/anat_preproc/anat_preproc.py | 5 +---- 1 file changed, 1 insertion(+), 4 deletions(-) diff --git a/CPAC/anat_preproc/anat_preproc.py b/CPAC/anat_preproc/anat_preproc.py index 6b3892709d..7faef7d65a 100644 --- a/CPAC/anat_preproc/anat_preproc.py +++ b/CPAC/anat_preproc/anat_preproc.py @@ -1172,10 +1172,7 @@ def freesurfer_abcd_brain_connector(wf, cfg, strat_pool, pipe_num, opt): wf.connect(binary_filled_mask, "out_file", brain_mask_to_t1_restore, "in_file") - if strat_pool.check_rpool("desc-restore_T1w"): - node, out = strat_pool.get_data("desc-restore_T1w") - else: - node, out = strat_pool.get_data("desc-preproc_T1w") + node, out = strat_pool.get_data(["desc-restore_T1w", "desc-preproc_T1w"]) wf.connect(node, out, brain_mask_to_t1_restore, "ref_file") outputs = {"space-T1w_desc-brain_mask": (brain_mask_to_t1_restore, "out_file")} From 5cb7a57089f9632f5043690ffece9c7b53b431d8 Mon Sep 17 00:00:00 2001 From: birajstha <111654544+birajstha@users.noreply.github.com> Date: Fri, 25 Jul 2025 17:23:02 -0400 Subject: [PATCH 433/507] Update CPAC/anat_preproc/anat_preproc.py Co-authored-by: Jon Cluce --- CPAC/anat_preproc/anat_preproc.py | 5 +---- 1 file changed, 1 insertion(+), 4 deletions(-) diff --git a/CPAC/anat_preproc/anat_preproc.py b/CPAC/anat_preproc/anat_preproc.py index 7faef7d65a..c1d144504f 100644 --- a/CPAC/anat_preproc/anat_preproc.py +++ b/CPAC/anat_preproc/anat_preproc.py @@ -1133,10 +1133,7 @@ def freesurfer_abcd_brain_connector(wf, cfg, strat_pool, pipe_num, opt): wmparc_to_nifti.inputs.args = "-rt nearest" - if strat_pool.check_rpool("desc-restore_T1w"): - node, out = strat_pool.get_data("desc-restore_T1w") - else: - node, out = strat_pool.get_data("desc-preproc_T1w") + node, out = strat_pool.get_data(["desc-restore_T1w", "desc-preproc_T1w"]) wf.connect(node, out, wmparc_to_nifti, "reslice_like") binary_mask = pe.Node( From 5c610c32132dbd876a150716ac69d34220ce36c3 Mon Sep 17 00:00:00 2001 From: Jon Cluce Date: Mon, 28 Jul 2025 11:21:32 -0400 Subject: [PATCH 434/507] :arrow_up: Upgrade coverage @7.10.1 --- dev/circleci_data/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/dev/circleci_data/requirements.txt b/dev/circleci_data/requirements.txt index 576c974897..fdd988a669 100644 --- a/dev/circleci_data/requirements.txt +++ b/dev/circleci_data/requirements.txt @@ -1,4 +1,4 @@ -coverage >= 7.10.0 +coverage >= 7.10.1 GitPython pytest pytest_bdd From 14f2da6a8e86e117b660a3d132b85afc882799c8 Mon Sep 17 00:00:00 2001 From: Jon Cluce Date: Tue, 29 Jul 2025 15:35:26 -0400 Subject: [PATCH 435/507] :necktie: Make XCPQC inputs optional --- .../pipeline/nipype_pipeline_engine/engine.py | 18 +- CPAC/qc/xcp.py | 382 +++++++++--------- 2 files changed, 205 insertions(+), 195 deletions(-) diff --git a/CPAC/pipeline/nipype_pipeline_engine/engine.py b/CPAC/pipeline/nipype_pipeline_engine/engine.py index 508bc03378..94aacd123c 100644 --- a/CPAC/pipeline/nipype_pipeline_engine/engine.py +++ b/CPAC/pipeline/nipype_pipeline_engine/engine.py @@ -54,7 +54,7 @@ from inspect import Parameter, Signature, signature import os import re -from typing import Any, ClassVar, Optional +from typing import Any, ClassVar, Optional, TYPE_CHECKING from numpy import prod from traits.api import List as TraitListObject @@ -76,6 +76,10 @@ from CPAC.utils.monitoring import getLogger, WFLOGGER +if TYPE_CHECKING: + from CPAC.pipeline.engine import ResourcePool + + # set global default mem_gb DEFAULT_MEM_GB = 2.0 UNDEFINED_SIZE = (42, 42, 42, 1200) @@ -762,6 +766,18 @@ def write_hierarchical_dotfile( else: WFLOGGER.info(dotstr) + def connect_optional( + self, + source_resource_pool: "ResourcePool", + source_resource: str | list[str], + dest: pe.Node, + dest_input: str, + ) -> None: + """Connect optional inputs to a workflow.""" + if source_resource_pool.check_rpool(source_resource): + node, out = source_resource_pool.get_data(source_resource) + self.connect(node, out, dest, dest_input) + def get_data_size(filepath, mode="xyzt"): """Return the size of a functional image (x * y * z * t). diff --git a/CPAC/qc/xcp.py b/CPAC/qc/xcp.py index 1a316717cd..3ee7db7412 100644 --- a/CPAC/qc/xcp.py +++ b/CPAC/qc/xcp.py @@ -77,11 +77,12 @@ from io import BufferedReader import os import re +from typing import Any, Optional from bids.layout import parse_file_entities import numpy as np import pandas as pd -import nibabel as nib +from nibabel import load as nib_load # type: ignore[reportPrivateImportUsage] from nipype.interfaces import afni, fsl from CPAC.generate_motion_statistics.generate_motion_statistics import ( @@ -89,6 +90,7 @@ ImageTo1D, ) from CPAC.pipeline import nipype_pipeline_engine as pe +from CPAC.pipeline.engine import ResourcePool from CPAC.pipeline.nodeblock import nodeblock from CPAC.qc.qcmetrics import regisQ from CPAC.utils.interfaces.function import Function @@ -101,40 +103,27 @@ ] -def _connect_motion(wf, nodes, strat_pool, qc_file, pipe_num): +def _connect_motion( + wf: pe.Workflow, strat_pool: ResourcePool, qc_file: pe.Node, pipe_num: int +) -> pe.Workflow: """ Connect the motion metrics to the workflow. Parameters ---------- - wf : nipype.pipeline.engine.Workflow + wf The workflow to connect the motion metrics to. - nodes : dict - Dictionary of nodes already collected from the strategy pool. - - strat_pool : CPAC.pipeline.engine.ResourcePool + strat_pool The current strategy pool. - qc_file : nipype.pipeline.engine.Node + qc_file A function node with the function ``generate_xcp_qc``. - - pipe_num : int - - Returns - ------- - wf : nipype.pipeline.engine.Workflow """ # pylint: disable=invalid-name, too-many-arguments - try: - nodes = {**nodes, "censor-indices": strat_pool.node_data("censor-indices")} - wf.connect( - nodes["censor-indices"].node, - nodes["censor-indices"].out, - qc_file, - "censor_indices", - ) - except LookupError: + if strat_pool.check_rpool("censor-indices"): + wf.connect_optional(strat_pool, "censor-indices", qc_file, "censor_indices") + else: qc_file.inputs.censor_indices = [] cal_DVARS = pe.Node( ImageTo1D(method="dvars"), @@ -153,36 +142,24 @@ def _connect_motion(wf, nodes, strat_pool, qc_file, pipe_num): name=f"cal_DVARS_strip_{pipe_num}", ) motion_name = "desc-movementParametersUnfiltered_motion" - if motion_name not in nodes: + if not strat_pool.check_rpool(motion_name): motion_name = "desc-movementParameters_motion" + wf.connect_optional(strat_pool, "desc-preproc_bold", cal_DVARS, "in_file") + wf.connect_optional(strat_pool, "space-bold_desc-brain_mask", cal_DVARS, "mask") + wf.connect_optional(strat_pool, motion_name, qc_file, "movement_parameters") + for resource in motion_params: + if not resource.endswith("_motion"): + wf.connect_optional( + strat_pool, resource, qc_file, resource.replace("-", "_") + ) wf.connect( [ - ( - nodes["desc-preproc_bold"].node, - cal_DVARS, - [(nodes["desc-preproc_bold"].out, "in_file")], - ), - ( - nodes["space-bold_desc-brain_mask"].node, - cal_DVARS, - [(nodes["space-bold_desc-brain_mask"].out, "mask")], - ), (cal_DVARS, cal_DVARS_strip, [("out_file", "file_1D")]), ( cal_DVARS_strip, qc_file, [("out_file", "dvars_after_path"), ("out_matrix", "dvars_after")], ), - ( - nodes[motion_name].node, - qc_file, - [(nodes[motion_name].out, "movement_parameters")], - ), - *[ - (nodes[node].node, qc_file, [(nodes[node].out, node.replace("-", "_"))]) - for node in motion_params - if not node.endswith("_motion") and node in nodes - ], ] ) return wf @@ -209,20 +186,20 @@ def generate_xcp_qc( # noqa: PLR0913 task: str, run: str | int, desc: str, - regressors: str, - bold2t1w_mask: str, - t1w_mask: str, - bold2template_mask: str, - template_mask: str, - original_func: str, - final_func: str, - movement_parameters: str, - dvars: str, - censor_indices: list[int], - framewise_displacement_jenkinson: str, - dvars_after: np.ndarray, - dvars_after_path: str, - template: str, + regressors: Optional[str], + bold2t1w_mask: Optional[str], + t1w_mask: Optional[str], + bold2template_mask: Optional[str], + template_mask: Optional[str], + original_func: Optional[str], + final_func: Optional[str], + movement_parameters: Optional[str], + dvars: Optional[str], + censor_indices: Optional[list[int]], + framewise_displacement_jenkinson: Optional[str], + dvars_after: Optional[np.ndarray], + dvars_after_path: Optional[str], + template: Optional[str], ) -> str: """ Generate an RBC-style QC CSV. @@ -292,90 +269,131 @@ def generate_xcp_qc( # noqa: PLR0913 str path to space-template_desc-xcp_quality TSV """ - columns = ( - "sub,ses,task,run,desc,regressors,space,meanFD,relMeansRMSMotion," - "relMaxRMSMotion,meanDVInit,meanDVFinal,nVolCensored,nVolsRemoved," - "motionDVCorrInit,motionDVCorrFinal,coregDice,coregJaccard," - "coregCrossCorr,coregCoverage,normDice,normJaccard,normCrossCorr," - "normCoverage".split(",") - ) - images = { - "original_func": nib.load(original_func), - "final_func": nib.load(final_func), + key: nib_load(image) + for key, image in [("original_func", original_func), ("final_func", final_func)] + if image } - # `sub` through `space` - from_bids = { - "sub": sub, - "ses": ses, - "task": task, - "run": run, - "desc": desc, - "regressors": regressors, - "space": os.path.basename(template).split(".", 1)[0].split("_", 1)[0], - } - if from_bids["space"].startswith("tpl-"): - from_bids["space"] = from_bids["space"][4:] - - # `nVolCensored` & `nVolsRemoved` - n_vols_censored = len(censor_indices) if censor_indices is not None else "unknown" - shape_params = { - "nVolCensored": n_vols_censored, - "nVolsRemoved": images["original_func"].shape[3] - - images["final_func"].shape[3], - } + qc_dict: dict[str, Any] = {} + """Quality control dictionary to be converted to a DataFrame.""" - if isinstance(final_func, BufferedReader): - final_func = final_func.name - qc_filepath = os.path.join(os.getcwd(), "xcpqc.tsv") + columns: list[str] = [] + """Header for the quality control DataFrame.""" - desc_span = re.search(r"_desc-.*_", final_func) - if desc_span: - desc_span = desc_span.span() - final_func = "_".join([final_func[: desc_span[0]], final_func[desc_span[1] :]]) - del desc_span - - # `meanFD (Jenkinson)` - power_params = {"meanFD": np.mean(np.loadtxt(framewise_displacement_jenkinson))} - - # `relMeansRMSMotion` & `relMaxRMSMotion` - mot = np.genfromtxt(movement_parameters).T - # Relative RMS of translation - rms = np.sqrt(mot[3] ** 2 + mot[4] ** 2 + mot[5] ** 2) - rms_params = {"relMeansRMSMotion": [np.mean(rms)], "relMaxRMSMotion": [np.max(rms)]} - - # `meanDVInit` & `meanDVFinal` - meanDV = {"meanDVInit": np.mean(np.loadtxt(dvars))} - try: - meanDV["motionDVCorrInit"] = dvcorr(dvars, framewise_displacement_jenkinson) - except ValueError as value_error: - meanDV["motionDVCorrInit"] = f"ValueError({value_error!s})" - meanDV["meanDVFinal"] = np.mean(dvars_after) - try: - meanDV["motionDVCorrFinal"] = dvcorr( - dvars_after_path, framewise_displacement_jenkinson + # `sub` through `space` + from_bids: dict[str, Any] = { + _k: _v + for _k, _v in { + "sub": sub, + "ses": ses, + "task": task, + "run": run, + "desc": desc, + "regressors": regressors, + "space": os.path.basename(template).split(".", 1)[0].split("_", 1)[0] + if template + else None, + }.items() + if _v is not None + } + columns.extend(["sub", "ses", "task", "run", "desc", "regressors"]) + if from_bids["space"] is not None: + if from_bids["space"].startswith("tpl-"): + from_bids["space"] = from_bids["space"][4:] + columns.append("space") + qc_dict = {**qc_dict, **from_bids} + + if framewise_displacement_jenkinson is not None: + # `meanFD (Jenkinson)` + power_params = {"meanFD": np.mean(np.loadtxt(framewise_displacement_jenkinson))} + qc_dict = {**qc_dict, **power_params} + columns.append("meanFD") + + if movement_parameters is not None: + # `relMeansRMSMotion` & `relMaxRMSMotion` + mot = np.genfromtxt(movement_parameters).T + # Relative RMS of translation + rms = np.sqrt(mot[3] ** 2 + mot[4] ** 2 + mot[5] ** 2) + rms_params = { + "relMeansRMSMotion": [np.mean(rms)], + "relMaxRMSMotion": [np.max(rms)], + } + qc_dict = {**qc_dict, **rms_params} + columns.extend(list(rms_params.keys())) + + if dvars is not None: + # `meanDVInit` & `meanDVFinal` + meanDV = {"meanDVInit": np.mean(np.loadtxt(dvars))} + try: + meanDV["motionDVCorrInit"] = dvcorr(dvars, framewise_displacement_jenkinson) + except ValueError as value_error: + meanDV["motionDVCorrInit"] = f"ValueError({value_error!s})" + meanDV["meanDVFinal"] = np.mean(dvars_after) + try: + meanDV["motionDVCorrFinal"] = dvcorr( + dvars_after_path, framewise_displacement_jenkinson + ) + except ValueError as value_error: + meanDV["motionDVCorrFinal"] = f"ValueError({value_error!s})" + qc_dict = {**qc_dict, **meanDV} + columns.extend(list(meanDV.keys())) + + if censor_indices is not None: + # `nVolCensored` & `nVolsRemoved` + n_vols_censored = ( + len(censor_indices) if censor_indices is not None else "unknown" ) - except ValueError as value_error: - meanDV["motionDVCorrFinal"] = f"ValueError({value_error!s})" - - # Overlap - overlap_params = regisQ( - bold2t1w_mask=bold2t1w_mask, - t1w_mask=t1w_mask, - bold2template_mask=bold2template_mask, - template_mask=template_mask, - ) + shape_params = { + "nVolCensored": n_vols_censored, + "nVolsRemoved": images["original_func"].shape[3] + - images["final_func"].shape[3], + } + qc_dict = {**qc_dict, **shape_params} + if "motionDVCorrFinal" in columns: + columns.insert(-2, "meanDVInit") + columns.insert(-2, "meanDVFinal") + columns.extend(["motionDVCorrInit", "motionDVCorrFinal"]) + else: + columns.extend(list(shape_params.keys())) + + if final_func is not None: + if isinstance(final_func, BufferedReader): + final_func = final_func.name + + desc_span = re.search(r"_desc-.*_", final_func) if final_func else None + if desc_span: + desc_span = desc_span.span() + assert final_func is not None + final_func = "_".join( + [final_func[: desc_span[0]], final_func[desc_span[1] :]] + ) + del desc_span + + if all( + _var is not None + for _var in [bold2t1w_mask, t1w_mask, bold2template_mask, template_mask] + ): + # `coregDice`, `coregJaccard`, `coregCrossCorr`, `coregCoverage` + coreg_params = regisQ( + bold2t1w_mask=bold2t1w_mask, + t1w_mask=t1w_mask, + bold2template_mask=bold2template_mask, + template_mask=template_mask, + ) + # Overlap + overlap_params = regisQ( + bold2t1w_mask=bold2t1w_mask, + t1w_mask=t1w_mask, + bold2template_mask=bold2template_mask, + template_mask=template_mask, + ) + qc_dict = {**qc_dict, **coreg_params, **overlap_params} + columns.extend([*list(coreg_params.keys()), *list(overlap_params.keys())]) - qc_dict = { - **from_bids, - **power_params, - **rms_params, - **shape_params, - **overlap_params, - **meanDV, - } df = pd.DataFrame(qc_dict, columns=columns) + + qc_filepath = os.path.join(os.getcwd(), "xcpqc.tsv") df.to_csv(qc_filepath, sep="\t", index=False) return qc_filepath @@ -543,31 +561,6 @@ def qc_xcp(wf, cfg, strat_pool, pipe_num, opt=None): name=f"binarize_bold_to_T1w_mask_{pipe_num}", op_string="-bin ", ) - nodes = { - key: strat_pool.node_data(key) - for key in [ - "bold", - "desc-preproc_bold", - "max-displacement", - "scan", - "space-bold_desc-brain_mask", - "space-T1w_desc-brain_mask", - "space-T1w_sbref", - "space-template_desc-preproc_bold", - "subject", - *motion_params, - ] - if strat_pool.check_rpool(key) - } - nodes["bold2template_mask"] = strat_pool.node_data( - ["space-template_desc-bold_mask", "space-EPItemplate_desc-bold_mask"] - ) - nodes["template_mask"] = strat_pool.node_data( - ["T1w-brain-template-mask", "EPI-template-mask"] - ) - nodes["template"] = strat_pool.node_data( - ["T1w-brain-template-funcreg", "EPI-brain-template-funcreg"] - ) resample_bold_mask_to_template = pe.Node( afni.Resample(), name=f"resample_bold_mask_to_anat_res_{pipe_num}", @@ -575,44 +568,45 @@ def qc_xcp(wf, cfg, strat_pool, pipe_num, opt=None): mem_x=(0.0115, "in_file", "t"), ) resample_bold_mask_to_template.inputs.outputtype = "NIFTI_GZ" - wf = _connect_motion(wf, nodes, strat_pool, qc_file, pipe_num=pipe_num) + wf: pe.Workflow = _connect_motion(wf, strat_pool, qc_file, pipe_num=pipe_num) + if not hasattr(wf, "connect_optional"): + setattr(wf, "connect_optional", pe.Workflow.connect_optional) + + for key in ["subject", "scan"]: + wf.connect_optional(strat_pool, key, bids_info, key) + wf.connect_optional(strat_pool, "space-T1w_sbref", bold_to_T1w_mask, "in_file") + wf.connect_optional(strat_pool, "space-T1w_desc-brain_mask", qc_file, "t1w_mask") + wf.connect_optional( + strat_pool, + ["T1w-brain-template-mask", "EPI-template-mask"], + qc_file, + "template_mask", + ) + wf.connect_optional( + strat_pool, + ["T1w-brain-template-mask", "EPI-template-mask"], + resample_bold_mask_to_template, + "master", + ) + wf.connect_optional(strat_pool, "bold", qc_file, "original_func") + wf.connect_optional( + strat_pool, "space-template_desc-preproc_bold", qc_file, "final_func" + ) + wf.connect_optional( + strat_pool, + ["T1w-brain-template-funcreg", "EPI-brain-template-funcreg"], + qc_file, + "template", + ) + wf.connect_optional( + strat_pool, + ["space-template_desc-bold_mask", "space-EPItemplate_desc-bold_mask"], + resample_bold_mask_to_template, + "in_file", + ) wf.connect( [ - (nodes["subject"].node, bids_info, [(nodes["subject"].out, "subject")]), - (nodes["scan"].node, bids_info, [(nodes["scan"].out, "scan")]), - ( - nodes["space-T1w_sbref"].node, - bold_to_T1w_mask, - [(nodes["space-T1w_sbref"].out, "in_file")], - ), - ( - nodes["space-T1w_desc-brain_mask"].node, - qc_file, - [(nodes["space-T1w_desc-brain_mask"].out, "t1w_mask")], - ), (bold_to_T1w_mask, qc_file, [("out_file", "bold2t1w_mask")]), - ( - nodes["template_mask"].node, - qc_file, - [(nodes["template_mask"].out, "template_mask")], - ), - (nodes["bold"].node, qc_file, [(nodes["bold"].out, "original_func")]), - ( - nodes["space-template_desc-preproc_bold"].node, - qc_file, - [(nodes["space-template_desc-preproc_bold"].out, "final_func")], - ), - (nodes["template"].node, qc_file, [(nodes["template"].out, "template")]), - ( - nodes["template_mask"].node, - resample_bold_mask_to_template, - [(nodes["template_mask"].out, "master")], - ), - ( - nodes["bold2template_mask"].node, - resample_bold_mask_to_template, - [(nodes["bold2template_mask"].out, "in_file")], - ), ( resample_bold_mask_to_template, qc_file, From 36e756259eb35767bb6bf10916d1579b495700c6 Mon Sep 17 00:00:00 2001 From: Jon Cluce Date: Wed, 30 Jul 2025 13:55:03 -0400 Subject: [PATCH 436/507] :necktie: Group `ICA_AROMA_FSLreg` inputs --- CPAC/nuisance/nuisance.py | 8 +++++--- 1 file changed, 5 insertions(+), 3 deletions(-) diff --git a/CPAC/nuisance/nuisance.py b/CPAC/nuisance/nuisance.py index 9504f0a3b7..beb84700b1 100644 --- a/CPAC/nuisance/nuisance.py +++ b/CPAC/nuisance/nuisance.py @@ -2023,9 +2023,11 @@ def filtering_bold_and_regressors( config=["nuisance_corrections", "1-ICA-AROMA"], switch=["run"], inputs=[ - "desc-preproc_bold", - "from-bold_to-T1w_mode-image_desc-linear_xfm", - "from-T1w_to-template_mode-image_xfm", + ( + "desc-preproc_bold", + "from-bold_to-T1w_mode-image_desc-linear_xfm", + "from-T1w_to-template_mode-image_xfm", + ), ], outputs=["desc-preproc_bold", "desc-cleaned_bold"], ) From 970afa5e908b1bff06379d441c2983b3e3104555 Mon Sep 17 00:00:00 2001 From: Jon Cluce Date: Wed, 30 Jul 2025 14:23:43 -0400 Subject: [PATCH 437/507] :rewind: Revert ":necktie: Group `ICA_AROMA_FSLreg` inputs" This reverts commit 36e756259eb35767bb6bf10916d1579b495700c6. --- CPAC/nuisance/nuisance.py | 8 +++----- 1 file changed, 3 insertions(+), 5 deletions(-) diff --git a/CPAC/nuisance/nuisance.py b/CPAC/nuisance/nuisance.py index beb84700b1..9504f0a3b7 100644 --- a/CPAC/nuisance/nuisance.py +++ b/CPAC/nuisance/nuisance.py @@ -2023,11 +2023,9 @@ def filtering_bold_and_regressors( config=["nuisance_corrections", "1-ICA-AROMA"], switch=["run"], inputs=[ - ( - "desc-preproc_bold", - "from-bold_to-T1w_mode-image_desc-linear_xfm", - "from-T1w_to-template_mode-image_xfm", - ), + "desc-preproc_bold", + "from-bold_to-T1w_mode-image_desc-linear_xfm", + "from-T1w_to-template_mode-image_xfm", ], outputs=["desc-preproc_bold", "desc-cleaned_bold"], ) From 930022bc090b1aeb0d07cd20fe2e40678aae0aee Mon Sep 17 00:00:00 2001 From: "birajstha:construction_worker::penguin" Date: Wed, 30 Jul 2025 15:37:03 -0400 Subject: [PATCH 438/507] making only the first arg as required --- CPAC/utils/datasource.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/CPAC/utils/datasource.py b/CPAC/utils/datasource.py index a23f373487..4c9e4afb69 100644 --- a/CPAC/utils/datasource.py +++ b/CPAC/utils/datasource.py @@ -449,7 +449,7 @@ def calc_delta_te_and_asym_ratio( return deltaTE, ees_asym_ratio -def gather_echo_times(echotime_1, echotime_2, echotime_3=None, echotime_4=None): +def gather_echo_times(echotime_1, echotime_2=None, echotime_3=None, echotime_4=None): """Gather the echo times from the field map data.""" echotime_list = [echotime_1, echotime_2, echotime_3, echotime_4] echotime_list = list(filter(lambda item: item is not None, echotime_list)) From 0a0e30669b2a40864cf663ac320f6cffba0a8378 Mon Sep 17 00:00:00 2001 From: "birajstha:construction_worker::penguin" Date: Thu, 31 Jul 2025 17:31:58 -0400 Subject: [PATCH 439/507] only calculate delta_ratio for EchoTime if phasediff type fmap --- CPAC/utils/datasource.py | 79 +++++++++++++++++++++------------------- 1 file changed, 41 insertions(+), 38 deletions(-) diff --git a/CPAC/utils/datasource.py b/CPAC/utils/datasource.py index 4c9e4afb69..1c8b86c7c1 100644 --- a/CPAC/utils/datasource.py +++ b/CPAC/utils/datasource.py @@ -449,7 +449,7 @@ def calc_delta_te_and_asym_ratio( return deltaTE, ees_asym_ratio -def gather_echo_times(echotime_1, echotime_2=None, echotime_3=None, echotime_4=None): +def gather_echo_times(echotime_1, echotime_2, echotime_3=None, echotime_4=None): """Gather the echo times from the field map data.""" echotime_list = [echotime_1, echotime_2, echotime_3, echotime_4] echotime_list = list(filter(lambda item: item is not None, echotime_list)) @@ -693,23 +693,16 @@ def ingress_func_metadata( "fmap_readout_ingress", ) - if "phase" in key or "mag" in key: + # Only consider it "diff" if it's not a spin-echo or EPI-based fmap + if ( + ("phase" in key or "phasediff" in key) + and not any(style in key.lower() for style in ["spin", "se", "epi"]) + ): diff = True - if re.match("epi_[AP]{2}", orig_key): blip = True if diff: - calc_delta_ratio = pe.Node( - Function( - input_names=["effective_echo_spacing", "echo_times"], - output_names=["deltaTE", "ees_asym_ratio"], - function=calc_delta_te_and_asym_ratio, - imports=["from typing import Optional"], - ), - name=f"diff_distcor_calc_delta{name_suffix}", - ) - gather_echoes = pe.Node( Function( input_names=[ @@ -733,7 +726,42 @@ def ingress_func_metadata( except KeyError: pass + calc_delta_ratio = pe.Node( + Function( + input_names=["effective_echo_spacing", "echo_times"], + output_names=["deltaTE", "ees_asym_ratio"], + function=calc_delta_te_and_asym_ratio, + imports=["from typing import Optional"], + ), + name=f"diff_distcor_calc_delta{name_suffix}", + ) + wf.connect(gather_echoes, "echotime_list", calc_delta_ratio, "echo_times") + + # Connect EffectiveEchoSpacing from functional metadata + rpool.set_data( + "effectiveEchoSpacing", + scan_params, + "effective_echo_spacing", + {}, + "", + "func_metadata_ingress", + ) + node, out_file = rpool.get("effectiveEchoSpacing")[ + "['effectiveEchoSpacing:func_metadata_ingress']" + ]["data"] + wf.connect(node, out_file, calc_delta_ratio, "effective_echo_spacing") + rpool.set_data( + "deltaTE", calc_delta_ratio, "deltaTE", {}, "", "deltaTE_ingress" + ) + rpool.set_data( + "ees-asym-ratio", + calc_delta_ratio, + "ees_asym_ratio", + {}, + "", + "ees_asym_ratio_ingress", + ) # Add in nodes to get parameters from configuration file # a node which checks if scan_parameters are present for each scan @@ -805,31 +833,6 @@ def ingress_func_metadata( "pe-direction", scan_params, "pe_direction", {}, "", "func_metadata_ingress" ) - if diff: - # Connect EffectiveEchoSpacing from functional metadata - rpool.set_data( - "effectiveEchoSpacing", - scan_params, - "effective_echo_spacing", - {}, - "", - "func_metadata_ingress", - ) - node, out_file = rpool.get("effectiveEchoSpacing")[ - "['effectiveEchoSpacing:func_metadata_ingress']" - ]["data"] - wf.connect(node, out_file, calc_delta_ratio, "effective_echo_spacing") - rpool.set_data( - "deltaTE", calc_delta_ratio, "deltaTE", {}, "", "deltaTE_ingress" - ) - rpool.set_data( - "ees-asym-ratio", - calc_delta_ratio, - "ees_asym_ratio", - {}, - "", - "ees_asym_ratio_ingress", - ) return wf, rpool, diff, blip, fmap_rp_list From 2d871cc68303ea382b1b5d25857bd9ad5c7ef11b Mon Sep 17 00:00:00 2001 From: "birajstha:construction_worker::penguin" Date: Fri, 1 Aug 2025 15:03:06 -0400 Subject: [PATCH 440/507] adding a better way to determine fmaps and then set resources accordingly --- CPAC/utils/datasource.py | 348 ++++++++++++++++------------ CPAC/utils/tests/slurm-33950418.out | 0 CPAC/utils/tests/test_datasource.py | 95 ++++++++ 3 files changed, 300 insertions(+), 143 deletions(-) create mode 100644 CPAC/utils/tests/slurm-33950418.out diff --git a/CPAC/utils/datasource.py b/CPAC/utils/datasource.py index 1c8b86c7c1..45f153d5fa 100644 --- a/CPAC/utils/datasource.py +++ b/CPAC/utils/datasource.py @@ -398,7 +398,12 @@ def get_fmap_phasediff_metadata(data_config_scan_params): dwell_time = data_config_scan_params.get("DwellTime") pe_direction = data_config_scan_params.get("PhaseEncodingDirection") total_readout = data_config_scan_params.get("TotalReadoutTime") + if "EffectiveEchoSpacing" in data_config_scan_params: + effective_echo_spacing = data_config_scan_params.get("EffectiveEchoSpacing") + else: + effective_echo_spacing = None + fmap_type = get_fmap_type(data_config_scan_params) return ( dwell_time, pe_direction, @@ -406,6 +411,8 @@ def get_fmap_phasediff_metadata(data_config_scan_params): echo_time, echo_time_one, echo_time_two, + effective_echo_spacing, + fmap_type, ) @@ -542,6 +549,67 @@ def match_epi_fmaps_function_node(name: str = "match_epi_fmaps"): ) +def get_fmap_type(metadata): + """ Determine the type of field map from metadata. + + reference: https://bids-specification.readthedocs.io/en/latest/modality-specific-files/magnetic-resonance-imaging-data.html#case-1-phase-difference-map-and-at-least-one-magnitude-image + + Parameters + ---------- + metadata : dict or str + Metadata dictionary or path to a JSON file containing metadata. + + Returns + ------- + str or None + Returns the type of field map as a string: + - "phasediff" for phase difference maps with two echo times + - "phase" for single echo phase maps + - "fieldmap" for field maps with units like Hz, rad/s, T, or Tesla + - "epi" for EPI field maps with phase encoding direction + """ + + if not isinstance(metadata, dict): + if isinstance(metadata, str) and ".json" in metadata: + import json + try: + with open(metadata, "r", encoding="utf-8") as f: + metadata = json.load(f) + except (FileNotFoundError, json.JSONDecodeError): + return None + else: + return None + + # Check for required BIDS fields only + match ( + "EchoTime1" in metadata, + "EchoTime2" in metadata, + "EchoTime" in metadata, + "Units" in metadata, + "PhaseEncodingDirection" in metadata + ): + case (True, True, _, _, _): + # Case 1: Phase-difference map (REQUIRED: EchoTime1 AND EchoTime2) + return "phasediff" + case (False, False, True, _, _): + # Case 2: Single phase map (REQUIRED: EchoTime, but NOT EchoTime1/2) + return "phase" + case (_, _, _, True, _): + # Case 3: Direct field mapping (REQUIRED: Units) + units = metadata["Units"].lower() + if units in ["hz", "rad/s", "t", "tesla", "hertz"]: + return "fieldmap" + case (_, _, _, _, True): + # Case 4: EPI field maps (REQUIRED: PhaseEncodingDirection) + pe_dir = metadata["PhaseEncodingDirection"] + if pe_dir in ["i", "i-", "j", "j-", "k", "k-"]: + return "epi" + case _: + return None + + return None + + def ingress_func_metadata( wf, cfg, @@ -557,11 +625,88 @@ def ingress_func_metadata( for suffix_part in (unique_id, num_strat): if suffix_part is not None: name_suffix += f"_{suffix_part}" - # Grab field maps + + scan_params = pe.Node( + Function( + input_names=[ + "data_config_scan_params", + "subject_id", + "scan", + "pipeconfig_tr", + "pipeconfig_tpattern", + "pipeconfig_start_indx", + "pipeconfig_stop_indx", + ], + output_names=[ + "tr", + "tpattern", + "template", + "ref_slice", + "start_indx", + "stop_indx", + "pe_direction", + "effective_echo_spacing", + ], + function=get_scan_params, + ), + name=f"bold_scan_params_{subject_id}{name_suffix}", + ) + scan_params.inputs.subject_id = subject_id + scan_params.inputs.set( + pipeconfig_start_indx=cfg.functional_preproc["truncation"]["start_tr"], + pipeconfig_stop_indx=cfg.functional_preproc["truncation"]["stop_tr"], + ) + + node, out = rpool.get("scan")["['scan:func_ingress']"]["data"] + wf.connect(node, out, scan_params, "scan") + + # Workaround for extracting metadata with ingress + if rpool.check_rpool("derivatives-dir"): + selectrest_json = pe.Node( + function.Function( + input_names=["scan", "rest_dict", "resource"], + output_names=["file_path"], + function=get_rest, + as_module=True, + ), + name="selectrest_json", + ) + selectrest_json.inputs.rest_dict = sub_dict + selectrest_json.inputs.resource = "scan_parameters" + wf.connect(node, out, selectrest_json, "scan") + wf.connect(selectrest_json, "file_path", scan_params, "data_config_scan_params") + else: + # wire in the scan parameter workflow + node, out = rpool.get("scan-params")["['scan-params:scan_params_ingress']"][ + "data" + ] + wf.connect(node, out, scan_params, "data_config_scan_params") + + # Set functional metadata in rpool + rpool.set_data("TR", scan_params, "tr", {}, "", "func_metadata_ingress") + rpool.set_data("tpattern", scan_params, "tpattern", {}, "", "func_metadata_ingress") + rpool.set_data("template", scan_params, "template", {}, "", "func_metadata_ingress") + rpool.set_data( + "start-tr", scan_params, "start_indx", {}, "", "func_metadata_ingress" + ) + rpool.set_data("stop-tr", scan_params, "stop_indx", {}, "", "func_metadata_ingress") + rpool.set_data( + "pe-direction", scan_params, "pe_direction", {}, "", "func_metadata_ingress" + ) + rpool.set_data( + "effectiveEchoSpacing", + scan_params, + "effective_echo_spacing", + {}, + "", + "func_metadata_ingress", + ) + diff = False blip = False fmap_rp_list = [] fmap_TE_list = [] + if "fmap" in sub_dict: second = False for orig_key in sub_dict["fmap"]: @@ -594,7 +739,6 @@ def ingress_func_metadata( fmap_rp_list.append(key) - get_fmap_metadata_imports = ["import json"] get_fmap_metadata = pe.Node( Function( input_names=["data_config_scan_params"], @@ -605,9 +749,11 @@ def ingress_func_metadata( "echo_time", "echo_time_one", "echo_time_two", + "effective_echo_spacing", + "fmap_type", ], function=get_fmap_phasediff_metadata, - imports=get_fmap_metadata_imports, + imports=["import json"], ), name=f"{key}_get_metadata{name_suffix}", ) @@ -619,54 +765,46 @@ def ingress_func_metadata( "data_config_scan_params", ) - if "phase" in key: - # leave it open to all three options, in case there is a - # phasediff image with either a single EchoTime field (which - # usually matches one of the magnitude EchoTimes), OR - # a phasediff with an EchoTime1 and EchoTime2 - - # at least one of these rpool keys will have a None value, - # which will be sorted out in gather_echo_times below - rpool.set_data( - f"{key}-TE", - get_fmap_metadata, - "echo_time", - {}, - "", - "fmap_TE_ingress", - ) - fmap_TE_list.append(f"{key}-TE") - - rpool.set_data( - f"{key}-TE1", - get_fmap_metadata, - "echo_time_one", - {}, - "", - "fmap_TE1_ingress", - ) - fmap_TE_list.append(f"{key}-TE1") - - rpool.set_data( - f"{key}-TE2", - get_fmap_metadata, - "echo_time_two", - {}, - "", - "fmap_TE2_ingress", - ) - fmap_TE_list.append(f"{key}-TE2") - - elif "magnitude" in key: - rpool.set_data( - f"{key}-TE", - get_fmap_metadata, - "echo_time", - {}, - "", - "fmap_TE_ingress", - ) - fmap_TE_list.append(f"{key}-TE") + # Store the fmap type output for later use + rpool.set_data( + f"{key}-fmap-type", + get_fmap_metadata, + "fmap_type", + {}, + "", + "fmap_type_ingress", + ) + + # Set echo time data - let downstream processing filter based on type + rpool.set_data( + f"{key}-TE", + get_fmap_metadata, + "echo_time", + {}, + "", + "fmap_TE_ingress", + ) + fmap_TE_list.append(f"{key}-TE") + + rpool.set_data( + f"{key}-TE1", + get_fmap_metadata, + "echo_time_one", + {}, + "", + "fmap_TE1_ingress", + ) + fmap_TE_list.append(f"{key}-TE1") + + rpool.set_data( + f"{key}-TE2", + get_fmap_metadata, + "echo_time_two", + {}, + "", + "fmap_TE2_ingress", + ) + fmap_TE_list.append(f"{key}-TE2") rpool.set_data( f"{key}-dwell", @@ -693,14 +831,15 @@ def ingress_func_metadata( "fmap_readout_ingress", ) - # Only consider it "diff" if it's not a spin-echo or EPI-based fmap - if ( - ("phase" in key or "phasediff" in key) - and not any(style in key.lower() for style in ["spin", "se", "epi"]) - ): - diff = True + # Set flags based on predictable patterns if re.match("epi_[AP]{2}", orig_key): blip = True + elif any(pattern in key.lower() for pattern in ["phase", "phasediff", "fieldmap"]): + diff = True + + # Conservative approach: if we have any fieldmaps, prepare for diff processing + if fmap_rp_list: + diff = True if diff: gather_echoes = pe.Node( @@ -718,13 +857,14 @@ def ingress_func_metadata( ) for idx, fmap_file in enumerate(fmap_TE_list, start=1): - try: - node, out_file = rpool.get(fmap_file)[ - f"['{fmap_file}:fmap_TE_ingress']" - ]["data"] - wf.connect(node, out_file, gather_echoes, f"echotime_{idx}") - except KeyError: - pass + if idx <= 4: # Limit to 4 inputs + try: + node, out_file = rpool.get(fmap_file)[ + f"['{fmap_file}:fmap_TE_ingress']" + ]["data"] + wf.connect(node, out_file, gather_echoes, f"echotime_{idx}") + except KeyError: + pass calc_delta_ratio = pe.Node( Function( @@ -739,18 +879,11 @@ def ingress_func_metadata( wf.connect(gather_echoes, "echotime_list", calc_delta_ratio, "echo_times") # Connect EffectiveEchoSpacing from functional metadata - rpool.set_data( - "effectiveEchoSpacing", - scan_params, - "effective_echo_spacing", - {}, - "", - "func_metadata_ingress", - ) node, out_file = rpool.get("effectiveEchoSpacing")[ "['effectiveEchoSpacing:func_metadata_ingress']" ]["data"] wf.connect(node, out_file, calc_delta_ratio, "effective_echo_spacing") + rpool.set_data( "deltaTE", calc_delta_ratio, "deltaTE", {}, "", "deltaTE_ingress" ) @@ -763,77 +896,6 @@ def ingress_func_metadata( "ees_asym_ratio_ingress", ) - # Add in nodes to get parameters from configuration file - # a node which checks if scan_parameters are present for each scan - scan_params = pe.Node( - Function( - input_names=[ - "data_config_scan_params", - "subject_id", - "scan", - "pipeconfig_tr", - "pipeconfig_tpattern", - "pipeconfig_start_indx", - "pipeconfig_stop_indx", - ], - output_names=[ - "tr", - "tpattern", - "template", - "ref_slice", - "start_indx", - "stop_indx", - "pe_direction", - "effective_echo_spacing", - ], - function=get_scan_params, - ), - name=f"bold_scan_params_{subject_id}{name_suffix}", - ) - scan_params.inputs.subject_id = subject_id - scan_params.inputs.set( - pipeconfig_start_indx=cfg.functional_preproc["truncation"]["start_tr"], - pipeconfig_stop_indx=cfg.functional_preproc["truncation"]["stop_tr"], - ) - - node, out = rpool.get("scan")["['scan:func_ingress']"]["data"] - wf.connect(node, out, scan_params, "scan") - - # Workaround for extracting metadata with ingress - if rpool.check_rpool("derivatives-dir"): - selectrest_json = pe.Node( - function.Function( - input_names=["scan", "rest_dict", "resource"], - output_names=["file_path"], - function=get_rest, - as_module=True, - ), - name="selectrest_json", - ) - selectrest_json.inputs.rest_dict = sub_dict - selectrest_json.inputs.resource = "scan_parameters" - wf.connect(node, out, selectrest_json, "scan") - wf.connect(selectrest_json, "file_path", scan_params, "data_config_scan_params") - - else: - # wire in the scan parameter workflow - node, out = rpool.get("scan-params")["['scan-params:scan_params_ingress']"][ - "data" - ] - wf.connect(node, out, scan_params, "data_config_scan_params") - - rpool.set_data("TR", scan_params, "tr", {}, "", "func_metadata_ingress") - rpool.set_data("tpattern", scan_params, "tpattern", {}, "", "func_metadata_ingress") - rpool.set_data("template", scan_params, "template", {}, "", "func_metadata_ingress") - rpool.set_data( - "start-tr", scan_params, "start_indx", {}, "", "func_metadata_ingress" - ) - rpool.set_data("stop-tr", scan_params, "stop_indx", {}, "", "func_metadata_ingress") - rpool.set_data( - "pe-direction", scan_params, "pe_direction", {}, "", "func_metadata_ingress" - ) - - return wf, rpool, diff, blip, fmap_rp_list diff --git a/CPAC/utils/tests/slurm-33950418.out b/CPAC/utils/tests/slurm-33950418.out new file mode 100644 index 0000000000..e69de29bb2 diff --git a/CPAC/utils/tests/test_datasource.py b/CPAC/utils/tests/test_datasource.py index 6e9e52c0d2..0769204706 100644 --- a/CPAC/utils/tests/test_datasource.py +++ b/CPAC/utils/tests/test_datasource.py @@ -28,6 +28,7 @@ from CPAC.utils.datasource import match_epi_fmaps, match_epi_fmaps_function_node from CPAC.utils.test_resources import setup_test_wf from CPAC.utils.utils import PE_DIRECTION +from CPAC.utils.datasource import match_epi_fmaps, match_epi_fmaps_function_node, get_fmap_type @dataclass @@ -381,3 +382,97 @@ def test_match_epi_fmaps(generate: bool, tmp_path: Path) -> None: path_outputs["nipype"][direction].name == path_outputs["direct"][direction].name ) + + +@pytest.mark.parametrize( + "metadata, expected_type", + [ + # Case 1: Phase-difference map (phasediff) - REQUIRED: EchoTime1 and EchoTime2 + ({"EchoTime1": 0.00600, "EchoTime2": 0.00746}, "phasediff"), + ({"EchoTime1": 0.004, "EchoTime2": 0.006}, "phasediff"), + + # Case 2: Single phase map (phase) - REQUIRED: EchoTime + ({"EchoTime": 0.00746}, "phase"), + ({"EchoTime": 0.004}, "phase"), + + # Case 3: Direct field mapping (fieldmap) - REQUIRED: Units + ({"Units": "rad/s"}, "fieldmap"), + ({"Units": "Hz"}, "fieldmap"), + ({"Units": "hz"}, "fieldmap"), + ({"Units": "T"}, "fieldmap"), + ({"Units": "Tesla"}, "fieldmap"), + ({"Units": "hertz"}, "fieldmap"), + + # Case 4: EPI field maps (epi) - REQUIRED: PhaseEncodingDirection + ({"PhaseEncodingDirection": "j-"}, "epi"), + ({"PhaseEncodingDirection": "j"}, "epi"), + ({"PhaseEncodingDirection": "i"}, "epi"), + ({"PhaseEncodingDirection": "i-"}, "epi"), + ({"PhaseEncodingDirection": "k"}, "epi"), + ({"PhaseEncodingDirection": "k-"}, "epi"), + + # Edge cases and invalid inputs + ({}, None), # Empty metadata + ({"SomeOtherField": "value"}, None), # Irrelevant metadata + ({"Units": "invalid_unit"}, None), # Invalid units + ({"PhaseEncodingDirection": "invalid"}, None), # Invalid PE direction + ({"EchoTime1": 0.006}, None), # Only EchoTime1 without EchoTime2 + ({"EchoTime2": 0.006}, None), # Only EchoTime2 without EchoTime1 + + # Priority testing - phasediff should take precedence + ({"EchoTime1": 0.006, "EchoTime2": 0.007, "EchoTime": 0.006}, "phasediff"), + ({"EchoTime1": 0.006, "EchoTime2": 0.007, "Units": "Hz"}, "phasediff"), + ({"EchoTime1": 0.006, "EchoTime2": 0.007, "PhaseEncodingDirection": "j-"}, "phasediff"), + + # Phase should take precedence over fieldmap and epi + ({"EchoTime": 0.006, "Units": "Hz"}, "phase"), + ({"EchoTime": 0.006, "PhaseEncodingDirection": "j-"}, "phase"), + + # Fieldmap should take precedence over epi + ({"Units": "Hz", "PhaseEncodingDirection": "j-"}, "fieldmap"), + + # Test with optional fields that might be present (but shouldn't affect detection) + ({"EchoTime1": 0.006, "EchoTime2": 0.007, "IntendedFor": "bids::sub-01/func/sub-01_task-motor_bold.nii.gz"}, "phasediff"), + ({"Units": "rad/s", "IntendedFor": "bids::sub-01/func/sub-01_task-motor_bold.nii.gz"}, "fieldmap"), + ({"PhaseEncodingDirection": "j-", "TotalReadoutTime": 0.095}, "epi"), + ] +) +def test_get_fmap_type_dict_input(metadata: dict, expected_type: str | None) -> None: + """Test `get_fmap_type` with dictionary input using only required BIDS fields.""" + result = get_fmap_type(metadata) + assert result == expected_type + + +def test_get_fmap_type_real_world_examples() -> None: + """Test `get_fmap_type` with realistic BIDS metadata examples (required fields only).""" + # Real-world phasediff example (only required fields) + phasediff_metadata = { + "EchoTime1": 0.00600, + "EchoTime2": 0.00746, + # Optional fields that might be present: + "IntendedFor": ["bids::sub-01/func/sub-01_task-motor_bold.nii.gz"] + } + assert get_fmap_type(phasediff_metadata) == "phasediff" + + # Real-world fieldmap example (only required fields) + fieldmap_metadata = { + "Units": "rad/s", + # Optional fields that might be present: + "IntendedFor": "bids::sub-01/func/sub-01_task-motor_bold.nii.gz" + } + assert get_fmap_type(fieldmap_metadata) == "fieldmap" + + # Real-world EPI example (only required fields) + epi_metadata = { + "PhaseEncodingDirection": "j-", + # Optional fields that might be present: + "TotalReadoutTime": 0.095, + "IntendedFor": "bids::sub-01/func/sub-01_task-motor_bold.nii.gz" + } + assert get_fmap_type(epi_metadata) == "epi" + + # Real-world phase example (only required fields) + phase_metadata = { + "EchoTime": 0.00746 + } + assert get_fmap_type(phase_metadata) == "phase" \ No newline at end of file From c2f22a47725be78df81db4796ff282d5b44a4e74 Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Fri, 1 Aug 2025 19:03:38 +0000 Subject: [PATCH 441/507] [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci --- CPAC/utils/datasource.py | 27 +++++++------- CPAC/utils/tests/test_datasource.py | 56 +++++++++++++++++------------ 2 files changed, 48 insertions(+), 35 deletions(-) diff --git a/CPAC/utils/datasource.py b/CPAC/utils/datasource.py index 45f153d5fa..baa158dad1 100644 --- a/CPAC/utils/datasource.py +++ b/CPAC/utils/datasource.py @@ -550,15 +550,15 @@ def match_epi_fmaps_function_node(name: str = "match_epi_fmaps"): def get_fmap_type(metadata): - """ Determine the type of field map from metadata. - + """Determine the type of field map from metadata. + reference: https://bids-specification.readthedocs.io/en/latest/modality-specific-files/magnetic-resonance-imaging-data.html#case-1-phase-difference-map-and-at-least-one-magnitude-image Parameters ---------- metadata : dict or str Metadata dictionary or path to a JSON file containing metadata. - + Returns ------- str or None @@ -568,10 +568,11 @@ def get_fmap_type(metadata): - "fieldmap" for field maps with units like Hz, rad/s, T, or Tesla - "epi" for EPI field maps with phase encoding direction """ - + if not isinstance(metadata, dict): if isinstance(metadata, str) and ".json" in metadata: import json + try: with open(metadata, "r", encoding="utf-8") as f: metadata = json.load(f) @@ -579,14 +580,14 @@ def get_fmap_type(metadata): return None else: return None - + # Check for required BIDS fields only match ( "EchoTime1" in metadata, - "EchoTime2" in metadata, + "EchoTime2" in metadata, "EchoTime" in metadata, "Units" in metadata, - "PhaseEncodingDirection" in metadata + "PhaseEncodingDirection" in metadata, ): case (True, True, _, _, _): # Case 1: Phase-difference map (REQUIRED: EchoTime1 AND EchoTime2) @@ -606,7 +607,7 @@ def get_fmap_type(metadata): return "epi" case _: return None - + return None @@ -706,7 +707,7 @@ def ingress_func_metadata( blip = False fmap_rp_list = [] fmap_TE_list = [] - + if "fmap" in sub_dict: second = False for orig_key in sub_dict["fmap"]: @@ -834,7 +835,9 @@ def ingress_func_metadata( # Set flags based on predictable patterns if re.match("epi_[AP]{2}", orig_key): blip = True - elif any(pattern in key.lower() for pattern in ["phase", "phasediff", "fieldmap"]): + elif any( + pattern in key.lower() for pattern in ["phase", "phasediff", "fieldmap"] + ): diff = True # Conservative approach: if we have any fieldmaps, prepare for diff processing @@ -877,13 +880,13 @@ def ingress_func_metadata( ) wf.connect(gather_echoes, "echotime_list", calc_delta_ratio, "echo_times") - + # Connect EffectiveEchoSpacing from functional metadata node, out_file = rpool.get("effectiveEchoSpacing")[ "['effectiveEchoSpacing:func_metadata_ingress']" ]["data"] wf.connect(node, out_file, calc_delta_ratio, "effective_echo_spacing") - + rpool.set_data( "deltaTE", calc_delta_ratio, "deltaTE", {}, "", "deltaTE_ingress" ) diff --git a/CPAC/utils/tests/test_datasource.py b/CPAC/utils/tests/test_datasource.py index 0769204706..ff2d499042 100644 --- a/CPAC/utils/tests/test_datasource.py +++ b/CPAC/utils/tests/test_datasource.py @@ -28,7 +28,11 @@ from CPAC.utils.datasource import match_epi_fmaps, match_epi_fmaps_function_node from CPAC.utils.test_resources import setup_test_wf from CPAC.utils.utils import PE_DIRECTION -from CPAC.utils.datasource import match_epi_fmaps, match_epi_fmaps_function_node, get_fmap_type +from CPAC.utils.datasource import ( + match_epi_fmaps, + match_epi_fmaps_function_node, + get_fmap_type, +) @dataclass @@ -390,11 +394,9 @@ def test_match_epi_fmaps(generate: bool, tmp_path: Path) -> None: # Case 1: Phase-difference map (phasediff) - REQUIRED: EchoTime1 and EchoTime2 ({"EchoTime1": 0.00600, "EchoTime2": 0.00746}, "phasediff"), ({"EchoTime1": 0.004, "EchoTime2": 0.006}, "phasediff"), - # Case 2: Single phase map (phase) - REQUIRED: EchoTime ({"EchoTime": 0.00746}, "phase"), ({"EchoTime": 0.004}, "phase"), - # Case 3: Direct field mapping (fieldmap) - REQUIRED: Units ({"Units": "rad/s"}, "fieldmap"), ({"Units": "Hz"}, "fieldmap"), @@ -402,7 +404,6 @@ def test_match_epi_fmaps(generate: bool, tmp_path: Path) -> None: ({"Units": "T"}, "fieldmap"), ({"Units": "Tesla"}, "fieldmap"), ({"Units": "hertz"}, "fieldmap"), - # Case 4: EPI field maps (epi) - REQUIRED: PhaseEncodingDirection ({"PhaseEncodingDirection": "j-"}, "epi"), ({"PhaseEncodingDirection": "j"}, "epi"), @@ -410,7 +411,6 @@ def test_match_epi_fmaps(generate: bool, tmp_path: Path) -> None: ({"PhaseEncodingDirection": "i-"}, "epi"), ({"PhaseEncodingDirection": "k"}, "epi"), ({"PhaseEncodingDirection": "k-"}, "epi"), - # Edge cases and invalid inputs ({}, None), # Empty metadata ({"SomeOtherField": "value"}, None), # Irrelevant metadata @@ -418,24 +418,36 @@ def test_match_epi_fmaps(generate: bool, tmp_path: Path) -> None: ({"PhaseEncodingDirection": "invalid"}, None), # Invalid PE direction ({"EchoTime1": 0.006}, None), # Only EchoTime1 without EchoTime2 ({"EchoTime2": 0.006}, None), # Only EchoTime2 without EchoTime1 - # Priority testing - phasediff should take precedence ({"EchoTime1": 0.006, "EchoTime2": 0.007, "EchoTime": 0.006}, "phasediff"), ({"EchoTime1": 0.006, "EchoTime2": 0.007, "Units": "Hz"}, "phasediff"), - ({"EchoTime1": 0.006, "EchoTime2": 0.007, "PhaseEncodingDirection": "j-"}, "phasediff"), - + ( + {"EchoTime1": 0.006, "EchoTime2": 0.007, "PhaseEncodingDirection": "j-"}, + "phasediff", + ), # Phase should take precedence over fieldmap and epi ({"EchoTime": 0.006, "Units": "Hz"}, "phase"), ({"EchoTime": 0.006, "PhaseEncodingDirection": "j-"}, "phase"), - # Fieldmap should take precedence over epi ({"Units": "Hz", "PhaseEncodingDirection": "j-"}, "fieldmap"), - # Test with optional fields that might be present (but shouldn't affect detection) - ({"EchoTime1": 0.006, "EchoTime2": 0.007, "IntendedFor": "bids::sub-01/func/sub-01_task-motor_bold.nii.gz"}, "phasediff"), - ({"Units": "rad/s", "IntendedFor": "bids::sub-01/func/sub-01_task-motor_bold.nii.gz"}, "fieldmap"), + ( + { + "EchoTime1": 0.006, + "EchoTime2": 0.007, + "IntendedFor": "bids::sub-01/func/sub-01_task-motor_bold.nii.gz", + }, + "phasediff", + ), + ( + { + "Units": "rad/s", + "IntendedFor": "bids::sub-01/func/sub-01_task-motor_bold.nii.gz", + }, + "fieldmap", + ), ({"PhaseEncodingDirection": "j-", "TotalReadoutTime": 0.095}, "epi"), - ] + ], ) def test_get_fmap_type_dict_input(metadata: dict, expected_type: str | None) -> None: """Test `get_fmap_type` with dictionary input using only required BIDS fields.""" @@ -450,29 +462,27 @@ def test_get_fmap_type_real_world_examples() -> None: "EchoTime1": 0.00600, "EchoTime2": 0.00746, # Optional fields that might be present: - "IntendedFor": ["bids::sub-01/func/sub-01_task-motor_bold.nii.gz"] + "IntendedFor": ["bids::sub-01/func/sub-01_task-motor_bold.nii.gz"], } assert get_fmap_type(phasediff_metadata) == "phasediff" - + # Real-world fieldmap example (only required fields) fieldmap_metadata = { "Units": "rad/s", # Optional fields that might be present: - "IntendedFor": "bids::sub-01/func/sub-01_task-motor_bold.nii.gz" + "IntendedFor": "bids::sub-01/func/sub-01_task-motor_bold.nii.gz", } assert get_fmap_type(fieldmap_metadata) == "fieldmap" - + # Real-world EPI example (only required fields) epi_metadata = { "PhaseEncodingDirection": "j-", # Optional fields that might be present: "TotalReadoutTime": 0.095, - "IntendedFor": "bids::sub-01/func/sub-01_task-motor_bold.nii.gz" + "IntendedFor": "bids::sub-01/func/sub-01_task-motor_bold.nii.gz", } assert get_fmap_type(epi_metadata) == "epi" - + # Real-world phase example (only required fields) - phase_metadata = { - "EchoTime": 0.00746 - } - assert get_fmap_type(phase_metadata) == "phase" \ No newline at end of file + phase_metadata = {"EchoTime": 0.00746} + assert get_fmap_type(phase_metadata) == "phase" From e04eeb3bc7f0348f3adede5ce8b9be2db9b42cb6 Mon Sep 17 00:00:00 2001 From: "birajstha:construction_worker::penguin" Date: Fri, 1 Aug 2025 19:16:51 -0400 Subject: [PATCH 442/507] resolving the nipype not defined error --- CPAC/utils/datasource.py | 123 +++++++++++++++++++-------------------- 1 file changed, 61 insertions(+), 62 deletions(-) diff --git a/CPAC/utils/datasource.py b/CPAC/utils/datasource.py index baa158dad1..e4b863e510 100644 --- a/CPAC/utils/datasource.py +++ b/CPAC/utils/datasource.py @@ -377,6 +377,67 @@ def create_fmap_datasource(fmap_dct, wf_name="fmap_datasource"): def get_fmap_phasediff_metadata(data_config_scan_params): """Return the scan parameters for a field map phasediff scan.""" + def get_fmap_type(metadata): + """Determine the type of field map from metadata. + + reference: https://bids-specification.readthedocs.io/en/latest/modality-specific-files/magnetic-resonance-imaging-data.html#case-1-phase-difference-map-and-at-least-one-magnitude-image + + Parameters + ---------- + metadata : dict or str + Metadata dictionary or path to a JSON file containing metadata. + + Returns + ------- + str or None + Returns the type of field map as a string: + - "phasediff" for phase difference maps with two echo times + - "phase" for single echo phase maps + - "fieldmap" for field maps with units like Hz, rad/s, T, or Tesla + - "epi" for EPI field maps with phase encoding direction + """ + + if not isinstance(metadata, dict): + if isinstance(metadata, str) and ".json" in metadata: + import json + + try: + with open(metadata, "r", encoding="utf-8") as f: + metadata = json.load(f) + except (FileNotFoundError, json.JSONDecodeError): + return None + else: + return None + + # Check for required BIDS fields only + match ( + "EchoTime1" in metadata, + "EchoTime2" in metadata, + "EchoTime" in metadata, + "Units" in metadata, + "PhaseEncodingDirection" in metadata, + ): + case (True, True, _, _, _): + # Case 1: Phase-difference map (REQUIRED: EchoTime1 AND EchoTime2) + return "phasediff" + case (False, False, True, _, _): + # Case 2: Single phase map (REQUIRED: EchoTime, but NOT EchoTime1/2) + return "phase" + case (_, _, _, True, _): + # Case 3: Direct field mapping (REQUIRED: Units) + units = metadata["Units"].lower() + if units in ["hz", "rad/s", "t", "tesla", "hertz"]: + return "fieldmap" + case (_, _, _, _, True): + # Case 4: EPI field maps (REQUIRED: PhaseEncodingDirection) + pe_dir = metadata["PhaseEncodingDirection"] + if pe_dir in ["i", "i-", "j", "j-", "k", "k-"]: + return "epi" + case _: + return None + + return None + if ( not isinstance(data_config_scan_params, dict) and ".json" in data_config_scan_params @@ -549,68 +610,6 @@ def match_epi_fmaps_function_node(name: str = "match_epi_fmaps"): ) -def get_fmap_type(metadata): - """Determine the type of field map from metadata. - - reference: https://bids-specification.readthedocs.io/en/latest/modality-specific-files/magnetic-resonance-imaging-data.html#case-1-phase-difference-map-and-at-least-one-magnitude-image - - Parameters - ---------- - metadata : dict or str - Metadata dictionary or path to a JSON file containing metadata. - - Returns - ------- - str or None - Returns the type of field map as a string: - - "phasediff" for phase difference maps with two echo times - - "phase" for single echo phase maps - - "fieldmap" for field maps with units like Hz, rad/s, T, or Tesla - - "epi" for EPI field maps with phase encoding direction - """ - - if not isinstance(metadata, dict): - if isinstance(metadata, str) and ".json" in metadata: - import json - - try: - with open(metadata, "r", encoding="utf-8") as f: - metadata = json.load(f) - except (FileNotFoundError, json.JSONDecodeError): - return None - else: - return None - - # Check for required BIDS fields only - match ( - "EchoTime1" in metadata, - "EchoTime2" in metadata, - "EchoTime" in metadata, - "Units" in metadata, - "PhaseEncodingDirection" in metadata, - ): - case (True, True, _, _, _): - # Case 1: Phase-difference map (REQUIRED: EchoTime1 AND EchoTime2) - return "phasediff" - case (False, False, True, _, _): - # Case 2: Single phase map (REQUIRED: EchoTime, but NOT EchoTime1/2) - return "phase" - case (_, _, _, True, _): - # Case 3: Direct field mapping (REQUIRED: Units) - units = metadata["Units"].lower() - if units in ["hz", "rad/s", "t", "tesla", "hertz"]: - return "fieldmap" - case (_, _, _, _, True): - # Case 4: EPI field maps (REQUIRED: PhaseEncodingDirection) - pe_dir = metadata["PhaseEncodingDirection"] - if pe_dir in ["i", "i-", "j", "j-", "k", "k-"]: - return "epi" - case _: - return None - - return None - - def ingress_func_metadata( wf, cfg, From aa73f3a0a17e7578887ce24aac22cbe2e3d054db Mon Sep 17 00:00:00 2001 From: "birajstha:construction_worker::penguin" Date: Sat, 2 Aug 2025 20:24:58 -0400 Subject: [PATCH 443/507] moving get_fmap_type to utils --- CPAC/utils/datasource.py | 62 ++-------------------------------------- CPAC/utils/utils.py | 62 ++++++++++++++++++++++++++++++++++++++++ 2 files changed, 64 insertions(+), 60 deletions(-) diff --git a/CPAC/utils/datasource.py b/CPAC/utils/datasource.py index e4b863e510..35e3a012e3 100644 --- a/CPAC/utils/datasource.py +++ b/CPAC/utils/datasource.py @@ -377,67 +377,9 @@ def create_fmap_datasource(fmap_dct, wf_name="fmap_datasource"): def get_fmap_phasediff_metadata(data_config_scan_params): """Return the scan parameters for a field map phasediff scan.""" - def get_fmap_type(metadata): - """Determine the type of field map from metadata. - - reference: https://bids-specification.readthedocs.io/en/latest/modality-specific-files/magnetic-resonance-imaging-data.html#case-1-phase-difference-map-and-at-least-one-magnitude-image - - Parameters - ---------- - metadata : dict or str - Metadata dictionary or path to a JSON file containing metadata. - - Returns - ------- - str or None - Returns the type of field map as a string: - - "phasediff" for phase difference maps with two echo times - - "phase" for single echo phase maps - - "fieldmap" for field maps with units like Hz, rad/s, T, or Tesla - - "epi" for EPI field maps with phase encoding direction - """ - - if not isinstance(metadata, dict): - if isinstance(metadata, str) and ".json" in metadata: - import json - - try: - with open(metadata, "r", encoding="utf-8") as f: - metadata = json.load(f) - except (FileNotFoundError, json.JSONDecodeError): - return None - else: - return None - - # Check for required BIDS fields only - match ( - "EchoTime1" in metadata, - "EchoTime2" in metadata, - "EchoTime" in metadata, - "Units" in metadata, - "PhaseEncodingDirection" in metadata, - ): - case (True, True, _, _, _): - # Case 1: Phase-difference map (REQUIRED: EchoTime1 AND EchoTime2) - return "phasediff" - case (False, False, True, _, _): - # Case 2: Single phase map (REQUIRED: EchoTime, but NOT EchoTime1/2) - return "phase" - case (_, _, _, True, _): - # Case 3: Direct field mapping (REQUIRED: Units) - units = metadata["Units"].lower() - if units in ["hz", "rad/s", "t", "tesla", "hertz"]: - return "fieldmap" - case (_, _, _, _, True): - # Case 4: EPI field maps (REQUIRED: PhaseEncodingDirection) - pe_dir = metadata["PhaseEncodingDirection"] - if pe_dir in ["i", "i-", "j", "j-", "k", "k-"]: - return "epi" - case _: - return None - - return None + from CPAC.utils.utils import get_fmap_type + if ( not isinstance(data_config_scan_params, dict) and ".json" in data_config_scan_params diff --git a/CPAC/utils/utils.py b/CPAC/utils/utils.py index d86f0b2f2f..7604ebf2c4 100644 --- a/CPAC/utils/utils.py +++ b/CPAC/utils/utils.py @@ -2657,3 +2657,65 @@ def flip_orientation_code(code): """Reverts an orientation code by flipping R↔L, A↔P, and I↔S.""" flip_dict = {"R": "L", "L": "R", "A": "P", "P": "A", "I": "S", "S": "I"} return "".join(flip_dict[c] for c in code) + + +def get_fmap_type(metadata): + """Determine the type of field map from metadata. + + reference: https://bids-specification.readthedocs.io/en/latest/modality-specific-files/magnetic-resonance-imaging-data.html#case-1-phase-difference-map-and-at-least-one-magnitude-image + + Parameters + ---------- + metadata : dict or str + Metadata dictionary or path to a JSON file containing metadata. + + Returns + ------- + str or None + Returns the type of field map as a string: + - "phasediff" for phase difference maps with two echo times + - "phase" for single echo phase maps + - "fieldmap" for field maps with units like Hz, rad/s, T, or Tesla + - "epi" for EPI field maps with phase encoding direction + """ + + if not isinstance(metadata, dict): + if isinstance(metadata, str) and ".json" in metadata: + import json + + try: + with open(metadata, "r", encoding="utf-8") as f: + metadata = json.load(f) + except (FileNotFoundError, json.JSONDecodeError): + return None + else: + return None + + # Check for required BIDS fields only + match ( + "EchoTime1" in metadata, + "EchoTime2" in metadata, + "EchoTime" in metadata, + "Units" in metadata, + "PhaseEncodingDirection" in metadata, + ): + case (True, True, _, _, _): + # Case 1: Phase-difference map (REQUIRED: EchoTime1 AND EchoTime2) + return "phasediff" + case (False, False, True, _, _): + # Case 2: Single phase map (REQUIRED: EchoTime, but NOT EchoTime1/2) + return "phase" + case (_, _, _, True, _): + # Case 3: Direct field mapping (REQUIRED: Units) + units = metadata["Units"].lower() + if units in ["hz", "rad/s", "t", "tesla", "hertz"]: + return "fieldmap" + case (_, _, _, _, True): + # Case 4: EPI field maps (REQUIRED: PhaseEncodingDirection) + pe_dir = metadata["PhaseEncodingDirection"] + if pe_dir in ["i", "i-", "j", "j-", "k", "k-"]: + return "epi" + case _: + return None + + return None \ No newline at end of file From 5a098728f2b63c473a1079d1ed5931fb9ea5b925 Mon Sep 17 00:00:00 2001 From: "birajstha:construction_worker::penguin" Date: Sat, 2 Aug 2025 20:29:38 -0400 Subject: [PATCH 444/507] correcting the imports in test --- CPAC/utils/tests/test_datasource.py | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/CPAC/utils/tests/test_datasource.py b/CPAC/utils/tests/test_datasource.py index ff2d499042..1d223b0c01 100644 --- a/CPAC/utils/tests/test_datasource.py +++ b/CPAC/utils/tests/test_datasource.py @@ -25,13 +25,11 @@ import pytest from CPAC.pipeline import nipype_pipeline_engine as pe -from CPAC.utils.datasource import match_epi_fmaps, match_epi_fmaps_function_node from CPAC.utils.test_resources import setup_test_wf -from CPAC.utils.utils import PE_DIRECTION +from CPAC.utils.utils import PE_DIRECTION, get_fmap_type from CPAC.utils.datasource import ( match_epi_fmaps, match_epi_fmaps_function_node, - get_fmap_type, ) From 1a4500ee5ed53c44e37b8ae266ea33342c2f75f1 Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Sun, 3 Aug 2025 00:30:56 +0000 Subject: [PATCH 445/507] [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci --- CPAC/utils/datasource.py | 2 +- CPAC/utils/utils.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/CPAC/utils/datasource.py b/CPAC/utils/datasource.py index 35e3a012e3..0879e6a02f 100644 --- a/CPAC/utils/datasource.py +++ b/CPAC/utils/datasource.py @@ -379,7 +379,7 @@ def get_fmap_phasediff_metadata(data_config_scan_params): """Return the scan parameters for a field map phasediff scan.""" from CPAC.utils.utils import get_fmap_type - + if ( not isinstance(data_config_scan_params, dict) and ".json" in data_config_scan_params diff --git a/CPAC/utils/utils.py b/CPAC/utils/utils.py index 7604ebf2c4..2daf44c06b 100644 --- a/CPAC/utils/utils.py +++ b/CPAC/utils/utils.py @@ -2718,4 +2718,4 @@ def get_fmap_type(metadata): case _: return None - return None \ No newline at end of file + return None From 7ae1c2af7ea0d40de3f7d143f907d5b8cff35ab9 Mon Sep 17 00:00:00 2001 From: "birajstha:construction_worker::penguin" Date: Mon, 4 Aug 2025 12:12:59 -0400 Subject: [PATCH 446/507] check fmap metadata during workflow build for robust fmap type detection --- CPAC/utils/datasource.py | 115 +++++++++++++------------- CPAC/utils/tests/slurm-33950418.out | 0 CPAC/utils/utils.py | 120 +++++++++++++++++++++++++++- 3 files changed, 179 insertions(+), 56 deletions(-) delete mode 100644 CPAC/utils/tests/slurm-33950418.out diff --git a/CPAC/utils/datasource.py b/CPAC/utils/datasource.py index 0879e6a02f..97f2ae02fe 100644 --- a/CPAC/utils/datasource.py +++ b/CPAC/utils/datasource.py @@ -377,7 +377,6 @@ def create_fmap_datasource(fmap_dct, wf_name="fmap_datasource"): def get_fmap_phasediff_metadata(data_config_scan_params): """Return the scan parameters for a field map phasediff scan.""" - from CPAC.utils.utils import get_fmap_type if ( @@ -563,6 +562,8 @@ def ingress_func_metadata( num_strat=None, ): """Ingress metadata for functional scans.""" + from CPAC.utils.utils import get_fmap_build_info, get_fmap_metadata_at_build_time + name_suffix = "" for suffix_part in (unique_id, num_strat): if suffix_part is not None: @@ -624,7 +625,6 @@ def ingress_func_metadata( ] wf.connect(node, out, scan_params, "data_config_scan_params") - # Set functional metadata in rpool rpool.set_data("TR", scan_params, "tr", {}, "", "func_metadata_ingress") rpool.set_data("tpattern", scan_params, "tpattern", {}, "", "func_metadata_ingress") rpool.set_data("template", scan_params, "template", {}, "", "func_metadata_ingress") @@ -652,6 +652,14 @@ def ingress_func_metadata( if "fmap" in sub_dict: second = False for orig_key in sub_dict["fmap"]: + fmap_metadata = get_fmap_metadata_at_build_time( + sub_dict, + orig_key, + input_creds_path, + cfg.pipeline_setup["working_directory"]["path"], + ) + build_info = get_fmap_build_info(fmap_metadata) + gather_fmap = create_fmap_datasource( sub_dict["fmap"], f"fmap_gather_{orig_key}_{subject_id}" ) @@ -681,6 +689,10 @@ def ingress_func_metadata( fmap_rp_list.append(key) + get_fmap_metadata_imports = [ + "import json", + "from CPAC.utils.utils import get_fmap_type", + ] get_fmap_metadata = pe.Node( Function( input_names=["data_config_scan_params"], @@ -695,7 +707,7 @@ def ingress_func_metadata( "fmap_type", ], function=get_fmap_phasediff_metadata, - imports=["import json"], + imports=get_fmap_metadata_imports, ), name=f"{key}_get_metadata{name_suffix}", ) @@ -707,7 +719,6 @@ def ingress_func_metadata( "data_config_scan_params", ) - # Store the fmap type output for later use rpool.set_data( f"{key}-fmap-type", get_fmap_metadata, @@ -717,36 +728,42 @@ def ingress_func_metadata( "fmap_type_ingress", ) - # Set echo time data - let downstream processing filter based on type - rpool.set_data( - f"{key}-TE", - get_fmap_metadata, - "echo_time", - {}, - "", - "fmap_TE_ingress", - ) - fmap_TE_list.append(f"{key}-TE") + if build_info["needs_echo_times"]: + rpool.set_data( + f"{key}-TE", + get_fmap_metadata, + "echo_time", + {}, + "", + "fmap_TE_ingress", + ) + fmap_TE_list.append(f"{key}-TE") + + rpool.set_data( + f"{key}-TE1", + get_fmap_metadata, + "echo_time_one", + {}, + "", + "fmap_TE1_ingress", + ) + fmap_TE_list.append(f"{key}-TE1") + + rpool.set_data( + f"{key}-TE2", + get_fmap_metadata, + "echo_time_two", + {}, + "", + "fmap_TE2_ingress", + ) + fmap_TE_list.append(f"{key}-TE2") - rpool.set_data( - f"{key}-TE1", - get_fmap_metadata, - "echo_time_one", - {}, - "", - "fmap_TE1_ingress", - ) - fmap_TE_list.append(f"{key}-TE1") + if build_info["needs_phasediff_processing"]: + diff = True - rpool.set_data( - f"{key}-TE2", - get_fmap_metadata, - "echo_time_two", - {}, - "", - "fmap_TE2_ingress", - ) - fmap_TE_list.append(f"{key}-TE2") + if build_info["is_epi"] or re.match("epi_[AP]{2}", orig_key): + blip = True rpool.set_data( f"{key}-dwell", @@ -773,19 +790,8 @@ def ingress_func_metadata( "fmap_readout_ingress", ) - # Set flags based on predictable patterns - if re.match("epi_[AP]{2}", orig_key): - blip = True - elif any( - pattern in key.lower() for pattern in ["phase", "phasediff", "fieldmap"] - ): - diff = True - - # Conservative approach: if we have any fieldmaps, prepare for diff processing - if fmap_rp_list: - diff = True - - if diff: + # Set up phasediff processing workflow if needed + if diff and fmap_TE_list: gather_echoes = pe.Node( Function( input_names=[ @@ -800,15 +806,15 @@ def ingress_func_metadata( name="fugue_gather_echo_times", ) - for idx, fmap_file in enumerate(fmap_TE_list, start=1): - if idx <= 4: # Limit to 4 inputs - try: - node, out_file = rpool.get(fmap_file)[ - f"['{fmap_file}:fmap_TE_ingress']" - ]["data"] - wf.connect(node, out_file, gather_echoes, f"echotime_{idx}") - except KeyError: - pass + # Connect available echo times + for idx, fmap_file in enumerate(fmap_TE_list[:4], start=1): + try: + node, out_file = rpool.get(fmap_file)[ + f"['{fmap_file}:fmap_TE_ingress']" + ]["data"] + wf.connect(node, out_file, gather_echoes, f"echotime_{idx}") + except KeyError: + pass calc_delta_ratio = pe.Node( Function( @@ -822,7 +828,6 @@ def ingress_func_metadata( wf.connect(gather_echoes, "echotime_list", calc_delta_ratio, "echo_times") - # Connect EffectiveEchoSpacing from functional metadata node, out_file = rpool.get("effectiveEchoSpacing")[ "['effectiveEchoSpacing:func_metadata_ingress']" ]["data"] diff --git a/CPAC/utils/tests/slurm-33950418.out b/CPAC/utils/tests/slurm-33950418.out deleted file mode 100644 index e69de29bb2..0000000000 diff --git a/CPAC/utils/utils.py b/CPAC/utils/utils.py index 2daf44c06b..a838780883 100644 --- a/CPAC/utils/utils.py +++ b/CPAC/utils/utils.py @@ -2678,7 +2678,6 @@ def get_fmap_type(metadata): - "fieldmap" for field maps with units like Hz, rad/s, T, or Tesla - "epi" for EPI field maps with phase encoding direction """ - if not isinstance(metadata, dict): if isinstance(metadata, str) and ".json" in metadata: import json @@ -2719,3 +2718,122 @@ def get_fmap_type(metadata): return None return None + + +def get_fmap_metadata_at_build_time(sub_dict, orig_key, input_creds_path, dl_dir): + """Extract fieldmap metadata during workflow build time. + + Parameters + ---------- + sub_dict : dict + Subject dictionary containing fieldmap information + orig_key : str + Original fieldmap key name + input_creds_path : str + Path to AWS credentials + dl_dir : str + Download directory path + + Returns + ------- + dict + Dictionary containing fieldmap metadata, or None if unavailable + """ + import json + import os + + try: + # Check if scan_parameters exists for this fieldmap + if orig_key not in sub_dict["fmap"]: + return None + + if "scan_parameters" not in sub_dict["fmap"][orig_key]: + return None + + scan_params_path = sub_dict["fmap"][orig_key]["scan_parameters"] + + # Handle dictionary metadata (direct dict) + if isinstance(scan_params_path, dict): + return scan_params_path + + # Handle file path metadata + if isinstance(scan_params_path, str): + local_path = scan_params_path + + # Handle S3 paths + if scan_params_path.startswith("s3://"): + try: + local_path = check_for_s3( + scan_params_path, input_creds_path, dl_dir + ) + except Exception: + return None + + # Load JSON file + if local_path.endswith(".json") and os.path.exists(local_path): + with open(local_path, "r", encoding="utf-8") as f: + return json.load(f) + + except (FileNotFoundError, json.JSONDecodeError, KeyError, Exception): + pass + + return None + + +def get_fmap_build_info(metadata_dict): + """Determine fieldmap processing requirements at build time. + + Parameters + ---------- + metadata_dict : dict or None + Fieldmap metadata dictionary + + Returns + ------- + dict + Dictionary with keys: + - 'fmap_type': str or None + - 'needs_echo_times': bool + - 'needs_phasediff_processing': bool + - 'is_epi': bool + """ + from CPAC.utils.utils import get_fmap_type + + build_info = { + "fmap_type": None, + "needs_echo_times": False, + "needs_phasediff_processing": False, + "is_epi": False, + } + + if not metadata_dict: + # Conservative fallback - assume we might need processing + build_info["needs_echo_times"] = True + build_info["needs_phasediff_processing"] = True + return build_info + + fmap_type = get_fmap_type(metadata_dict) + build_info["fmap_type"] = fmap_type + + match fmap_type: + case "phase": + build_info["needs_echo_times"] = True + build_info["needs_phasediff_processing"] = True + + case "phasediff": + build_info["needs_echo_times"] = True + build_info["needs_phasediff_processing"] = True + + case "epi": + build_info["needs_echo_times"] = True + build_info["is_epi"] = True + + case "fieldmap": + build_info["needs_phasediff_processing"] = True + + case None: + # Conservative fallback + build_info["needs_echo_times"] = True + build_info["needs_phasediff_processing"] = True + + return build_info From 7979f05075a79b178a220035e5db7582df0a3aab Mon Sep 17 00:00:00 2001 From: "birajstha:construction_worker::penguin" Date: Mon, 4 Aug 2025 12:35:06 -0400 Subject: [PATCH 447/507] adding tests --- CPAC/utils/tests/test_datasource.py | 199 +++++++++++++++++++++++++++- CPAC/utils/utils.py | 39 ++++-- 2 files changed, 222 insertions(+), 16 deletions(-) diff --git a/CPAC/utils/tests/test_datasource.py b/CPAC/utils/tests/test_datasource.py index 1d223b0c01..ae2a5eccb3 100644 --- a/CPAC/utils/tests/test_datasource.py +++ b/CPAC/utils/tests/test_datasource.py @@ -20,17 +20,23 @@ import json from pathlib import Path from typing import Any, Literal, TypeAlias +from unittest.mock import mock_open, patch from networkx.classes.digraph import DiGraph import pytest from CPAC.pipeline import nipype_pipeline_engine as pe -from CPAC.utils.test_resources import setup_test_wf -from CPAC.utils.utils import PE_DIRECTION, get_fmap_type from CPAC.utils.datasource import ( match_epi_fmaps, match_epi_fmaps_function_node, ) +from CPAC.utils.test_resources import setup_test_wf +from CPAC.utils.utils import ( + get_fmap_build_info, + get_fmap_metadata_at_build_time, + get_fmap_type, + PE_DIRECTION, +) @dataclass @@ -484,3 +490,192 @@ def test_get_fmap_type_real_world_examples() -> None: # Real-world phase example (only required fields) phase_metadata = {"EchoTime": 0.00746} assert get_fmap_type(phase_metadata) == "phase" + + +class TestGetFmapMetadataAtBuildTime: + """Test get_fmap_metadata_at_build_time function.""" + + def test_missing_fmap_key(self): + """Test when fieldmap key doesn't exist in sub_dict.""" + sub_dict = {"fmap": {"other_key": {}}} + result = get_fmap_metadata_at_build_time(sub_dict, "missing_key", "", "") + assert result is None + + def test_missing_scan_parameters(self): + """Test when scan_parameters field is missing.""" + sub_dict = {"fmap": {"test_key": {"scan": "path/to/scan.nii.gz"}}} + result = get_fmap_metadata_at_build_time(sub_dict, "test_key", "", "") + assert result is None + + def test_direct_dict_metadata(self): + """Test when metadata is provided as a direct dictionary.""" + metadata = {"EchoTime1": 0.006, "EchoTime2": 0.007} + sub_dict = {"fmap": {"test_key": {"scan_parameters": metadata}}} + result = get_fmap_metadata_at_build_time(sub_dict, "test_key", "", "") + assert result == metadata + + @patch("builtins.open", new_callable=mock_open, read_data='{"EchoTime": 0.006}') + @patch("os.path.exists", return_value=True) + def test_json_file_metadata(self, mock_exists, mock_file): + """Test loading metadata from JSON file.""" + sub_dict = {"fmap": {"test_key": {"scan_parameters": "/path/to/metadata.json"}}} + result = get_fmap_metadata_at_build_time(sub_dict, "test_key", "", "") + assert result == {"EchoTime": 0.006} + mock_file.assert_called_once_with( + "/path/to/metadata.json", "r", encoding="utf-8" + ) + + @patch("os.path.exists", return_value=False) + def test_nonexistent_file(self, mock_exists): + """Test when JSON file doesn't exist.""" + sub_dict = {"fmap": {"test_key": {"scan_parameters": "/nonexistent/file.json"}}} + result = get_fmap_metadata_at_build_time(sub_dict, "test_key", "", "") + assert result is None + + @patch("builtins.open", side_effect=json.JSONDecodeError("Invalid JSON", "", 0)) + @patch("os.path.exists", return_value=True) + def test_invalid_json(self, mock_exists, mock_file): + """Test when JSON file contains invalid JSON.""" + sub_dict = {"fmap": {"test_key": {"scan_parameters": "/path/to/invalid.json"}}} + result = get_fmap_metadata_at_build_time(sub_dict, "test_key", "", "") + assert result is None + + def test_non_json_file(self): + """Test when file path doesn't end with .json.""" + sub_dict = {"fmap": {"test_key": {"scan_parameters": "/path/to/file.txt"}}} + result = get_fmap_metadata_at_build_time(sub_dict, "test_key", "", "") + assert result is None + + def test_exception_handling(self): + """Test general exception handling.""" + sub_dict = {"fmap": {"test_key": {"scan_parameters": 123}}} # Invalid type + result = get_fmap_metadata_at_build_time(sub_dict, "test_key", "", "") + assert result is None + + +class TestGetFmapBuildInfo: + """Test get_fmap_build_info function.""" + + def test_none_metadata_raises_error(self): + """Test that None metadata raises ValueError.""" + with pytest.raises( + ValueError, match="Fieldmap metadata dictionary is required" + ): + get_fmap_build_info(None) + + def test_empty_metadata_raises_error(self): + """Test that empty metadata raises ValueError.""" + with pytest.raises( + ValueError, match="Fieldmap metadata dictionary is required" + ): + get_fmap_build_info({}) + + def test_unknown_fmap_type_raises_error(self): + """Test that unknown fieldmap type raises ValueError.""" + metadata = {"SomeUnknownField": "value"} + with pytest.raises(ValueError, match="Could not determine fieldmap type"): + get_fmap_build_info(metadata) + + def test_phase_fieldmap_info(self): + """Test phase fieldmap build info.""" + metadata = {"EchoTime": 0.006} + result = get_fmap_build_info(metadata) + expected = { + "fmap_type": "phase", + "needs_echo_times": True, + "needs_phasediff_processing": True, + "is_epi": False, + } + assert result == expected + + def test_phasediff_fieldmap_info(self): + """Test phasediff fieldmap build info.""" + metadata = {"EchoTime1": 0.006, "EchoTime2": 0.007} + result = get_fmap_build_info(metadata) + expected = { + "fmap_type": "phasediff", + "needs_echo_times": True, + "needs_phasediff_processing": True, + "is_epi": False, + } + assert result == expected + + def test_epi_fieldmap_info(self): + """Test EPI fieldmap build info.""" + metadata = {"PhaseEncodingDirection": "j-"} + result = get_fmap_build_info(metadata) + expected = { + "fmap_type": "epi", + "needs_echo_times": True, + "needs_phasediff_processing": False, + "is_epi": True, + } + assert result == expected + + @pytest.mark.parametrize( + "metadata,expected_fmap_type", + [ + ({"EchoTime": 0.006}, "phase"), + ({"EchoTime1": 0.006, "EchoTime2": 0.007}, "phasediff"), + ({"PhaseEncodingDirection": "j-"}, "epi"), + ], + ) + def test_various_fieldmap_types(self, metadata, expected_fmap_type): + """Test that various fieldmap types are correctly identified.""" + result = get_fmap_build_info(metadata) + assert result["fmap_type"] == expected_fmap_type + + def test_real_world_metadata_examples(self): + """Test with realistic metadata examples from the existing tests.""" + # Use some of the test data from the existing test_get_fmap_type tests + + # Phasediff example + phasediff_metadata = { + "EchoTime1": 0.00600, + "EchoTime2": 0.00746, + "IntendedFor": ["bids::sub-01/func/sub-01_task-motor_bold.nii.gz"], + } + result = get_fmap_build_info(phasediff_metadata) + assert result["fmap_type"] == "phasediff" + assert result["needs_echo_times"] is True + assert result["needs_phasediff_processing"] is True + assert result["is_epi"] is False + + # EPI example + epi_metadata = { + "PhaseEncodingDirection": "j-", + "TotalReadoutTime": 0.095, + "IntendedFor": "bids::sub-01/func/sub-01_task-motor_bold.nii.gz", + } + result = get_fmap_build_info(epi_metadata) + assert result["fmap_type"] == "epi" + assert result["needs_echo_times"] is True + assert result["needs_phasediff_processing"] is False + assert result["is_epi"] is True + + def test_phase_fieldmap_with_extra_fields(self): + """Test phase fieldmap with additional optional fields.""" + metadata = { + "EchoTime": 0.006, + "IntendedFor": "bids::sub-01/func/sub-01_task-motor_bold.nii.gz", + "B0FieldIdentifier": "my_fieldmap", + } + result = get_fmap_build_info(metadata) + assert result["fmap_type"] == "phase" + assert result["needs_echo_times"] is True + assert result["needs_phasediff_processing"] is True + assert result["is_epi"] is False + + def test_phasediff_fieldmap_with_extra_fields(self): + """Test phasediff fieldmap with additional optional fields.""" + metadata = { + "EchoTime1": 0.006, + "EchoTime2": 0.007, + "IntendedFor": ["bids::sub-01/func/sub-01_task-motor_bold.nii.gz"], + "B0FieldIdentifier": "my_phasediff", + } + result = get_fmap_build_info(metadata) + assert result["fmap_type"] == "phasediff" + assert result["needs_echo_times"] is True + assert result["needs_phasediff_processing"] is True + assert result["is_epi"] is False diff --git a/CPAC/utils/utils.py b/CPAC/utils/utils.py index a838780883..5d2213ec4a 100644 --- a/CPAC/utils/utils.py +++ b/CPAC/utils/utils.py @@ -2796,25 +2796,35 @@ def get_fmap_build_info(metadata_dict): - 'needs_echo_times': bool - 'needs_phasediff_processing': bool - 'is_epi': bool + + Raises + ------ + ValueError + If metadata_dict is None or if fieldmap type cannot be determined """ from CPAC.utils.utils import get_fmap_type + if not metadata_dict: + raise ValueError( + "Fieldmap metadata dictionary is required but was None. " + "Cannot determine fieldmap processing requirements without metadata." + ) + + fmap_type = get_fmap_type(metadata_dict) + + if fmap_type is None: + raise ValueError( + f"Could not determine fieldmap type from metadata: {metadata_dict}. " + "Metadata must contain required BIDS fields for fieldmap type detection." + ) + build_info = { - "fmap_type": None, + "fmap_type": fmap_type, "needs_echo_times": False, "needs_phasediff_processing": False, "is_epi": False, } - if not metadata_dict: - # Conservative fallback - assume we might need processing - build_info["needs_echo_times"] = True - build_info["needs_phasediff_processing"] = True - return build_info - - fmap_type = get_fmap_type(metadata_dict) - build_info["fmap_type"] = fmap_type - match fmap_type: case "phase": build_info["needs_echo_times"] = True @@ -2831,9 +2841,10 @@ def get_fmap_build_info(metadata_dict): case "fieldmap": build_info["needs_phasediff_processing"] = True - case None: - # Conservative fallback - build_info["needs_echo_times"] = True - build_info["needs_phasediff_processing"] = True + case _: + raise ValueError( + f"Unsupported fieldmap type '{fmap_type}'. " + "Supported types are: 'phase', 'phasediff', 'epi', 'fieldmap'." + ) return build_info From 26af869e3ec512dc02ff0a2dea920d52389ffd17 Mon Sep 17 00:00:00 2001 From: "birajstha:construction_worker::penguin" Date: Mon, 4 Aug 2025 15:01:10 -0400 Subject: [PATCH 448/507] fixing the wrong classification --- CPAC/utils/tests/test_datasource.py | 75 ++++++++++++++++++----------- CPAC/utils/utils.py | 6 +-- 2 files changed, 50 insertions(+), 31 deletions(-) diff --git a/CPAC/utils/tests/test_datasource.py b/CPAC/utils/tests/test_datasource.py index ae2a5eccb3..61ec5b655d 100644 --- a/CPAC/utils/tests/test_datasource.py +++ b/CPAC/utils/tests/test_datasource.py @@ -398,17 +398,10 @@ def test_match_epi_fmaps(generate: bool, tmp_path: Path) -> None: # Case 1: Phase-difference map (phasediff) - REQUIRED: EchoTime1 and EchoTime2 ({"EchoTime1": 0.00600, "EchoTime2": 0.00746}, "phasediff"), ({"EchoTime1": 0.004, "EchoTime2": 0.006}, "phasediff"), - # Case 2: Single phase map (phase) - REQUIRED: EchoTime + # Case 2: Single phase map (phase) - REQUIRED: EchoTime, but NOT PhaseEncodingDirection ({"EchoTime": 0.00746}, "phase"), ({"EchoTime": 0.004}, "phase"), - # Case 3: Direct field mapping (fieldmap) - REQUIRED: Units - ({"Units": "rad/s"}, "fieldmap"), - ({"Units": "Hz"}, "fieldmap"), - ({"Units": "hz"}, "fieldmap"), - ({"Units": "T"}, "fieldmap"), - ({"Units": "Tesla"}, "fieldmap"), - ({"Units": "hertz"}, "fieldmap"), - # Case 4: EPI field maps (epi) - REQUIRED: PhaseEncodingDirection + # Case 3: EPI field maps (epi) - REQUIRED: PhaseEncodingDirection ({"PhaseEncodingDirection": "j-"}, "epi"), ({"PhaseEncodingDirection": "j"}, "epi"), ({"PhaseEncodingDirection": "i"}, "epi"), @@ -417,23 +410,14 @@ def test_match_epi_fmaps(generate: bool, tmp_path: Path) -> None: ({"PhaseEncodingDirection": "k-"}, "epi"), # Edge cases and invalid inputs ({}, None), # Empty metadata - ({"SomeOtherField": "value"}, None), # Irrelevant metadata - ({"Units": "invalid_unit"}, None), # Invalid units - ({"PhaseEncodingDirection": "invalid"}, None), # Invalid PE direction - ({"EchoTime1": 0.006}, None), # Only EchoTime1 without EchoTime2 - ({"EchoTime2": 0.006}, None), # Only EchoTime2 without EchoTime1 - # Priority testing - phasediff should take precedence + # Priority testing - phasediff should take precedence over everything ({"EchoTime1": 0.006, "EchoTime2": 0.007, "EchoTime": 0.006}, "phasediff"), - ({"EchoTime1": 0.006, "EchoTime2": 0.007, "Units": "Hz"}, "phasediff"), ( {"EchoTime1": 0.006, "EchoTime2": 0.007, "PhaseEncodingDirection": "j-"}, "phasediff", ), - # Phase should take precedence over fieldmap and epi - ({"EchoTime": 0.006, "Units": "Hz"}, "phase"), - ({"EchoTime": 0.006, "PhaseEncodingDirection": "j-"}, "phase"), - # Fieldmap should take precedence over epi - ({"Units": "Hz", "PhaseEncodingDirection": "j-"}, "fieldmap"), + # EPI should take precedence when PhaseEncodingDirection is present (even with EchoTime) + ({"EchoTime": 0.006, "PhaseEncodingDirection": "j-"}, "epi"), # Test with optional fields that might be present (but shouldn't affect detection) ( { @@ -443,14 +427,51 @@ def test_match_epi_fmaps(generate: bool, tmp_path: Path) -> None: }, "phasediff", ), + ({"PhaseEncodingDirection": "j-", "TotalReadoutTime": 0.095}, "epi"), + ({"EchoTime": 0.006, "TotalReadoutTime": 0.095}, "phase"), + # Test invalid PhaseEncodingDirection values (should return epi for valid values) ( - { - "Units": "rad/s", - "IntendedFor": "bids::sub-01/func/sub-01_task-motor_bold.nii.gz", - }, + {"PhaseEncodingDirection": "invalid"}, + "epi", + ), # Current implementation returns epi for any PE direction + ( + {"PhaseEncodingDirection": "AP"}, + "epi", + ), # Current implementation returns epi for any PE direction + ( + {"PhaseEncodingDirection": "PA"}, + "epi", + ), # Current implementation returns epi for any PE direction + ( + {"PhaseEncodingDirection": ""}, + "epi", + ), # Current implementation returns epi for any PE direction + # Test fieldmap type (currently implemented and working) + ({"Units": "rad/s"}, "fieldmap"), + ({"Units": "Hz"}, "fieldmap"), + ({"Units": "hz"}, "fieldmap"), + ({"Units": "T"}, "fieldmap"), + ({"Units": "Tesla"}, "fieldmap"), + ({"Units": "hertz"}, "fieldmap"), + # Mixed cases with Units - fieldmap takes precedence in current implementation + ( + {"Units": "Hz", "PhaseEncodingDirection": "j-"}, "fieldmap", - ), - ({"PhaseEncodingDirection": "j-", "TotalReadoutTime": 0.095}, "epi"), + ), # fieldmap takes precedence + ( + {"EchoTime": 0.006, "Units": "Hz"}, + "phase", + ), # Phase takes precedence over fieldmap + # Test with bytes values (common in real data) - current implementation handles these + ( + {"PhaseEncodingDirection": b"j-"}, + "epi", + ), # Current implementation returns epi for bytes + # Test case sensitivity - current implementation handles these + ( + {"PhaseEncodingDirection": "J-"}, + "epi", + ), # Current implementation returns epi regardless of case ], ) def test_get_fmap_type_dict_input(metadata: dict, expected_type: str | None) -> None: diff --git a/CPAC/utils/utils.py b/CPAC/utils/utils.py index 5d2213ec4a..2641510911 100644 --- a/CPAC/utils/utils.py +++ b/CPAC/utils/utils.py @@ -2701,7 +2701,7 @@ def get_fmap_type(metadata): case (True, True, _, _, _): # Case 1: Phase-difference map (REQUIRED: EchoTime1 AND EchoTime2) return "phasediff" - case (False, False, True, _, _): + case (False, False, True, _, False): # Case 2: Single phase map (REQUIRED: EchoTime, but NOT EchoTime1/2) return "phase" case (_, _, _, True, _): @@ -2711,9 +2711,7 @@ def get_fmap_type(metadata): return "fieldmap" case (_, _, _, _, True): # Case 4: EPI field maps (REQUIRED: PhaseEncodingDirection) - pe_dir = metadata["PhaseEncodingDirection"] - if pe_dir in ["i", "i-", "j", "j-", "k", "k-"]: - return "epi" + return "epi" case _: return None From cf9229c51f6d41a5d6e163e802d08c527f950eaf Mon Sep 17 00:00:00 2001 From: "birajstha:construction_worker::penguin" Date: Mon, 4 Aug 2025 17:13:03 -0400 Subject: [PATCH 449/507] adding to changelog --- CHANGELOG.md | 1 + 1 file changed, 1 insertion(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index cac9fc998c..058fd3d472 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -35,6 +35,7 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 - Functionality to convert `space-T1w_desc-loose_brain_mask` and `space-T1w_desc-tight_brain_mask` into generic brain mask `space-T1w_desc-brain_mask` to use in brain extraction nodeblock downstream. - `desc-ABCDpreproc_T1w` to the outputs - `bc` to `lite` container images. +- Robust method to classify `fmap` types reading in the json metadata during workflow build process. ### Changed From d595d85cdf1fd8efadf2a657daa42091aadaab33 Mon Sep 17 00:00:00 2001 From: birajstha <111654544+birajstha@users.noreply.github.com> Date: Tue, 5 Aug 2025 12:26:56 -0400 Subject: [PATCH 450/507] Update CPAC/utils/utils.py Co-authored-by: Jon Cluce --- CPAC/utils/utils.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/CPAC/utils/utils.py b/CPAC/utils/utils.py index 2641510911..5a5e9e012e 100644 --- a/CPAC/utils/utils.py +++ b/CPAC/utils/utils.py @@ -2662,7 +2662,7 @@ def flip_orientation_code(code): def get_fmap_type(metadata): """Determine the type of field map from metadata. - reference: https://bids-specification.readthedocs.io/en/latest/modality-specific-files/magnetic-resonance-imaging-data.html#case-1-phase-difference-map-and-at-least-one-magnitude-image + reference: https://bids-specification.readthedocs.io/en/v1.10.0/modality-specific-files/magnetic-resonance-imaging-data.html#case-1-phase-difference-map-and-at-least-one-magnitude-image Parameters ---------- From 841d474ba8c764ed6de90ac38c7145567ae5f0f6 Mon Sep 17 00:00:00 2001 From: birajstha <111654544+birajstha@users.noreply.github.com> Date: Tue, 5 Aug 2025 12:27:58 -0400 Subject: [PATCH 451/507] Update CPAC/utils/utils.py Co-authored-by: Jon Cluce --- CPAC/utils/utils.py | 7 +++---- 1 file changed, 3 insertions(+), 4 deletions(-) diff --git a/CPAC/utils/utils.py b/CPAC/utils/utils.py index 5a5e9e012e..833790ca64 100644 --- a/CPAC/utils/utils.py +++ b/CPAC/utils/utils.py @@ -2811,10 +2811,9 @@ def get_fmap_build_info(metadata_dict): fmap_type = get_fmap_type(metadata_dict) if fmap_type is None: - raise ValueError( - f"Could not determine fieldmap type from metadata: {metadata_dict}. " - "Metadata must contain required BIDS fields for fieldmap type detection." - ) + msg = (f"Could not determine fieldmap type from metadata: {metadata_dict}. " + "Metadata must contain required BIDS fields for fieldmap type detection.") + raise ValueError(msg) build_info = { "fmap_type": fmap_type, From 5a789900e383ad0a4b3279b00c6169955ce6c73f Mon Sep 17 00:00:00 2001 From: birajstha <111654544+birajstha@users.noreply.github.com> Date: Tue, 5 Aug 2025 12:28:08 -0400 Subject: [PATCH 452/507] Update CPAC/utils/utils.py Co-authored-by: Jon Cluce --- CPAC/utils/utils.py | 15 ++++----------- 1 file changed, 4 insertions(+), 11 deletions(-) diff --git a/CPAC/utils/utils.py b/CPAC/utils/utils.py index 833790ca64..00718fc06c 100644 --- a/CPAC/utils/utils.py +++ b/CPAC/utils/utils.py @@ -2778,23 +2778,16 @@ def get_fmap_metadata_at_build_time(sub_dict, orig_key, input_creds_path, dl_dir return None -def get_fmap_build_info(metadata_dict): +@Function.sig_imports(["from typing import Optional", + "from CPAC.utils.utils import FmapBuildInfo"]) +def get_fmap_build_info(metadata_dict: Optional[dict]) -> FmapBuildInfo: """Determine fieldmap processing requirements at build time. Parameters ---------- - metadata_dict : dict or None + metadata_dict Fieldmap metadata dictionary - Returns - ------- - dict - Dictionary with keys: - - 'fmap_type': str or None - - 'needs_echo_times': bool - - 'needs_phasediff_processing': bool - - 'is_epi': bool - Raises ------ ValueError From ece294dcad5e8a03ec77246836d1ef248cce6b4a Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Tue, 5 Aug 2025 16:28:10 +0000 Subject: [PATCH 453/507] [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci --- CPAC/utils/utils.py | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/CPAC/utils/utils.py b/CPAC/utils/utils.py index 00718fc06c..64fe49969a 100644 --- a/CPAC/utils/utils.py +++ b/CPAC/utils/utils.py @@ -2804,8 +2804,10 @@ def get_fmap_build_info(metadata_dict: Optional[dict]) -> FmapBuildInfo: fmap_type = get_fmap_type(metadata_dict) if fmap_type is None: - msg = (f"Could not determine fieldmap type from metadata: {metadata_dict}. " - "Metadata must contain required BIDS fields for fieldmap type detection.") + msg = ( + f"Could not determine fieldmap type from metadata: {metadata_dict}. " + "Metadata must contain required BIDS fields for fieldmap type detection." + ) raise ValueError(msg) build_info = { From 489e41c940993bd3b296f8e6b21c8d4530a9a6fb Mon Sep 17 00:00:00 2001 From: birajstha <111654544+birajstha@users.noreply.github.com> Date: Tue, 5 Aug 2025 12:28:26 -0400 Subject: [PATCH 454/507] Update CPAC/utils/utils.py Co-authored-by: Jon Cluce --- CPAC/utils/utils.py | 1 + 1 file changed, 1 insertion(+) diff --git a/CPAC/utils/utils.py b/CPAC/utils/utils.py index 64fe49969a..771c3c8ccd 100644 --- a/CPAC/utils/utils.py +++ b/CPAC/utils/utils.py @@ -2709,6 +2709,7 @@ def get_fmap_type(metadata): units = metadata["Units"].lower() if units in ["hz", "rad/s", "t", "tesla", "hertz"]: return "fieldmap" + return None case (_, _, _, _, True): # Case 4: EPI field maps (REQUIRED: PhaseEncodingDirection) return "epi" From 3fd16bf7719a611dded8254df29ec200279ee731 Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Tue, 5 Aug 2025 16:28:37 +0000 Subject: [PATCH 455/507] [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci --- CPAC/utils/utils.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/CPAC/utils/utils.py b/CPAC/utils/utils.py index 771c3c8ccd..5a99bf6876 100644 --- a/CPAC/utils/utils.py +++ b/CPAC/utils/utils.py @@ -2779,8 +2779,9 @@ def get_fmap_metadata_at_build_time(sub_dict, orig_key, input_creds_path, dl_dir return None -@Function.sig_imports(["from typing import Optional", - "from CPAC.utils.utils import FmapBuildInfo"]) +@Function.sig_imports( + ["from typing import Optional", "from CPAC.utils.utils import FmapBuildInfo"] +) def get_fmap_build_info(metadata_dict: Optional[dict]) -> FmapBuildInfo: """Determine fieldmap processing requirements at build time. From 7764fba1264ce9db38668e508cea9ef2479ade36 Mon Sep 17 00:00:00 2001 From: "birajstha:construction_worker::penguin" Date: Tue, 5 Aug 2025 12:55:04 -0400 Subject: [PATCH 456/507] adding type hinting as per Jon's suggestion --- CPAC/utils/utils.py | 11 ++++++++++- 1 file changed, 10 insertions(+), 1 deletion(-) diff --git a/CPAC/utils/utils.py b/CPAC/utils/utils.py index 5a99bf6876..c3f0209d5d 100644 --- a/CPAC/utils/utils.py +++ b/CPAC/utils/utils.py @@ -25,7 +25,7 @@ import numbers import os import pickle -from typing import Any, Literal, Optional, overload +from typing import Any, Literal, Optional, overload, TypedDict import numpy as np from voluptuous.error import Invalid @@ -2779,6 +2779,15 @@ def get_fmap_metadata_at_build_time(sub_dict, orig_key, input_creds_path, dl_dir return None +class FmapBuildInfo(TypedDict): + """Fieldmap metadata.""" + + fmap_type: Optional[str] + needs_echo_times: bool + needs_phasediff_processing: bool + is_epi: bool + + @Function.sig_imports( ["from typing import Optional", "from CPAC.utils.utils import FmapBuildInfo"] ) From 22df8c37cb052e582faa29ee285f3b7179680443 Mon Sep 17 00:00:00 2001 From: Jon Cluce Date: Wed, 6 Aug 2025 13:48:30 -0400 Subject: [PATCH 457/507] :loud_sound: Add `new_strats` to verbose logger --- CPAC/pipeline/engine.py | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/CPAC/pipeline/engine.py b/CPAC/pipeline/engine.py index 93e5cd7981..bebebd2049 100644 --- a/CPAC/pipeline/engine.py +++ b/CPAC/pipeline/engine.py @@ -849,6 +849,10 @@ def get_strats(self, resources, debug=False): new_strats[pipe_idx].rpool["json"]["subjson"][data_type].update( copy.deepcopy(resource_strat_dct["json"]) ) + if debug: + verbose_logger = getLogger("CPAC.engine") + _k = list(new_strats.keys()) + verbose_logger.debug("new_strats: (%s) %s\n", len(_k), _k) return new_strats def derivative_xfm(self, wf, label, connection, json_info, pipe_idx, pipe_x): From 44b0f4b1319020bc867ba627ff20b4e7f4a99a26 Mon Sep 17 00:00:00 2001 From: Jon Cluce Date: Wed, 6 Aug 2025 21:17:51 -0400 Subject: [PATCH 458/507] :loud_sound: Include name when debugging strats --- CPAC/pipeline/engine.py | 9 ++++++--- 1 file changed, 6 insertions(+), 3 deletions(-) diff --git a/CPAC/pipeline/engine.py b/CPAC/pipeline/engine.py index bebebd2049..c5b84614f8 100644 --- a/CPAC/pipeline/engine.py +++ b/CPAC/pipeline/engine.py @@ -597,7 +597,7 @@ def flatten_prov(self, prov): return flat_prov return None - def get_strats(self, resources, debug=False): + def get_strats(self, resources, debug: bool | str = False): # TODO: NOTE: NOT COMPATIBLE WITH SUB-RPOOL/STRAT_POOLS # TODO: (and it doesn't have to be) @@ -852,7 +852,10 @@ def get_strats(self, resources, debug=False): if debug: verbose_logger = getLogger("CPAC.engine") _k = list(new_strats.keys()) - verbose_logger.debug("new_strats: (%s) %s\n", len(_k), _k) + if isinstance(debug, str): + verbose_logger.debug("new_strats: (%s, %s) %s\n", debug, len(_k), _k) + else: + verbose_logger.debug("new_strats: (%s) %s\n", len(_k), _k) return new_strats def derivative_xfm(self, wf, label, connection, json_info, pipe_idx, pipe_x): @@ -1712,7 +1715,7 @@ def connect_block(self, wf, cfg, rpool): for ( pipe_idx, strat_pool, # strat_pool is a ResourcePool like {'desc-preproc_T1w': { 'json': info, 'data': (node, out) }, 'desc-brain_mask': etc.} - ) in rpool.get_strats(inputs, debug).items(): + ) in rpool.get_strats(inputs, name if debug else False).items(): # keep in mind rpool.get_strats(inputs) = {pipe_idx1: {'desc-preproc_T1w': etc.}, pipe_idx2: {..} } fork = False in switch for opt in opts: # it's a dictionary of ResourcePools called strat_pools, except those sub-ResourcePools only have one level! no pipe_idx strat keys. From 33bb943c28b4cb66c262220ea5eb0ff251fdc0e9 Mon Sep 17 00:00:00 2001 From: Jon Cluce Date: Thu, 7 Aug 2025 12:14:01 -0400 Subject: [PATCH 459/507] :recycle: Un=daisy-chain registration options --- CPAC/anat_preproc/anat_preproc.py | 273 +++++-------------- CPAC/anat_preproc/tests/test_anat_preproc.py | 13 +- 2 files changed, 75 insertions(+), 211 deletions(-) diff --git a/CPAC/anat_preproc/anat_preproc.py b/CPAC/anat_preproc/anat_preproc.py index f3ef8c1e10..458f11d414 100644 --- a/CPAC/anat_preproc/anat_preproc.py +++ b/CPAC/anat_preproc/anat_preproc.py @@ -28,7 +28,6 @@ fsl_aff_to_rigid, fslmaths_command, mri_convert, - normalize_wmparc, pad, VolumeRemoveIslands, wb_command, @@ -1322,22 +1321,7 @@ def freesurfer_fsl_brain_connector(wf, cfg, strat_pool, pipe_num, opt): wf.connect(combine_mask, "out_file", binarize_combined_mask, "in_file") - if opt == "FreeSurfer-BET-Tight": - outputs = { - "space-T1w_desc-tight_brain_mask": ( - binarize_combined_mask, - "out_file", - ) - } - elif opt == "FreeSurfer-BET-Loose": - outputs = { - "space-T1w_desc-loose_brain_mask": ( - binarize_combined_mask, - "out_file", - ) - } - - return (wf, outputs) + return wf, {"space-T1w_desc-brain_mask": (binarize_combined_mask, "out_file")} def mask_T2(wf_name="mask_T2"): @@ -1500,7 +1484,7 @@ def acpc_align_head(wf, cfg, strat_pool, pipe_num, opt=None): ( "desc-head_T1w", "desc-preproc_T1w", - ["space-T1w_desc-brain_mask", "space-T1w_desc-brain_mask"], + "space-T1w_desc-brain_mask", ), "T1w-ACPC-template", "T1w-brain-ACPC-template", @@ -1508,7 +1492,7 @@ def acpc_align_head(wf, cfg, strat_pool, pipe_num, opt=None): outputs=[ "desc-head_T1w", "desc-preproc_T1w", - ["space-T1w_desc-brain_mask", "space-T1w_desc-brain_mask"], + "space-T1w_desc-brain_mask", "from-T1w_to-ACPC_mode-image_desc-aff2rig_xfm", ], ) @@ -1939,211 +1923,96 @@ def brain_mask_acpc_unet(wf, cfg, strat_pool, pipe_num, opt=None): ["anatomical_preproc", "run"], ], option_key=["anatomical_preproc", "brain_extraction", "using"], - option_val="FreeSurfer-Brainmask", - inputs=[ - "pipeline-fs_raw-average", - "pipeline-fs_brainmask", - "freesurfer-subject-dir", - ], - outputs=["space-T1w_desc-brain_mask"], -) -def brain_mask_freesurfer(wf, cfg, strat_pool, pipe_num, opt=None): - wf, outputs = freesurfer_brain_connector(wf, cfg, strat_pool, pipe_num, opt) - - return (wf, outputs) - - -@nodeblock( - name="brain_mask_acpc_freesurfer", - switch=[ - ["anatomical_preproc", "brain_extraction", "run"], - ["anatomical_preproc", "run"], + option_val=[ + "FreeSurfer-ABCD", + "FreeSurfer-BET-Loose", + "FreeSurfer-BET-Tight", + "FreeSurfer-Brainmask", ], - option_key=["anatomical_preproc", "brain_extraction", "using"], - option_val="FreeSurfer-Brainmask", inputs=[ - "space-T1w_desc-brain_mask", - "pipeline-fs_raw-average", - "freesurfer-subject-dir", - ], - outputs=["space-T1w_desc-acpcbrain_mask"], -) -def brain_mask_acpc_freesurfer(wf, cfg, strat_pool, pipe_num, opt=None): - wf, wf_outputs = freesurfer_brain_connector(wf, cfg, strat_pool, pipe_num, opt) - - outputs = {"space-T1w_desc-acpcbrain_mask": wf_outputs["space-T1w_desc-brain_mask"]} - - return (wf, outputs) - - -@nodeblock( - name="brain_mask_freesurfer_abcd", - switch=[ - ["anatomical_preproc", "brain_extraction", "run"], - ["anatomical_preproc", "run"], - ], - option_key=["anatomical_preproc", "brain_extraction", "using"], - option_val="FreeSurfer-ABCD", - inputs=[ - ["desc-restore_T1w", "desc-preproc_T1w"], - "pipeline-fs_wmparc", - "pipeline-fs_raw-average", - "freesurfer-subject-dir", - ], - outputs=["space-T1w_desc-brain_mask"], -) -def brain_mask_freesurfer_abcd(wf, cfg, strat_pool, pipe_num, opt=None): - wf, outputs = freesurfer_abcd_brain_connector(wf, cfg, strat_pool, pipe_num, opt) - - return (wf, outputs) - - -@nodeblock( - name="brain_mask_freesurfer_fsl_tight", - switch=[ - ["anatomical_preproc", "brain_extraction", "run"], - ["anatomical_preproc", "run"], - ], - option_key=["anatomical_preproc", "brain_extraction", "using"], - option_val="FreeSurfer-BET-Tight", - inputs=[ - "pipeline-fs_brainmask", - "pipeline-fs_T1", - "pipeline-fs_raw-average", - "freesurfer-subject-dir", + ( + ["desc-restore_T1w", "desc-preproc_T1w"], + "space-T1w_desc-brain_mask", + "pipeline-fs_T1", + "pipeline-fs_wmparc", + "pipeline-fs_raw-average", + "pipeline-fs_brainmask", + "freesurfer-subject-dir", + ), "T1w-brain-template-mask-ccs", "T1w-ACPC-template", ], - outputs={ - "space-T1w_desc-brain_mask": { - "Description": "Brain mask extracted using FreeSurfer-BET-Tight method", - "Method": "FreeSurfer-BET-Tight", - "Threshold": "tight", - } - }, -) -def brain_mask_freesurfer_fsl_tight(wf, cfg, strat_pool, pipe_num, opt=None): - wf, outputs = freesurfer_fsl_brain_connector(wf, cfg, strat_pool, pipe_num, opt) - - # Convert the tight brain mask to generic brain mask - outputs["space-T1w_desc-brain_mask"] = outputs.pop( - "space-T1w_desc-tight_brain_mask" - ) - return (wf, outputs) - - -@nodeblock( - name="brain_mask_acpc_freesurfer_abcd", - switch=[ - ["anatomical_preproc", "brain_extraction", "run"], - ["anatomical_preproc", "run"], - ], - option_key=["anatomical_preproc", "brain_extraction", "using"], - option_val="FreeSurfer-ABCD", - inputs=[ - ["desc-restore_T1w", "desc-preproc_T1w"], - "pipeline-fs_wmparc", - "pipeline-fs_raw-average", - "freesurfer-subject-dir", - ], - outputs=["space-T1w_desc-acpcbrain_mask"], + outputs={"space-T1w_desc-brain_mask": {}}, ) -def brain_mask_acpc_freesurfer_abcd(wf, cfg, strat_pool, pipe_num, opt=None): - wf, wf_outputs = freesurfer_abcd_brain_connector(wf, cfg, strat_pool, pipe_num, opt) - - outputs = {"space-T1w_desc-acpcbrain_mask": wf_outputs["space-T1w_desc-brain_mask"]} - - return (wf, outputs) +def brain_mask_freesurfer(wf, cfg, strat_pool, pipe_num, opt=None): + assert isinstance(brain_mask_freesurfer.outputs, dict) + brain_mask_freesurfer.outputs["space-T1w_desc-brain_mask"] = { + "Description": f"Brain mask extracted using {opt} method", + "Method": opt, + } + match opt: + case "FreeSurfer-ABCD": + return freesurfer_abcd_brain_connector(wf, cfg, strat_pool, pipe_num, opt) + case "FreeSurfer-BET-Loose" | "FreeSurfer-BET-Tight": + brain_mask_freesurfer.outputs["space-T1w_desc-brain_mask"]["Threshold"] = ( + opt.rsplit("-")[-1].lower() + ) + return freesurfer_fsl_brain_connector(wf, cfg, strat_pool, pipe_num, opt) + case "FreeSurfer-Brainmask": + return freesurfer_brain_connector(wf, cfg, strat_pool, pipe_num, opt) + return wf, {} @nodeblock( - name="brain_mask_freesurfer_fsl_loose", + name="brain_mask_acpc_freesurfer", switch=[ ["anatomical_preproc", "brain_extraction", "run"], ["anatomical_preproc", "run"], ], option_key=["anatomical_preproc", "brain_extraction", "using"], - option_val="FreeSurfer-BET-Loose", - inputs=[ - "pipeline-fs_brainmask", - "pipeline-fs_T1", - "pipeline-fs_raw-average", - "freesurfer-subject-dir", - "T1w-brain-template-mask-ccs", - "T1w-ACPC-template", + option_val=[ + "FreeSurfer-ABCD", + "FreeSurfer-Brainmask", + "FreeSurfer-BET-Loose", + "FreeSurfer-BET-Tight", ], - outputs={ - "space-T1w_desc-brain_mask": { - "Description": "Brain mask extracted using FreeSurfer-BET-Loose method", - "Method": "FreeSurfer-BET-Loose", - "Threshold": "loose", - } - }, -) -def brain_mask_freesurfer_fsl_loose(wf, cfg, strat_pool, pipe_num, opt=None): - wf, outputs = freesurfer_fsl_brain_connector(wf, cfg, strat_pool, pipe_num, opt) - - # Convert the loose brain mask to generic brain mask - outputs["space-T1w_desc-brain_mask"] = outputs.pop( - "space-T1w_desc-loose_brain_mask" - ) - return (wf, outputs) - - -@nodeblock( - name="brain_mask_acpc_freesurfer_fsl_tight", - switch=[ - ["anatomical_preproc", "brain_extraction", "run"], - ["anatomical_preproc", "run"], - ], - option_key=["anatomical_preproc", "brain_extraction", "using"], - option_val="FreeSurfer-BET-Tight", inputs=[ - "pipeline-fs_brainmask", - "pipeline-fs_T1", - "T1w-brain-template-mask-ccs", + ( + ["desc-restore_T1w", "desc-preproc_T1w"], + "space-T1w_desc-brain_mask", + "space-T1w_desc-acpcbrain_mask", + "pipeline-fs_brainmask", + "pipeline-fs_raw-average", + "pipeline-fs_T1", + "pipeline-fs_wmparc", + "freesurfer-subject-dir", + ), "T1w-ACPC-template", - ], - outputs=["space-T1w_desc-tight_acpcbrain_mask"], -) -def brain_mask_acpc_freesurfer_fsl_tight(wf, cfg, strat_pool, pipe_num, opt=None): - wf, wf_outputs = freesurfer_fsl_brain_connector(wf, cfg, strat_pool, pipe_num, opt) - - outputs = { - "space-T1w_desc-tight_acpcbrain_mask": wf_outputs[ - "space-T1w_desc-tight_brain_mask" - ] - } - - return (wf, outputs) - - -@nodeblock( - name="brain_mask_acpc_freesurfer_fsl_loose", - switch=[ - ["anatomical_preproc", "brain_extraction", "run"], - ["anatomical_preproc", "run"], - ], - option_key=["anatomical_preproc", "brain_extraction", "using"], - option_val="FreeSurfer-BET-Loose", - inputs=[ - "pipeline-fs_brainmask", - "pipeline-fs_T1", "T1w-brain-template-mask-ccs", - "T1w-ACPC-template", ], - outputs=["space-T1w_desc-loose_acpcbrain_mask"], + outputs={"space-T1w_desc-acpcbrain_mask": {}}, ) -def brain_mask_acpc_freesurfer_fsl_loose(wf, cfg, strat_pool, pipe_num, opt=None): - wf, wf_outputs = freesurfer_fsl_brain_connector(wf, cfg, strat_pool, pipe_num, opt) - - outputs = { - "space-T1w_desc-loose_acpcbrain_mask": wf_outputs[ - "space-T1w_desc-loose_brain_mask" - ] +def brain_mask_acpc_freesurfer(wf, cfg, strat_pool, pipe_num, opt=None): + assert isinstance(brain_mask_acpc_freesurfer.outputs, dict) + outputs = wf_outputs = {} + key = "space-T1w_desc-brain_mask" + functions = { + "FreeSurfer-ABCD": freesurfer_abcd_brain_connector, + "FreeSurfer-Brainmask": freesurfer_brain_connector, + "FreeSurfer-BET-Tight": freesurfer_fsl_brain_connector, } + if opt in ["FreeSurfer-BET-Loose", "FreeSurfer-BET-Tight"]: + brain_mask_acpc_freesurfer.outputs["space-T1w_desc-acpcbrain_mask"] = { + "Description": f"Brain mask extracted using {opt} method", + "Method": opt, + "Threshold": opt.rsplit("-")[-1].lower(), + } + if opt in functions: + wf, wf_outputs = functions[opt](wf, cfg, strat_pool, pipe_num, opt) + if key in wf_outputs: + outputs = {"space-T1w_desc-acpcbrain_mask": wf_outputs[key]} - return (wf, outputs) + return wf, outputs @nodeblock( diff --git a/CPAC/anat_preproc/tests/test_anat_preproc.py b/CPAC/anat_preproc/tests/test_anat_preproc.py index 829a3acd77..588cb1bb2d 100755 --- a/CPAC/anat_preproc/tests/test_anat_preproc.py +++ b/CPAC/anat_preproc/tests/test_anat_preproc.py @@ -1,15 +1,12 @@ import os +from unittest.mock import Mock, patch from nose.tools import * import numpy as np import nibabel as nib from .. import anat_preproc -from unittest.mock import Mock, patch -from ..anat_preproc import ( - brain_mask_freesurfer_fsl_loose, - brain_mask_freesurfer_fsl_tight, -) +from ..anat_preproc import brain_mask_freesurfer class TestAnatPreproc: @@ -279,7 +276,6 @@ def test_anat_brain(self): @patch("CPAC.anat_preproc.anat_preproc.freesurfer_fsl_brain_connector") def test_brain_mask_freesurfer_fsl_loose(mock_connector): """Test that brain_mask_freesurfer_fsl_loose correctly renames output key.""" - mock_wf = Mock() mock_cfg = Mock() mock_strat_pool = Mock() @@ -292,7 +288,7 @@ def test_brain_mask_freesurfer_fsl_loose(mock_connector): mock_connector.return_value = (mock_wf, mock_outputs) - result_wf, result_outputs = brain_mask_freesurfer_fsl_loose( + result_wf, result_outputs = brain_mask_freesurfer( mock_wf, mock_cfg, mock_strat_pool, pipe_num ) @@ -313,7 +309,6 @@ def test_brain_mask_freesurfer_fsl_loose(mock_connector): @patch("CPAC.anat_preproc.anat_preproc.freesurfer_fsl_brain_connector") def test_brain_mask_freesurfer_fsl_tight(mock_connector): """Test that brain_mask_freesurfer_fsl_tight correctly renames output key.""" - mock_wf = Mock() mock_cfg = Mock() mock_strat_pool = Mock() @@ -326,7 +321,7 @@ def test_brain_mask_freesurfer_fsl_tight(mock_connector): mock_connector.return_value = (mock_wf, mock_outputs) - result_wf, result_outputs = brain_mask_freesurfer_fsl_tight( + result_wf, result_outputs = brain_mask_freesurfer( mock_wf, mock_cfg, mock_strat_pool, pipe_num ) From 1d89c09f8e754570e7bde2c1ed99abee7c4233f8 Mon Sep 17 00:00:00 2001 From: Jon Cluce Date: Fri, 8 Aug 2025 17:26:07 -0400 Subject: [PATCH 460/507] =?UTF-8?q?:truck:=20`desc-ref=5Fbold`=20=E2=86=92?= =?UTF-8?q?=20`sbref`?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- CPAC/func_preproc/func_preproc.py | 7 +++---- 1 file changed, 3 insertions(+), 4 deletions(-) diff --git a/CPAC/func_preproc/func_preproc.py b/CPAC/func_preproc/func_preproc.py index d098b6186b..d5b04d0e01 100644 --- a/CPAC/func_preproc/func_preproc.py +++ b/CPAC/func_preproc/func_preproc.py @@ -1719,10 +1719,9 @@ def bold_mask_anatomical_resampled(wf, cfg, strat_pool, pipe_num, opt=None): option_val="CCS_Anatomical_Refined", inputs=[ ["desc-motion_bold", "desc-preproc_bold", "bold"], - "desc-brain_T1w", - ["desc-preproc_T1w", "desc-reorient_T1w", "T1w"], + ("desc-brain_T1w", "sbref", ["desc-preproc_T1w", "desc-reorient_T1w", "T1w"]), ], - outputs=["space-bold_desc-brain_mask", "desc-ref_bold"], + outputs=["space-bold_desc-brain_mask", "sbref"], ) def bold_mask_ccs(wf, cfg, strat_pool, pipe_num, opt=None): """Generate the BOLD mask by basing it off of the anatomical brain. @@ -1837,7 +1836,7 @@ def bold_mask_ccs(wf, cfg, strat_pool, pipe_num, opt=None): outputs = { "space-bold_desc-brain_mask": (intersect_mask, "out_file"), - "desc-ref_bold": (example_func_brain, "out_file"), + "sbref": (example_func_brain, "out_file"), } return (wf, outputs) From 5d9dafcf0a0d7785b36d33c9e29835a94c76aac2 Mon Sep 17 00:00:00 2001 From: Jon Cluce Date: Fri, 8 Aug 2025 17:49:14 -0400 Subject: [PATCH 461/507] :goal_net: Try to short-circuit crossed streams [don't cross the streams](https://tenor.com/baIhQ.gif) --- CPAC/anat_preproc/anat_preproc.py | 5 ++++ CPAC/pipeline/cpac_pipeline.py | 12 --------- CPAC/pipeline/engine.py | 10 +++++++ CPAC/pipeline/utils.py | 43 +++++++++++++++++++++++++++++++ CPAC/registration/registration.py | 8 ++++++ 5 files changed, 66 insertions(+), 12 deletions(-) diff --git a/CPAC/anat_preproc/anat_preproc.py b/CPAC/anat_preproc/anat_preproc.py index 458f11d414..482b11ac8e 100644 --- a/CPAC/anat_preproc/anat_preproc.py +++ b/CPAC/anat_preproc/anat_preproc.py @@ -1993,6 +1993,11 @@ def brain_mask_freesurfer(wf, cfg, strat_pool, pipe_num, opt=None): outputs={"space-T1w_desc-acpcbrain_mask": {}}, ) def brain_mask_acpc_freesurfer(wf, cfg, strat_pool, pipe_num, opt=None): + if opt != strat_pool.get_json("space-T1w_desc-brain_mask").get( + "CpacVariant", {} + ).get("space-T1w_mask", opt): + # https://tenor.com/baIhQ.gif + return wf, {} assert isinstance(brain_mask_acpc_freesurfer.outputs, dict) outputs = wf_outputs = {} key = "space-T1w_desc-brain_mask" diff --git a/CPAC/pipeline/cpac_pipeline.py b/CPAC/pipeline/cpac_pipeline.py index c2ffa96404..8ca5b1398c 100644 --- a/CPAC/pipeline/cpac_pipeline.py +++ b/CPAC/pipeline/cpac_pipeline.py @@ -52,18 +52,12 @@ brain_extraction_temp_T2, brain_mask_acpc_afni, brain_mask_acpc_freesurfer, - brain_mask_acpc_freesurfer_abcd, - brain_mask_acpc_freesurfer_fsl_loose, - brain_mask_acpc_freesurfer_fsl_tight, brain_mask_acpc_fsl, brain_mask_acpc_niworkflows_ants, brain_mask_acpc_T2, brain_mask_acpc_unet, brain_mask_afni, brain_mask_freesurfer, - brain_mask_freesurfer_abcd, - brain_mask_freesurfer_fsl_loose, - brain_mask_freesurfer_fsl_tight, brain_mask_fsl, brain_mask_niworkflows_ants, brain_mask_T2, @@ -933,10 +927,7 @@ def build_anat_preproc_stack(rpool, cfg, pipeline_blocks=None): brain_mask_acpc_fsl, brain_mask_acpc_niworkflows_ants, brain_mask_acpc_unet, - brain_mask_acpc_freesurfer_abcd, brain_mask_acpc_freesurfer, - brain_mask_acpc_freesurfer_fsl_tight, - brain_mask_acpc_freesurfer_fsl_loose, ], acpc_align_brain_with_mask, brain_extraction_temp, @@ -986,10 +977,7 @@ def build_anat_preproc_stack(rpool, cfg, pipeline_blocks=None): brain_mask_fsl, brain_mask_niworkflows_ants, brain_mask_unet, - brain_mask_freesurfer_abcd, brain_mask_freesurfer, - brain_mask_freesurfer_fsl_tight, - brain_mask_freesurfer_fsl_loose, ] ] pipeline_blocks += anat_brain_mask_blocks diff --git a/CPAC/pipeline/engine.py b/CPAC/pipeline/engine.py index c5b84614f8..8f6a1a8620 100644 --- a/CPAC/pipeline/engine.py +++ b/CPAC/pipeline/engine.py @@ -42,8 +42,10 @@ from CPAC.pipeline.check_outputs import ExpectedOutputs from CPAC.pipeline.nodeblock import NodeBlockFunction from CPAC.pipeline.utils import ( + CrossedVariantsError, MOVEMENT_FILTER_KEYS, name_fork, + short_circuit_crossed_variants, source_set, ) from CPAC.registration.registration import transform_derivative @@ -703,6 +705,7 @@ def get_strats(self, resources, debug: bool | str = False): if debug: verbose_logger = getLogger("CPAC.engine") verbose_logger.debug("len(strat_list_list): %s\n", len(strat_list_list)) + for strat_list in strat_list_list: json_dct = {} for strat in strat_list: @@ -1716,6 +1719,13 @@ def connect_block(self, wf, cfg, rpool): pipe_idx, strat_pool, # strat_pool is a ResourcePool like {'desc-preproc_T1w': { 'json': info, 'data': (node, out) }, 'desc-brain_mask': etc.} ) in rpool.get_strats(inputs, name if debug else False).items(): + try: + short_circuit_crossed_variants(strat_pool, inputs) + except CrossedVariantsError as e: + if cfg.pipeline_setup["Debugging"]["verbose"]: + verbose_logger = getLogger("CPAC.engine") + verbose_logger.debug(e) + continue # keep in mind rpool.get_strats(inputs) = {pipe_idx1: {'desc-preproc_T1w': etc.}, pipe_idx2: {..} } fork = False in switch for opt in opts: # it's a dictionary of ResourcePools called strat_pools, except those sub-ResourcePools only have one level! no pipe_idx strat keys. diff --git a/CPAC/pipeline/utils.py b/CPAC/pipeline/utils.py index 8d46fe1373..9a121c5554 100644 --- a/CPAC/pipeline/utils.py +++ b/CPAC/pipeline/utils.py @@ -25,11 +25,16 @@ from CPAC.utils.bids_utils import insert_entity if TYPE_CHECKING: + from CPAC.pipeline.engine import ResourcePool from CPAC.pipeline.nodeblock import POOL_RESOURCE_MAPPING MOVEMENT_FILTER_KEYS = motion_estimate_filter.outputs +class CrossedVariantsError(Exception): + """Exception raised when crossed variants are found in the inputs.""" + + def get_shell() -> str: """Return the path to default shell.""" shell: Optional[str] = subprocess.getoutput( @@ -240,3 +245,41 @@ def _update_resource_idx(resource_idx, out_dct, key, value): resource_idx = insert_entity(resource_idx, key, value) out_dct["filename"] = insert_entity(out_dct["filename"], key, value) return resource_idx, out_dct + + +def find_variants( + pool: "ResourcePool", keys: list | str | tuple +) -> dict[str, dict[str, set[str]]]: + """Find variants in the ResourcePool for the given keys.""" + outputs = {} + if isinstance(keys, str): + try: + return {keys: pool.get_json(keys)["CpacVariant"]} + except: + return {} + for key in keys: + outputs = {**outputs, **find_variants(pool, key)} + return outputs + + +def short_circuit_crossed_variants( + pool: "ResourcePool", inputs: list | str | tuple +) -> None: + """Short-circuit the strategy if crossed variants are found. + + .. image:: https://media1.tenor.com/m/S93jWPGv52gAAAAd/dont-cross-the-streams-egon.gif + :width: 48 + :alt: Don't cross the streams + """ + _variants = find_variants(pool, inputs) + variants = {} + for variant in _variants.values(): + for k, v in variant.items(): + if k not in variants: + variants[k] = set(v) + else: + variants[k] = {*variants[k], *v} + crossed_variants = {k: v for k, v in variants.items() if len(v) > 1} + if crossed_variants: + msg = f"Crossed variants found: {crossed_variants}" + raise CrossedVariantsError(msg) diff --git a/CPAC/registration/registration.py b/CPAC/registration/registration.py index d001b00550..6b3fceb0d2 100644 --- a/CPAC/registration/registration.py +++ b/CPAC/registration/registration.py @@ -3191,6 +3191,10 @@ def mask_sbref(wf, cfg, strat_pool, pipe_num, opt=None): ) def coregistration_prep_vol(wf, cfg, strat_pool, pipe_num, opt=None): """Create single-band reference for coreg by selecting a functional volume.""" + from CPAC.pipeline.utils import short_circuit_crossed_variants + + short_circuit_crossed_variants(strat_pool, strat_pool.rpool.keys()) + get_func_volume = pe.Node(interface=afni.Calc(), name=f"get_func_volume_{pipe_num}") get_func_volume.inputs.set( @@ -3329,6 +3333,10 @@ def coregistration_prep_fmriprep(wf, cfg, strat_pool, pipe_num, opt=None): ) def coregistration(wf, cfg, strat_pool, pipe_num, opt=None): """Coregister BOLD to T1w.""" + from CPAC.pipeline.utils import short_circuit_crossed_variants + + short_circuit_crossed_variants(strat_pool, strat_pool.rpool.keys()) + diff_complete = False if strat_pool.check_rpool("despiked-fieldmap") and strat_pool.check_rpool( "fieldmap-mask" From 1b6bd2efbc4cd673663225851b17fcc1de21999c Mon Sep 17 00:00:00 2001 From: Jon Cluce Date: Fri, 8 Aug 2025 21:49:16 -0400 Subject: [PATCH 462/507] :necktie: Move short-circuit --- CPAC/pipeline/engine.py | 25 ++++++++++++++----------- CPAC/registration/registration.py | 8 -------- 2 files changed, 14 insertions(+), 19 deletions(-) diff --git a/CPAC/pipeline/engine.py b/CPAC/pipeline/engine.py index 8f6a1a8620..4b47cf36b6 100644 --- a/CPAC/pipeline/engine.py +++ b/CPAC/pipeline/engine.py @@ -852,14 +852,24 @@ def get_strats(self, resources, debug: bool | str = False): new_strats[pipe_idx].rpool["json"]["subjson"][data_type].update( copy.deepcopy(resource_strat_dct["json"]) ) + return_strats: dict[str, ResourcePool] = {} + for pipe_idx, strat_pool in new_strats.items(): + try: + short_circuit_crossed_variants(strat_pool, resources) + return_strats[pipe_idx] = strat_pool + except CrossedVariantsError: + if debug: + verbose_logger = getLogger("CPAC.engine") + verbose_logger.debug("Dropped crossed variants strat: %s", pipe_idx) + continue if debug: verbose_logger = getLogger("CPAC.engine") - _k = list(new_strats.keys()) + _k = list(return_strats.keys()) if isinstance(debug, str): - verbose_logger.debug("new_strats: (%s, %s) %s\n", debug, len(_k), _k) + verbose_logger.debug("return_strats: (%s, %s) %s\n", debug, len(_k), _k) else: - verbose_logger.debug("new_strats: (%s) %s\n", len(_k), _k) - return new_strats + verbose_logger.debug("return_strats: (%s) %s\n", len(_k), _k) + return return_strats def derivative_xfm(self, wf, label, connection, json_info, pipe_idx, pipe_x): if label in self.xfm: @@ -1719,13 +1729,6 @@ def connect_block(self, wf, cfg, rpool): pipe_idx, strat_pool, # strat_pool is a ResourcePool like {'desc-preproc_T1w': { 'json': info, 'data': (node, out) }, 'desc-brain_mask': etc.} ) in rpool.get_strats(inputs, name if debug else False).items(): - try: - short_circuit_crossed_variants(strat_pool, inputs) - except CrossedVariantsError as e: - if cfg.pipeline_setup["Debugging"]["verbose"]: - verbose_logger = getLogger("CPAC.engine") - verbose_logger.debug(e) - continue # keep in mind rpool.get_strats(inputs) = {pipe_idx1: {'desc-preproc_T1w': etc.}, pipe_idx2: {..} } fork = False in switch for opt in opts: # it's a dictionary of ResourcePools called strat_pools, except those sub-ResourcePools only have one level! no pipe_idx strat keys. diff --git a/CPAC/registration/registration.py b/CPAC/registration/registration.py index 6b3fceb0d2..d001b00550 100644 --- a/CPAC/registration/registration.py +++ b/CPAC/registration/registration.py @@ -3191,10 +3191,6 @@ def mask_sbref(wf, cfg, strat_pool, pipe_num, opt=None): ) def coregistration_prep_vol(wf, cfg, strat_pool, pipe_num, opt=None): """Create single-band reference for coreg by selecting a functional volume.""" - from CPAC.pipeline.utils import short_circuit_crossed_variants - - short_circuit_crossed_variants(strat_pool, strat_pool.rpool.keys()) - get_func_volume = pe.Node(interface=afni.Calc(), name=f"get_func_volume_{pipe_num}") get_func_volume.inputs.set( @@ -3333,10 +3329,6 @@ def coregistration_prep_fmriprep(wf, cfg, strat_pool, pipe_num, opt=None): ) def coregistration(wf, cfg, strat_pool, pipe_num, opt=None): """Coregister BOLD to T1w.""" - from CPAC.pipeline.utils import short_circuit_crossed_variants - - short_circuit_crossed_variants(strat_pool, strat_pool.rpool.keys()) - diff_complete = False if strat_pool.check_rpool("despiked-fieldmap") and strat_pool.check_rpool( "fieldmap-mask" From 2bff2a54a5f3895afeac68df84ccb1f9400ddec4 Mon Sep 17 00:00:00 2001 From: Jon Cluce Date: Mon, 11 Aug 2025 14:54:15 -0400 Subject: [PATCH 463/507] :white_check_mark: Update `brain_mask_freesurfer_fsl` tests --- CPAC/anat_preproc/tests/test_anat_preproc.py | 146 +++++++++---------- 1 file changed, 72 insertions(+), 74 deletions(-) diff --git a/CPAC/anat_preproc/tests/test_anat_preproc.py b/CPAC/anat_preproc/tests/test_anat_preproc.py index 588cb1bb2d..41128ea6cd 100755 --- a/CPAC/anat_preproc/tests/test_anat_preproc.py +++ b/CPAC/anat_preproc/tests/test_anat_preproc.py @@ -1,31 +1,45 @@ +# Copyright (C) 2012-2025 C-PAC Developers + +# This file is part of C-PAC. + +# C-PAC is free software: you can redistribute it and/or modify it under +# the terms of the GNU Lesser General Public License as published by the +# Free Software Foundation, either version 3 of the License, or (at your +# option) any later version. + +# C-PAC is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or +# FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public +# License for more details. + +# You should have received a copy of the GNU Lesser General Public +# License along with C-PAC. If not, see . +"""Tests for anatomical preprocessing.""" + import os -from unittest.mock import Mock, patch -from nose.tools import * import numpy as np +import pytest import nibabel as nib -from .. import anat_preproc -from ..anat_preproc import brain_mask_freesurfer +from CPAC.anat_preproc import anat_preproc +from CPAC.anat_preproc.anat_preproc import brain_mask_freesurfer +from CPAC.pipeline import nipype_pipeline_engine as pe +from CPAC.pipeline.engine import ResourcePool +from CPAC.utils.configuration import Preconfiguration +from CPAC.utils.test_init import create_dummy_node +CFG = Preconfiguration("ccs-options") -class TestAnatPreproc: - def __init__(self): + +@pytest.mark.skip(reason="This test needs refactoring.") +class TestAnatPreproc: # noqa + def setup_method(self) -> None: """ Initialize and run the the anat_preproc workflow. Populate the node-name : node_output dictionary using the workflow object. This dictionary serves the outputs of each of the nodes in the workflow to all the tests that need them. - - Parameters - ---------- - self - - Returns - ------- - None - - """ self.preproc = anat_preproc.create_anat_preproc() self.input_anat = os.path.abspath("$FSLDIR/data/standard/MNI152_T1_2mm.nii.gz") @@ -273,67 +287,51 @@ def test_anat_brain(self): assert correlation[0, 1] >= 0.97 -@patch("CPAC.anat_preproc.anat_preproc.freesurfer_fsl_brain_connector") -def test_brain_mask_freesurfer_fsl_loose(mock_connector): - """Test that brain_mask_freesurfer_fsl_loose correctly renames output key.""" - mock_wf = Mock() - mock_cfg = Mock() - mock_strat_pool = Mock() - pipe_num = 1 - - mock_outputs = { - "space-T1w_desc-loose_brain_mask": "brain_mask_data", - "other_output": "other_data", - } - - mock_connector.return_value = (mock_wf, mock_outputs) - - result_wf, result_outputs = brain_mask_freesurfer( - mock_wf, mock_cfg, mock_strat_pool, pipe_num - ) - - mock_connector.assert_called_once_with( - mock_wf, mock_cfg, mock_strat_pool, pipe_num, None - ) - - # Assert workflow returned unchanged - assert result_wf == mock_wf - - # Assert output key was renamed correctly - assert "space-T1w_desc-brain_mask" in result_outputs - assert "space-T1w_desc-loose_brain_mask" not in result_outputs - assert result_outputs["space-T1w_desc-brain_mask"] == "brain_mask_data" - assert result_outputs["other_output"] == "other_data" - +@pytest.mark.parametrize("opt", ["FreeSurfer-BET-Loose", "FreeSurfer-BET-Tight"]) +@pytest.mark.parametrize("t1w", ["desc-restore_T1w", "desc-preproc_T1w"]) +def test_brain_mask_freesurfer_fsl_real(opt: str, t1w: str): + """Test that brain_mask_freesurfer_fsl correctly generates output key using real code.""" + # Create minimal mocks for required workflow/config/strat_pool, but do not patch freesurfer_fsl_brain_connector + + CFG["subject_id"] = opt + + wf = pe.Workflow(name=opt) + pre_resources = [ + t1w, + "space-T1w_desc-brain_mask", + "pipeline-fs_T1", + "pipeline-fs_wmparc", + "pipeline-fs_raw-average", + "pipeline-fs_brainmask", + "freesurfer-subject-dir", + "T1w-brain-template-mask-ccs", + "T1w-ACPC-template", + ] + before_this_test = create_dummy_node("created_before_this_test", pre_resources) + rpool = ResourcePool(name=f"{opt}_{opt}", cfg=CFG) + for resource in pre_resources: + rpool.set_data( + resource, before_this_test, resource, {}, "", before_this_test.name + ) + rpool.gather_pipes(wf, CFG) + strat_pool = next(iter(rpool.get_strats(pre_resources).values())) -@patch("CPAC.anat_preproc.anat_preproc.freesurfer_fsl_brain_connector") -def test_brain_mask_freesurfer_fsl_tight(mock_connector): - """Test that brain_mask_freesurfer_fsl_tight correctly renames output key.""" - mock_wf = Mock() - mock_cfg = Mock() - mock_strat_pool = Mock() pipe_num = 1 - mock_outputs = { - "space-T1w_desc-tight_brain_mask": "brain_mask_data", - "other_output": "other_data", - } - - mock_connector.return_value = (mock_wf, mock_outputs) - result_wf, result_outputs = brain_mask_freesurfer( - mock_wf, mock_cfg, mock_strat_pool, pipe_num - ) - - mock_connector.assert_called_once_with( - mock_wf, mock_cfg, mock_strat_pool, pipe_num, None + wf, CFG, strat_pool, pipe_num, opt ) - # Assert workflow returned unchanged - assert result_wf == mock_wf - - # Assert output key was renamed correctly - assert "space-T1w_desc-brain_mask" in result_outputs - assert "space-T1w_desc-tight_brain_mask" not in result_outputs - assert result_outputs["space-T1w_desc-brain_mask"] == "brain_mask_data" - assert result_outputs["other_output"] == "other_data" + # The output key should always be present + assert any( + k.startswith("space-T1w_desc-brain_mask") for k in result_outputs + ), "Expected brain_mask key in outputs." + # Should not have loose/tight keys + assert not any( + "loose_brain_mask" in k for k in result_outputs + ), "Loose brain mask key should not be present." + assert not any( + "tight_brain_mask" in k for k in result_outputs + ), "Tight brain mask key should not be present." + # Should return the workflow unchanged + assert result_wf == wf From 33756d23521f2a195d468310d81e4627fdf460ad Mon Sep 17 00:00:00 2001 From: Jon Cluce Date: Wed, 13 Aug 2025 15:30:09 -0400 Subject: [PATCH 464/507] :loud_sound: Improve dropped variant logging --- CPAC/anat_preproc/anat_preproc.py | 10 ++++++---- CPAC/anat_preproc/tests/test_anat_preproc.py | 2 +- CPAC/pipeline/engine.py | 11 ++++++++++- CPAC/registration/registration.py | 2 +- 4 files changed, 18 insertions(+), 7 deletions(-) diff --git a/CPAC/anat_preproc/anat_preproc.py b/CPAC/anat_preproc/anat_preproc.py index 482b11ac8e..d66d5ad786 100644 --- a/CPAC/anat_preproc/anat_preproc.py +++ b/CPAC/anat_preproc/anat_preproc.py @@ -695,13 +695,15 @@ def afni_brain_connector(wf, cfg, strat_pool, pipe_num, opt): wf.connect(anat_skullstrip, "out_file", anat_brain_mask, "in_file_a") + outputs = {} + if strat_pool.check_rpool("desc-preproc_T1w"): outputs = {"space-T1w_desc-brain_mask": (anat_brain_mask, "out_file")} elif strat_pool.check_rpool("desc-preproc_T2w"): outputs = {"space-T2w_desc-brain_mask": (anat_brain_mask, "out_file")} - return (wf, outputs) + return wf, outputs def fsl_brain_connector(wf, cfg, strat_pool, pipe_num, opt): @@ -1392,9 +1394,8 @@ def mask_T2(wf_name="mask_T2"): ) def anatomical_init(wf, cfg, strat_pool, pipe_num, opt=None): if opt not in anatomical_init.option_val: - raise ValueError( - f"\n[!] Error: Invalid option for deoblique: {opt}. \nExpected one of {anatomical_init.option_val}" - ) + msg = f"\n[!] Error: Invalid option for deoblique: {opt}. \nExpected one of {anatomical_init.option_val}" + raise ValueError(msg) if opt == "warp": anat_deoblique = pe.Node( @@ -2004,6 +2005,7 @@ def brain_mask_acpc_freesurfer(wf, cfg, strat_pool, pipe_num, opt=None): functions = { "FreeSurfer-ABCD": freesurfer_abcd_brain_connector, "FreeSurfer-Brainmask": freesurfer_brain_connector, + "FreeSurfer-BET-Loose": freesurfer_fsl_brain_connector, "FreeSurfer-BET-Tight": freesurfer_fsl_brain_connector, } if opt in ["FreeSurfer-BET-Loose", "FreeSurfer-BET-Tight"]: diff --git a/CPAC/anat_preproc/tests/test_anat_preproc.py b/CPAC/anat_preproc/tests/test_anat_preproc.py index 41128ea6cd..7a65dd8a3f 100755 --- a/CPAC/anat_preproc/tests/test_anat_preproc.py +++ b/CPAC/anat_preproc/tests/test_anat_preproc.py @@ -289,7 +289,7 @@ def test_anat_brain(self): @pytest.mark.parametrize("opt", ["FreeSurfer-BET-Loose", "FreeSurfer-BET-Tight"]) @pytest.mark.parametrize("t1w", ["desc-restore_T1w", "desc-preproc_T1w"]) -def test_brain_mask_freesurfer_fsl_real(opt: str, t1w: str): +def test_brain_mask_freesurfer_fsl(opt: str, t1w: str): """Test that brain_mask_freesurfer_fsl correctly generates output key using real code.""" # Create minimal mocks for required workflow/config/strat_pool, but do not patch freesurfer_fsl_brain_connector diff --git a/CPAC/pipeline/engine.py b/CPAC/pipeline/engine.py index 4b47cf36b6..565665b3c3 100644 --- a/CPAC/pipeline/engine.py +++ b/CPAC/pipeline/engine.py @@ -43,6 +43,7 @@ from CPAC.pipeline.nodeblock import NodeBlockFunction from CPAC.pipeline.utils import ( CrossedVariantsError, + find_variants, MOVEMENT_FILTER_KEYS, name_fork, short_circuit_crossed_variants, @@ -860,7 +861,10 @@ def get_strats(self, resources, debug: bool | str = False): except CrossedVariantsError: if debug: verbose_logger = getLogger("CPAC.engine") - verbose_logger.debug("Dropped crossed variants strat: %s", pipe_idx) + verbose_logger.debug( + "Dropped crossed variants strat: %s", + find_variants(strat_pool, resources), + ) continue if debug: verbose_logger = getLogger("CPAC.engine") @@ -1634,6 +1638,11 @@ def connect_block(self, wf, cfg, rpool): opts.append(option_val) else: # AND, if there are multiple option-val's (in a list) in the docstring, it gets iterated below in 'for opt in option' etc. AND THAT'S WHEN YOU HAVE TO DELINEATE WITHIN THE NODE BLOCK CODE!!! opts = [None] + if debug: + verbose_logger = getLogger("CPAC.engine") + verbose_logger.debug( + f"[connect_block] opts resolved for {name}: {opts}" + ) all_opts += opts sidecar_additions = { diff --git a/CPAC/registration/registration.py b/CPAC/registration/registration.py index d001b00550..41b42aae6e 100644 --- a/CPAC/registration/registration.py +++ b/CPAC/registration/registration.py @@ -3311,7 +3311,7 @@ def coregistration_prep_fmriprep(wf, cfg, strat_pool, pipe_num, opt=None): ), ( "desc-preproc_T1w", - ["desc-restore-brain_T1w", "desc-preproc_T1w"], + "desc-restore-brain_T1w", "desc-preproc_T2w", "desc-preproc_T2w", "T2w", From 323f031153ff105c3665dd50a7954fc6f378c4d2 Mon Sep 17 00:00:00 2001 From: "birajstha:construction_worker::penguin" Date: Mon, 18 Aug 2025 16:50:09 -0400 Subject: [PATCH 465/507] adding match_fov node before apply_warp --- CPAC/registration/registration.py | 37 ++++++++++++++++++++++++------- 1 file changed, 29 insertions(+), 8 deletions(-) diff --git a/CPAC/registration/registration.py b/CPAC/registration/registration.py index d001b00550..efb8645dfc 100644 --- a/CPAC/registration/registration.py +++ b/CPAC/registration/registration.py @@ -2906,6 +2906,8 @@ def register_ANTs_EPI_to_template(wf, cfg, strat_pool, pipe_num, opt=None): ["desc-preproc_T1w", "desc-reorient_T1w", "T1w"], "space-T1w_desc-brain_mask", "T1w-template", + "T1w-brain-template", + "T1w-brain-template-mask", "from-T1w_to-template_mode-image_xfm", "from-template_to-T1w_mode-image_xfm", "space-template_desc-brain_T1w", @@ -3063,6 +3065,11 @@ def overwrite_transform_anat_to_template(wf, cfg, strat_pool, pipe_num, opt=None wf.connect(merge_inv_xfms_to_list, "out", merge_inv_xfms, "in_files") + # Match FOVs using flirt -in infile -ref MNI152_T1_1mm_resample.nii.gz -out my_T1w_resampled.nii.gz -applyxfm -usesqform + match_fovs_T1w = pe.Node(interface=fsl.FLIRT(), name=f"match_fovs_T1w_{pipe_num}") + match_fovs_T1w.inputs.apply_xfm = True + match_fovs_T1w.inputs.uses_qform = True + # applywarp --rel --interp=spline -i ${T1wRestore} -r ${Reference} -w ${OutputTransform} -o ${OutputT1wImageRestore} fsl_apply_warp_t1_to_template = pe.Node( interface=fsl.ApplyWarp(), name=f"FSL-ABCD_T1_to_template_{pipe_num}" @@ -3070,16 +3077,22 @@ def overwrite_transform_anat_to_template(wf, cfg, strat_pool, pipe_num, opt=None fsl_apply_warp_t1_to_template.inputs.relwarp = True fsl_apply_warp_t1_to_template.inputs.interp = "spline" - node, out = strat_pool.get_data(["desc-restore_T1w", "desc-preproc_T1w"]) - wf.connect(node, out, fsl_apply_warp_t1_to_template, "in_file") - node, out = strat_pool.get_data("T1w-template") + wf.connect(node, out, match_fovs_T1w, "reference") wf.connect(node, out, fsl_apply_warp_t1_to_template, "ref_file") + node, out = strat_pool.get_data(["desc-restore_T1w", "desc-preproc_T1w"]) + wf.connect(node, out, match_fovs_T1w, "in_file") + wf.connect(match_fovs_T1w, "out_file", fsl_apply_warp_t1_to_template, "in_file") + wf.connect( merge_xfms, "merged_file", fsl_apply_warp_t1_to_template, "field_file" ) + match_fovs_T1w_brain = pe.Node(interface=fsl.FLIRT(), name=f"match_fovs_T1w_brain_{pipe_num}") + match_fovs_T1w_brain.inputs.apply_xfm = True + match_fovs_T1w_brain.inputs.uses_qform = True + # applywarp --rel --interp=nn -i ${T1wRestoreBrain} -r ${Reference} -w ${OutputTransform} -o ${OutputT1wImageRestoreBrain} fsl_apply_warp_t1_brain_to_template = pe.Node( interface=fsl.ApplyWarp(), name=f"FSL-ABCD_T1_brain_to_template_{pipe_num}" @@ -3088,16 +3101,22 @@ def overwrite_transform_anat_to_template(wf, cfg, strat_pool, pipe_num, opt=None fsl_apply_warp_t1_brain_to_template.inputs.interp = "nn" # TODO connect T1wRestoreBrain, check T1wRestoreBrain quality - node, out = strat_pool.get_data("desc-preproc_T1w") - wf.connect(node, out, fsl_apply_warp_t1_brain_to_template, "in_file") + node, out = strat_pool.get_data(["desc-restore-brain_T1w", "desc-preproc_T1w"]) + wf.connect(node, out, match_fovs_T1w_brain, "in_file") + wf.connect(match_fovs_T1w_brain, "out_file", fsl_apply_warp_t1_brain_to_template, "in_file") - node, out = strat_pool.get_data("T1w-template") + node, out = strat_pool.get_data("T1w-brain-template") + wf.connect(node, out, match_fovs_T1w_brain, "reference") wf.connect(node, out, fsl_apply_warp_t1_brain_to_template, "ref_file") wf.connect( merge_xfms, "merged_file", fsl_apply_warp_t1_brain_to_template, "field_file" ) + match_fovs_T1w_brain_mask = pe.Node(interface=fsl.FLIRT(), name=f"match_fovs_T1w_brain_mask_{pipe_num}") + match_fovs_T1w_brain_mask.inputs.apply_xfm = True + match_fovs_T1w_brain_mask.inputs.uses_qform = True + fsl_apply_warp_t1_brain_mask_to_template = pe.Node( interface=fsl.ApplyWarp(), name=f"FSL-ABCD_T1_brain_mask_to_template_{pipe_num}", @@ -3106,9 +3125,11 @@ def overwrite_transform_anat_to_template(wf, cfg, strat_pool, pipe_num, opt=None fsl_apply_warp_t1_brain_mask_to_template.inputs.interp = "nn" node, out = strat_pool.get_data("space-T1w_desc-brain_mask") - wf.connect(node, out, fsl_apply_warp_t1_brain_mask_to_template, "in_file") + wf.connect(node, out, match_fovs_T1w_brain_mask, "in_file") + wf.connect(match_fovs_T1w_brain_mask, "out_file", fsl_apply_warp_t1_brain_mask_to_template, "in_file") - node, out = strat_pool.get_data("T1w-template") + node, out = strat_pool.get_data("T1w-brain-template-mask") + wf.connect(node, out, match_fovs_T1w_brain_mask, "reference") wf.connect(node, out, fsl_apply_warp_t1_brain_mask_to_template, "ref_file") wf.connect( From 279cbbc6a6e0937a128c525c2dafe34548ce7d3a Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Mon, 18 Aug 2025 20:55:30 +0000 Subject: [PATCH 466/507] [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci --- CPAC/registration/registration.py | 26 +++++++++++++++++++++----- 1 file changed, 21 insertions(+), 5 deletions(-) diff --git a/CPAC/registration/registration.py b/CPAC/registration/registration.py index efb8645dfc..a2cb7e3272 100644 --- a/CPAC/registration/registration.py +++ b/CPAC/registration/registration.py @@ -3066,7 +3066,9 @@ def overwrite_transform_anat_to_template(wf, cfg, strat_pool, pipe_num, opt=None wf.connect(merge_inv_xfms_to_list, "out", merge_inv_xfms, "in_files") # Match FOVs using flirt -in infile -ref MNI152_T1_1mm_resample.nii.gz -out my_T1w_resampled.nii.gz -applyxfm -usesqform - match_fovs_T1w = pe.Node(interface=fsl.FLIRT(), name=f"match_fovs_T1w_{pipe_num}") + match_fovs_T1w = pe.Node( + interface=fsl.FLIRT(), name=f"match_fovs_T1w_{pipe_num}" + ) match_fovs_T1w.inputs.apply_xfm = True match_fovs_T1w.inputs.uses_qform = True @@ -3089,7 +3091,9 @@ def overwrite_transform_anat_to_template(wf, cfg, strat_pool, pipe_num, opt=None merge_xfms, "merged_file", fsl_apply_warp_t1_to_template, "field_file" ) - match_fovs_T1w_brain = pe.Node(interface=fsl.FLIRT(), name=f"match_fovs_T1w_brain_{pipe_num}") + match_fovs_T1w_brain = pe.Node( + interface=fsl.FLIRT(), name=f"match_fovs_T1w_brain_{pipe_num}" + ) match_fovs_T1w_brain.inputs.apply_xfm = True match_fovs_T1w_brain.inputs.uses_qform = True @@ -3103,7 +3107,12 @@ def overwrite_transform_anat_to_template(wf, cfg, strat_pool, pipe_num, opt=None # TODO connect T1wRestoreBrain, check T1wRestoreBrain quality node, out = strat_pool.get_data(["desc-restore-brain_T1w", "desc-preproc_T1w"]) wf.connect(node, out, match_fovs_T1w_brain, "in_file") - wf.connect(match_fovs_T1w_brain, "out_file", fsl_apply_warp_t1_brain_to_template, "in_file") + wf.connect( + match_fovs_T1w_brain, + "out_file", + fsl_apply_warp_t1_brain_to_template, + "in_file", + ) node, out = strat_pool.get_data("T1w-brain-template") wf.connect(node, out, match_fovs_T1w_brain, "reference") @@ -3113,7 +3122,9 @@ def overwrite_transform_anat_to_template(wf, cfg, strat_pool, pipe_num, opt=None merge_xfms, "merged_file", fsl_apply_warp_t1_brain_to_template, "field_file" ) - match_fovs_T1w_brain_mask = pe.Node(interface=fsl.FLIRT(), name=f"match_fovs_T1w_brain_mask_{pipe_num}") + match_fovs_T1w_brain_mask = pe.Node( + interface=fsl.FLIRT(), name=f"match_fovs_T1w_brain_mask_{pipe_num}" + ) match_fovs_T1w_brain_mask.inputs.apply_xfm = True match_fovs_T1w_brain_mask.inputs.uses_qform = True @@ -3126,7 +3137,12 @@ def overwrite_transform_anat_to_template(wf, cfg, strat_pool, pipe_num, opt=None node, out = strat_pool.get_data("space-T1w_desc-brain_mask") wf.connect(node, out, match_fovs_T1w_brain_mask, "in_file") - wf.connect(match_fovs_T1w_brain_mask, "out_file", fsl_apply_warp_t1_brain_mask_to_template, "in_file") + wf.connect( + match_fovs_T1w_brain_mask, + "out_file", + fsl_apply_warp_t1_brain_mask_to_template, + "in_file", + ) node, out = strat_pool.get_data("T1w-brain-template-mask") wf.connect(node, out, match_fovs_T1w_brain_mask, "reference") From 23bc03b9454296538b55e54ea5cc4bae71150da5 Mon Sep 17 00:00:00 2001 From: "birajstha:construction_worker::penguin" Date: Fri, 22 Aug 2025 16:07:36 -0400 Subject: [PATCH 467/507] changing ref from brain to whole head --- CPAC/func_preproc/func_preproc.py | 37 ++++++-------- CPAC/pipeline/cpac_pipeline.py | 16 +++--- CPAC/registration/registration.py | 82 +++++++------------------------ 3 files changed, 44 insertions(+), 91 deletions(-) diff --git a/CPAC/func_preproc/func_preproc.py b/CPAC/func_preproc/func_preproc.py index d098b6186b..c787b06f3c 100644 --- a/CPAC/func_preproc/func_preproc.py +++ b/CPAC/func_preproc/func_preproc.py @@ -1559,12 +1559,12 @@ def anat_brain_to_bold_res(wf_name, cfg, pipe_num): inputNode = pe.Node( util.IdentityInterface( - fields=["T1w-template-funcreg", "space-template_desc-preproc_T1w"] + fields=["T1w-template-funcreg", "space-template_desc-head_T1w"] ), name="inputspec", ) outputNode = pe.Node( - util.IdentityInterface(fields=["space-template_res-bold_desc-brain_T1w"]), + util.IdentityInterface(fields=["space-template_res-bold_desc-head_T1w"]), name="outputspec", ) @@ -1579,7 +1579,7 @@ def anat_brain_to_bold_res(wf_name, cfg, pipe_num): ]["registration"]["FSL-FNIRT"]["identity_matrix"] wf.connect( - inputNode, "space-template_desc-preproc_T1w", anat_brain_to_func_res, "in_file" + inputNode, "space-template_desc-head_T1w", anat_brain_to_func_res, "in_file" ) wf.connect(inputNode, "T1w-template-funcreg", anat_brain_to_func_res, "ref_file") @@ -1587,7 +1587,7 @@ def anat_brain_to_bold_res(wf_name, cfg, pipe_num): anat_brain_to_func_res, "out_file", outputNode, - "space-template_res-bold_desc-brain_T1w", + "space-template_res-bold_desc-head_T1w", ) return wf @@ -1598,7 +1598,7 @@ def anat_brain_mask_to_bold_res(wf_name, cfg, pipe_num): wf = pe.Workflow(name=f"{wf_name}_{pipe_num}") inputNode = pe.Node( util.IdentityInterface( - fields=["space-template_desc-brain_mask", "space-template_desc-preproc_T1w"] + fields=["space-template_desc-brain_mask", "space-template_desc-head_T1w"] ), name="inputspec", ) @@ -1625,7 +1625,7 @@ def anat_brain_mask_to_bold_res(wf_name, cfg, pipe_num): ) wf.connect( inputNode, - "space-template_desc-preproc_T1w", + "space-template_desc-head_T1w", anat_brain_mask_to_func_res, "ref_file", ) @@ -1649,11 +1649,11 @@ def anat_brain_mask_to_bold_res(wf_name, cfg, pipe_num): option_val="Anatomical_Resampled", inputs=[ "T1w-template-funcreg", - "space-template_desc-preproc_T1w", + "space-template_desc-head_T1w", "space-template_desc-brain_mask", ], outputs=[ - "space-template_res-bold_desc-brain_T1w", + "space-template_res-bold_desc-head_T1w", "space-template_desc-bold_mask", ], ) @@ -1666,9 +1666,9 @@ def bold_mask_anatomical_resampled(wf, cfg, strat_pool, pipe_num, opt=None): wf_name="anat_brain_to_bold_res", cfg=cfg, pipe_num=pipe_num ) - node, out = strat_pool.get_data("space-template_desc-preproc_T1w") + node, out = strat_pool.get_data("space-template_desc-head_T1w") wf.connect( - node, out, anat_brain_to_func_res, "inputspec.space-template_desc-preproc_T1w" + node, out, anat_brain_to_func_res, "inputspec.space-template_desc-head_T1w" ) node, out = strat_pool.get_data("T1w-template-funcreg") @@ -1690,15 +1690,15 @@ def bold_mask_anatomical_resampled(wf, cfg, strat_pool, pipe_num, opt=None): wf.connect( anat_brain_to_func_res, - "outputspec.space-template_res-bold_desc-brain_T1w", + "outputspec.space-template_res-bold_desc-head_T1w", anat_brain_mask_to_func_res, - "inputspec.space-template_desc-preproc_T1w", + "inputspec.space-template_desc-head_T1w", ) outputs = { - "space-template_res-bold_desc-brain_T1w": ( + "space-template_res-bold_desc-head_T1w": ( anat_brain_to_func_res, - "outputspec.space-template_res-bold_desc-brain_T1w", + "outputspec.space-template_res-bold_desc-head_T1w", ), "space-template_desc-bold_mask": ( anat_brain_mask_to_func_res, @@ -1897,7 +1897,7 @@ def bold_masking(wf, cfg, strat_pool, pipe_num, opt=None): ["functional_preproc", "template_space_func_masking", "run"], ], inputs=[ - ("space-template_desc-preproc_bold", "space-template_desc-bold_mask"), + ("space-template_desc-head_bold", "space-template_desc-bold_mask"), ], outputs={ "space-template_desc-preproc_bold": { @@ -1908,10 +1908,6 @@ def bold_masking(wf, cfg, strat_pool, pipe_num, opt=None): "Description": "The skull-stripped BOLD time-series.", "SkullStripped": True, }, - "space-template_desc-head_bold": { - "Description": "The non skull-stripped BOLD time-series.", - "SkullStripped": False, - }, }, ) def template_space_bold_masking( @@ -1931,7 +1927,7 @@ def template_space_bold_masking( func_apply_mask.inputs.outputtype = "NIFTI_GZ" node_head_bold, out_head_bold = strat_pool.get_data( - "space-template_desc-preproc_bold" + "space-template_desc-head_bold" ) wf.connect(node_head_bold, out_head_bold, func_apply_mask, "in_file_a") @@ -1941,7 +1937,6 @@ def template_space_bold_masking( outputs: POOL_RESOURCE_DICT = { "space-template_desc-preproc_bold": (func_apply_mask, "out_file"), "space-template_desc-brain_bold": (func_apply_mask, "out_file"), - "space-template_desc-head_bold": (node_head_bold, out_head_bold), } return wf, outputs diff --git a/CPAC/pipeline/cpac_pipeline.py b/CPAC/pipeline/cpac_pipeline.py index c2ffa96404..6546a1324f 100644 --- a/CPAC/pipeline/cpac_pipeline.py +++ b/CPAC/pipeline/cpac_pipeline.py @@ -1291,6 +1291,13 @@ def build_workflow(subject_id, sub_dict, cfg, pipeline_name=None): mask_sbref, ] + # Template space functional mask + if cfg.functional_preproc["template_space_func_masking"]["run"]: + if not rpool.check_rpool("space-template_desc-bold_mask"): + pipeline_blocks += [ + bold_mask_anatomical_resampled, + ] + # Distortion/Susceptibility Correction distcor_blocks = [] if "fmap" in sub_dict: @@ -1472,17 +1479,12 @@ def build_workflow(subject_id, sub_dict, cfg, pipeline_name=None): warp_deriv_mask_to_EPItemplate, ] - # Template space functional masking + # Apply mask in template space if cfg.functional_preproc["template_space_func_masking"]["run"]: - if not rpool.check_rpool("space-template_desc-bold_mask"): - pipeline_blocks += [ - bold_mask_anatomical_resampled, - ] - pipeline_blocks += [ template_space_bold_masking, ] - + # Template-space nuisance regression nuisance_template = ( cfg["nuisance_corrections", "2-nuisance_regression", "space"] == "template" diff --git a/CPAC/registration/registration.py b/CPAC/registration/registration.py index efb8645dfc..9bdda851b4 100644 --- a/CPAC/registration/registration.py +++ b/CPAC/registration/registration.py @@ -2901,6 +2901,7 @@ def register_ANTs_EPI_to_template(wf, cfg, strat_pool, pipe_num, opt=None): inputs=[ ( "desc-restore-brain_T1w", + "desc-head_T1w", ["desc-preproc_T1w", "space-longitudinal_desc-brain_T1w"], ["desc-restore_T1w", "desc-preproc_T1w", "desc-reorient_T1w", "T1w"], ["desc-preproc_T1w", "desc-reorient_T1w", "T1w"], @@ -3081,7 +3082,7 @@ def overwrite_transform_anat_to_template(wf, cfg, strat_pool, pipe_num, opt=None wf.connect(node, out, match_fovs_T1w, "reference") wf.connect(node, out, fsl_apply_warp_t1_to_template, "ref_file") - node, out = strat_pool.get_data(["desc-restore_T1w", "desc-preproc_T1w"]) + node, out = strat_pool.get_data(["desc-restore_T1w", "desc-head_T1w"]) wf.connect(node, out, match_fovs_T1w, "in_file") wf.connect(match_fovs_T1w, "out_file", fsl_apply_warp_t1_to_template, "in_file") @@ -4169,7 +4170,8 @@ def warp_timeseries_to_T1template_deriv(wf, cfg, strat_pool, pipe_num, opt=None) "from-bold_to-template_mode-image_xfm", "fsl-blip-warp", "desc-preproc_T1w", - "space-template_res-bold_desc-brain_T1w", + "desc-head_T1w", + "space-template_res-bold_desc-head_T1w", "space-template_desc-bold_mask", "T1w-brain-template-funcreg", "T1w-template-funcreg", @@ -4199,7 +4201,7 @@ def warp_timeseries_to_T1template_abcd(wf, cfg, strat_pool, pipe_num, opt=None): convert_func_to_anat_linear_warp.inputs.out_relwarp = True convert_func_to_anat_linear_warp.inputs.relwarp = True - node, out = strat_pool.get_data("desc-preproc_T1w") + node, out = strat_pool.get_data("desc-head_T1w") wf.connect(node, out, convert_func_to_anat_linear_warp, "reference") if strat_pool.check_rpool("fsl-blip-warp"): @@ -4231,24 +4233,9 @@ def warp_timeseries_to_T1template_abcd(wf, cfg, strat_pool, pipe_num, opt=None): node, out = strat_pool.get_data("from-T1w_to-template_mode-image_xfm") wf.connect(node, out, convert_func_to_standard_warp, "warp2") - from CPAC.func_preproc.func_preproc import ( - anat_brain_mask_to_bold_res, - anat_brain_to_bold_res, - ) - - anat_brain_to_func_res = anat_brain_to_bold_res(wf, cfg, pipe_num) - - node, out = strat_pool.get_data("space-template_desc-preproc_T1w") - wf.connect( - node, out, anat_brain_to_func_res, "inputspec.space-template_desc-preproc_T1w" - ) - - node, out = strat_pool.get_data("T1w-template-funcreg") - wf.connect(node, out, anat_brain_to_func_res, "inputspec.T1w-template-funcreg") - + node, out = strat_pool.get_data("space-template_res-bold_desc-head_T1w") wf.connect( - anat_brain_to_func_res, - "outputspec.space-template_res-bold_desc-brain_T1w", + node, out, convert_func_to_standard_warp, "reference", ) @@ -4314,11 +4301,9 @@ def warp_timeseries_to_T1template_abcd(wf, cfg, strat_pool, pipe_num, opt=None): convert_registration_warp.inputs.out_relwarp = True convert_registration_warp.inputs.relwarp = True + node, out = strat_pool.get_data("space-template_res-bold_desc-head_T1w") wf.connect( - anat_brain_to_func_res, - "outputspec.space-template_res-bold_desc-brain_T1w", - convert_registration_warp, - "reference", + node, out, convert_registration_warp, "reference", ) wf.connect( @@ -4356,11 +4341,9 @@ def warp_timeseries_to_T1template_abcd(wf, cfg, strat_pool, pipe_num, opt=None): convert_registration_warp, "out_file", applywarp_func_to_standard, "field_file" ) + node, out = strat_pool.get_data("space-template_res-bold_desc-head_T1w") wf.connect( - anat_brain_to_func_res, - "outputspec.space-template_res-bold_desc-brain_T1w", - applywarp_func_to_standard, - "ref_file", + node, out, applywarp_func_to_standard, "ref_file", ) # applywarp --rel --interp=nn --in=${WD}/prevols/vol${vnum}_mask.nii.gz --warp=${MotionMatrixFolder}/${MotionMatrixPrefix}${vnum}_all_warp.nii.gz --ref=${WD}/${T1wImageFile}.${FinalfMRIResolution} --out=${WD}/postvols/vol${vnum}_mask.nii.gz @@ -4384,11 +4367,9 @@ def warp_timeseries_to_T1template_abcd(wf, cfg, strat_pool, pipe_num, opt=None): "field_file", ) + node, out = strat_pool.get_data("space-template_res-bold_desc-head_T1w") wf.connect( - anat_brain_to_func_res, - "outputspec.space-template_res-bold_desc-brain_T1w", - applywarp_func_mask_to_standard, - "ref_file", + node, out, applywarp_func_mask_to_standard, "ref_file", ) ### Loop ends! ### @@ -4436,11 +4417,9 @@ def warp_timeseries_to_T1template_abcd(wf, cfg, strat_pool, pipe_num, opt=None): convert_dc_warp.inputs.out_relwarp = True convert_dc_warp.inputs.relwarp = True + node, out = strat_pool.get_data("space-template_res-bold_desc-head_T1w") wf.connect( - anat_brain_to_func_res, - "outputspec.space-template_res-bold_desc-brain_T1w", - convert_dc_warp, - "reference", + node, out, convert_dc_warp, "reference", ) wf.connect(multiply_func_roi_by_zero, "out_file", convert_dc_warp, "warp1") @@ -4458,11 +4437,9 @@ def warp_timeseries_to_T1template_abcd(wf, cfg, strat_pool, pipe_num, opt=None): node, out = strat_pool.get_data("motion-basefile") wf.connect(node, out, applywarp_scout, "in_file") + node, out = strat_pool.get_data("space-template_res-bold_desc-head_T1w") wf.connect( - anat_brain_to_func_res, - "outputspec.space-template_res-bold_desc-brain_T1w", - applywarp_scout, - "ref_file", + node, out, applywarp_scout, "ref_file", ) wf.connect(convert_dc_warp, "out_file", applywarp_scout, "field_file") @@ -4471,30 +4448,9 @@ def warp_timeseries_to_T1template_abcd(wf, cfg, strat_pool, pipe_num, opt=None): # fslmaths ${InputfMRI} -mas ${BrainMask} -mas ${InputfMRI}_mask -thr 0 -ing 10000 ${OutputfMRI} -odt float merge_func_mask = pe.Node(util.Merge(2), name=f"merge_func_mask_{pipe_num}") - anat_brain_mask_to_func_res = anat_brain_mask_to_bold_res( - wf_name="anat_brain_mask_to_bold_res", cfg=cfg, pipe_num=pipe_num - ) - - node, out = strat_pool.get_data("space-template_desc-brain_mask") + node, out = strat_pool.get_data("space-template_desc-bold_mask") wf.connect( - node, - out, - anat_brain_mask_to_func_res, - "inputspec.space-template_desc-brain_mask", - ) - - wf.connect( - anat_brain_to_func_res, - "outputspec.space-template_res-bold_desc-brain_T1w", - anat_brain_mask_to_func_res, - "inputspec.space-template_desc-preproc_T1w", - ) - - wf.connect( - anat_brain_mask_to_func_res, - "outputspec.space-template_desc-bold_mask", - merge_func_mask, - "in1", + node, out, merge_func_mask, "in1", ) wf.connect(find_min_mask, "out_file", merge_func_mask, "in2") From 5376cf32e40aa9aca746d15be56ac05f6f5c1c89 Mon Sep 17 00:00:00 2001 From: Jon Cluce Date: Tue, 26 Aug 2025 11:23:43 -0400 Subject: [PATCH 468/507] :recycle: Reduce initial number of connections; fix short-circuit --- CPAC/pipeline/engine.py | 144 ++++++++++++++++++++++++++++++++++++++-- CPAC/pipeline/utils.py | 26 +++++--- 2 files changed, 154 insertions(+), 16 deletions(-) diff --git a/CPAC/pipeline/engine.py b/CPAC/pipeline/engine.py index 565665b3c3..0f49941eca 100644 --- a/CPAC/pipeline/engine.py +++ b/CPAC/pipeline/engine.py @@ -24,7 +24,7 @@ import json import os import re -from typing import Literal, Optional +from typing import Callable, Generator, Literal, Optional import warnings import pandas as pd @@ -604,8 +604,6 @@ def get_strats(self, resources, debug: bool | str = False): # TODO: NOTE: NOT COMPATIBLE WITH SUB-RPOOL/STRAT_POOLS # TODO: (and it doesn't have to be) - import itertools - linked_resources = [] resource_list = [] if debug: @@ -686,7 +684,7 @@ def get_strats(self, resources, debug: bool | str = False): # TODO: and the actual resource is encoded in the tag: of the last item, every time! # keying the strategies to the resources, inverting it if len_inputs > 1: - strats = itertools.product(*total_pool) + strats = self.linked_product(total_pool, linked_resources, self.get_json) # we now currently have "strats", the combined permutations of all the strategies, as a list of tuples, each tuple combining one version of input each, being one of the permutations. # OF ALL THE DIFFERENT INPUTS. and they are tagged by their fetched inputs with {name}:{strat}. @@ -1482,6 +1480,128 @@ def node_data(self, resource, **kwargs): """ return NodeData(self, resource, **kwargs) + @staticmethod + def _normalize_variant_dict(json_obj: dict) -> dict[str, Optional[str]]: + """ + Return {variant_key: primary_value or None}. + + - list items are concatentated + - "NO-..." entries normalize to None + """ + out = {} + for k, v in json_obj.get("CpacVariant", {}).items(): + assert isinstance(v, (list, str)) + primary = "-".join(v) if isinstance(v, list) else v + out[k] = ( + None + if (isinstance(primary, str) and primary.startswith("NO-")) + else primary + ) + return out + + def _is_consistent( + self, + strat_list: list, + linked_resources: list | tuple, + json_lookup: Callable[[str, str | list[str]], dict], + debug: bool = False, + ) -> bool: + """ + Ensure consistency for linked_resources in strat_list. + + Rules: + - Sub-keys only compared if they exist in multiple resources in the same linked group. + - Missing sub-keys or NO-... values are compatible with anything. + - Lists are compared ignoring order. + - Debug prints a summary table per linked group. + """ + if not linked_resources: + return True + + # Build JSON for each prov + prov_json = {} + for prov in strat_list: + resource, strat_idx = self.generate_prov_string(prov) + prov_json[resource] = self._normalize_variant_dict( + json_lookup(resource, strat_idx) + ) + + for linked_group in linked_resources: + # Keep only resources present in strat_list + variants_map = {r: prov_json[r] for r in linked_group if r in prov_json} + if len(variants_map) < 2: + continue # nothing to compare yet + + # Determine which sub-keys are shared across multiple resources + subkey_counts = {} + for subdict in variants_map.values(): + for k in subdict.keys(): + subkey_counts[k] = subkey_counts.get(k, 0) + 1 + shared_subkeys = {k for k, count in subkey_counts.items() if count > 1} + + # Pairwise comparison only for shared sub-keys + resources = list(variants_map.keys()) + for i in range(len(resources)): + res_a = resources[i] + subdict_a = variants_map[res_a] + for j in range(i + 1, len(resources)): + res_b = resources[j] + subdict_b = variants_map[res_b] + + for subkey in shared_subkeys: + val_a = subdict_a.get(subkey) + val_b = subdict_b.get(subkey) + + # Skip if missing or NO-... + skip_a = ( + val_a is None + or ( + isinstance(val_a, list) + and all(str(v).startswith("NO-") for v in val_a) + ) + or (isinstance(val_a, str) and val_a.startswith("NO-")) + ) + skip_b = ( + val_b is None + or ( + isinstance(val_b, list) + and all(str(v).startswith("NO-") for v in val_b) + ) + or (isinstance(val_b, str) and val_b.startswith("NO-")) + ) + if skip_a or skip_b: + continue + + # Normalize lists + val_a_norm = sorted(val_a) if isinstance(val_a, list) else val_a + val_b_norm = sorted(val_b) if isinstance(val_b, list) else val_b + + if val_a_norm != val_b_norm: + return False + return True + + def linked_product( + self, + resource_pools: "list[ResourcePool]", + linked_resources: list[str], + json_lookup: Callable[[str, str | list[str]], dict], + ) -> Generator: + """ + Generate only consistent combinations of cpac_prov values across pools. + """ + + def backtrack(idx, current): + if idx == len(resource_pools): + yield list(current) + return + for prov in resource_pools[idx]: + current.append(prov) + if self._is_consistent(current, linked_resources, json_lookup): + yield from backtrack(idx + 1, current) + current.pop() + + yield from backtrack(0, []) + class NodeBlock: def __init__(self, node_block_functions, debug=False): @@ -1582,7 +1702,10 @@ def grab_tiered_dct(self, cfg, key_list): raise KeyError(msg) from ke return cfg_dct - def connect_block(self, wf, cfg, rpool): + def connect_block( + self, wf: pe.Workflow, cfg: Configuration, rpool: ResourcePool + ) -> pe.Workflow: + """Connect NodeBlock to a Workflow given a Configuration and ResourcePool.""" debug = cfg.pipeline_setup["Debugging"]["verbose"] all_opts = [] for name, block_dct in self.node_blocks.items(): @@ -1641,7 +1764,7 @@ def connect_block(self, wf, cfg, rpool): if debug: verbose_logger = getLogger("CPAC.engine") verbose_logger.debug( - f"[connect_block] opts resolved for {name}: {opts}" + "[connect_block] opts resolved for %s: %s", name, opts ) all_opts += opts @@ -1759,6 +1882,13 @@ def connect_block(self, wf, cfg, rpool): strat_pool.copy_resource(input_name, interface[0]) replaced_inputs.append(interface[0]) try: + if debug: + verbose_logger = getLogger("CPAC.engine") + verbose_logger.debug( + "Before block '%s', strat_pool contains: %s", + block_function.__name__, + list(strat_pool.rpool.keys()), + ) wf, outs = block_function(wf, cfg, strat_pool, pipe_x, opt) except IOError as e: # duplicate node WFLOGGER.warning(e) @@ -2472,7 +2602,7 @@ def func_outdir_ingress( def set_iterables(scan, mask_paths=None, ts_paths=None): - # match scan with filepath to get filepath + """Match scan with filepath to get filepath.""" mask_path = [path for path in mask_paths if scan in path] ts_path = [path for path in ts_paths if scan in path] diff --git a/CPAC/pipeline/utils.py b/CPAC/pipeline/utils.py index 9a121c5554..97852d2a8e 100644 --- a/CPAC/pipeline/utils.py +++ b/CPAC/pipeline/utils.py @@ -255,7 +255,7 @@ def find_variants( if isinstance(keys, str): try: return {keys: pool.get_json(keys)["CpacVariant"]} - except: + except LookupError: return {} for key in keys: outputs = {**outputs, **find_variants(pool, key)} @@ -272,14 +272,22 @@ def short_circuit_crossed_variants( :alt: Don't cross the streams """ _variants = find_variants(pool, inputs) - variants = {} - for variant in _variants.values(): - for k, v in variant.items(): - if k not in variants: - variants[k] = set(v) - else: - variants[k] = {*variants[k], *v} - crossed_variants = {k: v for k, v in variants.items() if len(v) > 1} + # collect all variant dicts + variant_dicts = list(_variants.values()) + if not variant_dicts: + return + + # only keep keys that exist in all variant dicts + common_keys = set.intersection(*(set(v.keys()) for v in variant_dicts)) + + crossed_variants = {} + for key in common_keys: + values = set() + for variant in variant_dicts: + values.update(variant.get(key, [])) + if len(values) > 1: + crossed_variants[key] = values + if crossed_variants: msg = f"Crossed variants found: {crossed_variants}" raise CrossedVariantsError(msg) From 9d89c6f93b202574781419a91449d94b71fe8477 Mon Sep 17 00:00:00 2001 From: "birajstha:construction_worker::penguin" Date: Tue, 26 Aug 2025 12:13:52 -0400 Subject: [PATCH 469/507] removing unnecessary nodes --- CPAC/registration/registration.py | 132 ++++++++++++++++++------------ 1 file changed, 81 insertions(+), 51 deletions(-) diff --git a/CPAC/registration/registration.py b/CPAC/registration/registration.py index 9bdda851b4..c2bdb6d606 100644 --- a/CPAC/registration/registration.py +++ b/CPAC/registration/registration.py @@ -22,6 +22,7 @@ from voluptuous import RequiredFieldInvalid from nipype.interfaces import afni, ants, c3, fsl, utility as util from nipype.interfaces.afni import utils as afni_utils +from pathlib import Path from CPAC.anat_preproc.lesion_preproc import create_lesion_preproc from CPAC.func_preproc.func_preproc import fsl_afni_subworkflow @@ -4194,6 +4195,21 @@ def warp_timeseries_to_T1template_abcd(wf, cfg, strat_pool, pipe_num, opt=None): https://github.com/DCAN-Labs/DCAN-HCP/blob/a8d495a/fMRIVolume/scripts/DistortionCorrectionAndEPIToT1wReg_FLIRTBBRAndFreeSurferBBRbased.sh#L548 convertwarp --relout --rel -m ${WD}/fMRI2str.mat --ref=${T1wImage} --out=${WD}/fMRI2str.nii.gz """ + # Identity matrix node needed for matching FOV + identity_node = pe.Node( + Function( + imports=["from pathlib import Path", "import numpy as np"], + input_names=["workdir"], + output_names=["identity_file"], + function=lambda workdir: ( + np.savetxt(Path(workdir) / "identity.mat", np.eye(4), fmt="%.6f") or + str(Path(workdir) / "identity.mat") + ) + ), + name=f"create_identity_matrix_{pipe_num}" + ) + identity_node.inputs.workdir = str(Path.cwd()) + convert_func_to_anat_linear_warp = pe.Node( interface=fsl.ConvertWarp(), name=f"convert_func_to_anat_linear_warp_{pipe_num}" ) @@ -4204,15 +4220,12 @@ def warp_timeseries_to_T1template_abcd(wf, cfg, strat_pool, pipe_num, opt=None): node, out = strat_pool.get_data("desc-head_T1w") wf.connect(node, out, convert_func_to_anat_linear_warp, "reference") - if strat_pool.check_rpool("fsl-blip-warp"): - node, out = strat_pool.get_data("from-bold_to-T1w_mode-image_desc-linear_xfm") - wf.connect(node, out, convert_func_to_anat_linear_warp, "postmat") + node, out = strat_pool.get_data("from-bold_to-T1w_mode-image_desc-linear_xfm") + wf.connect(node, out, convert_func_to_anat_linear_warp, "premat") + if strat_pool.check_rpool("fsl-blip-warp"): node, out = strat_pool.get_data("fsl-blip-warp") wf.connect(node, out, convert_func_to_anat_linear_warp, "warp1") - else: - node, out = strat_pool.get_data("from-bold_to-T1w_mode-image_desc-linear_xfm") - wf.connect(node, out, convert_func_to_anat_linear_warp, "premat") # https://github.com/DCAN-Labs/DCAN-HCP/blob/1d90814/fMRIVolume/scripts/OneStepResampling.sh#L140 # convertwarp --relout --rel --warp1=${fMRIToStructuralInput} --warp2=${StructuralToStandard} --ref=${WD}/${T1wImageFile}.${FinalfMRIResolution} --out=${OutputTransform} @@ -4253,15 +4266,6 @@ def warp_timeseries_to_T1template_abcd(wf, cfg, strat_pool, pipe_num, opt=None): node, out = strat_pool.get_data("desc-reorient_bold") wf.connect(node, out, extract_func_roi, "in_file") - # fslmaths "$fMRIFolder"/"$NameOffMRI"_gdc_warp -mul 0 "$fMRIFolder"/"$NameOffMRI"_gdc_warp - multiply_func_roi_by_zero = pe.Node( - interface=fsl.maths.MathsCommand(), name=f"multiply_func_roi_by_zero_{pipe_num}" - ) - - multiply_func_roi_by_zero.inputs.args = "-mul 0" - - wf.connect(extract_func_roi, "roi_file", multiply_func_roi_by_zero, "in_file") - # https://github.com/DCAN-Labs/DCAN-HCP/blob/1d90814/fMRIVolume/scripts/OneStepResampling.sh#L168-L193 # fslsplit ${InputfMRI} ${WD}/prevols/vol -t split_func = pe.Node(interface=fsl.Split(), name=f"split_func_{pipe_num}") @@ -4272,6 +4276,16 @@ def warp_timeseries_to_T1template_abcd(wf, cfg, strat_pool, pipe_num, opt=None): wf.connect(node, out, split_func, "in_file") ### Loop starts! ### + # Match FOV func_to_standard + match_fov_func = pe.MapNode( + interface=fsl.FLIRT(apply_xfm=True, interp="spline"), name=f"match_fov_func_{pipe_num}", iterfield=["in_file"] + ) + wf.connect(identity_node, "identity_file", match_fov_func, "in_matrix_file") + wf.connect(split_func, "out_files", match_fov_func, "in_file") + + node, out = strat_pool.get_data("space-template_res-bold_desc-head_T1w") + wf.connect(node, out, match_fov_func, "reference") + # convertwarp --relout --rel --ref=${WD}/prevols/vol${vnum}.nii.gz --warp1=${GradientDistortionField} --postmat=${MotionMatrixFolder}/${MotionMatrixPrefix}${vnum} --out=${MotionMatrixFolder}/${MotionMatrixPrefix}${vnum}_gdc_warp.nii.gz convert_motion_distortion_warp = pe.MapNode( interface=fsl.ConvertWarp(), @@ -4282,15 +4296,15 @@ def warp_timeseries_to_T1template_abcd(wf, cfg, strat_pool, pipe_num, opt=None): convert_motion_distortion_warp.inputs.out_relwarp = True convert_motion_distortion_warp.inputs.relwarp = True - wf.connect( - multiply_func_roi_by_zero, "out_file", convert_motion_distortion_warp, "warp1" - ) - - wf.connect(split_func, "out_files", convert_motion_distortion_warp, "reference") + wf.connect(match_fov_func, "out_file", convert_motion_distortion_warp, "reference") node, out = strat_pool.get_data("coordinate-transformation") wf.connect(node, out, convert_motion_distortion_warp, "postmat") + if strat_pool.check_rpool("gradient-distortion-field"): + node, out = strat_pool.get_data("gradient-distortion-field") + wf.connect(node, out, convert_motion_distortion_warp, "warp1") + # convertwarp --relout --rel --ref=${WD}/${T1wImageFile}.${FinalfMRIResolution} --warp1=${MotionMatrixFolder}/${MotionMatrixPrefix}${vnum}_gdc_warp.nii.gz --warp2=${OutputTransform} --out=${MotionMatrixFolder}/${MotionMatrixPrefix}${vnum}_all_warp.nii.gz convert_registration_warp = pe.MapNode( interface=fsl.ConvertWarp(), @@ -4314,17 +4328,6 @@ def warp_timeseries_to_T1template_abcd(wf, cfg, strat_pool, pipe_num, opt=None): convert_func_to_standard_warp, "out_file", convert_registration_warp, "warp2" ) - # fslmaths ${WD}/prevols/vol${vnum}.nii.gz -mul 0 -add 1 ${WD}/prevols/vol${vnum}_mask.nii.gz - generate_vol_mask = pe.MapNode( - interface=fsl.maths.MathsCommand(), - name=f"generate_mask_{pipe_num}", - iterfield=["in_file"], - ) - - generate_vol_mask.inputs.args = "-mul 0 -add 1" - - wf.connect(split_func, "out_files", generate_vol_mask, "in_file") - # applywarp --rel --interp=spline --in=${WD}/prevols/vol${vnum}.nii.gz --warp=${MotionMatrixFolder}/${MotionMatrixPrefix}${vnum}_all_warp.nii.gz --ref=${WD}/${T1wImageFile}.${FinalfMRIResolution} --out=${WD}/postvols/vol${vnum}.nii.gz applywarp_func_to_standard = pe.MapNode( interface=fsl.ApplyWarp(), @@ -4332,10 +4335,9 @@ def warp_timeseries_to_T1template_abcd(wf, cfg, strat_pool, pipe_num, opt=None): iterfield=["in_file", "field_file"], ) - applywarp_func_to_standard.inputs.relwarp = True applywarp_func_to_standard.inputs.interp = "spline" - wf.connect(split_func, "out_files", applywarp_func_to_standard, "in_file") + wf.connect(match_fov_func, "out_file", applywarp_func_to_standard, "in_file") wf.connect( convert_registration_warp, "out_file", applywarp_func_to_standard, "field_file" @@ -4353,11 +4355,11 @@ def warp_timeseries_to_T1template_abcd(wf, cfg, strat_pool, pipe_num, opt=None): iterfield=["in_file", "field_file"], ) - applywarp_func_mask_to_standard.inputs.relwarp = True applywarp_func_mask_to_standard.inputs.interp = "nn" + node, out = strat_pool.get_data("space-template_desc-bold_mask") wf.connect( - generate_vol_mask, "out_file", applywarp_func_mask_to_standard, "in_file" + node, out, applywarp_func_mask_to_standard, "in_file" ) wf.connect( @@ -4408,41 +4410,35 @@ def warp_timeseries_to_T1template_abcd(wf, cfg, strat_pool, pipe_num, opt=None): wf.connect(merge_func_mask_to_standard, "merged_file", find_min_mask, "in_file") - # Combine transformations: gradient non-linearity distortion + fMRI_dc to standard - # convertwarp --relout --rel --ref=${WD}/${T1wImageFile}.${FinalfMRIResolution} --warp1=${GradientDistortionField} --warp2=${OutputTransform} --out=${WD}/Scout_gdc_MNI_warp.nii.gz - convert_dc_warp = pe.Node( - interface=fsl.ConvertWarp(), name=f"convert_dc_warp_{pipe_num}" + # Match FOV scout_to_standard + match_fov_scout_to_standard = pe.MapNode( + interface=fsl.FLIRT(apply_xfm=True, interp="spline"), name=f"match_fov_scout_to_standard_{pipe_num}", iterfield=["in_file"] ) - - convert_dc_warp.inputs.out_relwarp = True - convert_dc_warp.inputs.relwarp = True + wf.connect(identity_node, "identity_file", match_fov_scout_to_standard, "in_matrix_file") + node, out = strat_pool.get_data("motion-basefile") + wf.connect(node, out, match_fov_scout_to_standard, "in_file") node, out = strat_pool.get_data("space-template_res-bold_desc-head_T1w") wf.connect( - node, out, convert_dc_warp, "reference", + node, out, match_fov_scout_to_standard, "reference" ) - wf.connect(multiply_func_roi_by_zero, "out_file", convert_dc_warp, "warp1") - - wf.connect(convert_func_to_standard_warp, "out_file", convert_dc_warp, "warp2") - # applywarp --rel --interp=spline --in=${ScoutInput} -w ${WD}/Scout_gdc_MNI_warp.nii.gz -r ${WD}/${T1wImageFile}.${FinalfMRIResolution} -o ${ScoutOutput} applywarp_scout = pe.Node( interface=fsl.ApplyWarp(), name=f"applywarp_scout_input_{pipe_num}" ) - applywarp_scout.inputs.relwarp = True applywarp_scout.inputs.interp = "spline" - node, out = strat_pool.get_data("motion-basefile") - wf.connect(node, out, applywarp_scout, "in_file") + wf.connect(match_fov_scout_to_standard, "out_file", applywarp_scout, "in_file") node, out = strat_pool.get_data("space-template_res-bold_desc-head_T1w") wf.connect( node, out, applywarp_scout, "ref_file", ) - wf.connect(convert_dc_warp, "out_file", applywarp_scout, "field_file") + # warp field is just fMRI->standard (skip GDC) + wf.connect(convert_func_to_standard_warp, "out_file", applywarp_scout, "field_file") # https://github.com/DCAN-Labs/DCAN-HCP/blob/1214767/fMRIVolume/scripts/IntensityNormalization.sh#L124-L127 # fslmaths ${InputfMRI} -mas ${BrainMask} -mas ${InputfMRI}_mask -thr 0 -ing 10000 ${OutputfMRI} -odt float @@ -5597,3 +5593,37 @@ def _warp_return( if apply_xfm is None: return wf, {} return wf, outputs + + +def create_identity_matrix(workdir): + """Create an identity matrix for FLIRT.""" + identity_file = Path(workdir) / "identity.mat" + np.savetxt(identity_file, np.eye(4), fmt="%.6f") + return str(identity_file) + +def build_match_fov_wf(name, workdir, pipe_num): + """ + Workflow to match Field-of-View for T1w or functional images + using an identity matrix. + """ + wf = Workflow(name=name) + + identity_node = Node( + Function( + input_names=["workdir"], + output_names=["identity_file"], + function=create_identity_matrix + ), + name=f"create_identity_matrix_{pipe_num}" + ) + identity_node.inputs.workdir = workdir + + match_fov_node = MapNode( + interface=fsl.FLIRT(apply_xfm=True, interp="spline"), + name=f"match_fov_{pipe_num}", + iterfield=["in_file"] + ) + + wf.connect(identity_node, "identity_file", match_fov_node, "in_matrix_file") + + return wf, match_fov_node \ No newline at end of file From 262c2fc528b07cb8e70426de8fee0790cc80b150 Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Tue, 26 Aug 2025 16:14:21 +0000 Subject: [PATCH 470/507] [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci --- CPAC/func_preproc/func_preproc.py | 4 +- CPAC/pipeline/cpac_pipeline.py | 2 +- CPAC/registration/registration.py | 73 +++++++++++++++++++------------ 3 files changed, 48 insertions(+), 31 deletions(-) diff --git a/CPAC/func_preproc/func_preproc.py b/CPAC/func_preproc/func_preproc.py index c787b06f3c..43658bbec0 100644 --- a/CPAC/func_preproc/func_preproc.py +++ b/CPAC/func_preproc/func_preproc.py @@ -1926,9 +1926,7 @@ def template_space_bold_masking( func_apply_mask.inputs.expr = "a*b" func_apply_mask.inputs.outputtype = "NIFTI_GZ" - node_head_bold, out_head_bold = strat_pool.get_data( - "space-template_desc-head_bold" - ) + node_head_bold, out_head_bold = strat_pool.get_data("space-template_desc-head_bold") wf.connect(node_head_bold, out_head_bold, func_apply_mask, "in_file_a") node, out = strat_pool.get_data("space-template_desc-bold_mask") diff --git a/CPAC/pipeline/cpac_pipeline.py b/CPAC/pipeline/cpac_pipeline.py index 6546a1324f..b9e6865939 100644 --- a/CPAC/pipeline/cpac_pipeline.py +++ b/CPAC/pipeline/cpac_pipeline.py @@ -1484,7 +1484,7 @@ def build_workflow(subject_id, sub_dict, cfg, pipeline_name=None): pipeline_blocks += [ template_space_bold_masking, ] - + # Template-space nuisance regression nuisance_template = ( cfg["nuisance_corrections", "2-nuisance_regression", "space"] == "template" diff --git a/CPAC/registration/registration.py b/CPAC/registration/registration.py index c2bdb6d606..d42f66095d 100644 --- a/CPAC/registration/registration.py +++ b/CPAC/registration/registration.py @@ -22,7 +22,7 @@ from voluptuous import RequiredFieldInvalid from nipype.interfaces import afni, ants, c3, fsl, utility as util from nipype.interfaces.afni import utils as afni_utils -from pathlib import Path +from pathlib import Path from CPAC.anat_preproc.lesion_preproc import create_lesion_preproc from CPAC.func_preproc.func_preproc import fsl_afni_subworkflow @@ -4202,14 +4202,14 @@ def warp_timeseries_to_T1template_abcd(wf, cfg, strat_pool, pipe_num, opt=None): input_names=["workdir"], output_names=["identity_file"], function=lambda workdir: ( - np.savetxt(Path(workdir) / "identity.mat", np.eye(4), fmt="%.6f") or - str(Path(workdir) / "identity.mat") - ) + np.savetxt(Path(workdir) / "identity.mat", np.eye(4), fmt="%.6f") + or str(Path(workdir) / "identity.mat") + ), ), - name=f"create_identity_matrix_{pipe_num}" + name=f"create_identity_matrix_{pipe_num}", ) identity_node.inputs.workdir = str(Path.cwd()) - + convert_func_to_anat_linear_warp = pe.Node( interface=fsl.ConvertWarp(), name=f"convert_func_to_anat_linear_warp_{pipe_num}" ) @@ -4248,7 +4248,8 @@ def warp_timeseries_to_T1template_abcd(wf, cfg, strat_pool, pipe_num, opt=None): node, out = strat_pool.get_data("space-template_res-bold_desc-head_T1w") wf.connect( - node, out, + node, + out, convert_func_to_standard_warp, "reference", ) @@ -4278,7 +4279,9 @@ def warp_timeseries_to_T1template_abcd(wf, cfg, strat_pool, pipe_num, opt=None): ### Loop starts! ### # Match FOV func_to_standard match_fov_func = pe.MapNode( - interface=fsl.FLIRT(apply_xfm=True, interp="spline"), name=f"match_fov_func_{pipe_num}", iterfield=["in_file"] + interface=fsl.FLIRT(apply_xfm=True, interp="spline"), + name=f"match_fov_func_{pipe_num}", + iterfield=["in_file"], ) wf.connect(identity_node, "identity_file", match_fov_func, "in_matrix_file") wf.connect(split_func, "out_files", match_fov_func, "in_file") @@ -4317,7 +4320,10 @@ def warp_timeseries_to_T1template_abcd(wf, cfg, strat_pool, pipe_num, opt=None): node, out = strat_pool.get_data("space-template_res-bold_desc-head_T1w") wf.connect( - node, out, convert_registration_warp, "reference", + node, + out, + convert_registration_warp, + "reference", ) wf.connect( @@ -4345,7 +4351,10 @@ def warp_timeseries_to_T1template_abcd(wf, cfg, strat_pool, pipe_num, opt=None): node, out = strat_pool.get_data("space-template_res-bold_desc-head_T1w") wf.connect( - node, out, applywarp_func_to_standard, "ref_file", + node, + out, + applywarp_func_to_standard, + "ref_file", ) # applywarp --rel --interp=nn --in=${WD}/prevols/vol${vnum}_mask.nii.gz --warp=${MotionMatrixFolder}/${MotionMatrixPrefix}${vnum}_all_warp.nii.gz --ref=${WD}/${T1wImageFile}.${FinalfMRIResolution} --out=${WD}/postvols/vol${vnum}_mask.nii.gz @@ -4358,9 +4367,7 @@ def warp_timeseries_to_T1template_abcd(wf, cfg, strat_pool, pipe_num, opt=None): applywarp_func_mask_to_standard.inputs.interp = "nn" node, out = strat_pool.get_data("space-template_desc-bold_mask") - wf.connect( - node, out, applywarp_func_mask_to_standard, "in_file" - ) + wf.connect(node, out, applywarp_func_mask_to_standard, "in_file") wf.connect( convert_registration_warp, @@ -4371,7 +4378,10 @@ def warp_timeseries_to_T1template_abcd(wf, cfg, strat_pool, pipe_num, opt=None): node, out = strat_pool.get_data("space-template_res-bold_desc-head_T1w") wf.connect( - node, out, applywarp_func_mask_to_standard, "ref_file", + node, + out, + applywarp_func_mask_to_standard, + "ref_file", ) ### Loop ends! ### @@ -4412,16 +4422,18 @@ def warp_timeseries_to_T1template_abcd(wf, cfg, strat_pool, pipe_num, opt=None): # Match FOV scout_to_standard match_fov_scout_to_standard = pe.MapNode( - interface=fsl.FLIRT(apply_xfm=True, interp="spline"), name=f"match_fov_scout_to_standard_{pipe_num}", iterfield=["in_file"] + interface=fsl.FLIRT(apply_xfm=True, interp="spline"), + name=f"match_fov_scout_to_standard_{pipe_num}", + iterfield=["in_file"], + ) + wf.connect( + identity_node, "identity_file", match_fov_scout_to_standard, "in_matrix_file" ) - wf.connect(identity_node, "identity_file", match_fov_scout_to_standard, "in_matrix_file") node, out = strat_pool.get_data("motion-basefile") wf.connect(node, out, match_fov_scout_to_standard, "in_file") node, out = strat_pool.get_data("space-template_res-bold_desc-head_T1w") - wf.connect( - node, out, match_fov_scout_to_standard, "reference" - ) + wf.connect(node, out, match_fov_scout_to_standard, "reference") # applywarp --rel --interp=spline --in=${ScoutInput} -w ${WD}/Scout_gdc_MNI_warp.nii.gz -r ${WD}/${T1wImageFile}.${FinalfMRIResolution} -o ${ScoutOutput} applywarp_scout = pe.Node( @@ -4434,7 +4446,10 @@ def warp_timeseries_to_T1template_abcd(wf, cfg, strat_pool, pipe_num, opt=None): node, out = strat_pool.get_data("space-template_res-bold_desc-head_T1w") wf.connect( - node, out, applywarp_scout, "ref_file", + node, + out, + applywarp_scout, + "ref_file", ) # warp field is just fMRI->standard (skip GDC) @@ -4446,7 +4461,10 @@ def warp_timeseries_to_T1template_abcd(wf, cfg, strat_pool, pipe_num, opt=None): node, out = strat_pool.get_data("space-template_desc-bold_mask") wf.connect( - node, out, merge_func_mask, "in1", + node, + out, + merge_func_mask, + "in1", ) wf.connect(find_min_mask, "out_file", merge_func_mask, "in2") @@ -5601,29 +5619,30 @@ def create_identity_matrix(workdir): np.savetxt(identity_file, np.eye(4), fmt="%.6f") return str(identity_file) + def build_match_fov_wf(name, workdir, pipe_num): """ Workflow to match Field-of-View for T1w or functional images using an identity matrix. """ wf = Workflow(name=name) - + identity_node = Node( Function( input_names=["workdir"], output_names=["identity_file"], - function=create_identity_matrix + function=create_identity_matrix, ), - name=f"create_identity_matrix_{pipe_num}" + name=f"create_identity_matrix_{pipe_num}", ) identity_node.inputs.workdir = workdir match_fov_node = MapNode( interface=fsl.FLIRT(apply_xfm=True, interp="spline"), name=f"match_fov_{pipe_num}", - iterfield=["in_file"] + iterfield=["in_file"], ) - + wf.connect(identity_node, "identity_file", match_fov_node, "in_matrix_file") - return wf, match_fov_node \ No newline at end of file + return wf, match_fov_node From 713f451b597daba3e39f4ae290a41811dccf599d Mon Sep 17 00:00:00 2001 From: "birajstha:construction_worker::penguin" Date: Wed, 27 Aug 2025 15:08:46 -0400 Subject: [PATCH 471/507] moving the reference flag from inside of bbreg to coregistration --- CPAC/pipeline/schema.py | 2 +- CPAC/registration/registration.py | 138 ++++++------------ .../configs/pipeline_config_abcd-options.yml | 5 +- .../configs/pipeline_config_blank.yml | 10 +- .../configs/pipeline_config_default.yml | 10 +- .../pipeline_config_fmriprep-options.yml | 7 +- 6 files changed, 59 insertions(+), 113 deletions(-) diff --git a/CPAC/pipeline/schema.py b/CPAC/pipeline/schema.py index 2785f6d8a6..abfb7a3c4e 100644 --- a/CPAC/pipeline/schema.py +++ b/CPAC/pipeline/schema.py @@ -843,12 +843,12 @@ def sanitize(filename): }, "mask_sbref": bool1_1, }, + "reference": In({"whole-head", "brain"}), "boundary_based_registration": { "run": forkable, "bbr_schedule": str, "bbr_wm_map": In({"probability_map", "partial_volume_map"}), "bbr_wm_mask_args": str, - "reference": In({"whole-head", "brain"}), }, }, "EPI_registration": { diff --git a/CPAC/registration/registration.py b/CPAC/registration/registration.py index d42f66095d..a8da814e97 100644 --- a/CPAC/registration/registration.py +++ b/CPAC/registration/registration.py @@ -22,7 +22,6 @@ from voluptuous import RequiredFieldInvalid from nipype.interfaces import afni, ants, c3, fsl, utility as util from nipype.interfaces.afni import utils as afni_utils -from pathlib import Path from CPAC.anat_preproc.lesion_preproc import create_lesion_preproc from CPAC.func_preproc.func_preproc import fsl_afni_subworkflow @@ -3068,7 +3067,9 @@ def overwrite_transform_anat_to_template(wf, cfg, strat_pool, pipe_num, opt=None wf.connect(merge_inv_xfms_to_list, "out", merge_inv_xfms, "in_files") # Match FOVs using flirt -in infile -ref MNI152_T1_1mm_resample.nii.gz -out my_T1w_resampled.nii.gz -applyxfm -usesqform - match_fovs_T1w = pe.Node(interface=fsl.FLIRT(), name=f"match_fovs_T1w_{pipe_num}") + match_fovs_T1w = pe.Node( + interface=fsl.FLIRT(), name=f"match_fovs_T1w_{pipe_num}" + ) match_fovs_T1w.inputs.apply_xfm = True match_fovs_T1w.inputs.uses_qform = True @@ -3091,7 +3092,9 @@ def overwrite_transform_anat_to_template(wf, cfg, strat_pool, pipe_num, opt=None merge_xfms, "merged_file", fsl_apply_warp_t1_to_template, "field_file" ) - match_fovs_T1w_brain = pe.Node(interface=fsl.FLIRT(), name=f"match_fovs_T1w_brain_{pipe_num}") + match_fovs_T1w_brain = pe.Node( + interface=fsl.FLIRT(), name=f"match_fovs_T1w_brain_{pipe_num}" + ) match_fovs_T1w_brain.inputs.apply_xfm = True match_fovs_T1w_brain.inputs.uses_qform = True @@ -3105,7 +3108,12 @@ def overwrite_transform_anat_to_template(wf, cfg, strat_pool, pipe_num, opt=None # TODO connect T1wRestoreBrain, check T1wRestoreBrain quality node, out = strat_pool.get_data(["desc-restore-brain_T1w", "desc-preproc_T1w"]) wf.connect(node, out, match_fovs_T1w_brain, "in_file") - wf.connect(match_fovs_T1w_brain, "out_file", fsl_apply_warp_t1_brain_to_template, "in_file") + wf.connect( + match_fovs_T1w_brain, + "out_file", + fsl_apply_warp_t1_brain_to_template, + "in_file", + ) node, out = strat_pool.get_data("T1w-brain-template") wf.connect(node, out, match_fovs_T1w_brain, "reference") @@ -3115,7 +3123,9 @@ def overwrite_transform_anat_to_template(wf, cfg, strat_pool, pipe_num, opt=None merge_xfms, "merged_file", fsl_apply_warp_t1_brain_to_template, "field_file" ) - match_fovs_T1w_brain_mask = pe.Node(interface=fsl.FLIRT(), name=f"match_fovs_T1w_brain_mask_{pipe_num}") + match_fovs_T1w_brain_mask = pe.Node( + interface=fsl.FLIRT(), name=f"match_fovs_T1w_brain_mask_{pipe_num}" + ) match_fovs_T1w_brain_mask.inputs.apply_xfm = True match_fovs_T1w_brain_mask.inputs.uses_qform = True @@ -3128,7 +3138,12 @@ def overwrite_transform_anat_to_template(wf, cfg, strat_pool, pipe_num, opt=None node, out = strat_pool.get_data("space-T1w_desc-brain_mask") wf.connect(node, out, match_fovs_T1w_brain_mask, "in_file") - wf.connect(match_fovs_T1w_brain_mask, "out_file", fsl_apply_warp_t1_brain_mask_to_template, "in_file") + wf.connect( + match_fovs_T1w_brain_mask, + "out_file", + fsl_apply_warp_t1_brain_mask_to_template, + "in_file", + ) node, out = strat_pool.get_data("T1w-brain-template-mask") wf.connect(node, out, match_fovs_T1w_brain_mask, "reference") @@ -3335,6 +3350,7 @@ def coregistration_prep_fmriprep(wf, cfg, strat_pool, pipe_num, opt=None): ( "desc-preproc_T1w", ["desc-restore-brain_T1w", "desc-preproc_T1w"], + ["desc-restore_T1w", "desc-head_T1w"], "desc-preproc_T2w", "desc-preproc_T2w", "T2w", @@ -3405,7 +3421,18 @@ def coregistration(wf, cfg, strat_pool, pipe_num, opt=None): node, out = strat_pool.get_data("sbref") wf.connect(node, out, func_to_anat, "inputspec.func") - node, out = strat_pool.get_data(["desc-restore-brain_T1w", "desc-preproc_T1w"]) + if ( + cfg.registration_workflows["functional_registration"]["coregistration"][ + "reference" + ] + == "whole-head" + ): + node, out = strat_pool.get_data(["desc-restore_T1w", "desc-head_T1w"]) + else: + node, out = strat_pool.get_data( + ["desc-restore-brain_T1w", "desc-preproc_T1w"] + ) + wf.connect(node, out, func_to_anat, "inputspec.anat") if diff_complete: @@ -3466,8 +3493,8 @@ def coregistration(wf, cfg, strat_pool, pipe_num, opt=None): if ( cfg.registration_workflows["functional_registration"]["coregistration"][ - "boundary_based_registration" - ]["reference"] + "reference" + ] == "whole-head" ): node, out = strat_pool.get_data("desc-head_T1w") @@ -3475,8 +3502,8 @@ def coregistration(wf, cfg, strat_pool, pipe_num, opt=None): elif ( cfg.registration_workflows["functional_registration"]["coregistration"][ - "boundary_based_registration" - ]["reference"] + "reference" + ] == "brain" ): node, out = strat_pool.get_data("desc-preproc_T1w") @@ -4195,21 +4222,6 @@ def warp_timeseries_to_T1template_abcd(wf, cfg, strat_pool, pipe_num, opt=None): https://github.com/DCAN-Labs/DCAN-HCP/blob/a8d495a/fMRIVolume/scripts/DistortionCorrectionAndEPIToT1wReg_FLIRTBBRAndFreeSurferBBRbased.sh#L548 convertwarp --relout --rel -m ${WD}/fMRI2str.mat --ref=${T1wImage} --out=${WD}/fMRI2str.nii.gz """ - # Identity matrix node needed for matching FOV - identity_node = pe.Node( - Function( - imports=["from pathlib import Path", "import numpy as np"], - input_names=["workdir"], - output_names=["identity_file"], - function=lambda workdir: ( - np.savetxt(Path(workdir) / "identity.mat", np.eye(4), fmt="%.6f") - or str(Path(workdir) / "identity.mat") - ), - ), - name=f"create_identity_matrix_{pipe_num}", - ) - identity_node.inputs.workdir = str(Path.cwd()) - convert_func_to_anat_linear_warp = pe.Node( interface=fsl.ConvertWarp(), name=f"convert_func_to_anat_linear_warp_{pipe_num}" ) @@ -4277,18 +4289,6 @@ def warp_timeseries_to_T1template_abcd(wf, cfg, strat_pool, pipe_num, opt=None): wf.connect(node, out, split_func, "in_file") ### Loop starts! ### - # Match FOV func_to_standard - match_fov_func = pe.MapNode( - interface=fsl.FLIRT(apply_xfm=True, interp="spline"), - name=f"match_fov_func_{pipe_num}", - iterfield=["in_file"], - ) - wf.connect(identity_node, "identity_file", match_fov_func, "in_matrix_file") - wf.connect(split_func, "out_files", match_fov_func, "in_file") - - node, out = strat_pool.get_data("space-template_res-bold_desc-head_T1w") - wf.connect(node, out, match_fov_func, "reference") - # convertwarp --relout --rel --ref=${WD}/prevols/vol${vnum}.nii.gz --warp1=${GradientDistortionField} --postmat=${MotionMatrixFolder}/${MotionMatrixPrefix}${vnum} --out=${MotionMatrixFolder}/${MotionMatrixPrefix}${vnum}_gdc_warp.nii.gz convert_motion_distortion_warp = pe.MapNode( interface=fsl.ConvertWarp(), @@ -4299,7 +4299,7 @@ def warp_timeseries_to_T1template_abcd(wf, cfg, strat_pool, pipe_num, opt=None): convert_motion_distortion_warp.inputs.out_relwarp = True convert_motion_distortion_warp.inputs.relwarp = True - wf.connect(match_fov_func, "out_file", convert_motion_distortion_warp, "reference") + wf.connect(split_func, "out_files", convert_motion_distortion_warp, "reference") node, out = strat_pool.get_data("coordinate-transformation") wf.connect(node, out, convert_motion_distortion_warp, "postmat") @@ -4341,9 +4341,10 @@ def warp_timeseries_to_T1template_abcd(wf, cfg, strat_pool, pipe_num, opt=None): iterfield=["in_file", "field_file"], ) + applywarp_func_to_standard.inputs.relwarp = True applywarp_func_to_standard.inputs.interp = "spline" - wf.connect(match_fov_func, "out_file", applywarp_func_to_standard, "in_file") + wf.connect(split_func, "out_files", applywarp_func_to_standard, "in_file") wf.connect( convert_registration_warp, "out_file", applywarp_func_to_standard, "field_file" @@ -4364,9 +4365,10 @@ def warp_timeseries_to_T1template_abcd(wf, cfg, strat_pool, pipe_num, opt=None): iterfield=["in_file", "field_file"], ) + applywarp_func_mask_to_standard.inputs.relwarp = True applywarp_func_mask_to_standard.inputs.interp = "nn" - node, out = strat_pool.get_data("space-template_desc-bold_mask") + node, out = strat_pool.get_data("space-template_desc-brain_mask") wf.connect(node, out, applywarp_func_mask_to_standard, "in_file") wf.connect( @@ -4420,29 +4422,16 @@ def warp_timeseries_to_T1template_abcd(wf, cfg, strat_pool, pipe_num, opt=None): wf.connect(merge_func_mask_to_standard, "merged_file", find_min_mask, "in_file") - # Match FOV scout_to_standard - match_fov_scout_to_standard = pe.MapNode( - interface=fsl.FLIRT(apply_xfm=True, interp="spline"), - name=f"match_fov_scout_to_standard_{pipe_num}", - iterfield=["in_file"], - ) - wf.connect( - identity_node, "identity_file", match_fov_scout_to_standard, "in_matrix_file" - ) - node, out = strat_pool.get_data("motion-basefile") - wf.connect(node, out, match_fov_scout_to_standard, "in_file") - - node, out = strat_pool.get_data("space-template_res-bold_desc-head_T1w") - wf.connect(node, out, match_fov_scout_to_standard, "reference") - # applywarp --rel --interp=spline --in=${ScoutInput} -w ${WD}/Scout_gdc_MNI_warp.nii.gz -r ${WD}/${T1wImageFile}.${FinalfMRIResolution} -o ${ScoutOutput} applywarp_scout = pe.Node( interface=fsl.ApplyWarp(), name=f"applywarp_scout_input_{pipe_num}" ) + applywarp_scout.inputs.relwarp = True applywarp_scout.inputs.interp = "spline" - wf.connect(match_fov_scout_to_standard, "out_file", applywarp_scout, "in_file") + node, out = strat_pool.get_data("motion-basefile") + wf.connect(node, out, applywarp_scout, "in_file") node, out = strat_pool.get_data("space-template_res-bold_desc-head_T1w") wf.connect( @@ -5611,38 +5600,3 @@ def _warp_return( if apply_xfm is None: return wf, {} return wf, outputs - - -def create_identity_matrix(workdir): - """Create an identity matrix for FLIRT.""" - identity_file = Path(workdir) / "identity.mat" - np.savetxt(identity_file, np.eye(4), fmt="%.6f") - return str(identity_file) - - -def build_match_fov_wf(name, workdir, pipe_num): - """ - Workflow to match Field-of-View for T1w or functional images - using an identity matrix. - """ - wf = Workflow(name=name) - - identity_node = Node( - Function( - input_names=["workdir"], - output_names=["identity_file"], - function=create_identity_matrix, - ), - name=f"create_identity_matrix_{pipe_num}", - ) - identity_node.inputs.workdir = workdir - - match_fov_node = MapNode( - interface=fsl.FLIRT(apply_xfm=True, interp="spline"), - name=f"match_fov_{pipe_num}", - iterfield=["in_file"], - ) - - wf.connect(identity_node, "identity_file", match_fov_node, "in_matrix_file") - - return wf, match_fov_node diff --git a/CPAC/resources/configs/pipeline_config_abcd-options.yml b/CPAC/resources/configs/pipeline_config_abcd-options.yml index 844def45a0..ee961a7ffe 100644 --- a/CPAC/resources/configs/pipeline_config_abcd-options.yml +++ b/CPAC/resources/configs/pipeline_config_abcd-options.yml @@ -217,14 +217,15 @@ registration_workflows: # Mask the sbref created by coregistration input prep nodeblocks above before registration mask_sbref: Off + # options: 'whole-head' or 'brain' + reference: whole-head + # Choose coregistration interpolation interpolation: spline # Choose coregistration degree of freedom dof: 12 - - func_registration_to_template: # these options modify the application (to the functional data), not the calculation, of the diff --git a/CPAC/resources/configs/pipeline_config_blank.yml b/CPAC/resources/configs/pipeline_config_blank.yml index fd1686a21f..008090280b 100644 --- a/CPAC/resources/configs/pipeline_config_blank.yml +++ b/CPAC/resources/configs/pipeline_config_blank.yml @@ -738,16 +738,15 @@ registration_workflows: # Mask the sbref created by coregistration input prep nodeblocks above before registration mask_sbref: On + # options: 'whole-head' or 'brain' + reference: whole-head + boundary_based_registration: # this is a fork point # run: [On, Off] - this will run both and fork the pipeline run: [Off] - # reference for boundary based registration - # options: 'whole-head' or 'brain' - reference: whole-head - # choose which FAST map to generate BBR WM mask # options: 'probability_map', 'partial_volume_map' bbr_wm_map: probability_map @@ -759,9 +758,6 @@ registration_workflows: # It is not necessary to change this path unless you intend to use non-standard MNI registration. bbr_schedule: $FSLDIR/etc/flirtsch/bbr.sch - # Choose FSL or ABCD as coregistration method - using: FSL - # Choose coregistration interpolation interpolation: trilinear diff --git a/CPAC/resources/configs/pipeline_config_default.yml b/CPAC/resources/configs/pipeline_config_default.yml index cba22d6a08..c69cc8d4b0 100644 --- a/CPAC/resources/configs/pipeline_config_default.yml +++ b/CPAC/resources/configs/pipeline_config_default.yml @@ -775,9 +775,6 @@ registration_workflows: run: On - # Choose FSL or ABCD as coregistration method - using: FSL - # Choose coregistration interpolation interpolation: trilinear @@ -811,6 +808,9 @@ registration_workflows: # Mask the sbref created by coregistration input prep nodeblocks above before registration mask_sbref: On + # options: 'whole-head' or 'brain' + reference: whole-head + boundary_based_registration: # this is a fork point # run: [On, Off] - this will run both and fork the pipeline @@ -820,10 +820,6 @@ registration_workflows: # It is not necessary to change this path unless you intend to use non-standard MNI registration. bbr_schedule: $FSLDIR/etc/flirtsch/bbr.sch - # reference for boundary based registration - # options: 'whole-head' or 'brain' - reference: whole-head - # choose which FAST map to generate BBR WM mask # options: 'probability_map', 'partial_volume_map' bbr_wm_map: 'probability_map' diff --git a/CPAC/resources/configs/pipeline_config_fmriprep-options.yml b/CPAC/resources/configs/pipeline_config_fmriprep-options.yml index 5453144af5..2b268930a7 100644 --- a/CPAC/resources/configs/pipeline_config_fmriprep-options.yml +++ b/CPAC/resources/configs/pipeline_config_fmriprep-options.yml @@ -251,16 +251,15 @@ registration_workflows: # input: ['Mean_Functional', 'Selected_Functional_Volume', 'fmriprep_reference'] input: [fmriprep_reference] + # options: 'whole-head' or 'brain' + reference: brain + boundary_based_registration: # this is a fork point # run: [On, Off] - this will run both and fork the pipeline run: [On] - # reference for boundary based registration - # options: 'whole-head' or 'brain' - reference: brain - # choose which FAST map to generate BBR WM mask # options: 'probability_map', 'partial_volume_map' bbr_wm_map: partial_volume_map From e9ba975098b09382ed976f2964caca4dab7245f2 Mon Sep 17 00:00:00 2001 From: "birajstha:construction_worker::penguin" Date: Wed, 27 Aug 2025 16:25:52 -0400 Subject: [PATCH 472/507] adding ref_weight in FLIRT whole head registration --- CPAC/registration/registration.py | 33 ++++++++++++++++++++++++++++--- 1 file changed, 30 insertions(+), 3 deletions(-) diff --git a/CPAC/registration/registration.py b/CPAC/registration/registration.py index a8da814e97..c3d63f43c0 100644 --- a/CPAC/registration/registration.py +++ b/CPAC/registration/registration.py @@ -711,6 +711,12 @@ def create_register_func_to_anat( inputspec.interp : string Type of interpolation to use ('trilinear' or 'nearestneighbour' or 'sinc') + inputspec.ref_weight : string (nifti file) + Reference weight image for registration + inputspec.fieldmap : string (nifti file) + Field map image for registration + inputspec.fieldmapmask : string (nifti file) + Field map mask image for registration Workflow Outputs:: @@ -723,7 +729,15 @@ def create_register_func_to_anat( inputspec = pe.Node( util.IdentityInterface( - fields=["func", "anat", "dof", "interp", "fieldmap", "fieldmapmask"] + fields=[ + "func", + "anat", + "dof", + "interp", + "fieldmap", + "fieldmapmask", + "ref_weight", + ] ), name="inputspec", ) @@ -754,6 +768,15 @@ def create_register_func_to_anat( linear_reg.inputs.dof = config.registration_workflows["functional_registration"][ "coregistration" ]["dof"] + + if ( + config.registration_workflows["functional_registration"]["coregistration"][ + "reference" + ] + == "whole_head" + ): + register_func_to_anat.connect(inputspec, "ref_weight", linear_reg, "ref_weight") + if ( config.registration_workflows["functional_registration"]["coregistration"][ "arguments" @@ -3349,6 +3372,7 @@ def coregistration_prep_fmriprep(wf, cfg, strat_pool, pipe_num, opt=None): ), ( "desc-preproc_T1w", + "space-T1w_desc-brain_mask", ["desc-restore-brain_T1w", "desc-preproc_T1w"], ["desc-restore_T1w", "desc-head_T1w"], "desc-preproc_T2w", @@ -3428,12 +3452,15 @@ def coregistration(wf, cfg, strat_pool, pipe_num, opt=None): == "whole-head" ): node, out = strat_pool.get_data(["desc-restore_T1w", "desc-head_T1w"]) + wf.connect(node, out, func_to_anat, "inputspec.anat") + + node, out = strat_pool.get_data("space-T1w_desc-brain_mask") + wf.connect(node, out, func_to_anat, "inputspec.ref_weight") else: node, out = strat_pool.get_data( ["desc-restore-brain_T1w", "desc-preproc_T1w"] ) - - wf.connect(node, out, func_to_anat, "inputspec.anat") + wf.connect(node, out, func_to_anat, "inputspec.anat") if diff_complete: node, out = strat_pool.get_data("effectiveEchoSpacing") From 4e47f2dbb588b487a2bf5116aca6df0f68eda0ac Mon Sep 17 00:00:00 2001 From: "birajstha:construction_worker::penguin" Date: Thu, 28 Aug 2025 13:14:33 -0400 Subject: [PATCH 473/507] adding reference whole head --- CPAC/registration/registration.py | 2 +- .../configs/pipeline_config_abcd-options.yml | 11 +++++++++++ 2 files changed, 12 insertions(+), 1 deletion(-) diff --git a/CPAC/registration/registration.py b/CPAC/registration/registration.py index c3d63f43c0..65a6f00a8a 100644 --- a/CPAC/registration/registration.py +++ b/CPAC/registration/registration.py @@ -773,7 +773,7 @@ def create_register_func_to_anat( config.registration_workflows["functional_registration"]["coregistration"][ "reference" ] - == "whole_head" + == "whole-head" ): register_func_to_anat.connect(inputspec, "ref_weight", linear_reg, "ref_weight") diff --git a/CPAC/resources/configs/pipeline_config_abcd-options.yml b/CPAC/resources/configs/pipeline_config_abcd-options.yml index ee961a7ffe..de7d04a479 100644 --- a/CPAC/resources/configs/pipeline_config_abcd-options.yml +++ b/CPAC/resources/configs/pipeline_config_abcd-options.yml @@ -225,6 +225,11 @@ registration_workflows: # Choose coregistration degree of freedom dof: 12 + boundary_based_registration: + + # this is a fork point + # run: [On, Off] - this will run both and fork the pipeline + run: [On] func_registration_to_template: @@ -263,7 +268,13 @@ registration_workflows: # Interpolation method for writing out transformed functional images. # Possible values: Linear, BSpline, LanczosWindowedSinc interpolation: Linear + EPI_registration: + # directly register the mean functional to an EPI template + # instead of applying the anatomical T1-to-template transform to the functional data that has been + # coregistered to anatomical/T1 space + run: on + functional_preproc: run: On motion_estimates_and_correction: From 1a4c8d92b184900e3d54a908dc4fe23b13ea956f Mon Sep 17 00:00:00 2001 From: Jon Cluce Date: Fri, 29 Aug 2025 14:29:43 -0400 Subject: [PATCH 474/507] :recycle: Restore consecutive forking ability --- CPAC/pipeline/cpac_pipeline.py | 1 - CPAC/pipeline/engine.py | 14 ++++++++++---- CPAC/pipeline/utils.py | 7 +++++-- CPAC/registration/registration.py | 3 +-- 4 files changed, 16 insertions(+), 9 deletions(-) diff --git a/CPAC/pipeline/cpac_pipeline.py b/CPAC/pipeline/cpac_pipeline.py index 8ca5b1398c..f0d6da602b 100644 --- a/CPAC/pipeline/cpac_pipeline.py +++ b/CPAC/pipeline/cpac_pipeline.py @@ -1392,7 +1392,6 @@ def build_workflow(subject_id, sub_dict, cfg, pipeline_name=None): target_space_nuis = cfg.nuisance_corrections["2-nuisance_regression"]["space"] target_space_alff = cfg.amplitude_low_frequency_fluctuation["target_space"] target_space_reho = cfg.regional_homogeneity["target_space"] - if apply_func_warp["T1"]: ts_to_T1template_block = [ apply_phasediff_to_timeseries_separately, diff --git a/CPAC/pipeline/engine.py b/CPAC/pipeline/engine.py index 0f49941eca..d6ec47abf4 100644 --- a/CPAC/pipeline/engine.py +++ b/CPAC/pipeline/engine.py @@ -1481,7 +1481,14 @@ def node_data(self, resource, **kwargs): return NodeData(self, resource, **kwargs) @staticmethod - def _normalize_variant_dict(json_obj: dict) -> dict[str, Optional[str]]: + def _concatenate_list_items(list_values: list[str] | str) -> str: + """Sort and concatenate list values.""" + if isinstance(list_values, list): + list_values.sort() + list_values = "-".join(list_values) + return list_values + + def _normalize_variant_dict(self, json_obj: dict) -> dict[str, Optional[str]]: """ Return {variant_key: primary_value or None}. @@ -1489,9 +1496,8 @@ def _normalize_variant_dict(json_obj: dict) -> dict[str, Optional[str]]: - "NO-..." entries normalize to None """ out = {} - for k, v in json_obj.get("CpacVariant", {}).items(): - assert isinstance(v, (list, str)) - primary = "-".join(v) if isinstance(v, list) else v + for k, _v in json_obj.get("CpacVariant", {}).items(): + primary = self._concatenate_list_items(_v) out[k] = ( None if (isinstance(primary, str) and primary.startswith("NO-")) diff --git a/CPAC/pipeline/utils.py b/CPAC/pipeline/utils.py index 97852d2a8e..de412eee43 100644 --- a/CPAC/pipeline/utils.py +++ b/CPAC/pipeline/utils.py @@ -249,7 +249,7 @@ def _update_resource_idx(resource_idx, out_dct, key, value): def find_variants( pool: "ResourcePool", keys: list | str | tuple -) -> dict[str, dict[str, set[str]]]: +) -> dict[str, dict[str, list[str] | set[str] | str]]: """Find variants in the ResourcePool for the given keys.""" outputs = {} if isinstance(keys, str): @@ -276,6 +276,9 @@ def short_circuit_crossed_variants( variant_dicts = list(_variants.values()) if not variant_dicts: return + for dct in variant_dicts: + for k, v in dct.items(): + dct[k] = [pool._concatenate_list_items(v)] # only keep keys that exist in all variant dicts common_keys = set.intersection(*(set(v.keys()) for v in variant_dicts)) @@ -285,7 +288,7 @@ def short_circuit_crossed_variants( values = set() for variant in variant_dicts: values.update(variant.get(key, [])) - if len(values) > 1: + if any(len(set(variant.get(key, []))) > 1 for variant in variant_dicts): crossed_variants[key] = values if crossed_variants: diff --git a/CPAC/registration/registration.py b/CPAC/registration/registration.py index 41b42aae6e..278dbb9990 100644 --- a/CPAC/registration/registration.py +++ b/CPAC/registration/registration.py @@ -4057,8 +4057,7 @@ def warp_timeseries_to_T1template(wf, cfg, strat_pool, pipe_num, opt=None): outputs = { "space-template_desc-preproc_bold": (apply_xfm, "outputspec.output_image") } - - return (wf, outputs) + return wf, outputs @nodeblock( From 25453fcf656fa14696002da1c3adaf494d3388e1 Mon Sep 17 00:00:00 2001 From: "birajstha:construction_worker::penguin" Date: Fri, 29 Aug 2025 18:56:51 -0400 Subject: [PATCH 475/507] concating match_fov to the xfm --- CPAC/registration/registration.py | 145 ++++++------------------------ 1 file changed, 28 insertions(+), 117 deletions(-) diff --git a/CPAC/registration/registration.py b/CPAC/registration/registration.py index 65a6f00a8a..a39d4b3a3f 100644 --- a/CPAC/registration/registration.py +++ b/CPAC/registration/registration.py @@ -3095,6 +3095,7 @@ def overwrite_transform_anat_to_template(wf, cfg, strat_pool, pipe_num, opt=None ) match_fovs_T1w.inputs.apply_xfm = True match_fovs_T1w.inputs.uses_qform = True + match_fovs_T1w.inputs.out_matrix_file = "match_fov.mat" # applywarp --rel --interp=spline -i ${T1wRestore} -r ${Reference} -w ${OutputTransform} -o ${OutputT1wImageRestore} fsl_apply_warp_t1_to_template = pe.Node( @@ -3115,11 +3116,25 @@ def overwrite_transform_anat_to_template(wf, cfg, strat_pool, pipe_num, opt=None merge_xfms, "merged_file", fsl_apply_warp_t1_to_template, "field_file" ) - match_fovs_T1w_brain = pe.Node( - interface=fsl.FLIRT(), name=f"match_fovs_T1w_brain_{pipe_num}" + concat_match_fov = pe.Node(interface=fsl.ConvertWarp(), name=f"concat_match_fov_{pipe_num}") + concat_match_fov.inputs.relwarp = True + + wf.connect(match_fovs_T1w, "out_matrix_file", concat_match_fov, "premat") + wf.connect(merge_xfms, "merged_file", concat_match_fov, "warp1") + node, out = strat_pool.get_data("T1w-template") + wf.connect(node, out, concat_match_fov, "reference") + + # Node to concatenate the inverse warp with the FOV matrix + concat_match_fov_inv = pe.Node( + interface=fsl.ConvertWarp(), + name=f"concat_match_fov_inv_{pipe_num}" ) - match_fovs_T1w_brain.inputs.apply_xfm = True - match_fovs_T1w_brain.inputs.uses_qform = True + concat_match_fov_inv.inputs.relwarp = True + + wf.connect(merge_inv_xfms, "merged_file", concat_match_fov_inv, "warp1") + wf.connect(match_fovs_T1w, "out_matrix_file", concat_match_fov_inv, "premat") + node, out = strat_pool.get_data(["desc-restore_T1w", "desc-head_T1w"]) + wf.connect(node, out, concat_match_fov_inv, "reference") # applywarp --rel --interp=nn -i ${T1wRestoreBrain} -r ${Reference} -w ${OutputTransform} -o ${OutputT1wImageRestoreBrain} fsl_apply_warp_t1_brain_to_template = pe.Node( @@ -3130,28 +3145,17 @@ def overwrite_transform_anat_to_template(wf, cfg, strat_pool, pipe_num, opt=None # TODO connect T1wRestoreBrain, check T1wRestoreBrain quality node, out = strat_pool.get_data(["desc-restore-brain_T1w", "desc-preproc_T1w"]) - wf.connect(node, out, match_fovs_T1w_brain, "in_file") wf.connect( - match_fovs_T1w_brain, - "out_file", - fsl_apply_warp_t1_brain_to_template, - "in_file", + node, out, fsl_apply_warp_t1_brain_to_template, "in_file" ) node, out = strat_pool.get_data("T1w-brain-template") - wf.connect(node, out, match_fovs_T1w_brain, "reference") wf.connect(node, out, fsl_apply_warp_t1_brain_to_template, "ref_file") wf.connect( - merge_xfms, "merged_file", fsl_apply_warp_t1_brain_to_template, "field_file" + concat_match_fov, "out_file", fsl_apply_warp_t1_brain_to_template, "field_file" ) - match_fovs_T1w_brain_mask = pe.Node( - interface=fsl.FLIRT(), name=f"match_fovs_T1w_brain_mask_{pipe_num}" - ) - match_fovs_T1w_brain_mask.inputs.apply_xfm = True - match_fovs_T1w_brain_mask.inputs.uses_qform = True - fsl_apply_warp_t1_brain_mask_to_template = pe.Node( interface=fsl.ApplyWarp(), name=f"FSL-ABCD_T1_brain_mask_to_template_{pipe_num}", @@ -3160,21 +3164,16 @@ def overwrite_transform_anat_to_template(wf, cfg, strat_pool, pipe_num, opt=None fsl_apply_warp_t1_brain_mask_to_template.inputs.interp = "nn" node, out = strat_pool.get_data("space-T1w_desc-brain_mask") - wf.connect(node, out, match_fovs_T1w_brain_mask, "in_file") wf.connect( - match_fovs_T1w_brain_mask, - "out_file", - fsl_apply_warp_t1_brain_mask_to_template, - "in_file", + node, out, fsl_apply_warp_t1_brain_mask_to_template, "in_file" ) node, out = strat_pool.get_data("T1w-brain-template-mask") - wf.connect(node, out, match_fovs_T1w_brain_mask, "reference") wf.connect(node, out, fsl_apply_warp_t1_brain_mask_to_template, "ref_file") wf.connect( - merge_xfms, - "merged_file", + concat_match_fov, + "out_file", fsl_apply_warp_t1_brain_mask_to_template, "field_file", ) @@ -3193,8 +3192,8 @@ def overwrite_transform_anat_to_template(wf, cfg, strat_pool, pipe_num, opt=None outputs = { "space-template_desc-preproc_T1w": (apply_mask, "out_file"), "space-template_desc-head_T1w": (fsl_apply_warp_t1_to_template, "out_file"), - "from-T1w_to-template_mode-image_xfm": (merge_xfms, "merged_file"), - "from-template_to-T1w_mode-image_xfm": (merge_inv_xfms, "merged_file"), + "from-T1w_to-template_mode-image_xfm": (concat_match_fov, "out_file"), + "from-template_to-T1w_mode-image_xfm": (concat_match_fov_inv, "out_file"), } else: @@ -4385,34 +4384,6 @@ def warp_timeseries_to_T1template_abcd(wf, cfg, strat_pool, pipe_num, opt=None): "ref_file", ) - # applywarp --rel --interp=nn --in=${WD}/prevols/vol${vnum}_mask.nii.gz --warp=${MotionMatrixFolder}/${MotionMatrixPrefix}${vnum}_all_warp.nii.gz --ref=${WD}/${T1wImageFile}.${FinalfMRIResolution} --out=${WD}/postvols/vol${vnum}_mask.nii.gz - applywarp_func_mask_to_standard = pe.MapNode( - interface=fsl.ApplyWarp(), - name=f"applywarp_func_mask_to_standard_{pipe_num}", - iterfield=["in_file", "field_file"], - ) - - applywarp_func_mask_to_standard.inputs.relwarp = True - applywarp_func_mask_to_standard.inputs.interp = "nn" - - node, out = strat_pool.get_data("space-template_desc-brain_mask") - wf.connect(node, out, applywarp_func_mask_to_standard, "in_file") - - wf.connect( - convert_registration_warp, - "out_file", - applywarp_func_mask_to_standard, - "field_file", - ) - - node, out = strat_pool.get_data("space-template_res-bold_desc-head_T1w") - wf.connect( - node, - out, - applywarp_func_mask_to_standard, - "ref_file", - ) - ### Loop ends! ### # fslmerge -tr ${OutputfMRI} $FrameMergeSTRING $TR_vol @@ -4426,29 +4397,6 @@ def warp_timeseries_to_T1template_abcd(wf, cfg, strat_pool, pipe_num, opt=None): applywarp_func_to_standard, "out_file", merge_func_to_standard, "in_files" ) - # fslmerge -tr ${OutputfMRI}_mask $FrameMergeSTRINGII $TR_vol - merge_func_mask_to_standard = pe.Node( - interface=fslMerge(), name=f"merge_func_mask_to_standard_{pipe_num}" - ) - - merge_func_mask_to_standard.inputs.dimension = "t" - - wf.connect( - applywarp_func_mask_to_standard, - "out_file", - merge_func_mask_to_standard, - "in_files", - ) - - # fslmaths ${OutputfMRI}_mask -Tmin ${OutputfMRI}_mask - find_min_mask = pe.Node( - interface=fsl.maths.MathsCommand(), name=f"find_min_mask_{pipe_num}" - ) - - find_min_mask.inputs.args = "-Tmin" - - wf.connect(merge_func_mask_to_standard, "merged_file", find_min_mask, "in_file") - # applywarp --rel --interp=spline --in=${ScoutInput} -w ${WD}/Scout_gdc_MNI_warp.nii.gz -r ${WD}/${T1wImageFile}.${FinalfMRIResolution} -o ${ScoutOutput} applywarp_scout = pe.Node( interface=fsl.ApplyWarp(), name=f"applywarp_scout_input_{pipe_num}" @@ -4471,46 +4419,9 @@ def warp_timeseries_to_T1template_abcd(wf, cfg, strat_pool, pipe_num, opt=None): # warp field is just fMRI->standard (skip GDC) wf.connect(convert_func_to_standard_warp, "out_file", applywarp_scout, "field_file") - # https://github.com/DCAN-Labs/DCAN-HCP/blob/1214767/fMRIVolume/scripts/IntensityNormalization.sh#L124-L127 - # fslmaths ${InputfMRI} -mas ${BrainMask} -mas ${InputfMRI}_mask -thr 0 -ing 10000 ${OutputfMRI} -odt float - merge_func_mask = pe.Node(util.Merge(2), name=f"merge_func_mask_{pipe_num}") - - node, out = strat_pool.get_data("space-template_desc-bold_mask") - wf.connect( - node, - out, - merge_func_mask, - "in1", - ) - - wf.connect(find_min_mask, "out_file", merge_func_mask, "in2") - - extract_func_brain = pe.Node( - interface=fsl.MultiImageMaths(), name=f"extract_func_brain_{pipe_num}" - ) - - extract_func_brain.inputs.op_string = "-mas %s -mas %s -thr 0 -ing 10000" - extract_func_brain.inputs.output_datatype = "float" - - wf.connect(merge_func_to_standard, "merged_file", extract_func_brain, "in_file") - - wf.connect(merge_func_mask, "out", extract_func_brain, "operand_files") - - # fslmaths ${ScoutInput} -mas ${BrainMask} -mas ${InputfMRI}_mask -thr 0 -ing 10000 ${ScoutOutput} -odt float - extract_scout_brain = pe.Node( - interface=fsl.MultiImageMaths(), name=f"extract_scout_brain_{pipe_num}" - ) - - extract_scout_brain.inputs.op_string = "-mas %s -mas %s -thr 0 -ing 10000" - extract_scout_brain.inputs.output_datatype = "float" - - wf.connect(applywarp_scout, "out_file", extract_scout_brain, "in_file") - - wf.connect(merge_func_mask, "out", extract_scout_brain, "operand_files") - outputs = { - "space-template_desc-preproc_bold": (extract_func_brain, "out_file"), - "space-template_desc-scout_bold": (extract_scout_brain, "out_file"), + "space-template_desc-preproc_bold": (merge_func_to_standard, "merged_file"), + "space-template_desc-scout_bold": (applywarp_scout, "out_file"), "space-template_desc-head_bold": (merge_func_to_standard, "merged_file"), } From 47cdb13942f0c8efb6bc8ad6ed413461b809e272 Mon Sep 17 00:00:00 2001 From: "birajstha:construction_worker::penguin" Date: Tue, 2 Sep 2025 13:12:09 -0400 Subject: [PATCH 476/507] Turning EPI off for abcd-options config --- CHANGELOG.md | 1 + CPAC/resources/configs/pipeline_config_abcd-options.yml | 4 ++-- 2 files changed, 3 insertions(+), 2 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index cac9fc998c..ceb319991b 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -57,6 +57,7 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 - Removed broken support for native-space masking. - Introduced a new `template_space_func_masking` section in the pipeline config for template-space-only methods. - Moved `Anatomical_Resampled` masking method from `func_masking` to the `template_space_func_masking`. +- Moved `reference` in the bbreg config out to the whole co-registration block in all configs. ### Upgraded diff --git a/CPAC/resources/configs/pipeline_config_abcd-options.yml b/CPAC/resources/configs/pipeline_config_abcd-options.yml index de7d04a479..7339413237 100644 --- a/CPAC/resources/configs/pipeline_config_abcd-options.yml +++ b/CPAC/resources/configs/pipeline_config_abcd-options.yml @@ -273,8 +273,8 @@ registration_workflows: # directly register the mean functional to an EPI template # instead of applying the anatomical T1-to-template transform to the functional data that has been # coregistered to anatomical/T1 space - run: on - + run: off + functional_preproc: run: On motion_estimates_and_correction: From cc5648edad1f621d4f254aac74f017dced917354 Mon Sep 17 00:00:00 2001 From: Jon Cluce Date: Tue, 2 Sep 2025 13:28:36 -0400 Subject: [PATCH 477/507] :rewind: Revert ":recycle: Restore consecutive forking ability" This reverts commit 1a4c8d92b184900e3d54a908dc4fe23b13ea956f. --- CPAC/pipeline/cpac_pipeline.py | 1 + CPAC/pipeline/engine.py | 14 ++++---------- CPAC/pipeline/utils.py | 7 ++----- CPAC/registration/registration.py | 3 ++- 4 files changed, 9 insertions(+), 16 deletions(-) diff --git a/CPAC/pipeline/cpac_pipeline.py b/CPAC/pipeline/cpac_pipeline.py index f0d6da602b..8ca5b1398c 100644 --- a/CPAC/pipeline/cpac_pipeline.py +++ b/CPAC/pipeline/cpac_pipeline.py @@ -1392,6 +1392,7 @@ def build_workflow(subject_id, sub_dict, cfg, pipeline_name=None): target_space_nuis = cfg.nuisance_corrections["2-nuisance_regression"]["space"] target_space_alff = cfg.amplitude_low_frequency_fluctuation["target_space"] target_space_reho = cfg.regional_homogeneity["target_space"] + if apply_func_warp["T1"]: ts_to_T1template_block = [ apply_phasediff_to_timeseries_separately, diff --git a/CPAC/pipeline/engine.py b/CPAC/pipeline/engine.py index d6ec47abf4..0f49941eca 100644 --- a/CPAC/pipeline/engine.py +++ b/CPAC/pipeline/engine.py @@ -1481,14 +1481,7 @@ def node_data(self, resource, **kwargs): return NodeData(self, resource, **kwargs) @staticmethod - def _concatenate_list_items(list_values: list[str] | str) -> str: - """Sort and concatenate list values.""" - if isinstance(list_values, list): - list_values.sort() - list_values = "-".join(list_values) - return list_values - - def _normalize_variant_dict(self, json_obj: dict) -> dict[str, Optional[str]]: + def _normalize_variant_dict(json_obj: dict) -> dict[str, Optional[str]]: """ Return {variant_key: primary_value or None}. @@ -1496,8 +1489,9 @@ def _normalize_variant_dict(self, json_obj: dict) -> dict[str, Optional[str]]: - "NO-..." entries normalize to None """ out = {} - for k, _v in json_obj.get("CpacVariant", {}).items(): - primary = self._concatenate_list_items(_v) + for k, v in json_obj.get("CpacVariant", {}).items(): + assert isinstance(v, (list, str)) + primary = "-".join(v) if isinstance(v, list) else v out[k] = ( None if (isinstance(primary, str) and primary.startswith("NO-")) diff --git a/CPAC/pipeline/utils.py b/CPAC/pipeline/utils.py index de412eee43..97852d2a8e 100644 --- a/CPAC/pipeline/utils.py +++ b/CPAC/pipeline/utils.py @@ -249,7 +249,7 @@ def _update_resource_idx(resource_idx, out_dct, key, value): def find_variants( pool: "ResourcePool", keys: list | str | tuple -) -> dict[str, dict[str, list[str] | set[str] | str]]: +) -> dict[str, dict[str, set[str]]]: """Find variants in the ResourcePool for the given keys.""" outputs = {} if isinstance(keys, str): @@ -276,9 +276,6 @@ def short_circuit_crossed_variants( variant_dicts = list(_variants.values()) if not variant_dicts: return - for dct in variant_dicts: - for k, v in dct.items(): - dct[k] = [pool._concatenate_list_items(v)] # only keep keys that exist in all variant dicts common_keys = set.intersection(*(set(v.keys()) for v in variant_dicts)) @@ -288,7 +285,7 @@ def short_circuit_crossed_variants( values = set() for variant in variant_dicts: values.update(variant.get(key, [])) - if any(len(set(variant.get(key, []))) > 1 for variant in variant_dicts): + if len(values) > 1: crossed_variants[key] = values if crossed_variants: diff --git a/CPAC/registration/registration.py b/CPAC/registration/registration.py index 278dbb9990..41b42aae6e 100644 --- a/CPAC/registration/registration.py +++ b/CPAC/registration/registration.py @@ -4057,7 +4057,8 @@ def warp_timeseries_to_T1template(wf, cfg, strat_pool, pipe_num, opt=None): outputs = { "space-template_desc-preproc_bold": (apply_xfm, "outputspec.output_image") } - return wf, outputs + + return (wf, outputs) @nodeblock( From 66d40dc7cfecca282fa72a70af07a654ba7cff28 Mon Sep 17 00:00:00 2001 From: "birajstha:construction_worker::penguin" Date: Tue, 2 Sep 2025 13:30:23 -0400 Subject: [PATCH 478/507] adding to changelog --- CHANGELOG.md | 3 +++ 1 file changed, 3 insertions(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index ceb319991b..7a97e63455 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -58,6 +58,9 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 - Introduced a new `template_space_func_masking` section in the pipeline config for template-space-only methods. - Moved `Anatomical_Resampled` masking method from `func_masking` to the `template_space_func_masking`. - Moved `reference` in the bbreg config out to the whole co-registration block in all configs. +- Turned `On` boundary_based_registration for abcd-options preconfig. +- Refactored `transform_timeseries_to_T1template_abcd` nodeblock removing unnecessary nodes, changing `desc-preproc_T1w` inputs as reference to `desc-head_T1w`. +- Appended `T1w to Template` FOV match transform to the XFM. ### Upgraded From 9c551be1f9199ba2187584b17c925ef4d5264411 Mon Sep 17 00:00:00 2001 From: Jon Cluce Date: Tue, 2 Sep 2025 13:48:52 -0400 Subject: [PATCH 479/507] :necktie: Stringify variants to handle chains --- CPAC/pipeline/utils.py | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/CPAC/pipeline/utils.py b/CPAC/pipeline/utils.py index 97852d2a8e..5dad655bb7 100644 --- a/CPAC/pipeline/utils.py +++ b/CPAC/pipeline/utils.py @@ -254,7 +254,12 @@ def find_variants( outputs = {} if isinstance(keys, str): try: - return {keys: pool.get_json(keys)["CpacVariant"]} + return { + keys: { + _k: {str(_v)} + for _k, _v in pool.get_json(keys)["CpacVariant"].items() + } + } except LookupError: return {} for key in keys: From f2a75786d9c047c20e0f60bfb47849e8ea833b88 Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Tue, 2 Sep 2025 18:25:49 +0000 Subject: [PATCH 480/507] [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci --- CPAC/registration/registration.py | 20 ++++++++++---------- 1 file changed, 10 insertions(+), 10 deletions(-) diff --git a/CPAC/registration/registration.py b/CPAC/registration/registration.py index b4e4e85ab5..1a2385b646 100644 --- a/CPAC/registration/registration.py +++ b/CPAC/registration/registration.py @@ -3116,7 +3116,9 @@ def overwrite_transform_anat_to_template(wf, cfg, strat_pool, pipe_num, opt=None merge_xfms, "merged_file", fsl_apply_warp_t1_to_template, "field_file" ) - concat_match_fov = pe.Node(interface=fsl.ConvertWarp(), name=f"concat_match_fov_{pipe_num}") + concat_match_fov = pe.Node( + interface=fsl.ConvertWarp(), name=f"concat_match_fov_{pipe_num}" + ) concat_match_fov.inputs.relwarp = True wf.connect(match_fovs_T1w, "out_matrix_file", concat_match_fov, "premat") @@ -3126,8 +3128,7 @@ def overwrite_transform_anat_to_template(wf, cfg, strat_pool, pipe_num, opt=None # Node to concatenate the inverse warp with the FOV matrix concat_match_fov_inv = pe.Node( - interface=fsl.ConvertWarp(), - name=f"concat_match_fov_inv_{pipe_num}" + interface=fsl.ConvertWarp(), name=f"concat_match_fov_inv_{pipe_num}" ) concat_match_fov_inv.inputs.relwarp = True @@ -3146,15 +3147,16 @@ def overwrite_transform_anat_to_template(wf, cfg, strat_pool, pipe_num, opt=None # TODO connect T1wRestoreBrain, check T1wRestoreBrain quality node, out = strat_pool.get_data(["desc-restore-brain_T1w", "desc-preproc_T1w"]) - wf.connect( - node, out, fsl_apply_warp_t1_brain_to_template, "in_file" - ) + wf.connect(node, out, fsl_apply_warp_t1_brain_to_template, "in_file") node, out = strat_pool.get_data("T1w-brain-template") wf.connect(node, out, fsl_apply_warp_t1_brain_to_template, "ref_file") wf.connect( - concat_match_fov, "out_file", fsl_apply_warp_t1_brain_to_template, "field_file" + concat_match_fov, + "out_file", + fsl_apply_warp_t1_brain_to_template, + "field_file", ) fsl_apply_warp_t1_brain_mask_to_template = pe.Node( @@ -3166,9 +3168,7 @@ def overwrite_transform_anat_to_template(wf, cfg, strat_pool, pipe_num, opt=None node, out = strat_pool.get_data("space-T1w_desc-brain_mask") - wf.connect( - node, out, fsl_apply_warp_t1_brain_mask_to_template, "in_file" - ) + wf.connect(node, out, fsl_apply_warp_t1_brain_mask_to_template, "in_file") node, out = strat_pool.get_data("T1w-brain-template-mask") wf.connect(node, out, fsl_apply_warp_t1_brain_mask_to_template, "ref_file") From e9925b7d51d4c2f967b8c8f4272f3d5b10f8a5b1 Mon Sep 17 00:00:00 2001 From: "birajstha:construction_worker::penguin" Date: Wed, 3 Sep 2025 16:58:32 -0400 Subject: [PATCH 481/507] changing func_input from Selected_Functional_Volume to Mean_Functional in abcd-options preconfig --- CPAC/registration/registration.py | 6 +++++- CPAC/resources/configs/pipeline_config_abcd-options.yml | 2 +- 2 files changed, 6 insertions(+), 2 deletions(-) diff --git a/CPAC/registration/registration.py b/CPAC/registration/registration.py index 1a2385b646..696f9ad83c 100644 --- a/CPAC/registration/registration.py +++ b/CPAC/registration/registration.py @@ -4039,7 +4039,11 @@ def warp_wholeheadT1_to_template(wf, cfg, strat_pool, pipe_num, opt=None): ) def warp_T1mask_to_template(wf, cfg, strat_pool, pipe_num, opt=None): """Warp T1 mask to template.""" - reg_tool = strat_pool.reg_tool("from-T1w_to-template_mode-image_xfm") + + if cfg.registration_workflows["anatomical_registration"]["overwrite_transform"] and cfg.registration_workflows["anatomical_registration"]["overwrite_transform"]["using"] == 'FSL': + reg_tool = 'fsl' + else: + reg_tool = strat_pool.reg_tool("from-T1w_to-template_mode-image_xfm") num_cpus = cfg.pipeline_setup["system_config"]["max_cores_per_participant"] diff --git a/CPAC/resources/configs/pipeline_config_abcd-options.yml b/CPAC/resources/configs/pipeline_config_abcd-options.yml index 7339413237..111f532328 100644 --- a/CPAC/resources/configs/pipeline_config_abcd-options.yml +++ b/CPAC/resources/configs/pipeline_config_abcd-options.yml @@ -212,7 +212,7 @@ registration_workflows: # Choose whether to use the mean of the functional/EPI as the input to functional-to-anatomical registration or one of the volumes from the functional 4D timeseries that you choose. # input: ['Mean_Functional', 'Selected_Functional_Volume', 'fmriprep_reference'] - input: [Selected_Functional_Volume] + input: [Mean_Functional] # Mask the sbref created by coregistration input prep nodeblocks above before registration mask_sbref: Off From 009ba4a822077800a699162a26706d4aa84b5182 Mon Sep 17 00:00:00 2001 From: "birajstha:construction_worker::penguin" Date: Wed, 3 Sep 2025 16:58:41 -0400 Subject: [PATCH 482/507] changing func_input from Selected_Functional_Volume to Mean_Functional in abcd-options preconfig --- CPAC/registration/registration.py | 11 ++++++++--- 1 file changed, 8 insertions(+), 3 deletions(-) diff --git a/CPAC/registration/registration.py b/CPAC/registration/registration.py index 696f9ad83c..f5af239793 100644 --- a/CPAC/registration/registration.py +++ b/CPAC/registration/registration.py @@ -4039,9 +4039,14 @@ def warp_wholeheadT1_to_template(wf, cfg, strat_pool, pipe_num, opt=None): ) def warp_T1mask_to_template(wf, cfg, strat_pool, pipe_num, opt=None): """Warp T1 mask to template.""" - - if cfg.registration_workflows["anatomical_registration"]["overwrite_transform"] and cfg.registration_workflows["anatomical_registration"]["overwrite_transform"]["using"] == 'FSL': - reg_tool = 'fsl' + if ( + cfg.registration_workflows["anatomical_registration"]["overwrite_transform"] + and cfg.registration_workflows["anatomical_registration"][ + "overwrite_transform" + ]["using"] + == "FSL" + ): + reg_tool = "fsl" else: reg_tool = strat_pool.reg_tool("from-T1w_to-template_mode-image_xfm") From 2b13ca56b6fec6f007687482518f1345dcaf61c9 Mon Sep 17 00:00:00 2001 From: "birajstha:construction_worker::penguin" Date: Wed, 3 Sep 2025 20:58:17 -0400 Subject: [PATCH 483/507] reverting some unnecessary changes --- CPAC/pipeline/schema.py | 2 +- CPAC/registration/registration.py | 67 ++++++------------- .../configs/pipeline_config_abcd-options.yml | 9 +-- .../configs/pipeline_config_blank.yml | 7 +- .../configs/pipeline_config_default.yml | 7 +- .../pipeline_config_fmriprep-options.yml | 7 +- 6 files changed, 39 insertions(+), 60 deletions(-) diff --git a/CPAC/pipeline/schema.py b/CPAC/pipeline/schema.py index abfb7a3c4e..81c542e8e5 100644 --- a/CPAC/pipeline/schema.py +++ b/CPAC/pipeline/schema.py @@ -843,9 +843,9 @@ def sanitize(filename): }, "mask_sbref": bool1_1, }, - "reference": In({"whole-head", "brain"}), "boundary_based_registration": { "run": forkable, + "reference": In({"whole-head", "brain"}), "bbr_schedule": str, "bbr_wm_map": In({"probability_map", "partial_volume_map"}), "bbr_wm_mask_args": str, diff --git a/CPAC/registration/registration.py b/CPAC/registration/registration.py index f5af239793..2865c15b72 100644 --- a/CPAC/registration/registration.py +++ b/CPAC/registration/registration.py @@ -769,14 +769,6 @@ def create_register_func_to_anat( "coregistration" ]["dof"] - if ( - config.registration_workflows["functional_registration"]["coregistration"][ - "reference" - ] - == "whole-head" - ): - register_func_to_anat.connect(inputspec, "ref_weight", linear_reg, "ref_weight") - if ( config.registration_workflows["functional_registration"]["coregistration"][ "arguments" @@ -3116,9 +3108,7 @@ def overwrite_transform_anat_to_template(wf, cfg, strat_pool, pipe_num, opt=None merge_xfms, "merged_file", fsl_apply_warp_t1_to_template, "field_file" ) - concat_match_fov = pe.Node( - interface=fsl.ConvertWarp(), name=f"concat_match_fov_{pipe_num}" - ) + concat_match_fov = pe.Node(interface=fsl.ConvertWarp(), name=f"concat_match_fov_{pipe_num}") concat_match_fov.inputs.relwarp = True wf.connect(match_fovs_T1w, "out_matrix_file", concat_match_fov, "premat") @@ -3128,7 +3118,8 @@ def overwrite_transform_anat_to_template(wf, cfg, strat_pool, pipe_num, opt=None # Node to concatenate the inverse warp with the FOV matrix concat_match_fov_inv = pe.Node( - interface=fsl.ConvertWarp(), name=f"concat_match_fov_inv_{pipe_num}" + interface=fsl.ConvertWarp(), + name=f"concat_match_fov_inv_{pipe_num}" ) concat_match_fov_inv.inputs.relwarp = True @@ -3147,16 +3138,15 @@ def overwrite_transform_anat_to_template(wf, cfg, strat_pool, pipe_num, opt=None # TODO connect T1wRestoreBrain, check T1wRestoreBrain quality node, out = strat_pool.get_data(["desc-restore-brain_T1w", "desc-preproc_T1w"]) - wf.connect(node, out, fsl_apply_warp_t1_brain_to_template, "in_file") + wf.connect( + node, out, fsl_apply_warp_t1_brain_to_template, "in_file" + ) node, out = strat_pool.get_data("T1w-brain-template") wf.connect(node, out, fsl_apply_warp_t1_brain_to_template, "ref_file") wf.connect( - concat_match_fov, - "out_file", - fsl_apply_warp_t1_brain_to_template, - "field_file", + concat_match_fov, "out_file", fsl_apply_warp_t1_brain_to_template, "field_file" ) fsl_apply_warp_t1_brain_mask_to_template = pe.Node( @@ -3168,7 +3158,9 @@ def overwrite_transform_anat_to_template(wf, cfg, strat_pool, pipe_num, opt=None node, out = strat_pool.get_data("space-T1w_desc-brain_mask") - wf.connect(node, out, fsl_apply_warp_t1_brain_mask_to_template, "in_file") + wf.connect( + node, out, fsl_apply_warp_t1_brain_mask_to_template, "in_file" + ) node, out = strat_pool.get_data("T1w-brain-template-mask") wf.connect(node, out, fsl_apply_warp_t1_brain_mask_to_template, "ref_file") @@ -3446,22 +3438,10 @@ def coregistration(wf, cfg, strat_pool, pipe_num, opt=None): node, out = strat_pool.get_data("sbref") wf.connect(node, out, func_to_anat, "inputspec.func") - if ( - cfg.registration_workflows["functional_registration"]["coregistration"][ - "reference" - ] - == "whole-head" - ): - node, out = strat_pool.get_data(["desc-restore_T1w", "desc-head_T1w"]) - wf.connect(node, out, func_to_anat, "inputspec.anat") - - node, out = strat_pool.get_data("space-T1w_desc-brain_mask") - wf.connect(node, out, func_to_anat, "inputspec.ref_weight") - else: - node, out = strat_pool.get_data( - ["desc-restore-brain_T1w", "desc-preproc_T1w"] - ) - wf.connect(node, out, func_to_anat, "inputspec.anat") + node, out = strat_pool.get_data( + ["desc-restore-brain_T1w", "desc-preproc_T1w"] + ) + wf.connect(node, out, func_to_anat, "inputspec.anat") if diff_complete: node, out = strat_pool.get_data("effectiveEchoSpacing") @@ -3521,8 +3501,8 @@ def coregistration(wf, cfg, strat_pool, pipe_num, opt=None): if ( cfg.registration_workflows["functional_registration"]["coregistration"][ - "reference" - ] + "boundary_based_registration" + ]["reference"] == "whole-head" ): node, out = strat_pool.get_data("desc-head_T1w") @@ -3530,8 +3510,8 @@ def coregistration(wf, cfg, strat_pool, pipe_num, opt=None): elif ( cfg.registration_workflows["functional_registration"]["coregistration"][ - "reference" - ] + "boundary_based_registration" + ]["reference"] == "brain" ): node, out = strat_pool.get_data("desc-preproc_T1w") @@ -4039,14 +4019,9 @@ def warp_wholeheadT1_to_template(wf, cfg, strat_pool, pipe_num, opt=None): ) def warp_T1mask_to_template(wf, cfg, strat_pool, pipe_num, opt=None): """Warp T1 mask to template.""" - if ( - cfg.registration_workflows["anatomical_registration"]["overwrite_transform"] - and cfg.registration_workflows["anatomical_registration"][ - "overwrite_transform" - ]["using"] - == "FSL" - ): - reg_tool = "fsl" + + if cfg.registration_workflows["anatomical_registration"]["overwrite_transform"] and cfg.registration_workflows["anatomical_registration"]["overwrite_transform"]["using"] == 'FSL': + reg_tool = 'fsl' else: reg_tool = strat_pool.reg_tool("from-T1w_to-template_mode-image_xfm") diff --git a/CPAC/resources/configs/pipeline_config_abcd-options.yml b/CPAC/resources/configs/pipeline_config_abcd-options.yml index 111f532328..fa682d8c7a 100644 --- a/CPAC/resources/configs/pipeline_config_abcd-options.yml +++ b/CPAC/resources/configs/pipeline_config_abcd-options.yml @@ -212,14 +212,11 @@ registration_workflows: # Choose whether to use the mean of the functional/EPI as the input to functional-to-anatomical registration or one of the volumes from the functional 4D timeseries that you choose. # input: ['Mean_Functional', 'Selected_Functional_Volume', 'fmriprep_reference'] - input: [Mean_Functional] + input: [Selected_Functional_Volume] # Mask the sbref created by coregistration input prep nodeblocks above before registration mask_sbref: Off - # options: 'whole-head' or 'brain' - reference: whole-head - # Choose coregistration interpolation interpolation: spline @@ -231,6 +228,10 @@ registration_workflows: # run: [On, Off] - this will run both and fork the pipeline run: [On] + # reference for boundary based registration + # options: 'whole-head' or 'brain' + reference: whole-head + func_registration_to_template: # these options modify the application (to the functional data), not the calculation, of the diff --git a/CPAC/resources/configs/pipeline_config_blank.yml b/CPAC/resources/configs/pipeline_config_blank.yml index 008090280b..11e4809460 100644 --- a/CPAC/resources/configs/pipeline_config_blank.yml +++ b/CPAC/resources/configs/pipeline_config_blank.yml @@ -738,15 +738,16 @@ registration_workflows: # Mask the sbref created by coregistration input prep nodeblocks above before registration mask_sbref: On - # options: 'whole-head' or 'brain' - reference: whole-head - boundary_based_registration: # this is a fork point # run: [On, Off] - this will run both and fork the pipeline run: [Off] + # reference for boundary based registration + # options: 'whole-head' or 'brain' + reference: whole-head + # choose which FAST map to generate BBR WM mask # options: 'probability_map', 'partial_volume_map' bbr_wm_map: probability_map diff --git a/CPAC/resources/configs/pipeline_config_default.yml b/CPAC/resources/configs/pipeline_config_default.yml index c69cc8d4b0..7bdbf527cc 100644 --- a/CPAC/resources/configs/pipeline_config_default.yml +++ b/CPAC/resources/configs/pipeline_config_default.yml @@ -808,14 +808,15 @@ registration_workflows: # Mask the sbref created by coregistration input prep nodeblocks above before registration mask_sbref: On - # options: 'whole-head' or 'brain' - reference: whole-head - boundary_based_registration: # this is a fork point # run: [On, Off] - this will run both and fork the pipeline run: [On] + # reference for boundary based registration + # options: 'whole-head' or 'brain' + reference: whole-head + # Standard FSL 5.0 Scheduler used for Boundary Based Registration. # It is not necessary to change this path unless you intend to use non-standard MNI registration. bbr_schedule: $FSLDIR/etc/flirtsch/bbr.sch diff --git a/CPAC/resources/configs/pipeline_config_fmriprep-options.yml b/CPAC/resources/configs/pipeline_config_fmriprep-options.yml index 2b268930a7..5453144af5 100644 --- a/CPAC/resources/configs/pipeline_config_fmriprep-options.yml +++ b/CPAC/resources/configs/pipeline_config_fmriprep-options.yml @@ -251,15 +251,16 @@ registration_workflows: # input: ['Mean_Functional', 'Selected_Functional_Volume', 'fmriprep_reference'] input: [fmriprep_reference] - # options: 'whole-head' or 'brain' - reference: brain - boundary_based_registration: # this is a fork point # run: [On, Off] - this will run both and fork the pipeline run: [On] + # reference for boundary based registration + # options: 'whole-head' or 'brain' + reference: brain + # choose which FAST map to generate BBR WM mask # options: 'probability_map', 'partial_volume_map' bbr_wm_map: partial_volume_map From c3957a969d82fc3baddd6aff17fb1a431b3cfabb Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Thu, 4 Sep 2025 00:58:57 +0000 Subject: [PATCH 484/507] [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci --- CPAC/registration/registration.py | 34 +++++++++++++++++-------------- 1 file changed, 19 insertions(+), 15 deletions(-) diff --git a/CPAC/registration/registration.py b/CPAC/registration/registration.py index 2865c15b72..6bf649b440 100644 --- a/CPAC/registration/registration.py +++ b/CPAC/registration/registration.py @@ -3108,7 +3108,9 @@ def overwrite_transform_anat_to_template(wf, cfg, strat_pool, pipe_num, opt=None merge_xfms, "merged_file", fsl_apply_warp_t1_to_template, "field_file" ) - concat_match_fov = pe.Node(interface=fsl.ConvertWarp(), name=f"concat_match_fov_{pipe_num}") + concat_match_fov = pe.Node( + interface=fsl.ConvertWarp(), name=f"concat_match_fov_{pipe_num}" + ) concat_match_fov.inputs.relwarp = True wf.connect(match_fovs_T1w, "out_matrix_file", concat_match_fov, "premat") @@ -3118,8 +3120,7 @@ def overwrite_transform_anat_to_template(wf, cfg, strat_pool, pipe_num, opt=None # Node to concatenate the inverse warp with the FOV matrix concat_match_fov_inv = pe.Node( - interface=fsl.ConvertWarp(), - name=f"concat_match_fov_inv_{pipe_num}" + interface=fsl.ConvertWarp(), name=f"concat_match_fov_inv_{pipe_num}" ) concat_match_fov_inv.inputs.relwarp = True @@ -3138,15 +3139,16 @@ def overwrite_transform_anat_to_template(wf, cfg, strat_pool, pipe_num, opt=None # TODO connect T1wRestoreBrain, check T1wRestoreBrain quality node, out = strat_pool.get_data(["desc-restore-brain_T1w", "desc-preproc_T1w"]) - wf.connect( - node, out, fsl_apply_warp_t1_brain_to_template, "in_file" - ) + wf.connect(node, out, fsl_apply_warp_t1_brain_to_template, "in_file") node, out = strat_pool.get_data("T1w-brain-template") wf.connect(node, out, fsl_apply_warp_t1_brain_to_template, "ref_file") wf.connect( - concat_match_fov, "out_file", fsl_apply_warp_t1_brain_to_template, "field_file" + concat_match_fov, + "out_file", + fsl_apply_warp_t1_brain_to_template, + "field_file", ) fsl_apply_warp_t1_brain_mask_to_template = pe.Node( @@ -3158,9 +3160,7 @@ def overwrite_transform_anat_to_template(wf, cfg, strat_pool, pipe_num, opt=None node, out = strat_pool.get_data("space-T1w_desc-brain_mask") - wf.connect( - node, out, fsl_apply_warp_t1_brain_mask_to_template, "in_file" - ) + wf.connect(node, out, fsl_apply_warp_t1_brain_mask_to_template, "in_file") node, out = strat_pool.get_data("T1w-brain-template-mask") wf.connect(node, out, fsl_apply_warp_t1_brain_mask_to_template, "ref_file") @@ -3438,9 +3438,7 @@ def coregistration(wf, cfg, strat_pool, pipe_num, opt=None): node, out = strat_pool.get_data("sbref") wf.connect(node, out, func_to_anat, "inputspec.func") - node, out = strat_pool.get_data( - ["desc-restore-brain_T1w", "desc-preproc_T1w"] - ) + node, out = strat_pool.get_data(["desc-restore-brain_T1w", "desc-preproc_T1w"]) wf.connect(node, out, func_to_anat, "inputspec.anat") if diff_complete: @@ -4020,8 +4018,14 @@ def warp_wholeheadT1_to_template(wf, cfg, strat_pool, pipe_num, opt=None): def warp_T1mask_to_template(wf, cfg, strat_pool, pipe_num, opt=None): """Warp T1 mask to template.""" - if cfg.registration_workflows["anatomical_registration"]["overwrite_transform"] and cfg.registration_workflows["anatomical_registration"]["overwrite_transform"]["using"] == 'FSL': - reg_tool = 'fsl' + if ( + cfg.registration_workflows["anatomical_registration"]["overwrite_transform"] + and cfg.registration_workflows["anatomical_registration"][ + "overwrite_transform" + ]["using"] + == "FSL" + ): + reg_tool = "fsl" else: reg_tool = strat_pool.reg_tool("from-T1w_to-template_mode-image_xfm") From 54994b09287a2c2f4779bfe02d433a892a2c8348 Mon Sep 17 00:00:00 2001 From: "birajstha:construction_worker::penguin" Date: Wed, 3 Sep 2025 21:06:51 -0400 Subject: [PATCH 485/507] updating the changelog --- CHANGELOG.md | 1 - 1 file changed, 1 deletion(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 7a97e63455..4de4717bf4 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -57,7 +57,6 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 - Removed broken support for native-space masking. - Introduced a new `template_space_func_masking` section in the pipeline config for template-space-only methods. - Moved `Anatomical_Resampled` masking method from `func_masking` to the `template_space_func_masking`. -- Moved `reference` in the bbreg config out to the whole co-registration block in all configs. - Turned `On` boundary_based_registration for abcd-options preconfig. - Refactored `transform_timeseries_to_T1template_abcd` nodeblock removing unnecessary nodes, changing `desc-preproc_T1w` inputs as reference to `desc-head_T1w`. - Appended `T1w to Template` FOV match transform to the XFM. From b15aece93831f3c49a54d2c3ca2df643857bfce2 Mon Sep 17 00:00:00 2001 From: Jon Cluce Date: Thu, 4 Sep 2025 14:29:50 -0400 Subject: [PATCH 486/507] :passport_control: Document maintenance mode and codeowners Co-authored-by: Biraj Shrestha <111654544+birajstha@users.noreply.github.com> Co-authored-by: Steve Giavasis --- .dockerignore | 1 + .github/CODEOWNERS | 30 ++++++++++++++++++++++++++++++ README.md | 4 ++++ SUPPORT.md | 18 ++++++++++++++++++ 4 files changed, 53 insertions(+) create mode 100644 .github/CODEOWNERS create mode 100644 SUPPORT.md diff --git a/.dockerignore b/.dockerignore index 4ea69480d5..ef5287d138 100644 --- a/.dockerignore +++ b/.dockerignore @@ -4,5 +4,6 @@ cpac_runs .env* .git .github +!.github/CODEOWNERS !.github/scripts *.tar.gz diff --git a/.github/CODEOWNERS b/.github/CODEOWNERS new file mode 100644 index 0000000000..81b0a84a6e --- /dev/null +++ b/.github/CODEOWNERS @@ -0,0 +1,30 @@ +# Copyright (C) 2025 C-PAC Developers + +# This file is part of C-PAC. + +# C-PAC is free software: you can redistribute it and/or modify it under +# the terms of the GNU Lesser General Public License as published by the +# Free Software Foundation, either version 3 of the License, or (at your +# option) any later version. + +# C-PAC is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or +# FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public +# License for more details. + +# You should have received a copy of the GNU Lesser General Public +# License along with C-PAC. If not, see . + +# Global maintenance +* @FCP-INDI/Maintenance + +# DevOps +/pyproject.toml @FCP-INDI/DevOps +/requirements.txt @FCP-INDI/DevOps +/setup.py @FCP-INDI/DevOps +/dev @FCP-INDI/DevOps +/scripts @FCP-INDI/DevOps +/.* @FCP-INDI/DevOps +/.circleci @FCP-INDI/DevOps +/.github @FCP-INDI/DevOps +**/*Dockerfile @FCP-INDI/DevOps diff --git a/README.md b/README.md index f8501ba049..6bc400be3e 100644 --- a/README.md +++ b/README.md @@ -9,6 +9,7 @@ C-PAC is distributed in the hope that it will be useful, but WITHOUT ANY WARRANT You should have received a copy of the GNU Lesser General Public License along with C-PAC. If not, see . --> C-PAC: Configurable Pipeline for the Analysis of Connectomes ============================================================ + [![DOI for "Moving Beyond Processing and Analysis-Related Variation in Neuroscience"](https://zenodo.org/badge/DOI/10.1101/2021.12.01.470790.svg)](https://doi.org/10.1101/2021.12.01.470790) [![DOI for "FCP-INDI/C-PAC: CPAC Version 1.0.0 Beta"](https://zenodo.org/badge/DOI/10.5281/zenodo.164638.svg)](https://doi.org/10.5281/zenodo.164638) [![pre-commit.ci status](https://results.pre-commit.ci/badge/github/FCP-INDI/C-PAC/main.svg)](https://results.pre-commit.ci/latest/github/FCP-INDI/C-PAC/main) [![codecov](https://codecov.io/github/FCP-INDI/C-PAC/graph/badge.svg?token=sWxXoDRf1M)](https://codecov.io/github/FCP-INDI/C-PAC) [![LGPL](https://www.gnu.org/graphics/lgplv3-88x31.png)](./COPYING.LESSER) @@ -17,6 +18,9 @@ A configurable, open-source, Nipype-based, automated processing pipeline for res Designed for use by both novice users and experts, C-PAC brings the power, flexibility and elegance of Nipype to users in a plug-and-play fashion; no programming required. +> [!WARNING] +> C-PAC entered maintenance mode in version 1.8.8. See [SUPPORT.md](./SUPPORT.md). + Website ------- diff --git a/SUPPORT.md b/SUPPORT.md new file mode 100644 index 0000000000..dec7292214 --- /dev/null +++ b/SUPPORT.md @@ -0,0 +1,18 @@ +Support Policy +============== + +As of v1.8.8, C-PAC is in maintenance mode. With the 2.0.0 release, we will begin strict adherence to Semantic Versioning. + +While in maintenance mode, we will continue to publish new releases but FCP-INDI will no longer be paying developers to add new features. + +Our team is happy to offer guidance and support where possible. For requests involving custom development, extended technical support, or integration efforts, we're open to exploring paid support arrangements. Please reach out if you'd like to discuss these options further. + +Community contributions will be reviewed and released when passing review. Responsibility for these reviews is defined in [.github/CODEOWNERS](./.github/CODEOWNERS). + +User support will continue at [Neurostars](https://neurostars.org/tag/cpac), though expect a slower response time as FCP-INDI will no longer be paying developers to perform user support. + +Major bug fixes will continue to be addressed by [**@FCP-INDI/maintenance**](https://github.com/orgs/FCP-INDI/teams/maintenance). Minor bugs will be documented and left to the community to contribute fixes and workarounds. + +Security releases will continue to be published by [**@FCP-INDI/DevOps**](https://github.com/orgs/FCP-INDI/teams/DevOps). + +These guidelines will be in effect for 2 years, at which point they will be reevaluated and updated by [**@FCP-INDI/maintenance**](https://github.com/orgs/FCP-INDI/teams/maintenance). From e408c74c2dce7fd139031959a4ea71b2091c3256 Mon Sep 17 00:00:00 2001 From: "birajstha:construction_worker::penguin" Date: Fri, 5 Sep 2025 16:58:05 -0400 Subject: [PATCH 487/507] moving the addition of bold masking below and enabling previously blocked nodeblocks --- CPAC/pipeline/cpac_pipeline.py | 15 ++++++++------- 1 file changed, 8 insertions(+), 7 deletions(-) diff --git a/CPAC/pipeline/cpac_pipeline.py b/CPAC/pipeline/cpac_pipeline.py index b9e6865939..12ff690f43 100644 --- a/CPAC/pipeline/cpac_pipeline.py +++ b/CPAC/pipeline/cpac_pipeline.py @@ -1291,13 +1291,6 @@ def build_workflow(subject_id, sub_dict, cfg, pipeline_name=None): mask_sbref, ] - # Template space functional mask - if cfg.functional_preproc["template_space_func_masking"]["run"]: - if not rpool.check_rpool("space-template_desc-bold_mask"): - pipeline_blocks += [ - bold_mask_anatomical_resampled, - ] - # Distortion/Susceptibility Correction distcor_blocks = [] if "fmap" in sub_dict: @@ -1320,6 +1313,14 @@ def build_workflow(subject_id, sub_dict, cfg, pipeline_name=None): pipeline_blocks += stack_motion_blocks(func_blocks, cfg, rpool) + + # Template space functional mask + if cfg.functional_preproc["template_space_func_masking"]["run"]: + if not rpool.check_rpool("space-template_desc-bold_mask"): + pipeline_blocks += [ + bold_mask_anatomical_resampled, + ] + # BOLD to T1 coregistration if cfg.registration_workflows["functional_registration"]["coregistration"][ "run" From fb7e86f0622f4b5fff7bd119e59195a2eaeae69e Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Fri, 5 Sep 2025 20:58:51 +0000 Subject: [PATCH 488/507] [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci --- CPAC/pipeline/cpac_pipeline.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/CPAC/pipeline/cpac_pipeline.py b/CPAC/pipeline/cpac_pipeline.py index 12ff690f43..a54fa929b3 100644 --- a/CPAC/pipeline/cpac_pipeline.py +++ b/CPAC/pipeline/cpac_pipeline.py @@ -1313,14 +1313,13 @@ def build_workflow(subject_id, sub_dict, cfg, pipeline_name=None): pipeline_blocks += stack_motion_blocks(func_blocks, cfg, rpool) - # Template space functional mask if cfg.functional_preproc["template_space_func_masking"]["run"]: if not rpool.check_rpool("space-template_desc-bold_mask"): pipeline_blocks += [ bold_mask_anatomical_resampled, ] - + # BOLD to T1 coregistration if cfg.registration_workflows["functional_registration"]["coregistration"][ "run" From 5a48533c95d6dbf6ee8b756f6f8a9ca11569f428 Mon Sep 17 00:00:00 2001 From: "birajstha:construction_worker::penguin" Date: Thu, 11 Sep 2025 16:44:02 -0400 Subject: [PATCH 489/507] adding match fov node to convert fs brain mask to native space --- CPAC/anat_preproc/anat_preproc.py | 22 +++++++++++++++++++++- 1 file changed, 21 insertions(+), 1 deletion(-) diff --git a/CPAC/anat_preproc/anat_preproc.py b/CPAC/anat_preproc/anat_preproc.py index d66d5ad786..fdc5d15edd 100644 --- a/CPAC/anat_preproc/anat_preproc.py +++ b/CPAC/anat_preproc/anat_preproc.py @@ -1323,7 +1323,27 @@ def freesurfer_fsl_brain_connector(wf, cfg, strat_pool, pipe_num, opt): wf.connect(combine_mask, "out_file", binarize_combined_mask, "in_file") - return wf, {"space-T1w_desc-brain_mask": (binarize_combined_mask, "out_file")} + # CCS brain mask is in FS space, transfer it back to native T1 space + match_fov_ccs_brain_mask = pe.Node( + interface=fsl.FLIRT(), name=f"match_fov_CCS_brain_mask_{node_id}" + ) + match_fov_ccs_brain_mask.inputs.apply_xfm = True + match_fov_ccs_brain_mask.inputs.uses_qform = True + match_fov_ccs_brain_mask.inputs.interp = "nearestneighbour" + + node, out = strat_pool.get_data("pipeline-fs_raw-average") + convert_fs_T1_to_nifti = pe.Node( + Function( + input_names=["in_file"], output_names=["out_file"], function=mri_convert + ), + name=f"convert_fs_T1_to_nifti_for_ccs_{node_id}", + ) + wf.connect(node, out, convert_fs_T1_to_nifti, "in_file") + wf.connect(convert_fs_T1_to_nifti, "out_file", match_fov_ccs_brain_mask, "reference") + + wf.connect(binarize_combined_mask, "out_file", match_fov_ccs_brain_mask, "in_file") + + return wf, {"space-T1w_desc-brain_mask": (match_fov_ccs_brain_mask, "out_file")} def mask_T2(wf_name="mask_T2"): From 1df0639e85f62193c88a541deaed44c3d742f63d Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Thu, 11 Sep 2025 20:44:36 +0000 Subject: [PATCH 490/507] [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci --- CPAC/anat_preproc/anat_preproc.py | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/CPAC/anat_preproc/anat_preproc.py b/CPAC/anat_preproc/anat_preproc.py index fdc5d15edd..2cf19ced7e 100644 --- a/CPAC/anat_preproc/anat_preproc.py +++ b/CPAC/anat_preproc/anat_preproc.py @@ -1323,7 +1323,7 @@ def freesurfer_fsl_brain_connector(wf, cfg, strat_pool, pipe_num, opt): wf.connect(combine_mask, "out_file", binarize_combined_mask, "in_file") - # CCS brain mask is in FS space, transfer it back to native T1 space + # CCS brain mask is in FS space, transfer it back to native T1 space match_fov_ccs_brain_mask = pe.Node( interface=fsl.FLIRT(), name=f"match_fov_CCS_brain_mask_{node_id}" ) @@ -1339,7 +1339,9 @@ def freesurfer_fsl_brain_connector(wf, cfg, strat_pool, pipe_num, opt): name=f"convert_fs_T1_to_nifti_for_ccs_{node_id}", ) wf.connect(node, out, convert_fs_T1_to_nifti, "in_file") - wf.connect(convert_fs_T1_to_nifti, "out_file", match_fov_ccs_brain_mask, "reference") + wf.connect( + convert_fs_T1_to_nifti, "out_file", match_fov_ccs_brain_mask, "reference" + ) wf.connect(binarize_combined_mask, "out_file", match_fov_ccs_brain_mask, "in_file") From 651cfa57a14eafa6a3cf43dfa7602b81c07d79e6 Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Fri, 12 Sep 2025 18:30:11 +0000 Subject: [PATCH 491/507] [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci --- .../Dockerfiles/C-PAC.develop-jammy.Dockerfile | 2 +- .../C-PAC.develop-lite-jammy.Dockerfile | 2 +- .github/Dockerfiles/base-standard.Dockerfile | 2 +- CPAC/anat_preproc/ants.py | 18 ++++++++++++++---- .../configs/data_config_S3-BIDS-ABIDE.yml | 2 +- .../configs/data_config_S3-BIDS-ADHD200.yml | 2 +- .../data_config_S3-BIDS-ADHD200_only2.yml | 2 +- .../data_config_S3-BIDS-NKI-RocklandSample.yml | 2 +- .../configs/data_config_cpac_benchmark.yml | 2 +- .../configs/data_settings_template.yml | 2 +- .../configs/group_config_template.yml | 2 +- .../configs/pipeline_config_abcd-options.yml | 2 +- .../configs/pipeline_config_abcd-prep.yml | 2 +- .../configs/pipeline_config_anat-only.yml | 2 +- .../configs/pipeline_config_benchmark-ANTS.yml | 2 +- .../pipeline_config_benchmark-FNIRT.yml | 2 +- .../configs/pipeline_config_blank.yml | 2 +- .../configs/pipeline_config_ccs-options.yml | 2 +- .../pipeline_config_default-deprecated.yml | 2 +- .../configs/pipeline_config_default.yml | 2 +- .../pipeline_config_fmriprep-ingress.yml | 2 +- .../pipeline_config_fmriprep-options.yml | 2 +- .../configs/pipeline_config_fx-options.yml | 2 +- .../configs/pipeline_config_monkey-ABCD.yml | 2 +- .../configs/pipeline_config_monkey.yml | 2 +- .../resources/configs/pipeline_config_ndmg.yml | 2 +- .../configs/pipeline_config_nhp-macaque.yml | 2 +- .../configs/pipeline_config_preproc.yml | 2 +- .../configs/pipeline_config_rbc-options.yml | 2 +- .../configs/pipeline_config_regtest-1.yml | 2 +- .../configs/pipeline_config_regtest-2.yml | 2 +- .../configs/pipeline_config_regtest-3.yml | 2 +- .../configs/pipeline_config_regtest-4.yml | 2 +- .../configs/pipeline_config_rodent.yml | 2 +- CPAC/resources/configs/system_config.yml | 2 +- .../test_configs/data-test_S3-ADHD200_1.yml | 2 +- .../data-test_S3-ADHD200_no-params.yml | 2 +- .../test_configs/data-test_S3-NKI-RS_fmap.yml | 2 +- .../data_config_S3_CoRR_5only_mult-scan.yml | 2 +- .../data_config_S3_CoRR_5only_mult-sess.yml | 2 +- .../configs/test_configs/pipe-test_ABCD.yml | 2 +- .../pipe-test_ANTs-3dSk-AllNuis.yml | 2 +- .../pipe-test_ANTs-3dSk-DistCorr3dSk.yml | 2 +- .../pipe-test_ANTs-3dSk-DistCorrBET.yml | 2 +- .../pipe-test_ANTs-BET-AllNuis.yml | 2 +- .../pipe-test_FNIRT-3dSk-AllNuis.yml | 2 +- .../pipe-test_FNIRT-BET-AllNuis-BASC.yml | 2 +- .../pipe-test_FNIRT-BET-AllNuis-ISC-voxel.yml | 2 +- .../pipe-test_FNIRT-BET-AllNuis-ISC.yml | 2 +- .../pipe-test_FNIRT-BET-AllNuis-MDMR.yml | 2 +- .../pipe-test_FNIRT-BET-AllNuis.yml | 2 +- .../configs/test_configs/pipe-test_all.yml | 2 +- Dockerfile | 2 +- variant-lite.Dockerfile | 2 +- version | 2 +- 55 files changed, 68 insertions(+), 58 deletions(-) diff --git a/.github/Dockerfiles/C-PAC.develop-jammy.Dockerfile b/.github/Dockerfiles/C-PAC.develop-jammy.Dockerfile index e41bd6fc73..1a8b5dc6a1 100644 --- a/.github/Dockerfiles/C-PAC.develop-jammy.Dockerfile +++ b/.github/Dockerfiles/C-PAC.develop-jammy.Dockerfile @@ -14,7 +14,7 @@ # You should have received a copy of the GNU Lesser General Public # License along with C-PAC. If not, see . -FROM ghcr.io/fcp-indi/c-pac/stage-base:standard-v1.8.8.dev1 +FROM ghcr.io/fcp-indi/c-pac/stage-base:standard-v LABEL org.opencontainers.image.description="Full C-PAC image" LABEL org.opencontainers.image.source=https://github.com/FCP-INDI/C-PAC USER root diff --git a/.github/Dockerfiles/C-PAC.develop-lite-jammy.Dockerfile b/.github/Dockerfiles/C-PAC.develop-lite-jammy.Dockerfile index 6f350c4f18..ebbb142297 100644 --- a/.github/Dockerfiles/C-PAC.develop-lite-jammy.Dockerfile +++ b/.github/Dockerfiles/C-PAC.develop-lite-jammy.Dockerfile @@ -14,7 +14,7 @@ # You should have received a copy of the GNU Lesser General Public # License along with C-PAC. If not, see . -FROM ghcr.io/fcp-indi/c-pac/stage-base:lite-v1.8.8.dev1 +FROM ghcr.io/fcp-indi/c-pac/stage-base:lite-v LABEL org.opencontainers.image.description="Full C-PAC image without FreeSurfer" LABEL org.opencontainers.image.source=https://github.com/FCP-INDI/C-PAC USER root diff --git a/.github/Dockerfiles/base-standard.Dockerfile b/.github/Dockerfiles/base-standard.Dockerfile index 0eb4738375..a982c400ad 100644 --- a/.github/Dockerfiles/base-standard.Dockerfile +++ b/.github/Dockerfiles/base-standard.Dockerfile @@ -16,7 +16,7 @@ # License along with C-PAC. If not, see . FROM ghcr.io/fcp-indi/c-pac/freesurfer:6.0.0-min.neurodocker-jammy AS freesurfer -FROM ghcr.io/fcp-indi/c-pac/stage-base:lite-v1.8.8.dev1 +FROM ghcr.io/fcp-indi/c-pac/stage-base:lite-v LABEL org.opencontainers.image.description="NOT INTENDED FOR USE OTHER THAN AS A STAGE IMAGE IN A MULTI-STAGE BUILD \ Standard software dependencies for C-PAC standard images" LABEL org.opencontainers.image.source=https://github.com/FCP-INDI/C-PAC diff --git a/CPAC/anat_preproc/ants.py b/CPAC/anat_preproc/ants.py index 03d2432adc..9c145eae12 100644 --- a/CPAC/anat_preproc/ants.py +++ b/CPAC/anat_preproc/ants.py @@ -295,12 +295,22 @@ def init_brain_extraction_wf( # noqa: PLR0913 init_aff.inputs.search_grid = (40, (0, 40, 40)) # Set up spatial normalization - settings_file = (f'antsBrainExtraction_{normalization_quality}.json' if use_laplacian else f'antsBrainExtractionNoLaplacian_{normalization_quality}.json') - norm = pe.Node(Registration(from_file=str(files('CPAC.anat_preproc').joinpath('data').joinpath(settings_file))), - name='norm', + settings_file = ( + f"antsBrainExtraction_{normalization_quality}.json" + if use_laplacian + else f"antsBrainExtractionNoLaplacian_{normalization_quality}.json" + ) + norm = pe.Node( + Registration( + from_file=str( + files("CPAC.anat_preproc").joinpath("data").joinpath(settings_file) + ) + ), + name="norm", n_procs=omp_nthreads, mem_gb=1.7, - mem_x=(1233286593342025 / 151115727451828646838272, 'moving_image')) + mem_x=(1233286593342025 / 151115727451828646838272, "moving_image"), + ) norm.inputs.float = use_float fixed_mask_trait = "fixed_image_mask" if _ants_version and parseversion(_ants_version) >= Version("2.2.0"): diff --git a/CPAC/resources/configs/data_config_S3-BIDS-ABIDE.yml b/CPAC/resources/configs/data_config_S3-BIDS-ABIDE.yml index 694fe23286..287d145476 100644 --- a/CPAC/resources/configs/data_config_S3-BIDS-ABIDE.yml +++ b/CPAC/resources/configs/data_config_S3-BIDS-ABIDE.yml @@ -1,5 +1,5 @@ # CPAC Data Configuration File -# Version 1.8.8.dev1 +# Version # # http://fcp-indi.github.io for more info. # diff --git a/CPAC/resources/configs/data_config_S3-BIDS-ADHD200.yml b/CPAC/resources/configs/data_config_S3-BIDS-ADHD200.yml index 550847bd90..a964812b4f 100644 --- a/CPAC/resources/configs/data_config_S3-BIDS-ADHD200.yml +++ b/CPAC/resources/configs/data_config_S3-BIDS-ADHD200.yml @@ -1,5 +1,5 @@ # CPAC Data Configuration File -# Version 1.8.8.dev1 +# Version # # http://fcp-indi.github.io for more info. # diff --git a/CPAC/resources/configs/data_config_S3-BIDS-ADHD200_only2.yml b/CPAC/resources/configs/data_config_S3-BIDS-ADHD200_only2.yml index 6b1a07f928..839c3aab13 100644 --- a/CPAC/resources/configs/data_config_S3-BIDS-ADHD200_only2.yml +++ b/CPAC/resources/configs/data_config_S3-BIDS-ADHD200_only2.yml @@ -1,5 +1,5 @@ # CPAC Data Configuration File -# Version 1.8.8.dev1 +# Version # # http://fcp-indi.github.io for more info. # diff --git a/CPAC/resources/configs/data_config_S3-BIDS-NKI-RocklandSample.yml b/CPAC/resources/configs/data_config_S3-BIDS-NKI-RocklandSample.yml index 4fe2dd4f12..afe2e2d51d 100644 --- a/CPAC/resources/configs/data_config_S3-BIDS-NKI-RocklandSample.yml +++ b/CPAC/resources/configs/data_config_S3-BIDS-NKI-RocklandSample.yml @@ -1,5 +1,5 @@ # CPAC Data Configuration File -# Version 1.8.8.dev1 +# Version # # http://fcp-indi.github.io for more info. # diff --git a/CPAC/resources/configs/data_config_cpac_benchmark.yml b/CPAC/resources/configs/data_config_cpac_benchmark.yml index 344e6f2926..ceb546be14 100644 --- a/CPAC/resources/configs/data_config_cpac_benchmark.yml +++ b/CPAC/resources/configs/data_config_cpac_benchmark.yml @@ -1,5 +1,5 @@ # CPAC Data Configuration File -# Version 1.8.8.dev1 +# Version # # http://fcp-indi.github.io for more info. # diff --git a/CPAC/resources/configs/data_settings_template.yml b/CPAC/resources/configs/data_settings_template.yml index 8f3193809a..06ee292815 100644 --- a/CPAC/resources/configs/data_settings_template.yml +++ b/CPAC/resources/configs/data_settings_template.yml @@ -1,5 +1,5 @@ # CPAC Data Settings File -# Version 1.8.8.dev1 +# Version # # http://fcp-indi.github.io for more info. # diff --git a/CPAC/resources/configs/group_config_template.yml b/CPAC/resources/configs/group_config_template.yml index f96528dce3..3d98fb285d 100644 --- a/CPAC/resources/configs/group_config_template.yml +++ b/CPAC/resources/configs/group_config_template.yml @@ -1,5 +1,5 @@ # CPAC Group-Level Analysis Configuration YAML file -# Version 1.8.8.dev1 +# Version # # http://fcp-indi.github.io for more info. # diff --git a/CPAC/resources/configs/pipeline_config_abcd-options.yml b/CPAC/resources/configs/pipeline_config_abcd-options.yml index 2f63c9e49c..53763e61b2 100644 --- a/CPAC/resources/configs/pipeline_config_abcd-options.yml +++ b/CPAC/resources/configs/pipeline_config_abcd-options.yml @@ -1,7 +1,7 @@ %YAML 1.1 --- # CPAC Pipeline Configuration YAML file -# Version 1.8.8.dev1 +# Version # # http://fcp-indi.github.io for more info. # diff --git a/CPAC/resources/configs/pipeline_config_abcd-prep.yml b/CPAC/resources/configs/pipeline_config_abcd-prep.yml index 27a4cd5f63..bea192a97d 100644 --- a/CPAC/resources/configs/pipeline_config_abcd-prep.yml +++ b/CPAC/resources/configs/pipeline_config_abcd-prep.yml @@ -1,7 +1,7 @@ %YAML 1.1 --- # CPAC Pipeline Configuration YAML file -# Version 1.8.8.dev1 +# Version # # http://fcp-indi.github.io for more info. # diff --git a/CPAC/resources/configs/pipeline_config_anat-only.yml b/CPAC/resources/configs/pipeline_config_anat-only.yml index 38953f68ef..b9cccd5948 100644 --- a/CPAC/resources/configs/pipeline_config_anat-only.yml +++ b/CPAC/resources/configs/pipeline_config_anat-only.yml @@ -1,7 +1,7 @@ %YAML 1.1 --- # CPAC Pipeline Configuration YAML file -# Version 1.8.8.dev1 +# Version # # http://fcp-indi.github.io for more info. # diff --git a/CPAC/resources/configs/pipeline_config_benchmark-ANTS.yml b/CPAC/resources/configs/pipeline_config_benchmark-ANTS.yml index b42c30f547..b2ab123fb6 100644 --- a/CPAC/resources/configs/pipeline_config_benchmark-ANTS.yml +++ b/CPAC/resources/configs/pipeline_config_benchmark-ANTS.yml @@ -1,7 +1,7 @@ %YAML 1.1 --- # CPAC Pipeline Configuration YAML file -# Version 1.8.8.dev1 +# Version # # http://fcp-indi.github.io for more info. # diff --git a/CPAC/resources/configs/pipeline_config_benchmark-FNIRT.yml b/CPAC/resources/configs/pipeline_config_benchmark-FNIRT.yml index 7026e1f2fd..c0ac6e51e5 100644 --- a/CPAC/resources/configs/pipeline_config_benchmark-FNIRT.yml +++ b/CPAC/resources/configs/pipeline_config_benchmark-FNIRT.yml @@ -1,7 +1,7 @@ %YAML 1.1 --- # CPAC Pipeline Configuration YAML file -# Version 1.8.8.dev1 +# Version # # http://fcp-indi.github.io for more info. # diff --git a/CPAC/resources/configs/pipeline_config_blank.yml b/CPAC/resources/configs/pipeline_config_blank.yml index 75c889d9b2..d2c8d21f89 100644 --- a/CPAC/resources/configs/pipeline_config_blank.yml +++ b/CPAC/resources/configs/pipeline_config_blank.yml @@ -1,7 +1,7 @@ %YAML 1.1 --- # CPAC Pipeline Configuration YAML file -# Version 1.8.8.dev1 +# Version # # http://fcp-indi.github.io for more info. # diff --git a/CPAC/resources/configs/pipeline_config_ccs-options.yml b/CPAC/resources/configs/pipeline_config_ccs-options.yml index 1a4d59c7eb..1c3fea07ca 100644 --- a/CPAC/resources/configs/pipeline_config_ccs-options.yml +++ b/CPAC/resources/configs/pipeline_config_ccs-options.yml @@ -1,7 +1,7 @@ %YAML 1.1 --- # CPAC Pipeline Configuration YAML file -# Version 1.8.8.dev1 +# Version # # http://fcp-indi.github.io for more info. # diff --git a/CPAC/resources/configs/pipeline_config_default-deprecated.yml b/CPAC/resources/configs/pipeline_config_default-deprecated.yml index 22c0e5dada..658b311094 100644 --- a/CPAC/resources/configs/pipeline_config_default-deprecated.yml +++ b/CPAC/resources/configs/pipeline_config_default-deprecated.yml @@ -1,7 +1,7 @@ %YAML 1.1 --- # CPAC Pipeline Configuration YAML file -# Version 1.8.8.dev1 +# Version # # http://fcp-indi.github.io for more info. # diff --git a/CPAC/resources/configs/pipeline_config_default.yml b/CPAC/resources/configs/pipeline_config_default.yml index 5c42e5ad35..e856e44dd6 100644 --- a/CPAC/resources/configs/pipeline_config_default.yml +++ b/CPAC/resources/configs/pipeline_config_default.yml @@ -1,7 +1,7 @@ %YAML 1.1 --- # CPAC Pipeline Configuration YAML file -# Version 1.8.8.dev1 +# Version # # http://fcp-indi.github.io for more info. # diff --git a/CPAC/resources/configs/pipeline_config_fmriprep-ingress.yml b/CPAC/resources/configs/pipeline_config_fmriprep-ingress.yml index 275a7d8d1f..9166c3acaa 100644 --- a/CPAC/resources/configs/pipeline_config_fmriprep-ingress.yml +++ b/CPAC/resources/configs/pipeline_config_fmriprep-ingress.yml @@ -1,7 +1,7 @@ %YAML 1.1 --- # CPAC Pipeline Configuration YAML file -# Version 1.8.8.dev1 +# Version # # http://fcp-indi.github.io for more info. # diff --git a/CPAC/resources/configs/pipeline_config_fmriprep-options.yml b/CPAC/resources/configs/pipeline_config_fmriprep-options.yml index 5453144af5..0718297532 100644 --- a/CPAC/resources/configs/pipeline_config_fmriprep-options.yml +++ b/CPAC/resources/configs/pipeline_config_fmriprep-options.yml @@ -1,7 +1,7 @@ %YAML 1.1 --- # CPAC Pipeline Configuration YAML file -# Version 1.8.8.dev1 +# Version # # http://fcp-indi.github.io for more info. # diff --git a/CPAC/resources/configs/pipeline_config_fx-options.yml b/CPAC/resources/configs/pipeline_config_fx-options.yml index f8cc2f8de6..02d6f31ced 100644 --- a/CPAC/resources/configs/pipeline_config_fx-options.yml +++ b/CPAC/resources/configs/pipeline_config_fx-options.yml @@ -1,7 +1,7 @@ %YAML 1.1 --- # CPAC Pipeline Configuration YAML file -# Version 1.8.8.dev1 +# Version # # http://fcp-indi.github.io for more info. # diff --git a/CPAC/resources/configs/pipeline_config_monkey-ABCD.yml b/CPAC/resources/configs/pipeline_config_monkey-ABCD.yml index dfca2e5e46..34e1a2ade7 100644 --- a/CPAC/resources/configs/pipeline_config_monkey-ABCD.yml +++ b/CPAC/resources/configs/pipeline_config_monkey-ABCD.yml @@ -1,7 +1,7 @@ %YAML 1.1 --- # CPAC Pipeline Configuration YAML file -# Version 1.8.8.dev1 +# Version # # http://fcp-indi.github.io for more info. # diff --git a/CPAC/resources/configs/pipeline_config_monkey.yml b/CPAC/resources/configs/pipeline_config_monkey.yml index 98c6ffe5f1..1d5f499c5e 100644 --- a/CPAC/resources/configs/pipeline_config_monkey.yml +++ b/CPAC/resources/configs/pipeline_config_monkey.yml @@ -1,7 +1,7 @@ %YAML 1.1 --- # CPAC Pipeline Configuration YAML file -# Version 1.8.8.dev1 +# Version # # http://fcp-indi.github.io for more info. # diff --git a/CPAC/resources/configs/pipeline_config_ndmg.yml b/CPAC/resources/configs/pipeline_config_ndmg.yml index bd77602300..7fb48bcef9 100644 --- a/CPAC/resources/configs/pipeline_config_ndmg.yml +++ b/CPAC/resources/configs/pipeline_config_ndmg.yml @@ -1,7 +1,7 @@ %YAML 1.1 --- # CPAC Pipeline Configuration YAML file -# Version 1.8.8.dev1 +# Version # # http://fcp-indi.github.io for more info. # diff --git a/CPAC/resources/configs/pipeline_config_nhp-macaque.yml b/CPAC/resources/configs/pipeline_config_nhp-macaque.yml index 2dbe4c724c..68afff32df 100644 --- a/CPAC/resources/configs/pipeline_config_nhp-macaque.yml +++ b/CPAC/resources/configs/pipeline_config_nhp-macaque.yml @@ -1,7 +1,7 @@ %YAML 1.1 --- # CPAC Pipeline Configuration YAML file -# Version 1.8.8.dev1 +# Version # # http://fcp-indi.github.io for more info. # diff --git a/CPAC/resources/configs/pipeline_config_preproc.yml b/CPAC/resources/configs/pipeline_config_preproc.yml index f2d567d661..028f7de296 100644 --- a/CPAC/resources/configs/pipeline_config_preproc.yml +++ b/CPAC/resources/configs/pipeline_config_preproc.yml @@ -1,7 +1,7 @@ %YAML 1.1 --- # CPAC Pipeline Configuration YAML file -# Version 1.8.8.dev1 +# Version # # http://fcp-indi.github.io for more info. # diff --git a/CPAC/resources/configs/pipeline_config_rbc-options.yml b/CPAC/resources/configs/pipeline_config_rbc-options.yml index 27fae8a7ac..a7c4dfd6ea 100644 --- a/CPAC/resources/configs/pipeline_config_rbc-options.yml +++ b/CPAC/resources/configs/pipeline_config_rbc-options.yml @@ -1,7 +1,7 @@ %YAML 1.1 --- # CPAC Pipeline Configuration YAML file -# Version 1.8.8.dev1 +# Version # # http://fcp-indi.github.io for more info. # diff --git a/CPAC/resources/configs/pipeline_config_regtest-1.yml b/CPAC/resources/configs/pipeline_config_regtest-1.yml index ee1ff5d5cf..ee96ad34d5 100644 --- a/CPAC/resources/configs/pipeline_config_regtest-1.yml +++ b/CPAC/resources/configs/pipeline_config_regtest-1.yml @@ -1,7 +1,7 @@ %YAML 1.1 --- # CPAC Pipeline Configuration YAML file -# Version 1.8.8.dev1 +# Version # # http://fcp-indi.github.io for more info. # diff --git a/CPAC/resources/configs/pipeline_config_regtest-2.yml b/CPAC/resources/configs/pipeline_config_regtest-2.yml index e5b6dbd626..f1fcc596aa 100644 --- a/CPAC/resources/configs/pipeline_config_regtest-2.yml +++ b/CPAC/resources/configs/pipeline_config_regtest-2.yml @@ -1,7 +1,7 @@ %YAML 1.1 --- # CPAC Pipeline Configuration YAML file -# Version 1.8.8.dev1 +# Version # # http://fcp-indi.github.io for more info. # diff --git a/CPAC/resources/configs/pipeline_config_regtest-3.yml b/CPAC/resources/configs/pipeline_config_regtest-3.yml index 804e4f5f2f..f0c4989c42 100644 --- a/CPAC/resources/configs/pipeline_config_regtest-3.yml +++ b/CPAC/resources/configs/pipeline_config_regtest-3.yml @@ -1,7 +1,7 @@ %YAML 1.1 --- # CPAC Pipeline Configuration YAML file -# Version 1.8.8.dev1 +# Version # # http://fcp-indi.github.io for more info. # diff --git a/CPAC/resources/configs/pipeline_config_regtest-4.yml b/CPAC/resources/configs/pipeline_config_regtest-4.yml index 5232abdbf5..7591d09b7d 100644 --- a/CPAC/resources/configs/pipeline_config_regtest-4.yml +++ b/CPAC/resources/configs/pipeline_config_regtest-4.yml @@ -1,7 +1,7 @@ %YAML 1.1 --- # CPAC Pipeline Configuration YAML file -# Version 1.8.8.dev1 +# Version # # http://fcp-indi.github.io for more info. # diff --git a/CPAC/resources/configs/pipeline_config_rodent.yml b/CPAC/resources/configs/pipeline_config_rodent.yml index a5f2c6caf9..e7372d31ce 100644 --- a/CPAC/resources/configs/pipeline_config_rodent.yml +++ b/CPAC/resources/configs/pipeline_config_rodent.yml @@ -1,7 +1,7 @@ %YAML 1.1 --- # CPAC Pipeline Configuration YAML file -# Version 1.8.8.dev1 +# Version # # http://fcp-indi.github.io for more info. # diff --git a/CPAC/resources/configs/system_config.yml b/CPAC/resources/configs/system_config.yml index c33ad0d0a1..06cdcefca4 100644 --- a/CPAC/resources/configs/system_config.yml +++ b/CPAC/resources/configs/system_config.yml @@ -1,5 +1,5 @@ # C-PAC System Configuration YAML file -# Version 1.8.8.dev1 +# Version # # http://fcp-indi.github.io for more info. # diff --git a/CPAC/resources/configs/test_configs/data-test_S3-ADHD200_1.yml b/CPAC/resources/configs/test_configs/data-test_S3-ADHD200_1.yml index 544ffad843..b62cea6fa8 100644 --- a/CPAC/resources/configs/test_configs/data-test_S3-ADHD200_1.yml +++ b/CPAC/resources/configs/test_configs/data-test_S3-ADHD200_1.yml @@ -1,5 +1,5 @@ # CPAC Data Configuration File -# Version 1.8.8.dev1 +# Version # # http://fcp-indi.github.io for more info. # diff --git a/CPAC/resources/configs/test_configs/data-test_S3-ADHD200_no-params.yml b/CPAC/resources/configs/test_configs/data-test_S3-ADHD200_no-params.yml index 474aa7ee70..00b6d26db6 100644 --- a/CPAC/resources/configs/test_configs/data-test_S3-ADHD200_no-params.yml +++ b/CPAC/resources/configs/test_configs/data-test_S3-ADHD200_no-params.yml @@ -1,5 +1,5 @@ # CPAC Data Configuration File -# Version 1.8.8.dev1 +# Version # # http://fcp-indi.github.io for more info. # diff --git a/CPAC/resources/configs/test_configs/data-test_S3-NKI-RS_fmap.yml b/CPAC/resources/configs/test_configs/data-test_S3-NKI-RS_fmap.yml index b5355e01fa..b61fa626a6 100644 --- a/CPAC/resources/configs/test_configs/data-test_S3-NKI-RS_fmap.yml +++ b/CPAC/resources/configs/test_configs/data-test_S3-NKI-RS_fmap.yml @@ -1,5 +1,5 @@ # CPAC Data Configuration File -# Version 1.8.8.dev1 +# Version # # http://fcp-indi.github.io for more info. # diff --git a/CPAC/resources/configs/test_configs/data_config_S3_CoRR_5only_mult-scan.yml b/CPAC/resources/configs/test_configs/data_config_S3_CoRR_5only_mult-scan.yml index df33b1cbf8..73dff0bf8b 100644 --- a/CPAC/resources/configs/test_configs/data_config_S3_CoRR_5only_mult-scan.yml +++ b/CPAC/resources/configs/test_configs/data_config_S3_CoRR_5only_mult-scan.yml @@ -1,5 +1,5 @@ # CPAC Data Configuration File -# Version 1.8.8.dev1 +# Version # # http://fcp-indi.github.io for more info. # diff --git a/CPAC/resources/configs/test_configs/data_config_S3_CoRR_5only_mult-sess.yml b/CPAC/resources/configs/test_configs/data_config_S3_CoRR_5only_mult-sess.yml index a1de8541b1..2c6e03d4d9 100644 --- a/CPAC/resources/configs/test_configs/data_config_S3_CoRR_5only_mult-sess.yml +++ b/CPAC/resources/configs/test_configs/data_config_S3_CoRR_5only_mult-sess.yml @@ -1,5 +1,5 @@ # CPAC Data Configuration File -# Version 1.8.8.dev1 +# Version # # http://fcp-indi.github.io for more info. # diff --git a/CPAC/resources/configs/test_configs/pipe-test_ABCD.yml b/CPAC/resources/configs/test_configs/pipe-test_ABCD.yml index 4f87d4b3ba..57f16c3ab5 100644 --- a/CPAC/resources/configs/test_configs/pipe-test_ABCD.yml +++ b/CPAC/resources/configs/test_configs/pipe-test_ABCD.yml @@ -1,7 +1,7 @@ %YAML 1.1 --- # CPAC Pipeline Configuration YAML file -# Version 1.8.8.dev1 +# Version # # http://fcp-indi.github.io for more info. # diff --git a/CPAC/resources/configs/test_configs/pipe-test_ANTs-3dSk-AllNuis.yml b/CPAC/resources/configs/test_configs/pipe-test_ANTs-3dSk-AllNuis.yml index e13f77e435..d2b076b434 100644 --- a/CPAC/resources/configs/test_configs/pipe-test_ANTs-3dSk-AllNuis.yml +++ b/CPAC/resources/configs/test_configs/pipe-test_ANTs-3dSk-AllNuis.yml @@ -1,7 +1,7 @@ %YAML 1.1 --- # CPAC Pipeline Configuration YAML file -# Version 1.8.8.dev1 +# Version # # http://fcp-indi.github.io for more info. # diff --git a/CPAC/resources/configs/test_configs/pipe-test_ANTs-3dSk-DistCorr3dSk.yml b/CPAC/resources/configs/test_configs/pipe-test_ANTs-3dSk-DistCorr3dSk.yml index 41f7262a15..9e8482ae69 100644 --- a/CPAC/resources/configs/test_configs/pipe-test_ANTs-3dSk-DistCorr3dSk.yml +++ b/CPAC/resources/configs/test_configs/pipe-test_ANTs-3dSk-DistCorr3dSk.yml @@ -1,7 +1,7 @@ %YAML 1.1 --- # CPAC Pipeline Configuration YAML file -# Version 1.8.8.dev1 +# Version # # http://fcp-indi.github.io for more info. # diff --git a/CPAC/resources/configs/test_configs/pipe-test_ANTs-3dSk-DistCorrBET.yml b/CPAC/resources/configs/test_configs/pipe-test_ANTs-3dSk-DistCorrBET.yml index 4d59bdb05d..60d47af717 100644 --- a/CPAC/resources/configs/test_configs/pipe-test_ANTs-3dSk-DistCorrBET.yml +++ b/CPAC/resources/configs/test_configs/pipe-test_ANTs-3dSk-DistCorrBET.yml @@ -1,7 +1,7 @@ %YAML 1.1 --- # CPAC Pipeline Configuration YAML file -# Version 1.8.8.dev1 +# Version # # http://fcp-indi.github.io for more info. # diff --git a/CPAC/resources/configs/test_configs/pipe-test_ANTs-BET-AllNuis.yml b/CPAC/resources/configs/test_configs/pipe-test_ANTs-BET-AllNuis.yml index 4155ffc2a1..1ef383c560 100644 --- a/CPAC/resources/configs/test_configs/pipe-test_ANTs-BET-AllNuis.yml +++ b/CPAC/resources/configs/test_configs/pipe-test_ANTs-BET-AllNuis.yml @@ -1,7 +1,7 @@ %YAML 1.1 --- # CPAC Pipeline Configuration YAML file -# Version 1.8.8.dev1 +# Version # # http://fcp-indi.github.io for more info. # diff --git a/CPAC/resources/configs/test_configs/pipe-test_FNIRT-3dSk-AllNuis.yml b/CPAC/resources/configs/test_configs/pipe-test_FNIRT-3dSk-AllNuis.yml index c8464849fd..32755f5668 100644 --- a/CPAC/resources/configs/test_configs/pipe-test_FNIRT-3dSk-AllNuis.yml +++ b/CPAC/resources/configs/test_configs/pipe-test_FNIRT-3dSk-AllNuis.yml @@ -1,7 +1,7 @@ %YAML 1.1 --- # CPAC Pipeline Configuration YAML file -# Version 1.8.8.dev1 +# Version # # http://fcp-indi.github.io for more info. # diff --git a/CPAC/resources/configs/test_configs/pipe-test_FNIRT-BET-AllNuis-BASC.yml b/CPAC/resources/configs/test_configs/pipe-test_FNIRT-BET-AllNuis-BASC.yml index a1b1664eb9..f4c9b15081 100644 --- a/CPAC/resources/configs/test_configs/pipe-test_FNIRT-BET-AllNuis-BASC.yml +++ b/CPAC/resources/configs/test_configs/pipe-test_FNIRT-BET-AllNuis-BASC.yml @@ -1,7 +1,7 @@ %YAML 1.1 --- # CPAC Pipeline Configuration YAML file -# Version 1.8.8.dev1 +# Version # # http://fcp-indi.github.io for more info. # diff --git a/CPAC/resources/configs/test_configs/pipe-test_FNIRT-BET-AllNuis-ISC-voxel.yml b/CPAC/resources/configs/test_configs/pipe-test_FNIRT-BET-AllNuis-ISC-voxel.yml index a1b1664eb9..f4c9b15081 100644 --- a/CPAC/resources/configs/test_configs/pipe-test_FNIRT-BET-AllNuis-ISC-voxel.yml +++ b/CPAC/resources/configs/test_configs/pipe-test_FNIRT-BET-AllNuis-ISC-voxel.yml @@ -1,7 +1,7 @@ %YAML 1.1 --- # CPAC Pipeline Configuration YAML file -# Version 1.8.8.dev1 +# Version # # http://fcp-indi.github.io for more info. # diff --git a/CPAC/resources/configs/test_configs/pipe-test_FNIRT-BET-AllNuis-ISC.yml b/CPAC/resources/configs/test_configs/pipe-test_FNIRT-BET-AllNuis-ISC.yml index a1b1664eb9..f4c9b15081 100644 --- a/CPAC/resources/configs/test_configs/pipe-test_FNIRT-BET-AllNuis-ISC.yml +++ b/CPAC/resources/configs/test_configs/pipe-test_FNIRT-BET-AllNuis-ISC.yml @@ -1,7 +1,7 @@ %YAML 1.1 --- # CPAC Pipeline Configuration YAML file -# Version 1.8.8.dev1 +# Version # # http://fcp-indi.github.io for more info. # diff --git a/CPAC/resources/configs/test_configs/pipe-test_FNIRT-BET-AllNuis-MDMR.yml b/CPAC/resources/configs/test_configs/pipe-test_FNIRT-BET-AllNuis-MDMR.yml index a1b1664eb9..f4c9b15081 100644 --- a/CPAC/resources/configs/test_configs/pipe-test_FNIRT-BET-AllNuis-MDMR.yml +++ b/CPAC/resources/configs/test_configs/pipe-test_FNIRT-BET-AllNuis-MDMR.yml @@ -1,7 +1,7 @@ %YAML 1.1 --- # CPAC Pipeline Configuration YAML file -# Version 1.8.8.dev1 +# Version # # http://fcp-indi.github.io for more info. # diff --git a/CPAC/resources/configs/test_configs/pipe-test_FNIRT-BET-AllNuis.yml b/CPAC/resources/configs/test_configs/pipe-test_FNIRT-BET-AllNuis.yml index a1b1664eb9..f4c9b15081 100644 --- a/CPAC/resources/configs/test_configs/pipe-test_FNIRT-BET-AllNuis.yml +++ b/CPAC/resources/configs/test_configs/pipe-test_FNIRT-BET-AllNuis.yml @@ -1,7 +1,7 @@ %YAML 1.1 --- # CPAC Pipeline Configuration YAML file -# Version 1.8.8.dev1 +# Version # # http://fcp-indi.github.io for more info. # diff --git a/CPAC/resources/configs/test_configs/pipe-test_all.yml b/CPAC/resources/configs/test_configs/pipe-test_all.yml index f973dc89c4..e8abebac37 100644 --- a/CPAC/resources/configs/test_configs/pipe-test_all.yml +++ b/CPAC/resources/configs/test_configs/pipe-test_all.yml @@ -1,7 +1,7 @@ %YAML 1.1 --- # CPAC Pipeline Configuration YAML file -# Version 1.8.8.dev1 +# Version # # http://fcp-indi.github.io for more info. # diff --git a/Dockerfile b/Dockerfile index e41bd6fc73..1a8b5dc6a1 100644 --- a/Dockerfile +++ b/Dockerfile @@ -14,7 +14,7 @@ # You should have received a copy of the GNU Lesser General Public # License along with C-PAC. If not, see . -FROM ghcr.io/fcp-indi/c-pac/stage-base:standard-v1.8.8.dev1 +FROM ghcr.io/fcp-indi/c-pac/stage-base:standard-v LABEL org.opencontainers.image.description="Full C-PAC image" LABEL org.opencontainers.image.source=https://github.com/FCP-INDI/C-PAC USER root diff --git a/variant-lite.Dockerfile b/variant-lite.Dockerfile index 6f350c4f18..ebbb142297 100644 --- a/variant-lite.Dockerfile +++ b/variant-lite.Dockerfile @@ -14,7 +14,7 @@ # You should have received a copy of the GNU Lesser General Public # License along with C-PAC. If not, see . -FROM ghcr.io/fcp-indi/c-pac/stage-base:lite-v1.8.8.dev1 +FROM ghcr.io/fcp-indi/c-pac/stage-base:lite-v LABEL org.opencontainers.image.description="Full C-PAC image without FreeSurfer" LABEL org.opencontainers.image.source=https://github.com/FCP-INDI/C-PAC USER root diff --git a/version b/version index bb676e6961..110ed9b99b 100644 --- a/version +++ b/version @@ -1 +1 @@ -v1.8.8.dev1 +v From 6b0356ffd6824e570006d65a8e5ec09b8c857a05 Mon Sep 17 00:00:00 2001 From: "birajstha:construction_worker::penguin" Date: Fri, 12 Sep 2025 15:13:17 -0400 Subject: [PATCH 492/507] fixing git path --- CPAC/info.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/CPAC/info.py b/CPAC/info.py index 9f19ee2f00..1fcf087df0 100644 --- a/CPAC/info.py +++ b/CPAC/info.py @@ -56,7 +56,7 @@ def get_cpac_gitversion() -> str | None: import subprocess with as_file(files("CPAC")) as _cpac: - gitpath = _cpac.parent + gitpath = _cpac gitpathgit = gitpath / ".git" if not gitpathgit.exists(): From f4f886aedcfb6b078cca3acc8c5e717e15011102 Mon Sep 17 00:00:00 2001 From: "birajstha:construction_worker::penguin" Date: Fri, 12 Sep 2025 15:15:59 -0400 Subject: [PATCH 493/507] using pathlib instead in gitversion --- CPAC/info.py | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/CPAC/info.py b/CPAC/info.py index 1fcf087df0..e984654ae7 100644 --- a/CPAC/info.py +++ b/CPAC/info.py @@ -52,11 +52,10 @@ def get_cpac_gitversion() -> str | None: """CPAC version as reported by the last commit in git.""" - from importlib.resources import as_file, files + from pathlib import Path import subprocess - with as_file(files("CPAC")) as _cpac: - gitpath = _cpac + gitpath = Path(__file__).parent.resolve() gitpathgit = gitpath / ".git" if not gitpathgit.exists(): From 3ec98293dd5e22ae826b0a66cd836e6c83b7f4b0 Mon Sep 17 00:00:00 2001 From: "birajstha:construction_worker::penguin" Date: Fri, 12 Sep 2025 15:34:02 -0400 Subject: [PATCH 494/507] git path fix --- CPAC/info.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/CPAC/info.py b/CPAC/info.py index e984654ae7..d530b415ed 100644 --- a/CPAC/info.py +++ b/CPAC/info.py @@ -55,7 +55,7 @@ def get_cpac_gitversion() -> str | None: from pathlib import Path import subprocess - gitpath = Path(__file__).parent.resolve() + gitpath = Path(__file__).resolve() gitpathgit = gitpath / ".git" if not gitpathgit.exists(): From c60f7b730d5bf087c094e529155ac1512a6ab3d8 Mon Sep 17 00:00:00 2001 From: "birajstha:construction_worker::penguin" Date: Fri, 12 Sep 2025 15:40:18 -0400 Subject: [PATCH 495/507] git path fix --- .github/Dockerfiles/C-PAC.develop-jammy.Dockerfile | 4 ++-- .github/Dockerfiles/C-PAC.develop-lite-jammy.Dockerfile | 4 ++-- .github/Dockerfiles/base-standard.Dockerfile | 4 ++-- CPAC/info.py | 2 +- CPAC/resources/configs/data_config_S3-BIDS-ABIDE.yml | 2 +- CPAC/resources/configs/data_config_S3-BIDS-ADHD200.yml | 2 +- CPAC/resources/configs/data_config_S3-BIDS-ADHD200_only2.yml | 2 +- .../configs/data_config_S3-BIDS-NKI-RocklandSample.yml | 2 +- CPAC/resources/configs/data_config_cpac_benchmark.yml | 2 +- CPAC/resources/configs/data_settings_template.yml | 2 +- CPAC/resources/configs/group_config_template.yml | 2 +- CPAC/resources/configs/pipeline_config_abcd-options.yml | 2 +- CPAC/resources/configs/pipeline_config_abcd-prep.yml | 2 +- CPAC/resources/configs/pipeline_config_anat-only.yml | 2 +- CPAC/resources/configs/pipeline_config_benchmark-ANTS.yml | 2 +- CPAC/resources/configs/pipeline_config_benchmark-FNIRT.yml | 2 +- CPAC/resources/configs/pipeline_config_blank.yml | 2 +- CPAC/resources/configs/pipeline_config_ccs-options.yml | 2 +- CPAC/resources/configs/pipeline_config_default-deprecated.yml | 2 +- CPAC/resources/configs/pipeline_config_default.yml | 2 +- CPAC/resources/configs/pipeline_config_fmriprep-ingress.yml | 2 +- CPAC/resources/configs/pipeline_config_fmriprep-options.yml | 2 +- CPAC/resources/configs/pipeline_config_fx-options.yml | 2 +- CPAC/resources/configs/pipeline_config_monkey-ABCD.yml | 2 +- CPAC/resources/configs/pipeline_config_monkey.yml | 2 +- CPAC/resources/configs/pipeline_config_ndmg.yml | 2 +- CPAC/resources/configs/pipeline_config_nhp-macaque.yml | 2 +- CPAC/resources/configs/pipeline_config_preproc.yml | 2 +- CPAC/resources/configs/pipeline_config_rbc-options.yml | 2 +- CPAC/resources/configs/pipeline_config_regtest-1.yml | 2 +- CPAC/resources/configs/pipeline_config_regtest-2.yml | 2 +- CPAC/resources/configs/pipeline_config_regtest-3.yml | 2 +- CPAC/resources/configs/pipeline_config_regtest-4.yml | 2 +- CPAC/resources/configs/pipeline_config_rodent.yml | 2 +- CPAC/resources/configs/system_config.yml | 2 +- .../resources/configs/test_configs/data-test_S3-ADHD200_1.yml | 2 +- .../configs/test_configs/data-test_S3-ADHD200_no-params.yml | 2 +- .../configs/test_configs/data-test_S3-NKI-RS_fmap.yml | 2 +- .../test_configs/data_config_S3_CoRR_5only_mult-scan.yml | 2 +- .../test_configs/data_config_S3_CoRR_5only_mult-sess.yml | 2 +- CPAC/resources/configs/test_configs/pipe-test_ABCD.yml | 2 +- .../configs/test_configs/pipe-test_ANTs-3dSk-AllNuis.yml | 2 +- .../configs/test_configs/pipe-test_ANTs-3dSk-DistCorr3dSk.yml | 2 +- .../configs/test_configs/pipe-test_ANTs-3dSk-DistCorrBET.yml | 2 +- .../configs/test_configs/pipe-test_ANTs-BET-AllNuis.yml | 2 +- .../configs/test_configs/pipe-test_FNIRT-3dSk-AllNuis.yml | 2 +- .../configs/test_configs/pipe-test_FNIRT-BET-AllNuis-BASC.yml | 2 +- .../test_configs/pipe-test_FNIRT-BET-AllNuis-ISC-voxel.yml | 2 +- .../configs/test_configs/pipe-test_FNIRT-BET-AllNuis-ISC.yml | 2 +- .../configs/test_configs/pipe-test_FNIRT-BET-AllNuis-MDMR.yml | 2 +- .../configs/test_configs/pipe-test_FNIRT-BET-AllNuis.yml | 2 +- CPAC/resources/configs/test_configs/pipe-test_all.yml | 2 +- Dockerfile | 4 ++-- variant-lite.Dockerfile | 4 ++-- version | 2 +- 55 files changed, 60 insertions(+), 60 deletions(-) diff --git a/.github/Dockerfiles/C-PAC.develop-jammy.Dockerfile b/.github/Dockerfiles/C-PAC.develop-jammy.Dockerfile index 1a8b5dc6a1..0a9a54592e 100644 --- a/.github/Dockerfiles/C-PAC.develop-jammy.Dockerfile +++ b/.github/Dockerfiles/C-PAC.develop-jammy.Dockerfile @@ -14,7 +14,7 @@ # You should have received a copy of the GNU Lesser General Public # License along with C-PAC. If not, see . -FROM ghcr.io/fcp-indi/c-pac/stage-base:standard-v +FROM ghcr.io/fcp-indi/c-pac/stage-base:standard-v1.8.8.dev11.8.8.dev11.8.8.dev11.8.8.dev11.8.8.dev11.8.8.dev11.8.8.dev1 LABEL org.opencontainers.image.description="Full C-PAC image" LABEL org.opencontainers.image.source=https://github.com/FCP-INDI/C-PAC USER root @@ -42,7 +42,7 @@ RUN rm -rf /var/lib/apt/lists/* /tmp/* /var/tmp/* /root/.cache/* \ && apt-get autoremove -y \ && ldconfig \ && chmod 777 / \ - && chmod 777 $(ls / | grep -v sys | grep -v proc) + && chmod 777 $(ls / | grep -v1.8.8.dev11.8.8.dev11.8.8.dev11.8.8.dev11.8.8.dev11.8.8.dev11.8.8.dev1 sys | grep -v1.8.8.dev11.8.8.dev11.8.8.dev11.8.8.dev11.8.8.dev11.8.8.dev11.8.8.dev1 proc) ENV PYTHONUSERBASE=/home/c-pac_user/.local ENV PATH=$PATH:/home/c-pac_user/.local/bin \ PYTHONPATH=$PYTHONPATH:$PYTHONUSERBASE/lib/python3.10/site-packages \ diff --git a/.github/Dockerfiles/C-PAC.develop-lite-jammy.Dockerfile b/.github/Dockerfiles/C-PAC.develop-lite-jammy.Dockerfile index ebbb142297..dd07ffb736 100644 --- a/.github/Dockerfiles/C-PAC.develop-lite-jammy.Dockerfile +++ b/.github/Dockerfiles/C-PAC.develop-lite-jammy.Dockerfile @@ -14,7 +14,7 @@ # You should have received a copy of the GNU Lesser General Public # License along with C-PAC. If not, see . -FROM ghcr.io/fcp-indi/c-pac/stage-base:lite-v +FROM ghcr.io/fcp-indi/c-pac/stage-base:lite-v1.8.8.dev11.8.8.dev11.8.8.dev11.8.8.dev11.8.8.dev11.8.8.dev11.8.8.dev1 LABEL org.opencontainers.image.description="Full C-PAC image without FreeSurfer" LABEL org.opencontainers.image.source=https://github.com/FCP-INDI/C-PAC USER root @@ -43,7 +43,7 @@ RUN rm -rf /var/lib/apt/lists/* /tmp/* /var/tmp/* /root/.cache/* \ && apt-get autoremove -y \ && ldconfig \ && chmod 777 / \ - && chmod 777 $(ls / | grep -v sys | grep -v proc) + && chmod 777 $(ls / | grep -v1.8.8.dev11.8.8.dev11.8.8.dev11.8.8.dev11.8.8.dev11.8.8.dev11.8.8.dev1 sys | grep -v1.8.8.dev11.8.8.dev11.8.8.dev11.8.8.dev11.8.8.dev11.8.8.dev11.8.8.dev1 proc) ENV PYTHONUSERBASE=/home/c-pac_user/.local ENV PATH=$PATH:/home/c-pac_user/.local/bin \ PYTHONPATH=$PYTHONPATH:$PYTHONUSERBASE/lib/python3.10/site-packages \ diff --git a/.github/Dockerfiles/base-standard.Dockerfile b/.github/Dockerfiles/base-standard.Dockerfile index a982c400ad..1547a1f343 100644 --- a/.github/Dockerfiles/base-standard.Dockerfile +++ b/.github/Dockerfiles/base-standard.Dockerfile @@ -16,7 +16,7 @@ # License along with C-PAC. If not, see . FROM ghcr.io/fcp-indi/c-pac/freesurfer:6.0.0-min.neurodocker-jammy AS freesurfer -FROM ghcr.io/fcp-indi/c-pac/stage-base:lite-v +FROM ghcr.io/fcp-indi/c-pac/stage-base:lite-v1.8.8.dev11.8.8.dev11.8.8.dev11.8.8.dev11.8.8.dev11.8.8.dev11.8.8.dev1 LABEL org.opencontainers.image.description="NOT INTENDED FOR USE OTHER THAN AS A STAGE IMAGE IN A MULTI-STAGE BUILD \ Standard software dependencies for C-PAC standard images" LABEL org.opencontainers.image.source=https://github.com/FCP-INDI/C-PAC @@ -56,7 +56,7 @@ RUN apt-get autoremove -y \ && rm -rf results.txt \ && ldconfig \ && chmod 777 / /home/c-pac_user \ - && chmod 777 $(ls / | grep -v sys | grep -v proc) + && chmod 777 $(ls / | grep -v1.8.8.dev11.8.8.dev11.8.8.dev11.8.8.dev11.8.8.dev11.8.8.dev11.8.8.dev1 sys | grep -v1.8.8.dev11.8.8.dev11.8.8.dev11.8.8.dev11.8.8.dev11.8.8.dev11.8.8.dev1 proc) # set user USER c-pac_user diff --git a/CPAC/info.py b/CPAC/info.py index d530b415ed..e984654ae7 100644 --- a/CPAC/info.py +++ b/CPAC/info.py @@ -55,7 +55,7 @@ def get_cpac_gitversion() -> str | None: from pathlib import Path import subprocess - gitpath = Path(__file__).resolve() + gitpath = Path(__file__).parent.resolve() gitpathgit = gitpath / ".git" if not gitpathgit.exists(): diff --git a/CPAC/resources/configs/data_config_S3-BIDS-ABIDE.yml b/CPAC/resources/configs/data_config_S3-BIDS-ABIDE.yml index 287d145476..694fe23286 100644 --- a/CPAC/resources/configs/data_config_S3-BIDS-ABIDE.yml +++ b/CPAC/resources/configs/data_config_S3-BIDS-ABIDE.yml @@ -1,5 +1,5 @@ # CPAC Data Configuration File -# Version +# Version 1.8.8.dev1 # # http://fcp-indi.github.io for more info. # diff --git a/CPAC/resources/configs/data_config_S3-BIDS-ADHD200.yml b/CPAC/resources/configs/data_config_S3-BIDS-ADHD200.yml index a964812b4f..550847bd90 100644 --- a/CPAC/resources/configs/data_config_S3-BIDS-ADHD200.yml +++ b/CPAC/resources/configs/data_config_S3-BIDS-ADHD200.yml @@ -1,5 +1,5 @@ # CPAC Data Configuration File -# Version +# Version 1.8.8.dev1 # # http://fcp-indi.github.io for more info. # diff --git a/CPAC/resources/configs/data_config_S3-BIDS-ADHD200_only2.yml b/CPAC/resources/configs/data_config_S3-BIDS-ADHD200_only2.yml index 839c3aab13..6b1a07f928 100644 --- a/CPAC/resources/configs/data_config_S3-BIDS-ADHD200_only2.yml +++ b/CPAC/resources/configs/data_config_S3-BIDS-ADHD200_only2.yml @@ -1,5 +1,5 @@ # CPAC Data Configuration File -# Version +# Version 1.8.8.dev1 # # http://fcp-indi.github.io for more info. # diff --git a/CPAC/resources/configs/data_config_S3-BIDS-NKI-RocklandSample.yml b/CPAC/resources/configs/data_config_S3-BIDS-NKI-RocklandSample.yml index afe2e2d51d..4fe2dd4f12 100644 --- a/CPAC/resources/configs/data_config_S3-BIDS-NKI-RocklandSample.yml +++ b/CPAC/resources/configs/data_config_S3-BIDS-NKI-RocklandSample.yml @@ -1,5 +1,5 @@ # CPAC Data Configuration File -# Version +# Version 1.8.8.dev1 # # http://fcp-indi.github.io for more info. # diff --git a/CPAC/resources/configs/data_config_cpac_benchmark.yml b/CPAC/resources/configs/data_config_cpac_benchmark.yml index ceb546be14..344e6f2926 100644 --- a/CPAC/resources/configs/data_config_cpac_benchmark.yml +++ b/CPAC/resources/configs/data_config_cpac_benchmark.yml @@ -1,5 +1,5 @@ # CPAC Data Configuration File -# Version +# Version 1.8.8.dev1 # # http://fcp-indi.github.io for more info. # diff --git a/CPAC/resources/configs/data_settings_template.yml b/CPAC/resources/configs/data_settings_template.yml index 06ee292815..8f3193809a 100644 --- a/CPAC/resources/configs/data_settings_template.yml +++ b/CPAC/resources/configs/data_settings_template.yml @@ -1,5 +1,5 @@ # CPAC Data Settings File -# Version +# Version 1.8.8.dev1 # # http://fcp-indi.github.io for more info. # diff --git a/CPAC/resources/configs/group_config_template.yml b/CPAC/resources/configs/group_config_template.yml index 3d98fb285d..f96528dce3 100644 --- a/CPAC/resources/configs/group_config_template.yml +++ b/CPAC/resources/configs/group_config_template.yml @@ -1,5 +1,5 @@ # CPAC Group-Level Analysis Configuration YAML file -# Version +# Version 1.8.8.dev1 # # http://fcp-indi.github.io for more info. # diff --git a/CPAC/resources/configs/pipeline_config_abcd-options.yml b/CPAC/resources/configs/pipeline_config_abcd-options.yml index 53763e61b2..2f63c9e49c 100644 --- a/CPAC/resources/configs/pipeline_config_abcd-options.yml +++ b/CPAC/resources/configs/pipeline_config_abcd-options.yml @@ -1,7 +1,7 @@ %YAML 1.1 --- # CPAC Pipeline Configuration YAML file -# Version +# Version 1.8.8.dev1 # # http://fcp-indi.github.io for more info. # diff --git a/CPAC/resources/configs/pipeline_config_abcd-prep.yml b/CPAC/resources/configs/pipeline_config_abcd-prep.yml index bea192a97d..27a4cd5f63 100644 --- a/CPAC/resources/configs/pipeline_config_abcd-prep.yml +++ b/CPAC/resources/configs/pipeline_config_abcd-prep.yml @@ -1,7 +1,7 @@ %YAML 1.1 --- # CPAC Pipeline Configuration YAML file -# Version +# Version 1.8.8.dev1 # # http://fcp-indi.github.io for more info. # diff --git a/CPAC/resources/configs/pipeline_config_anat-only.yml b/CPAC/resources/configs/pipeline_config_anat-only.yml index b9cccd5948..38953f68ef 100644 --- a/CPAC/resources/configs/pipeline_config_anat-only.yml +++ b/CPAC/resources/configs/pipeline_config_anat-only.yml @@ -1,7 +1,7 @@ %YAML 1.1 --- # CPAC Pipeline Configuration YAML file -# Version +# Version 1.8.8.dev1 # # http://fcp-indi.github.io for more info. # diff --git a/CPAC/resources/configs/pipeline_config_benchmark-ANTS.yml b/CPAC/resources/configs/pipeline_config_benchmark-ANTS.yml index b2ab123fb6..b42c30f547 100644 --- a/CPAC/resources/configs/pipeline_config_benchmark-ANTS.yml +++ b/CPAC/resources/configs/pipeline_config_benchmark-ANTS.yml @@ -1,7 +1,7 @@ %YAML 1.1 --- # CPAC Pipeline Configuration YAML file -# Version +# Version 1.8.8.dev1 # # http://fcp-indi.github.io for more info. # diff --git a/CPAC/resources/configs/pipeline_config_benchmark-FNIRT.yml b/CPAC/resources/configs/pipeline_config_benchmark-FNIRT.yml index c0ac6e51e5..7026e1f2fd 100644 --- a/CPAC/resources/configs/pipeline_config_benchmark-FNIRT.yml +++ b/CPAC/resources/configs/pipeline_config_benchmark-FNIRT.yml @@ -1,7 +1,7 @@ %YAML 1.1 --- # CPAC Pipeline Configuration YAML file -# Version +# Version 1.8.8.dev1 # # http://fcp-indi.github.io for more info. # diff --git a/CPAC/resources/configs/pipeline_config_blank.yml b/CPAC/resources/configs/pipeline_config_blank.yml index d2c8d21f89..75c889d9b2 100644 --- a/CPAC/resources/configs/pipeline_config_blank.yml +++ b/CPAC/resources/configs/pipeline_config_blank.yml @@ -1,7 +1,7 @@ %YAML 1.1 --- # CPAC Pipeline Configuration YAML file -# Version +# Version 1.8.8.dev1 # # http://fcp-indi.github.io for more info. # diff --git a/CPAC/resources/configs/pipeline_config_ccs-options.yml b/CPAC/resources/configs/pipeline_config_ccs-options.yml index 1c3fea07ca..1a4d59c7eb 100644 --- a/CPAC/resources/configs/pipeline_config_ccs-options.yml +++ b/CPAC/resources/configs/pipeline_config_ccs-options.yml @@ -1,7 +1,7 @@ %YAML 1.1 --- # CPAC Pipeline Configuration YAML file -# Version +# Version 1.8.8.dev1 # # http://fcp-indi.github.io for more info. # diff --git a/CPAC/resources/configs/pipeline_config_default-deprecated.yml b/CPAC/resources/configs/pipeline_config_default-deprecated.yml index 658b311094..22c0e5dada 100644 --- a/CPAC/resources/configs/pipeline_config_default-deprecated.yml +++ b/CPAC/resources/configs/pipeline_config_default-deprecated.yml @@ -1,7 +1,7 @@ %YAML 1.1 --- # CPAC Pipeline Configuration YAML file -# Version +# Version 1.8.8.dev1 # # http://fcp-indi.github.io for more info. # diff --git a/CPAC/resources/configs/pipeline_config_default.yml b/CPAC/resources/configs/pipeline_config_default.yml index e856e44dd6..5c42e5ad35 100644 --- a/CPAC/resources/configs/pipeline_config_default.yml +++ b/CPAC/resources/configs/pipeline_config_default.yml @@ -1,7 +1,7 @@ %YAML 1.1 --- # CPAC Pipeline Configuration YAML file -# Version +# Version 1.8.8.dev1 # # http://fcp-indi.github.io for more info. # diff --git a/CPAC/resources/configs/pipeline_config_fmriprep-ingress.yml b/CPAC/resources/configs/pipeline_config_fmriprep-ingress.yml index 9166c3acaa..275a7d8d1f 100644 --- a/CPAC/resources/configs/pipeline_config_fmriprep-ingress.yml +++ b/CPAC/resources/configs/pipeline_config_fmriprep-ingress.yml @@ -1,7 +1,7 @@ %YAML 1.1 --- # CPAC Pipeline Configuration YAML file -# Version +# Version 1.8.8.dev1 # # http://fcp-indi.github.io for more info. # diff --git a/CPAC/resources/configs/pipeline_config_fmriprep-options.yml b/CPAC/resources/configs/pipeline_config_fmriprep-options.yml index 0718297532..5453144af5 100644 --- a/CPAC/resources/configs/pipeline_config_fmriprep-options.yml +++ b/CPAC/resources/configs/pipeline_config_fmriprep-options.yml @@ -1,7 +1,7 @@ %YAML 1.1 --- # CPAC Pipeline Configuration YAML file -# Version +# Version 1.8.8.dev1 # # http://fcp-indi.github.io for more info. # diff --git a/CPAC/resources/configs/pipeline_config_fx-options.yml b/CPAC/resources/configs/pipeline_config_fx-options.yml index 02d6f31ced..f8cc2f8de6 100644 --- a/CPAC/resources/configs/pipeline_config_fx-options.yml +++ b/CPAC/resources/configs/pipeline_config_fx-options.yml @@ -1,7 +1,7 @@ %YAML 1.1 --- # CPAC Pipeline Configuration YAML file -# Version +# Version 1.8.8.dev1 # # http://fcp-indi.github.io for more info. # diff --git a/CPAC/resources/configs/pipeline_config_monkey-ABCD.yml b/CPAC/resources/configs/pipeline_config_monkey-ABCD.yml index 34e1a2ade7..dfca2e5e46 100644 --- a/CPAC/resources/configs/pipeline_config_monkey-ABCD.yml +++ b/CPAC/resources/configs/pipeline_config_monkey-ABCD.yml @@ -1,7 +1,7 @@ %YAML 1.1 --- # CPAC Pipeline Configuration YAML file -# Version +# Version 1.8.8.dev1 # # http://fcp-indi.github.io for more info. # diff --git a/CPAC/resources/configs/pipeline_config_monkey.yml b/CPAC/resources/configs/pipeline_config_monkey.yml index 1d5f499c5e..98c6ffe5f1 100644 --- a/CPAC/resources/configs/pipeline_config_monkey.yml +++ b/CPAC/resources/configs/pipeline_config_monkey.yml @@ -1,7 +1,7 @@ %YAML 1.1 --- # CPAC Pipeline Configuration YAML file -# Version +# Version 1.8.8.dev1 # # http://fcp-indi.github.io for more info. # diff --git a/CPAC/resources/configs/pipeline_config_ndmg.yml b/CPAC/resources/configs/pipeline_config_ndmg.yml index 7fb48bcef9..bd77602300 100644 --- a/CPAC/resources/configs/pipeline_config_ndmg.yml +++ b/CPAC/resources/configs/pipeline_config_ndmg.yml @@ -1,7 +1,7 @@ %YAML 1.1 --- # CPAC Pipeline Configuration YAML file -# Version +# Version 1.8.8.dev1 # # http://fcp-indi.github.io for more info. # diff --git a/CPAC/resources/configs/pipeline_config_nhp-macaque.yml b/CPAC/resources/configs/pipeline_config_nhp-macaque.yml index 68afff32df..2dbe4c724c 100644 --- a/CPAC/resources/configs/pipeline_config_nhp-macaque.yml +++ b/CPAC/resources/configs/pipeline_config_nhp-macaque.yml @@ -1,7 +1,7 @@ %YAML 1.1 --- # CPAC Pipeline Configuration YAML file -# Version +# Version 1.8.8.dev1 # # http://fcp-indi.github.io for more info. # diff --git a/CPAC/resources/configs/pipeline_config_preproc.yml b/CPAC/resources/configs/pipeline_config_preproc.yml index 028f7de296..f2d567d661 100644 --- a/CPAC/resources/configs/pipeline_config_preproc.yml +++ b/CPAC/resources/configs/pipeline_config_preproc.yml @@ -1,7 +1,7 @@ %YAML 1.1 --- # CPAC Pipeline Configuration YAML file -# Version +# Version 1.8.8.dev1 # # http://fcp-indi.github.io for more info. # diff --git a/CPAC/resources/configs/pipeline_config_rbc-options.yml b/CPAC/resources/configs/pipeline_config_rbc-options.yml index a7c4dfd6ea..27fae8a7ac 100644 --- a/CPAC/resources/configs/pipeline_config_rbc-options.yml +++ b/CPAC/resources/configs/pipeline_config_rbc-options.yml @@ -1,7 +1,7 @@ %YAML 1.1 --- # CPAC Pipeline Configuration YAML file -# Version +# Version 1.8.8.dev1 # # http://fcp-indi.github.io for more info. # diff --git a/CPAC/resources/configs/pipeline_config_regtest-1.yml b/CPAC/resources/configs/pipeline_config_regtest-1.yml index ee96ad34d5..ee1ff5d5cf 100644 --- a/CPAC/resources/configs/pipeline_config_regtest-1.yml +++ b/CPAC/resources/configs/pipeline_config_regtest-1.yml @@ -1,7 +1,7 @@ %YAML 1.1 --- # CPAC Pipeline Configuration YAML file -# Version +# Version 1.8.8.dev1 # # http://fcp-indi.github.io for more info. # diff --git a/CPAC/resources/configs/pipeline_config_regtest-2.yml b/CPAC/resources/configs/pipeline_config_regtest-2.yml index f1fcc596aa..e5b6dbd626 100644 --- a/CPAC/resources/configs/pipeline_config_regtest-2.yml +++ b/CPAC/resources/configs/pipeline_config_regtest-2.yml @@ -1,7 +1,7 @@ %YAML 1.1 --- # CPAC Pipeline Configuration YAML file -# Version +# Version 1.8.8.dev1 # # http://fcp-indi.github.io for more info. # diff --git a/CPAC/resources/configs/pipeline_config_regtest-3.yml b/CPAC/resources/configs/pipeline_config_regtest-3.yml index f0c4989c42..804e4f5f2f 100644 --- a/CPAC/resources/configs/pipeline_config_regtest-3.yml +++ b/CPAC/resources/configs/pipeline_config_regtest-3.yml @@ -1,7 +1,7 @@ %YAML 1.1 --- # CPAC Pipeline Configuration YAML file -# Version +# Version 1.8.8.dev1 # # http://fcp-indi.github.io for more info. # diff --git a/CPAC/resources/configs/pipeline_config_regtest-4.yml b/CPAC/resources/configs/pipeline_config_regtest-4.yml index 7591d09b7d..5232abdbf5 100644 --- a/CPAC/resources/configs/pipeline_config_regtest-4.yml +++ b/CPAC/resources/configs/pipeline_config_regtest-4.yml @@ -1,7 +1,7 @@ %YAML 1.1 --- # CPAC Pipeline Configuration YAML file -# Version +# Version 1.8.8.dev1 # # http://fcp-indi.github.io for more info. # diff --git a/CPAC/resources/configs/pipeline_config_rodent.yml b/CPAC/resources/configs/pipeline_config_rodent.yml index e7372d31ce..a5f2c6caf9 100644 --- a/CPAC/resources/configs/pipeline_config_rodent.yml +++ b/CPAC/resources/configs/pipeline_config_rodent.yml @@ -1,7 +1,7 @@ %YAML 1.1 --- # CPAC Pipeline Configuration YAML file -# Version +# Version 1.8.8.dev1 # # http://fcp-indi.github.io for more info. # diff --git a/CPAC/resources/configs/system_config.yml b/CPAC/resources/configs/system_config.yml index 06cdcefca4..c33ad0d0a1 100644 --- a/CPAC/resources/configs/system_config.yml +++ b/CPAC/resources/configs/system_config.yml @@ -1,5 +1,5 @@ # C-PAC System Configuration YAML file -# Version +# Version 1.8.8.dev1 # # http://fcp-indi.github.io for more info. # diff --git a/CPAC/resources/configs/test_configs/data-test_S3-ADHD200_1.yml b/CPAC/resources/configs/test_configs/data-test_S3-ADHD200_1.yml index b62cea6fa8..544ffad843 100644 --- a/CPAC/resources/configs/test_configs/data-test_S3-ADHD200_1.yml +++ b/CPAC/resources/configs/test_configs/data-test_S3-ADHD200_1.yml @@ -1,5 +1,5 @@ # CPAC Data Configuration File -# Version +# Version 1.8.8.dev1 # # http://fcp-indi.github.io for more info. # diff --git a/CPAC/resources/configs/test_configs/data-test_S3-ADHD200_no-params.yml b/CPAC/resources/configs/test_configs/data-test_S3-ADHD200_no-params.yml index 00b6d26db6..474aa7ee70 100644 --- a/CPAC/resources/configs/test_configs/data-test_S3-ADHD200_no-params.yml +++ b/CPAC/resources/configs/test_configs/data-test_S3-ADHD200_no-params.yml @@ -1,5 +1,5 @@ # CPAC Data Configuration File -# Version +# Version 1.8.8.dev1 # # http://fcp-indi.github.io for more info. # diff --git a/CPAC/resources/configs/test_configs/data-test_S3-NKI-RS_fmap.yml b/CPAC/resources/configs/test_configs/data-test_S3-NKI-RS_fmap.yml index b61fa626a6..b5355e01fa 100644 --- a/CPAC/resources/configs/test_configs/data-test_S3-NKI-RS_fmap.yml +++ b/CPAC/resources/configs/test_configs/data-test_S3-NKI-RS_fmap.yml @@ -1,5 +1,5 @@ # CPAC Data Configuration File -# Version +# Version 1.8.8.dev1 # # http://fcp-indi.github.io for more info. # diff --git a/CPAC/resources/configs/test_configs/data_config_S3_CoRR_5only_mult-scan.yml b/CPAC/resources/configs/test_configs/data_config_S3_CoRR_5only_mult-scan.yml index 73dff0bf8b..df33b1cbf8 100644 --- a/CPAC/resources/configs/test_configs/data_config_S3_CoRR_5only_mult-scan.yml +++ b/CPAC/resources/configs/test_configs/data_config_S3_CoRR_5only_mult-scan.yml @@ -1,5 +1,5 @@ # CPAC Data Configuration File -# Version +# Version 1.8.8.dev1 # # http://fcp-indi.github.io for more info. # diff --git a/CPAC/resources/configs/test_configs/data_config_S3_CoRR_5only_mult-sess.yml b/CPAC/resources/configs/test_configs/data_config_S3_CoRR_5only_mult-sess.yml index 2c6e03d4d9..a1de8541b1 100644 --- a/CPAC/resources/configs/test_configs/data_config_S3_CoRR_5only_mult-sess.yml +++ b/CPAC/resources/configs/test_configs/data_config_S3_CoRR_5only_mult-sess.yml @@ -1,5 +1,5 @@ # CPAC Data Configuration File -# Version +# Version 1.8.8.dev1 # # http://fcp-indi.github.io for more info. # diff --git a/CPAC/resources/configs/test_configs/pipe-test_ABCD.yml b/CPAC/resources/configs/test_configs/pipe-test_ABCD.yml index 57f16c3ab5..4f87d4b3ba 100644 --- a/CPAC/resources/configs/test_configs/pipe-test_ABCD.yml +++ b/CPAC/resources/configs/test_configs/pipe-test_ABCD.yml @@ -1,7 +1,7 @@ %YAML 1.1 --- # CPAC Pipeline Configuration YAML file -# Version +# Version 1.8.8.dev1 # # http://fcp-indi.github.io for more info. # diff --git a/CPAC/resources/configs/test_configs/pipe-test_ANTs-3dSk-AllNuis.yml b/CPAC/resources/configs/test_configs/pipe-test_ANTs-3dSk-AllNuis.yml index d2b076b434..e13f77e435 100644 --- a/CPAC/resources/configs/test_configs/pipe-test_ANTs-3dSk-AllNuis.yml +++ b/CPAC/resources/configs/test_configs/pipe-test_ANTs-3dSk-AllNuis.yml @@ -1,7 +1,7 @@ %YAML 1.1 --- # CPAC Pipeline Configuration YAML file -# Version +# Version 1.8.8.dev1 # # http://fcp-indi.github.io for more info. # diff --git a/CPAC/resources/configs/test_configs/pipe-test_ANTs-3dSk-DistCorr3dSk.yml b/CPAC/resources/configs/test_configs/pipe-test_ANTs-3dSk-DistCorr3dSk.yml index 9e8482ae69..41f7262a15 100644 --- a/CPAC/resources/configs/test_configs/pipe-test_ANTs-3dSk-DistCorr3dSk.yml +++ b/CPAC/resources/configs/test_configs/pipe-test_ANTs-3dSk-DistCorr3dSk.yml @@ -1,7 +1,7 @@ %YAML 1.1 --- # CPAC Pipeline Configuration YAML file -# Version +# Version 1.8.8.dev1 # # http://fcp-indi.github.io for more info. # diff --git a/CPAC/resources/configs/test_configs/pipe-test_ANTs-3dSk-DistCorrBET.yml b/CPAC/resources/configs/test_configs/pipe-test_ANTs-3dSk-DistCorrBET.yml index 60d47af717..4d59bdb05d 100644 --- a/CPAC/resources/configs/test_configs/pipe-test_ANTs-3dSk-DistCorrBET.yml +++ b/CPAC/resources/configs/test_configs/pipe-test_ANTs-3dSk-DistCorrBET.yml @@ -1,7 +1,7 @@ %YAML 1.1 --- # CPAC Pipeline Configuration YAML file -# Version +# Version 1.8.8.dev1 # # http://fcp-indi.github.io for more info. # diff --git a/CPAC/resources/configs/test_configs/pipe-test_ANTs-BET-AllNuis.yml b/CPAC/resources/configs/test_configs/pipe-test_ANTs-BET-AllNuis.yml index 1ef383c560..4155ffc2a1 100644 --- a/CPAC/resources/configs/test_configs/pipe-test_ANTs-BET-AllNuis.yml +++ b/CPAC/resources/configs/test_configs/pipe-test_ANTs-BET-AllNuis.yml @@ -1,7 +1,7 @@ %YAML 1.1 --- # CPAC Pipeline Configuration YAML file -# Version +# Version 1.8.8.dev1 # # http://fcp-indi.github.io for more info. # diff --git a/CPAC/resources/configs/test_configs/pipe-test_FNIRT-3dSk-AllNuis.yml b/CPAC/resources/configs/test_configs/pipe-test_FNIRT-3dSk-AllNuis.yml index 32755f5668..c8464849fd 100644 --- a/CPAC/resources/configs/test_configs/pipe-test_FNIRT-3dSk-AllNuis.yml +++ b/CPAC/resources/configs/test_configs/pipe-test_FNIRT-3dSk-AllNuis.yml @@ -1,7 +1,7 @@ %YAML 1.1 --- # CPAC Pipeline Configuration YAML file -# Version +# Version 1.8.8.dev1 # # http://fcp-indi.github.io for more info. # diff --git a/CPAC/resources/configs/test_configs/pipe-test_FNIRT-BET-AllNuis-BASC.yml b/CPAC/resources/configs/test_configs/pipe-test_FNIRT-BET-AllNuis-BASC.yml index f4c9b15081..a1b1664eb9 100644 --- a/CPAC/resources/configs/test_configs/pipe-test_FNIRT-BET-AllNuis-BASC.yml +++ b/CPAC/resources/configs/test_configs/pipe-test_FNIRT-BET-AllNuis-BASC.yml @@ -1,7 +1,7 @@ %YAML 1.1 --- # CPAC Pipeline Configuration YAML file -# Version +# Version 1.8.8.dev1 # # http://fcp-indi.github.io for more info. # diff --git a/CPAC/resources/configs/test_configs/pipe-test_FNIRT-BET-AllNuis-ISC-voxel.yml b/CPAC/resources/configs/test_configs/pipe-test_FNIRT-BET-AllNuis-ISC-voxel.yml index f4c9b15081..a1b1664eb9 100644 --- a/CPAC/resources/configs/test_configs/pipe-test_FNIRT-BET-AllNuis-ISC-voxel.yml +++ b/CPAC/resources/configs/test_configs/pipe-test_FNIRT-BET-AllNuis-ISC-voxel.yml @@ -1,7 +1,7 @@ %YAML 1.1 --- # CPAC Pipeline Configuration YAML file -# Version +# Version 1.8.8.dev1 # # http://fcp-indi.github.io for more info. # diff --git a/CPAC/resources/configs/test_configs/pipe-test_FNIRT-BET-AllNuis-ISC.yml b/CPAC/resources/configs/test_configs/pipe-test_FNIRT-BET-AllNuis-ISC.yml index f4c9b15081..a1b1664eb9 100644 --- a/CPAC/resources/configs/test_configs/pipe-test_FNIRT-BET-AllNuis-ISC.yml +++ b/CPAC/resources/configs/test_configs/pipe-test_FNIRT-BET-AllNuis-ISC.yml @@ -1,7 +1,7 @@ %YAML 1.1 --- # CPAC Pipeline Configuration YAML file -# Version +# Version 1.8.8.dev1 # # http://fcp-indi.github.io for more info. # diff --git a/CPAC/resources/configs/test_configs/pipe-test_FNIRT-BET-AllNuis-MDMR.yml b/CPAC/resources/configs/test_configs/pipe-test_FNIRT-BET-AllNuis-MDMR.yml index f4c9b15081..a1b1664eb9 100644 --- a/CPAC/resources/configs/test_configs/pipe-test_FNIRT-BET-AllNuis-MDMR.yml +++ b/CPAC/resources/configs/test_configs/pipe-test_FNIRT-BET-AllNuis-MDMR.yml @@ -1,7 +1,7 @@ %YAML 1.1 --- # CPAC Pipeline Configuration YAML file -# Version +# Version 1.8.8.dev1 # # http://fcp-indi.github.io for more info. # diff --git a/CPAC/resources/configs/test_configs/pipe-test_FNIRT-BET-AllNuis.yml b/CPAC/resources/configs/test_configs/pipe-test_FNIRT-BET-AllNuis.yml index f4c9b15081..a1b1664eb9 100644 --- a/CPAC/resources/configs/test_configs/pipe-test_FNIRT-BET-AllNuis.yml +++ b/CPAC/resources/configs/test_configs/pipe-test_FNIRT-BET-AllNuis.yml @@ -1,7 +1,7 @@ %YAML 1.1 --- # CPAC Pipeline Configuration YAML file -# Version +# Version 1.8.8.dev1 # # http://fcp-indi.github.io for more info. # diff --git a/CPAC/resources/configs/test_configs/pipe-test_all.yml b/CPAC/resources/configs/test_configs/pipe-test_all.yml index e8abebac37..f973dc89c4 100644 --- a/CPAC/resources/configs/test_configs/pipe-test_all.yml +++ b/CPAC/resources/configs/test_configs/pipe-test_all.yml @@ -1,7 +1,7 @@ %YAML 1.1 --- # CPAC Pipeline Configuration YAML file -# Version +# Version 1.8.8.dev1 # # http://fcp-indi.github.io for more info. # diff --git a/Dockerfile b/Dockerfile index 1a8b5dc6a1..0a9a54592e 100644 --- a/Dockerfile +++ b/Dockerfile @@ -14,7 +14,7 @@ # You should have received a copy of the GNU Lesser General Public # License along with C-PAC. If not, see . -FROM ghcr.io/fcp-indi/c-pac/stage-base:standard-v +FROM ghcr.io/fcp-indi/c-pac/stage-base:standard-v1.8.8.dev11.8.8.dev11.8.8.dev11.8.8.dev11.8.8.dev11.8.8.dev11.8.8.dev1 LABEL org.opencontainers.image.description="Full C-PAC image" LABEL org.opencontainers.image.source=https://github.com/FCP-INDI/C-PAC USER root @@ -42,7 +42,7 @@ RUN rm -rf /var/lib/apt/lists/* /tmp/* /var/tmp/* /root/.cache/* \ && apt-get autoremove -y \ && ldconfig \ && chmod 777 / \ - && chmod 777 $(ls / | grep -v sys | grep -v proc) + && chmod 777 $(ls / | grep -v1.8.8.dev11.8.8.dev11.8.8.dev11.8.8.dev11.8.8.dev11.8.8.dev11.8.8.dev1 sys | grep -v1.8.8.dev11.8.8.dev11.8.8.dev11.8.8.dev11.8.8.dev11.8.8.dev11.8.8.dev1 proc) ENV PYTHONUSERBASE=/home/c-pac_user/.local ENV PATH=$PATH:/home/c-pac_user/.local/bin \ PYTHONPATH=$PYTHONPATH:$PYTHONUSERBASE/lib/python3.10/site-packages \ diff --git a/variant-lite.Dockerfile b/variant-lite.Dockerfile index ebbb142297..dd07ffb736 100644 --- a/variant-lite.Dockerfile +++ b/variant-lite.Dockerfile @@ -14,7 +14,7 @@ # You should have received a copy of the GNU Lesser General Public # License along with C-PAC. If not, see . -FROM ghcr.io/fcp-indi/c-pac/stage-base:lite-v +FROM ghcr.io/fcp-indi/c-pac/stage-base:lite-v1.8.8.dev11.8.8.dev11.8.8.dev11.8.8.dev11.8.8.dev11.8.8.dev11.8.8.dev1 LABEL org.opencontainers.image.description="Full C-PAC image without FreeSurfer" LABEL org.opencontainers.image.source=https://github.com/FCP-INDI/C-PAC USER root @@ -43,7 +43,7 @@ RUN rm -rf /var/lib/apt/lists/* /tmp/* /var/tmp/* /root/.cache/* \ && apt-get autoremove -y \ && ldconfig \ && chmod 777 / \ - && chmod 777 $(ls / | grep -v sys | grep -v proc) + && chmod 777 $(ls / | grep -v1.8.8.dev11.8.8.dev11.8.8.dev11.8.8.dev11.8.8.dev11.8.8.dev11.8.8.dev1 sys | grep -v1.8.8.dev11.8.8.dev11.8.8.dev11.8.8.dev11.8.8.dev11.8.8.dev11.8.8.dev1 proc) ENV PYTHONUSERBASE=/home/c-pac_user/.local ENV PATH=$PATH:/home/c-pac_user/.local/bin \ PYTHONPATH=$PYTHONPATH:$PYTHONUSERBASE/lib/python3.10/site-packages \ diff --git a/version b/version index 110ed9b99b..bb676e6961 100644 --- a/version +++ b/version @@ -1 +1 @@ -v +v1.8.8.dev1 From 613f1e819e6e167be503b52c4e1841218ccae0fc Mon Sep 17 00:00:00 2001 From: "birajstha:construction_worker::penguin" Date: Fri, 12 Sep 2025 15:49:17 -0400 Subject: [PATCH 496/507] adding fall back for version --- CPAC/info.py | 1 + 1 file changed, 1 insertion(+) diff --git a/CPAC/info.py b/CPAC/info.py index e984654ae7..272545f0cf 100644 --- a/CPAC/info.py +++ b/CPAC/info.py @@ -68,6 +68,7 @@ def get_cpac_gitversion() -> str | None: "git describe --always", shell=True, cwd=gitpath, stdout=subprocess.PIPE ).communicate() except Exception: + ver = "0.0.0.dev" pass else: ver = o.decode().strip().split("-")[-1] From 9e433d4d23a3231a08ef423b544cd3d53b6f5b85 Mon Sep 17 00:00:00 2001 From: Jon Cluce Date: Mon, 15 Sep 2025 11:31:34 -0400 Subject: [PATCH 497/507] :rewind: Revert "[pre-commit.ci] auto fixes from pre-commit.com hooks" (651cfa5) --- .github/Dockerfiles/C-PAC.develop-jammy.Dockerfile | 4 ++-- .github/Dockerfiles/C-PAC.develop-lite-jammy.Dockerfile | 4 ++-- .github/Dockerfiles/base-standard.Dockerfile | 4 ++-- CPAC/anat_preproc/ants.py | 2 +- Dockerfile | 4 ++-- variant-lite.Dockerfile | 4 ++-- 6 files changed, 11 insertions(+), 11 deletions(-) diff --git a/.github/Dockerfiles/C-PAC.develop-jammy.Dockerfile b/.github/Dockerfiles/C-PAC.develop-jammy.Dockerfile index 0a9a54592e..e41bd6fc73 100644 --- a/.github/Dockerfiles/C-PAC.develop-jammy.Dockerfile +++ b/.github/Dockerfiles/C-PAC.develop-jammy.Dockerfile @@ -14,7 +14,7 @@ # You should have received a copy of the GNU Lesser General Public # License along with C-PAC. If not, see . -FROM ghcr.io/fcp-indi/c-pac/stage-base:standard-v1.8.8.dev11.8.8.dev11.8.8.dev11.8.8.dev11.8.8.dev11.8.8.dev11.8.8.dev1 +FROM ghcr.io/fcp-indi/c-pac/stage-base:standard-v1.8.8.dev1 LABEL org.opencontainers.image.description="Full C-PAC image" LABEL org.opencontainers.image.source=https://github.com/FCP-INDI/C-PAC USER root @@ -42,7 +42,7 @@ RUN rm -rf /var/lib/apt/lists/* /tmp/* /var/tmp/* /root/.cache/* \ && apt-get autoremove -y \ && ldconfig \ && chmod 777 / \ - && chmod 777 $(ls / | grep -v1.8.8.dev11.8.8.dev11.8.8.dev11.8.8.dev11.8.8.dev11.8.8.dev11.8.8.dev1 sys | grep -v1.8.8.dev11.8.8.dev11.8.8.dev11.8.8.dev11.8.8.dev11.8.8.dev11.8.8.dev1 proc) + && chmod 777 $(ls / | grep -v sys | grep -v proc) ENV PYTHONUSERBASE=/home/c-pac_user/.local ENV PATH=$PATH:/home/c-pac_user/.local/bin \ PYTHONPATH=$PYTHONPATH:$PYTHONUSERBASE/lib/python3.10/site-packages \ diff --git a/.github/Dockerfiles/C-PAC.develop-lite-jammy.Dockerfile b/.github/Dockerfiles/C-PAC.develop-lite-jammy.Dockerfile index dd07ffb736..6f350c4f18 100644 --- a/.github/Dockerfiles/C-PAC.develop-lite-jammy.Dockerfile +++ b/.github/Dockerfiles/C-PAC.develop-lite-jammy.Dockerfile @@ -14,7 +14,7 @@ # You should have received a copy of the GNU Lesser General Public # License along with C-PAC. If not, see . -FROM ghcr.io/fcp-indi/c-pac/stage-base:lite-v1.8.8.dev11.8.8.dev11.8.8.dev11.8.8.dev11.8.8.dev11.8.8.dev11.8.8.dev1 +FROM ghcr.io/fcp-indi/c-pac/stage-base:lite-v1.8.8.dev1 LABEL org.opencontainers.image.description="Full C-PAC image without FreeSurfer" LABEL org.opencontainers.image.source=https://github.com/FCP-INDI/C-PAC USER root @@ -43,7 +43,7 @@ RUN rm -rf /var/lib/apt/lists/* /tmp/* /var/tmp/* /root/.cache/* \ && apt-get autoremove -y \ && ldconfig \ && chmod 777 / \ - && chmod 777 $(ls / | grep -v1.8.8.dev11.8.8.dev11.8.8.dev11.8.8.dev11.8.8.dev11.8.8.dev11.8.8.dev1 sys | grep -v1.8.8.dev11.8.8.dev11.8.8.dev11.8.8.dev11.8.8.dev11.8.8.dev11.8.8.dev1 proc) + && chmod 777 $(ls / | grep -v sys | grep -v proc) ENV PYTHONUSERBASE=/home/c-pac_user/.local ENV PATH=$PATH:/home/c-pac_user/.local/bin \ PYTHONPATH=$PYTHONPATH:$PYTHONUSERBASE/lib/python3.10/site-packages \ diff --git a/.github/Dockerfiles/base-standard.Dockerfile b/.github/Dockerfiles/base-standard.Dockerfile index 1547a1f343..0eb4738375 100644 --- a/.github/Dockerfiles/base-standard.Dockerfile +++ b/.github/Dockerfiles/base-standard.Dockerfile @@ -16,7 +16,7 @@ # License along with C-PAC. If not, see . FROM ghcr.io/fcp-indi/c-pac/freesurfer:6.0.0-min.neurodocker-jammy AS freesurfer -FROM ghcr.io/fcp-indi/c-pac/stage-base:lite-v1.8.8.dev11.8.8.dev11.8.8.dev11.8.8.dev11.8.8.dev11.8.8.dev11.8.8.dev1 +FROM ghcr.io/fcp-indi/c-pac/stage-base:lite-v1.8.8.dev1 LABEL org.opencontainers.image.description="NOT INTENDED FOR USE OTHER THAN AS A STAGE IMAGE IN A MULTI-STAGE BUILD \ Standard software dependencies for C-PAC standard images" LABEL org.opencontainers.image.source=https://github.com/FCP-INDI/C-PAC @@ -56,7 +56,7 @@ RUN apt-get autoremove -y \ && rm -rf results.txt \ && ldconfig \ && chmod 777 / /home/c-pac_user \ - && chmod 777 $(ls / | grep -v1.8.8.dev11.8.8.dev11.8.8.dev11.8.8.dev11.8.8.dev11.8.8.dev11.8.8.dev1 sys | grep -v1.8.8.dev11.8.8.dev11.8.8.dev11.8.8.dev11.8.8.dev11.8.8.dev11.8.8.dev1 proc) + && chmod 777 $(ls / | grep -v sys | grep -v proc) # set user USER c-pac_user diff --git a/CPAC/anat_preproc/ants.py b/CPAC/anat_preproc/ants.py index 9c145eae12..bb53f099bf 100644 --- a/CPAC/anat_preproc/ants.py +++ b/CPAC/anat_preproc/ants.py @@ -43,7 +43,7 @@ """ from collections import OrderedDict -from importlib.resources import as_file, files +from importlib.resources import files from logging import getLogger from typing import Literal diff --git a/Dockerfile b/Dockerfile index 0a9a54592e..e41bd6fc73 100644 --- a/Dockerfile +++ b/Dockerfile @@ -14,7 +14,7 @@ # You should have received a copy of the GNU Lesser General Public # License along with C-PAC. If not, see . -FROM ghcr.io/fcp-indi/c-pac/stage-base:standard-v1.8.8.dev11.8.8.dev11.8.8.dev11.8.8.dev11.8.8.dev11.8.8.dev11.8.8.dev1 +FROM ghcr.io/fcp-indi/c-pac/stage-base:standard-v1.8.8.dev1 LABEL org.opencontainers.image.description="Full C-PAC image" LABEL org.opencontainers.image.source=https://github.com/FCP-INDI/C-PAC USER root @@ -42,7 +42,7 @@ RUN rm -rf /var/lib/apt/lists/* /tmp/* /var/tmp/* /root/.cache/* \ && apt-get autoremove -y \ && ldconfig \ && chmod 777 / \ - && chmod 777 $(ls / | grep -v1.8.8.dev11.8.8.dev11.8.8.dev11.8.8.dev11.8.8.dev11.8.8.dev11.8.8.dev1 sys | grep -v1.8.8.dev11.8.8.dev11.8.8.dev11.8.8.dev11.8.8.dev11.8.8.dev11.8.8.dev1 proc) + && chmod 777 $(ls / | grep -v sys | grep -v proc) ENV PYTHONUSERBASE=/home/c-pac_user/.local ENV PATH=$PATH:/home/c-pac_user/.local/bin \ PYTHONPATH=$PYTHONPATH:$PYTHONUSERBASE/lib/python3.10/site-packages \ diff --git a/variant-lite.Dockerfile b/variant-lite.Dockerfile index dd07ffb736..6f350c4f18 100644 --- a/variant-lite.Dockerfile +++ b/variant-lite.Dockerfile @@ -14,7 +14,7 @@ # You should have received a copy of the GNU Lesser General Public # License along with C-PAC. If not, see . -FROM ghcr.io/fcp-indi/c-pac/stage-base:lite-v1.8.8.dev11.8.8.dev11.8.8.dev11.8.8.dev11.8.8.dev11.8.8.dev11.8.8.dev1 +FROM ghcr.io/fcp-indi/c-pac/stage-base:lite-v1.8.8.dev1 LABEL org.opencontainers.image.description="Full C-PAC image without FreeSurfer" LABEL org.opencontainers.image.source=https://github.com/FCP-INDI/C-PAC USER root @@ -43,7 +43,7 @@ RUN rm -rf /var/lib/apt/lists/* /tmp/* /var/tmp/* /root/.cache/* \ && apt-get autoremove -y \ && ldconfig \ && chmod 777 / \ - && chmod 777 $(ls / | grep -v1.8.8.dev11.8.8.dev11.8.8.dev11.8.8.dev11.8.8.dev11.8.8.dev11.8.8.dev1 sys | grep -v1.8.8.dev11.8.8.dev11.8.8.dev11.8.8.dev11.8.8.dev11.8.8.dev11.8.8.dev1 proc) + && chmod 777 $(ls / | grep -v sys | grep -v proc) ENV PYTHONUSERBASE=/home/c-pac_user/.local ENV PATH=$PATH:/home/c-pac_user/.local/bin \ PYTHONPATH=$PYTHONPATH:$PYTHONUSERBASE/lib/python3.10/site-packages \ From e1ab42cc6bc02f6411f143c5a32317e9f6798983 Mon Sep 17 00:00:00 2001 From: Greg Kiar Date: Thu, 11 Sep 2025 16:01:32 -0400 Subject: [PATCH 498/507] tweak SUPPORT.md language Removed the "paying" language and timebound commitment, as they seemed unnecessary to put in a public document... --- SUPPORT.md | 8 +------- 1 file changed, 1 insertion(+), 7 deletions(-) diff --git a/SUPPORT.md b/SUPPORT.md index dec7292214..5809e042b8 100644 --- a/SUPPORT.md +++ b/SUPPORT.md @@ -3,16 +3,10 @@ Support Policy As of v1.8.8, C-PAC is in maintenance mode. With the 2.0.0 release, we will begin strict adherence to Semantic Versioning. -While in maintenance mode, we will continue to publish new releases but FCP-INDI will no longer be paying developers to add new features. - -Our team is happy to offer guidance and support where possible. For requests involving custom development, extended technical support, or integration efforts, we're open to exploring paid support arrangements. Please reach out if you'd like to discuss these options further. - Community contributions will be reviewed and released when passing review. Responsibility for these reviews is defined in [.github/CODEOWNERS](./.github/CODEOWNERS). -User support will continue at [Neurostars](https://neurostars.org/tag/cpac), though expect a slower response time as FCP-INDI will no longer be paying developers to perform user support. +User support will continue at [Neurostars](https://neurostars.org/tag/cpac), though expect a slower response time. Major bug fixes will continue to be addressed by [**@FCP-INDI/maintenance**](https://github.com/orgs/FCP-INDI/teams/maintenance). Minor bugs will be documented and left to the community to contribute fixes and workarounds. Security releases will continue to be published by [**@FCP-INDI/DevOps**](https://github.com/orgs/FCP-INDI/teams/DevOps). - -These guidelines will be in effect for 2 years, at which point they will be reevaluated and updated by [**@FCP-INDI/maintenance**](https://github.com/orgs/FCP-INDI/teams/maintenance). From 4955772e308dad2bfd8580df85d05961b66ba090 Mon Sep 17 00:00:00 2001 From: Greg Kiar Date: Thu, 18 Sep 2025 10:17:49 -0400 Subject: [PATCH 499/507] Update SUPPORT.md Co-authored-by: Jon Cluce --- SUPPORT.md | 1 + 1 file changed, 1 insertion(+) diff --git a/SUPPORT.md b/SUPPORT.md index 5809e042b8..16eb8b642f 100644 --- a/SUPPORT.md +++ b/SUPPORT.md @@ -3,6 +3,7 @@ Support Policy As of v1.8.8, C-PAC is in maintenance mode. With the 2.0.0 release, we will begin strict adherence to Semantic Versioning. +While in maintenance mode, we will continue to publish new releases but FCP-INDI will no longer be developing new features. Community contributions will be reviewed and released when passing review. Responsibility for these reviews is defined in [.github/CODEOWNERS](./.github/CODEOWNERS). User support will continue at [Neurostars](https://neurostars.org/tag/cpac), though expect a slower response time. From 3e3cec5b183f0125381850f85b342ad20eda8450 Mon Sep 17 00:00:00 2001 From: "birajstha:construction_worker::penguin" Date: Mon, 22 Sep 2025 15:32:30 -0400 Subject: [PATCH 500/507] changes suggested by Jon --- CPAC/info.py | 16 ++++++++++------ CPAC/pipeline/schema.py | 6 +++--- .../configs/data_config_S3-BIDS-ABIDE.yml | 2 +- .../configs/data_config_S3-BIDS-ADHD200.yml | 2 +- .../data_config_S3-BIDS-ADHD200_only2.yml | 2 +- .../data_config_S3-BIDS-NKI-RocklandSample.yml | 2 +- .../configs/data_config_cpac_benchmark.yml | 2 +- .../resources/configs/data_settings_template.yml | 2 +- CPAC/resources/configs/group_config_template.yml | 2 +- .../configs/pipeline_config_abcd-options.yml | 2 +- .../configs/pipeline_config_abcd-prep.yml | 2 +- .../configs/pipeline_config_anat-only.yml | 2 +- .../configs/pipeline_config_benchmark-ANTS.yml | 2 +- .../configs/pipeline_config_benchmark-FNIRT.yml | 2 +- CPAC/resources/configs/pipeline_config_blank.yml | 2 +- .../configs/pipeline_config_ccs-options.yml | 2 +- .../pipeline_config_default-deprecated.yml | 2 +- .../configs/pipeline_config_default.yml | 2 +- .../configs/pipeline_config_fmriprep-ingress.yml | 2 +- .../configs/pipeline_config_fmriprep-options.yml | 2 +- .../configs/pipeline_config_fx-options.yml | 2 +- .../configs/pipeline_config_monkey-ABCD.yml | 2 +- .../resources/configs/pipeline_config_monkey.yml | 2 +- CPAC/resources/configs/pipeline_config_ndmg.yml | 2 +- .../configs/pipeline_config_nhp-macaque.yml | 2 +- .../configs/pipeline_config_preproc.yml | 2 +- .../configs/pipeline_config_rbc-options.yml | 2 +- .../configs/pipeline_config_regtest-1.yml | 2 +- .../configs/pipeline_config_regtest-2.yml | 2 +- .../configs/pipeline_config_regtest-3.yml | 2 +- .../configs/pipeline_config_regtest-4.yml | 2 +- .../resources/configs/pipeline_config_rodent.yml | 2 +- CPAC/resources/configs/system_config.yml | 2 +- .../test_configs/data-test_S3-ADHD200_1.yml | 2 +- .../data-test_S3-ADHD200_no-params.yml | 2 +- .../test_configs/data-test_S3-NKI-RS_fmap.yml | 2 +- .../data_config_S3_CoRR_5only_mult-scan.yml | 2 +- .../data_config_S3_CoRR_5only_mult-sess.yml | 2 +- .../configs/test_configs/pipe-test_ABCD.yml | 2 +- .../test_configs/pipe-test_ANTs-3dSk-AllNuis.yml | 2 +- .../pipe-test_ANTs-3dSk-DistCorr3dSk.yml | 2 +- .../pipe-test_ANTs-3dSk-DistCorrBET.yml | 2 +- .../test_configs/pipe-test_ANTs-BET-AllNuis.yml | 2 +- .../pipe-test_FNIRT-3dSk-AllNuis.yml | 2 +- .../pipe-test_FNIRT-BET-AllNuis-BASC.yml | 2 +- .../pipe-test_FNIRT-BET-AllNuis-ISC-voxel.yml | 2 +- .../pipe-test_FNIRT-BET-AllNuis-ISC.yml | 2 +- .../pipe-test_FNIRT-BET-AllNuis-MDMR.yml | 2 +- .../test_configs/pipe-test_FNIRT-BET-AllNuis.yml | 2 +- .../configs/test_configs/pipe-test_all.yml | 2 +- version | 2 +- 51 files changed, 62 insertions(+), 58 deletions(-) diff --git a/CPAC/info.py b/CPAC/info.py index 272545f0cf..de41799605 100644 --- a/CPAC/info.py +++ b/CPAC/info.py @@ -52,13 +52,18 @@ def get_cpac_gitversion() -> str | None: """CPAC version as reported by the last commit in git.""" - from pathlib import Path + from importlib.resources import as_file, files import subprocess - gitpath = Path(__file__).parent.resolve() - - gitpathgit = gitpath / ".git" - if not gitpathgit.exists(): + with as_file(files("CPAC")) as _cpac: + gitpath = _cpac + gitpathgit = None + for _cpacpath in [gitpath, *gitpath.parents]: + git_dir = _cpacpath / ".git" + if git_dir.exists(): + gitpathgit = git_dir + break + if not gitpathgit: return None ver = None @@ -68,7 +73,6 @@ def get_cpac_gitversion() -> str | None: "git describe --always", shell=True, cwd=gitpath, stdout=subprocess.PIPE ).communicate() except Exception: - ver = "0.0.0.dev" pass else: ver = o.decode().strip().split("-")[-1] diff --git a/CPAC/pipeline/schema.py b/CPAC/pipeline/schema.py index 07f41c2f48..81c542e8e5 100644 --- a/CPAC/pipeline/schema.py +++ b/CPAC/pipeline/schema.py @@ -55,7 +55,7 @@ Schema, Title, ) -from voluptuous.schema_builder import Schema, UNDEFINED +from voluptuous.schema_builder import Schemable, UNDEFINED from CPAC.utils.datatypes import ItemFromList, ListFromItem from CPAC.utils.docs import DOCS_URL_PREFIX @@ -80,7 +80,7 @@ ORGANISMS: list[Organism] = ["human", "non-human primate", "rodent"] -def deprecated_option(option: Schema, version: str, message: str) -> None: +def deprecated_option(option: Schemable, version: str, message: str) -> None: """Mark an option as deprecated. Parameters @@ -119,7 +119,7 @@ class Deprecated(Optional): def __init__( self, - schema: Schema, + schema: Schemable, version: str, msg: str = "This option is deprecated and will be removed in a future release.", default: AnyType = UNDEFINED, diff --git a/CPAC/resources/configs/data_config_S3-BIDS-ABIDE.yml b/CPAC/resources/configs/data_config_S3-BIDS-ABIDE.yml index 694fe23286..287d145476 100644 --- a/CPAC/resources/configs/data_config_S3-BIDS-ABIDE.yml +++ b/CPAC/resources/configs/data_config_S3-BIDS-ABIDE.yml @@ -1,5 +1,5 @@ # CPAC Data Configuration File -# Version 1.8.8.dev1 +# Version # # http://fcp-indi.github.io for more info. # diff --git a/CPAC/resources/configs/data_config_S3-BIDS-ADHD200.yml b/CPAC/resources/configs/data_config_S3-BIDS-ADHD200.yml index 550847bd90..a964812b4f 100644 --- a/CPAC/resources/configs/data_config_S3-BIDS-ADHD200.yml +++ b/CPAC/resources/configs/data_config_S3-BIDS-ADHD200.yml @@ -1,5 +1,5 @@ # CPAC Data Configuration File -# Version 1.8.8.dev1 +# Version # # http://fcp-indi.github.io for more info. # diff --git a/CPAC/resources/configs/data_config_S3-BIDS-ADHD200_only2.yml b/CPAC/resources/configs/data_config_S3-BIDS-ADHD200_only2.yml index 6b1a07f928..839c3aab13 100644 --- a/CPAC/resources/configs/data_config_S3-BIDS-ADHD200_only2.yml +++ b/CPAC/resources/configs/data_config_S3-BIDS-ADHD200_only2.yml @@ -1,5 +1,5 @@ # CPAC Data Configuration File -# Version 1.8.8.dev1 +# Version # # http://fcp-indi.github.io for more info. # diff --git a/CPAC/resources/configs/data_config_S3-BIDS-NKI-RocklandSample.yml b/CPAC/resources/configs/data_config_S3-BIDS-NKI-RocklandSample.yml index 4fe2dd4f12..afe2e2d51d 100644 --- a/CPAC/resources/configs/data_config_S3-BIDS-NKI-RocklandSample.yml +++ b/CPAC/resources/configs/data_config_S3-BIDS-NKI-RocklandSample.yml @@ -1,5 +1,5 @@ # CPAC Data Configuration File -# Version 1.8.8.dev1 +# Version # # http://fcp-indi.github.io for more info. # diff --git a/CPAC/resources/configs/data_config_cpac_benchmark.yml b/CPAC/resources/configs/data_config_cpac_benchmark.yml index 344e6f2926..ceb546be14 100644 --- a/CPAC/resources/configs/data_config_cpac_benchmark.yml +++ b/CPAC/resources/configs/data_config_cpac_benchmark.yml @@ -1,5 +1,5 @@ # CPAC Data Configuration File -# Version 1.8.8.dev1 +# Version # # http://fcp-indi.github.io for more info. # diff --git a/CPAC/resources/configs/data_settings_template.yml b/CPAC/resources/configs/data_settings_template.yml index 8f3193809a..06ee292815 100644 --- a/CPAC/resources/configs/data_settings_template.yml +++ b/CPAC/resources/configs/data_settings_template.yml @@ -1,5 +1,5 @@ # CPAC Data Settings File -# Version 1.8.8.dev1 +# Version # # http://fcp-indi.github.io for more info. # diff --git a/CPAC/resources/configs/group_config_template.yml b/CPAC/resources/configs/group_config_template.yml index f96528dce3..3d98fb285d 100644 --- a/CPAC/resources/configs/group_config_template.yml +++ b/CPAC/resources/configs/group_config_template.yml @@ -1,5 +1,5 @@ # CPAC Group-Level Analysis Configuration YAML file -# Version 1.8.8.dev1 +# Version # # http://fcp-indi.github.io for more info. # diff --git a/CPAC/resources/configs/pipeline_config_abcd-options.yml b/CPAC/resources/configs/pipeline_config_abcd-options.yml index 2f63c9e49c..53763e61b2 100644 --- a/CPAC/resources/configs/pipeline_config_abcd-options.yml +++ b/CPAC/resources/configs/pipeline_config_abcd-options.yml @@ -1,7 +1,7 @@ %YAML 1.1 --- # CPAC Pipeline Configuration YAML file -# Version 1.8.8.dev1 +# Version # # http://fcp-indi.github.io for more info. # diff --git a/CPAC/resources/configs/pipeline_config_abcd-prep.yml b/CPAC/resources/configs/pipeline_config_abcd-prep.yml index 27a4cd5f63..bea192a97d 100644 --- a/CPAC/resources/configs/pipeline_config_abcd-prep.yml +++ b/CPAC/resources/configs/pipeline_config_abcd-prep.yml @@ -1,7 +1,7 @@ %YAML 1.1 --- # CPAC Pipeline Configuration YAML file -# Version 1.8.8.dev1 +# Version # # http://fcp-indi.github.io for more info. # diff --git a/CPAC/resources/configs/pipeline_config_anat-only.yml b/CPAC/resources/configs/pipeline_config_anat-only.yml index 38953f68ef..b9cccd5948 100644 --- a/CPAC/resources/configs/pipeline_config_anat-only.yml +++ b/CPAC/resources/configs/pipeline_config_anat-only.yml @@ -1,7 +1,7 @@ %YAML 1.1 --- # CPAC Pipeline Configuration YAML file -# Version 1.8.8.dev1 +# Version # # http://fcp-indi.github.io for more info. # diff --git a/CPAC/resources/configs/pipeline_config_benchmark-ANTS.yml b/CPAC/resources/configs/pipeline_config_benchmark-ANTS.yml index b42c30f547..b2ab123fb6 100644 --- a/CPAC/resources/configs/pipeline_config_benchmark-ANTS.yml +++ b/CPAC/resources/configs/pipeline_config_benchmark-ANTS.yml @@ -1,7 +1,7 @@ %YAML 1.1 --- # CPAC Pipeline Configuration YAML file -# Version 1.8.8.dev1 +# Version # # http://fcp-indi.github.io for more info. # diff --git a/CPAC/resources/configs/pipeline_config_benchmark-FNIRT.yml b/CPAC/resources/configs/pipeline_config_benchmark-FNIRT.yml index 7026e1f2fd..c0ac6e51e5 100644 --- a/CPAC/resources/configs/pipeline_config_benchmark-FNIRT.yml +++ b/CPAC/resources/configs/pipeline_config_benchmark-FNIRT.yml @@ -1,7 +1,7 @@ %YAML 1.1 --- # CPAC Pipeline Configuration YAML file -# Version 1.8.8.dev1 +# Version # # http://fcp-indi.github.io for more info. # diff --git a/CPAC/resources/configs/pipeline_config_blank.yml b/CPAC/resources/configs/pipeline_config_blank.yml index 75c889d9b2..d2c8d21f89 100644 --- a/CPAC/resources/configs/pipeline_config_blank.yml +++ b/CPAC/resources/configs/pipeline_config_blank.yml @@ -1,7 +1,7 @@ %YAML 1.1 --- # CPAC Pipeline Configuration YAML file -# Version 1.8.8.dev1 +# Version # # http://fcp-indi.github.io for more info. # diff --git a/CPAC/resources/configs/pipeline_config_ccs-options.yml b/CPAC/resources/configs/pipeline_config_ccs-options.yml index 1a4d59c7eb..1c3fea07ca 100644 --- a/CPAC/resources/configs/pipeline_config_ccs-options.yml +++ b/CPAC/resources/configs/pipeline_config_ccs-options.yml @@ -1,7 +1,7 @@ %YAML 1.1 --- # CPAC Pipeline Configuration YAML file -# Version 1.8.8.dev1 +# Version # # http://fcp-indi.github.io for more info. # diff --git a/CPAC/resources/configs/pipeline_config_default-deprecated.yml b/CPAC/resources/configs/pipeline_config_default-deprecated.yml index 22c0e5dada..658b311094 100644 --- a/CPAC/resources/configs/pipeline_config_default-deprecated.yml +++ b/CPAC/resources/configs/pipeline_config_default-deprecated.yml @@ -1,7 +1,7 @@ %YAML 1.1 --- # CPAC Pipeline Configuration YAML file -# Version 1.8.8.dev1 +# Version # # http://fcp-indi.github.io for more info. # diff --git a/CPAC/resources/configs/pipeline_config_default.yml b/CPAC/resources/configs/pipeline_config_default.yml index 5c42e5ad35..e856e44dd6 100644 --- a/CPAC/resources/configs/pipeline_config_default.yml +++ b/CPAC/resources/configs/pipeline_config_default.yml @@ -1,7 +1,7 @@ %YAML 1.1 --- # CPAC Pipeline Configuration YAML file -# Version 1.8.8.dev1 +# Version # # http://fcp-indi.github.io for more info. # diff --git a/CPAC/resources/configs/pipeline_config_fmriprep-ingress.yml b/CPAC/resources/configs/pipeline_config_fmriprep-ingress.yml index 275a7d8d1f..9166c3acaa 100644 --- a/CPAC/resources/configs/pipeline_config_fmriprep-ingress.yml +++ b/CPAC/resources/configs/pipeline_config_fmriprep-ingress.yml @@ -1,7 +1,7 @@ %YAML 1.1 --- # CPAC Pipeline Configuration YAML file -# Version 1.8.8.dev1 +# Version # # http://fcp-indi.github.io for more info. # diff --git a/CPAC/resources/configs/pipeline_config_fmriprep-options.yml b/CPAC/resources/configs/pipeline_config_fmriprep-options.yml index 5453144af5..0718297532 100644 --- a/CPAC/resources/configs/pipeline_config_fmriprep-options.yml +++ b/CPAC/resources/configs/pipeline_config_fmriprep-options.yml @@ -1,7 +1,7 @@ %YAML 1.1 --- # CPAC Pipeline Configuration YAML file -# Version 1.8.8.dev1 +# Version # # http://fcp-indi.github.io for more info. # diff --git a/CPAC/resources/configs/pipeline_config_fx-options.yml b/CPAC/resources/configs/pipeline_config_fx-options.yml index f8cc2f8de6..02d6f31ced 100644 --- a/CPAC/resources/configs/pipeline_config_fx-options.yml +++ b/CPAC/resources/configs/pipeline_config_fx-options.yml @@ -1,7 +1,7 @@ %YAML 1.1 --- # CPAC Pipeline Configuration YAML file -# Version 1.8.8.dev1 +# Version # # http://fcp-indi.github.io for more info. # diff --git a/CPAC/resources/configs/pipeline_config_monkey-ABCD.yml b/CPAC/resources/configs/pipeline_config_monkey-ABCD.yml index dfca2e5e46..34e1a2ade7 100644 --- a/CPAC/resources/configs/pipeline_config_monkey-ABCD.yml +++ b/CPAC/resources/configs/pipeline_config_monkey-ABCD.yml @@ -1,7 +1,7 @@ %YAML 1.1 --- # CPAC Pipeline Configuration YAML file -# Version 1.8.8.dev1 +# Version # # http://fcp-indi.github.io for more info. # diff --git a/CPAC/resources/configs/pipeline_config_monkey.yml b/CPAC/resources/configs/pipeline_config_monkey.yml index 98c6ffe5f1..1d5f499c5e 100644 --- a/CPAC/resources/configs/pipeline_config_monkey.yml +++ b/CPAC/resources/configs/pipeline_config_monkey.yml @@ -1,7 +1,7 @@ %YAML 1.1 --- # CPAC Pipeline Configuration YAML file -# Version 1.8.8.dev1 +# Version # # http://fcp-indi.github.io for more info. # diff --git a/CPAC/resources/configs/pipeline_config_ndmg.yml b/CPAC/resources/configs/pipeline_config_ndmg.yml index bd77602300..7fb48bcef9 100644 --- a/CPAC/resources/configs/pipeline_config_ndmg.yml +++ b/CPAC/resources/configs/pipeline_config_ndmg.yml @@ -1,7 +1,7 @@ %YAML 1.1 --- # CPAC Pipeline Configuration YAML file -# Version 1.8.8.dev1 +# Version # # http://fcp-indi.github.io for more info. # diff --git a/CPAC/resources/configs/pipeline_config_nhp-macaque.yml b/CPAC/resources/configs/pipeline_config_nhp-macaque.yml index 2dbe4c724c..68afff32df 100644 --- a/CPAC/resources/configs/pipeline_config_nhp-macaque.yml +++ b/CPAC/resources/configs/pipeline_config_nhp-macaque.yml @@ -1,7 +1,7 @@ %YAML 1.1 --- # CPAC Pipeline Configuration YAML file -# Version 1.8.8.dev1 +# Version # # http://fcp-indi.github.io for more info. # diff --git a/CPAC/resources/configs/pipeline_config_preproc.yml b/CPAC/resources/configs/pipeline_config_preproc.yml index f2d567d661..028f7de296 100644 --- a/CPAC/resources/configs/pipeline_config_preproc.yml +++ b/CPAC/resources/configs/pipeline_config_preproc.yml @@ -1,7 +1,7 @@ %YAML 1.1 --- # CPAC Pipeline Configuration YAML file -# Version 1.8.8.dev1 +# Version # # http://fcp-indi.github.io for more info. # diff --git a/CPAC/resources/configs/pipeline_config_rbc-options.yml b/CPAC/resources/configs/pipeline_config_rbc-options.yml index 27fae8a7ac..a7c4dfd6ea 100644 --- a/CPAC/resources/configs/pipeline_config_rbc-options.yml +++ b/CPAC/resources/configs/pipeline_config_rbc-options.yml @@ -1,7 +1,7 @@ %YAML 1.1 --- # CPAC Pipeline Configuration YAML file -# Version 1.8.8.dev1 +# Version # # http://fcp-indi.github.io for more info. # diff --git a/CPAC/resources/configs/pipeline_config_regtest-1.yml b/CPAC/resources/configs/pipeline_config_regtest-1.yml index ee1ff5d5cf..ee96ad34d5 100644 --- a/CPAC/resources/configs/pipeline_config_regtest-1.yml +++ b/CPAC/resources/configs/pipeline_config_regtest-1.yml @@ -1,7 +1,7 @@ %YAML 1.1 --- # CPAC Pipeline Configuration YAML file -# Version 1.8.8.dev1 +# Version # # http://fcp-indi.github.io for more info. # diff --git a/CPAC/resources/configs/pipeline_config_regtest-2.yml b/CPAC/resources/configs/pipeline_config_regtest-2.yml index e5b6dbd626..f1fcc596aa 100644 --- a/CPAC/resources/configs/pipeline_config_regtest-2.yml +++ b/CPAC/resources/configs/pipeline_config_regtest-2.yml @@ -1,7 +1,7 @@ %YAML 1.1 --- # CPAC Pipeline Configuration YAML file -# Version 1.8.8.dev1 +# Version # # http://fcp-indi.github.io for more info. # diff --git a/CPAC/resources/configs/pipeline_config_regtest-3.yml b/CPAC/resources/configs/pipeline_config_regtest-3.yml index 804e4f5f2f..f0c4989c42 100644 --- a/CPAC/resources/configs/pipeline_config_regtest-3.yml +++ b/CPAC/resources/configs/pipeline_config_regtest-3.yml @@ -1,7 +1,7 @@ %YAML 1.1 --- # CPAC Pipeline Configuration YAML file -# Version 1.8.8.dev1 +# Version # # http://fcp-indi.github.io for more info. # diff --git a/CPAC/resources/configs/pipeline_config_regtest-4.yml b/CPAC/resources/configs/pipeline_config_regtest-4.yml index 5232abdbf5..7591d09b7d 100644 --- a/CPAC/resources/configs/pipeline_config_regtest-4.yml +++ b/CPAC/resources/configs/pipeline_config_regtest-4.yml @@ -1,7 +1,7 @@ %YAML 1.1 --- # CPAC Pipeline Configuration YAML file -# Version 1.8.8.dev1 +# Version # # http://fcp-indi.github.io for more info. # diff --git a/CPAC/resources/configs/pipeline_config_rodent.yml b/CPAC/resources/configs/pipeline_config_rodent.yml index a5f2c6caf9..e7372d31ce 100644 --- a/CPAC/resources/configs/pipeline_config_rodent.yml +++ b/CPAC/resources/configs/pipeline_config_rodent.yml @@ -1,7 +1,7 @@ %YAML 1.1 --- # CPAC Pipeline Configuration YAML file -# Version 1.8.8.dev1 +# Version # # http://fcp-indi.github.io for more info. # diff --git a/CPAC/resources/configs/system_config.yml b/CPAC/resources/configs/system_config.yml index c33ad0d0a1..06cdcefca4 100644 --- a/CPAC/resources/configs/system_config.yml +++ b/CPAC/resources/configs/system_config.yml @@ -1,5 +1,5 @@ # C-PAC System Configuration YAML file -# Version 1.8.8.dev1 +# Version # # http://fcp-indi.github.io for more info. # diff --git a/CPAC/resources/configs/test_configs/data-test_S3-ADHD200_1.yml b/CPAC/resources/configs/test_configs/data-test_S3-ADHD200_1.yml index 544ffad843..b62cea6fa8 100644 --- a/CPAC/resources/configs/test_configs/data-test_S3-ADHD200_1.yml +++ b/CPAC/resources/configs/test_configs/data-test_S3-ADHD200_1.yml @@ -1,5 +1,5 @@ # CPAC Data Configuration File -# Version 1.8.8.dev1 +# Version # # http://fcp-indi.github.io for more info. # diff --git a/CPAC/resources/configs/test_configs/data-test_S3-ADHD200_no-params.yml b/CPAC/resources/configs/test_configs/data-test_S3-ADHD200_no-params.yml index 474aa7ee70..00b6d26db6 100644 --- a/CPAC/resources/configs/test_configs/data-test_S3-ADHD200_no-params.yml +++ b/CPAC/resources/configs/test_configs/data-test_S3-ADHD200_no-params.yml @@ -1,5 +1,5 @@ # CPAC Data Configuration File -# Version 1.8.8.dev1 +# Version # # http://fcp-indi.github.io for more info. # diff --git a/CPAC/resources/configs/test_configs/data-test_S3-NKI-RS_fmap.yml b/CPAC/resources/configs/test_configs/data-test_S3-NKI-RS_fmap.yml index b5355e01fa..b61fa626a6 100644 --- a/CPAC/resources/configs/test_configs/data-test_S3-NKI-RS_fmap.yml +++ b/CPAC/resources/configs/test_configs/data-test_S3-NKI-RS_fmap.yml @@ -1,5 +1,5 @@ # CPAC Data Configuration File -# Version 1.8.8.dev1 +# Version # # http://fcp-indi.github.io for more info. # diff --git a/CPAC/resources/configs/test_configs/data_config_S3_CoRR_5only_mult-scan.yml b/CPAC/resources/configs/test_configs/data_config_S3_CoRR_5only_mult-scan.yml index df33b1cbf8..73dff0bf8b 100644 --- a/CPAC/resources/configs/test_configs/data_config_S3_CoRR_5only_mult-scan.yml +++ b/CPAC/resources/configs/test_configs/data_config_S3_CoRR_5only_mult-scan.yml @@ -1,5 +1,5 @@ # CPAC Data Configuration File -# Version 1.8.8.dev1 +# Version # # http://fcp-indi.github.io for more info. # diff --git a/CPAC/resources/configs/test_configs/data_config_S3_CoRR_5only_mult-sess.yml b/CPAC/resources/configs/test_configs/data_config_S3_CoRR_5only_mult-sess.yml index a1de8541b1..2c6e03d4d9 100644 --- a/CPAC/resources/configs/test_configs/data_config_S3_CoRR_5only_mult-sess.yml +++ b/CPAC/resources/configs/test_configs/data_config_S3_CoRR_5only_mult-sess.yml @@ -1,5 +1,5 @@ # CPAC Data Configuration File -# Version 1.8.8.dev1 +# Version # # http://fcp-indi.github.io for more info. # diff --git a/CPAC/resources/configs/test_configs/pipe-test_ABCD.yml b/CPAC/resources/configs/test_configs/pipe-test_ABCD.yml index 4f87d4b3ba..57f16c3ab5 100644 --- a/CPAC/resources/configs/test_configs/pipe-test_ABCD.yml +++ b/CPAC/resources/configs/test_configs/pipe-test_ABCD.yml @@ -1,7 +1,7 @@ %YAML 1.1 --- # CPAC Pipeline Configuration YAML file -# Version 1.8.8.dev1 +# Version # # http://fcp-indi.github.io for more info. # diff --git a/CPAC/resources/configs/test_configs/pipe-test_ANTs-3dSk-AllNuis.yml b/CPAC/resources/configs/test_configs/pipe-test_ANTs-3dSk-AllNuis.yml index e13f77e435..d2b076b434 100644 --- a/CPAC/resources/configs/test_configs/pipe-test_ANTs-3dSk-AllNuis.yml +++ b/CPAC/resources/configs/test_configs/pipe-test_ANTs-3dSk-AllNuis.yml @@ -1,7 +1,7 @@ %YAML 1.1 --- # CPAC Pipeline Configuration YAML file -# Version 1.8.8.dev1 +# Version # # http://fcp-indi.github.io for more info. # diff --git a/CPAC/resources/configs/test_configs/pipe-test_ANTs-3dSk-DistCorr3dSk.yml b/CPAC/resources/configs/test_configs/pipe-test_ANTs-3dSk-DistCorr3dSk.yml index 41f7262a15..9e8482ae69 100644 --- a/CPAC/resources/configs/test_configs/pipe-test_ANTs-3dSk-DistCorr3dSk.yml +++ b/CPAC/resources/configs/test_configs/pipe-test_ANTs-3dSk-DistCorr3dSk.yml @@ -1,7 +1,7 @@ %YAML 1.1 --- # CPAC Pipeline Configuration YAML file -# Version 1.8.8.dev1 +# Version # # http://fcp-indi.github.io for more info. # diff --git a/CPAC/resources/configs/test_configs/pipe-test_ANTs-3dSk-DistCorrBET.yml b/CPAC/resources/configs/test_configs/pipe-test_ANTs-3dSk-DistCorrBET.yml index 4d59bdb05d..60d47af717 100644 --- a/CPAC/resources/configs/test_configs/pipe-test_ANTs-3dSk-DistCorrBET.yml +++ b/CPAC/resources/configs/test_configs/pipe-test_ANTs-3dSk-DistCorrBET.yml @@ -1,7 +1,7 @@ %YAML 1.1 --- # CPAC Pipeline Configuration YAML file -# Version 1.8.8.dev1 +# Version # # http://fcp-indi.github.io for more info. # diff --git a/CPAC/resources/configs/test_configs/pipe-test_ANTs-BET-AllNuis.yml b/CPAC/resources/configs/test_configs/pipe-test_ANTs-BET-AllNuis.yml index 4155ffc2a1..1ef383c560 100644 --- a/CPAC/resources/configs/test_configs/pipe-test_ANTs-BET-AllNuis.yml +++ b/CPAC/resources/configs/test_configs/pipe-test_ANTs-BET-AllNuis.yml @@ -1,7 +1,7 @@ %YAML 1.1 --- # CPAC Pipeline Configuration YAML file -# Version 1.8.8.dev1 +# Version # # http://fcp-indi.github.io for more info. # diff --git a/CPAC/resources/configs/test_configs/pipe-test_FNIRT-3dSk-AllNuis.yml b/CPAC/resources/configs/test_configs/pipe-test_FNIRT-3dSk-AllNuis.yml index c8464849fd..32755f5668 100644 --- a/CPAC/resources/configs/test_configs/pipe-test_FNIRT-3dSk-AllNuis.yml +++ b/CPAC/resources/configs/test_configs/pipe-test_FNIRT-3dSk-AllNuis.yml @@ -1,7 +1,7 @@ %YAML 1.1 --- # CPAC Pipeline Configuration YAML file -# Version 1.8.8.dev1 +# Version # # http://fcp-indi.github.io for more info. # diff --git a/CPAC/resources/configs/test_configs/pipe-test_FNIRT-BET-AllNuis-BASC.yml b/CPAC/resources/configs/test_configs/pipe-test_FNIRT-BET-AllNuis-BASC.yml index a1b1664eb9..f4c9b15081 100644 --- a/CPAC/resources/configs/test_configs/pipe-test_FNIRT-BET-AllNuis-BASC.yml +++ b/CPAC/resources/configs/test_configs/pipe-test_FNIRT-BET-AllNuis-BASC.yml @@ -1,7 +1,7 @@ %YAML 1.1 --- # CPAC Pipeline Configuration YAML file -# Version 1.8.8.dev1 +# Version # # http://fcp-indi.github.io for more info. # diff --git a/CPAC/resources/configs/test_configs/pipe-test_FNIRT-BET-AllNuis-ISC-voxel.yml b/CPAC/resources/configs/test_configs/pipe-test_FNIRT-BET-AllNuis-ISC-voxel.yml index a1b1664eb9..f4c9b15081 100644 --- a/CPAC/resources/configs/test_configs/pipe-test_FNIRT-BET-AllNuis-ISC-voxel.yml +++ b/CPAC/resources/configs/test_configs/pipe-test_FNIRT-BET-AllNuis-ISC-voxel.yml @@ -1,7 +1,7 @@ %YAML 1.1 --- # CPAC Pipeline Configuration YAML file -# Version 1.8.8.dev1 +# Version # # http://fcp-indi.github.io for more info. # diff --git a/CPAC/resources/configs/test_configs/pipe-test_FNIRT-BET-AllNuis-ISC.yml b/CPAC/resources/configs/test_configs/pipe-test_FNIRT-BET-AllNuis-ISC.yml index a1b1664eb9..f4c9b15081 100644 --- a/CPAC/resources/configs/test_configs/pipe-test_FNIRT-BET-AllNuis-ISC.yml +++ b/CPAC/resources/configs/test_configs/pipe-test_FNIRT-BET-AllNuis-ISC.yml @@ -1,7 +1,7 @@ %YAML 1.1 --- # CPAC Pipeline Configuration YAML file -# Version 1.8.8.dev1 +# Version # # http://fcp-indi.github.io for more info. # diff --git a/CPAC/resources/configs/test_configs/pipe-test_FNIRT-BET-AllNuis-MDMR.yml b/CPAC/resources/configs/test_configs/pipe-test_FNIRT-BET-AllNuis-MDMR.yml index a1b1664eb9..f4c9b15081 100644 --- a/CPAC/resources/configs/test_configs/pipe-test_FNIRT-BET-AllNuis-MDMR.yml +++ b/CPAC/resources/configs/test_configs/pipe-test_FNIRT-BET-AllNuis-MDMR.yml @@ -1,7 +1,7 @@ %YAML 1.1 --- # CPAC Pipeline Configuration YAML file -# Version 1.8.8.dev1 +# Version # # http://fcp-indi.github.io for more info. # diff --git a/CPAC/resources/configs/test_configs/pipe-test_FNIRT-BET-AllNuis.yml b/CPAC/resources/configs/test_configs/pipe-test_FNIRT-BET-AllNuis.yml index a1b1664eb9..f4c9b15081 100644 --- a/CPAC/resources/configs/test_configs/pipe-test_FNIRT-BET-AllNuis.yml +++ b/CPAC/resources/configs/test_configs/pipe-test_FNIRT-BET-AllNuis.yml @@ -1,7 +1,7 @@ %YAML 1.1 --- # CPAC Pipeline Configuration YAML file -# Version 1.8.8.dev1 +# Version # # http://fcp-indi.github.io for more info. # diff --git a/CPAC/resources/configs/test_configs/pipe-test_all.yml b/CPAC/resources/configs/test_configs/pipe-test_all.yml index f973dc89c4..e8abebac37 100644 --- a/CPAC/resources/configs/test_configs/pipe-test_all.yml +++ b/CPAC/resources/configs/test_configs/pipe-test_all.yml @@ -1,7 +1,7 @@ %YAML 1.1 --- # CPAC Pipeline Configuration YAML file -# Version 1.8.8.dev1 +# Version # # http://fcp-indi.github.io for more info. # diff --git a/version b/version index bb676e6961..110ed9b99b 100644 --- a/version +++ b/version @@ -1 +1 @@ -v1.8.8.dev1 +v From 866524498880fc765aad0ccb82bc04288f4e82d3 Mon Sep 17 00:00:00 2001 From: "birajstha:construction_worker::penguin" Date: Tue, 23 Sep 2025 09:40:56 -0400 Subject: [PATCH 501/507] reverting multiple version added by ci again --- .../Dockerfiles/C-PAC.develop-jammy.Dockerfile | 4 ++-- .../C-PAC.develop-lite-jammy.Dockerfile | 4 ++-- .github/Dockerfiles/base-standard.Dockerfile | 4 ++-- CPAC/info.py | 16 +++++----------- .../configs/data_config_S3-BIDS-ABIDE.yml | 2 +- .../configs/data_config_S3-BIDS-ADHD200.yml | 2 +- .../data_config_S3-BIDS-ADHD200_only2.yml | 2 +- .../data_config_S3-BIDS-NKI-RocklandSample.yml | 2 +- .../configs/data_config_cpac_benchmark.yml | 2 +- .../resources/configs/data_settings_template.yml | 2 +- CPAC/resources/configs/group_config_template.yml | 2 +- .../configs/pipeline_config_abcd-options.yml | 2 +- .../configs/pipeline_config_abcd-prep.yml | 2 +- .../configs/pipeline_config_anat-only.yml | 2 +- .../configs/pipeline_config_benchmark-ANTS.yml | 2 +- .../configs/pipeline_config_benchmark-FNIRT.yml | 2 +- CPAC/resources/configs/pipeline_config_blank.yml | 2 +- .../configs/pipeline_config_ccs-options.yml | 2 +- .../pipeline_config_default-deprecated.yml | 2 +- .../configs/pipeline_config_default.yml | 2 +- .../configs/pipeline_config_fmriprep-ingress.yml | 2 +- .../configs/pipeline_config_fmriprep-options.yml | 2 +- .../configs/pipeline_config_fx-options.yml | 2 +- .../configs/pipeline_config_monkey-ABCD.yml | 2 +- .../resources/configs/pipeline_config_monkey.yml | 2 +- CPAC/resources/configs/pipeline_config_ndmg.yml | 2 +- .../configs/pipeline_config_nhp-macaque.yml | 2 +- .../configs/pipeline_config_preproc.yml | 2 +- .../configs/pipeline_config_rbc-options.yml | 2 +- .../configs/pipeline_config_regtest-1.yml | 2 +- .../configs/pipeline_config_regtest-2.yml | 2 +- .../configs/pipeline_config_regtest-3.yml | 2 +- .../configs/pipeline_config_regtest-4.yml | 2 +- .../resources/configs/pipeline_config_rodent.yml | 2 +- CPAC/resources/configs/system_config.yml | 2 +- .../test_configs/data-test_S3-ADHD200_1.yml | 2 +- .../data-test_S3-ADHD200_no-params.yml | 2 +- .../test_configs/data-test_S3-NKI-RS_fmap.yml | 2 +- .../data_config_S3_CoRR_5only_mult-scan.yml | 2 +- .../data_config_S3_CoRR_5only_mult-sess.yml | 2 +- .../configs/test_configs/pipe-test_ABCD.yml | 2 +- .../test_configs/pipe-test_ANTs-3dSk-AllNuis.yml | 2 +- .../pipe-test_ANTs-3dSk-DistCorr3dSk.yml | 2 +- .../pipe-test_ANTs-3dSk-DistCorrBET.yml | 2 +- .../test_configs/pipe-test_ANTs-BET-AllNuis.yml | 2 +- .../pipe-test_FNIRT-3dSk-AllNuis.yml | 2 +- .../pipe-test_FNIRT-BET-AllNuis-BASC.yml | 2 +- .../pipe-test_FNIRT-BET-AllNuis-ISC-voxel.yml | 2 +- .../pipe-test_FNIRT-BET-AllNuis-ISC.yml | 2 +- .../pipe-test_FNIRT-BET-AllNuis-MDMR.yml | 2 +- .../test_configs/pipe-test_FNIRT-BET-AllNuis.yml | 2 +- .../configs/test_configs/pipe-test_all.yml | 2 +- Dockerfile | 4 ++-- variant-lite.Dockerfile | 4 ++-- version | 2 +- 55 files changed, 64 insertions(+), 70 deletions(-) diff --git a/.github/Dockerfiles/C-PAC.develop-jammy.Dockerfile b/.github/Dockerfiles/C-PAC.develop-jammy.Dockerfile index e41bd6fc73..710b08cc5f 100644 --- a/.github/Dockerfiles/C-PAC.develop-jammy.Dockerfile +++ b/.github/Dockerfiles/C-PAC.develop-jammy.Dockerfile @@ -14,7 +14,7 @@ # You should have received a copy of the GNU Lesser General Public # License along with C-PAC. If not, see . -FROM ghcr.io/fcp-indi/c-pac/stage-base:standard-v1.8.8.dev1 +FROM ghcr.io/fcp-indi/c-pac/stage-base:standard-v1.8.8.dev11.8.8.dev11.8.8.dev11.8.8.dev11.8.8.dev11.8.8.dev11.8.8.dev1 LABEL org.opencontainers.image.description="Full C-PAC image" LABEL org.opencontainers.image.source=https://github.com/FCP-INDI/C-PAC USER root @@ -42,7 +42,7 @@ RUN rm -rf /var/lib/apt/lists/* /tmp/* /var/tmp/* /root/.cache/* \ && apt-get autoremove -y \ && ldconfig \ && chmod 777 / \ - && chmod 777 $(ls / | grep -v sys | grep -v proc) + && chmod 777 $(ls / | grep -v1.8.8.dev11.8.8.dev11.8.8.dev11.8.8.dev11.8.8.dev11.8.8.dev1 sys | grep -v1.8.8.dev11.8.8.dev11.8.8.dev11.8.8.dev11.8.8.dev11.8.8.dev1 proc) ENV PYTHONUSERBASE=/home/c-pac_user/.local ENV PATH=$PATH:/home/c-pac_user/.local/bin \ PYTHONPATH=$PYTHONPATH:$PYTHONUSERBASE/lib/python3.10/site-packages \ diff --git a/.github/Dockerfiles/C-PAC.develop-lite-jammy.Dockerfile b/.github/Dockerfiles/C-PAC.develop-lite-jammy.Dockerfile index 6f350c4f18..24f0943ca9 100644 --- a/.github/Dockerfiles/C-PAC.develop-lite-jammy.Dockerfile +++ b/.github/Dockerfiles/C-PAC.develop-lite-jammy.Dockerfile @@ -14,7 +14,7 @@ # You should have received a copy of the GNU Lesser General Public # License along with C-PAC. If not, see . -FROM ghcr.io/fcp-indi/c-pac/stage-base:lite-v1.8.8.dev1 +FROM ghcr.io/fcp-indi/c-pac/stage-base:lite-v1.8.8.dev11.8.8.dev11.8.8.dev11.8.8.dev11.8.8.dev11.8.8.dev11.8.8.dev1 LABEL org.opencontainers.image.description="Full C-PAC image without FreeSurfer" LABEL org.opencontainers.image.source=https://github.com/FCP-INDI/C-PAC USER root @@ -43,7 +43,7 @@ RUN rm -rf /var/lib/apt/lists/* /tmp/* /var/tmp/* /root/.cache/* \ && apt-get autoremove -y \ && ldconfig \ && chmod 777 / \ - && chmod 777 $(ls / | grep -v sys | grep -v proc) + && chmod 777 $(ls / | grep -v1.8.8.dev11.8.8.dev11.8.8.dev11.8.8.dev11.8.8.dev11.8.8.dev1 sys | grep -v1.8.8.dev11.8.8.dev11.8.8.dev11.8.8.dev11.8.8.dev11.8.8.dev1 proc) ENV PYTHONUSERBASE=/home/c-pac_user/.local ENV PATH=$PATH:/home/c-pac_user/.local/bin \ PYTHONPATH=$PYTHONPATH:$PYTHONUSERBASE/lib/python3.10/site-packages \ diff --git a/.github/Dockerfiles/base-standard.Dockerfile b/.github/Dockerfiles/base-standard.Dockerfile index 0eb4738375..14352330d9 100644 --- a/.github/Dockerfiles/base-standard.Dockerfile +++ b/.github/Dockerfiles/base-standard.Dockerfile @@ -16,7 +16,7 @@ # License along with C-PAC. If not, see . FROM ghcr.io/fcp-indi/c-pac/freesurfer:6.0.0-min.neurodocker-jammy AS freesurfer -FROM ghcr.io/fcp-indi/c-pac/stage-base:lite-v1.8.8.dev1 +FROM ghcr.io/fcp-indi/c-pac/stage-base:lite-v1.8.8.dev11.8.8.dev11.8.8.dev11.8.8.dev11.8.8.dev11.8.8.dev11.8.8.dev1 LABEL org.opencontainers.image.description="NOT INTENDED FOR USE OTHER THAN AS A STAGE IMAGE IN A MULTI-STAGE BUILD \ Standard software dependencies for C-PAC standard images" LABEL org.opencontainers.image.source=https://github.com/FCP-INDI/C-PAC @@ -56,7 +56,7 @@ RUN apt-get autoremove -y \ && rm -rf results.txt \ && ldconfig \ && chmod 777 / /home/c-pac_user \ - && chmod 777 $(ls / | grep -v sys | grep -v proc) + && chmod 777 $(ls / | grep -v1.8.8.dev11.8.8.dev11.8.8.dev11.8.8.dev11.8.8.dev11.8.8.dev1 sys | grep -v1.8.8.dev11.8.8.dev11.8.8.dev11.8.8.dev11.8.8.dev11.8.8.dev1 proc) # set user USER c-pac_user diff --git a/CPAC/info.py b/CPAC/info.py index de41799605..a5c63923cd 100644 --- a/CPAC/info.py +++ b/CPAC/info.py @@ -52,22 +52,16 @@ def get_cpac_gitversion() -> str | None: """CPAC version as reported by the last commit in git.""" - from importlib.resources import as_file, files + from pathlib import Path import subprocess - with as_file(files("CPAC")) as _cpac: - gitpath = _cpac - gitpathgit = None - for _cpacpath in [gitpath, *gitpath.parents]: - git_dir = _cpacpath / ".git" - if git_dir.exists(): - gitpathgit = git_dir - break - if not gitpathgit: + gitpath = Path(__file__).parent.resolve() + + gitpathgit = gitpath / ".git" + if not gitpathgit.exists(): return None ver = None - try: o, _ = subprocess.Popen( "git describe --always", shell=True, cwd=gitpath, stdout=subprocess.PIPE diff --git a/CPAC/resources/configs/data_config_S3-BIDS-ABIDE.yml b/CPAC/resources/configs/data_config_S3-BIDS-ABIDE.yml index 287d145476..59bdf37710 100644 --- a/CPAC/resources/configs/data_config_S3-BIDS-ABIDE.yml +++ b/CPAC/resources/configs/data_config_S3-BIDS-ABIDE.yml @@ -1,5 +1,5 @@ # CPAC Data Configuration File -# Version +# Version # # http://fcp-indi.github.io for more info. # diff --git a/CPAC/resources/configs/data_config_S3-BIDS-ADHD200.yml b/CPAC/resources/configs/data_config_S3-BIDS-ADHD200.yml index a964812b4f..fc123fa6dd 100644 --- a/CPAC/resources/configs/data_config_S3-BIDS-ADHD200.yml +++ b/CPAC/resources/configs/data_config_S3-BIDS-ADHD200.yml @@ -1,5 +1,5 @@ # CPAC Data Configuration File -# Version +# Version # # http://fcp-indi.github.io for more info. # diff --git a/CPAC/resources/configs/data_config_S3-BIDS-ADHD200_only2.yml b/CPAC/resources/configs/data_config_S3-BIDS-ADHD200_only2.yml index 839c3aab13..894f86fd5a 100644 --- a/CPAC/resources/configs/data_config_S3-BIDS-ADHD200_only2.yml +++ b/CPAC/resources/configs/data_config_S3-BIDS-ADHD200_only2.yml @@ -1,5 +1,5 @@ # CPAC Data Configuration File -# Version +# Version # # http://fcp-indi.github.io for more info. # diff --git a/CPAC/resources/configs/data_config_S3-BIDS-NKI-RocklandSample.yml b/CPAC/resources/configs/data_config_S3-BIDS-NKI-RocklandSample.yml index afe2e2d51d..d166dc3f52 100644 --- a/CPAC/resources/configs/data_config_S3-BIDS-NKI-RocklandSample.yml +++ b/CPAC/resources/configs/data_config_S3-BIDS-NKI-RocklandSample.yml @@ -1,5 +1,5 @@ # CPAC Data Configuration File -# Version +# Version # # http://fcp-indi.github.io for more info. # diff --git a/CPAC/resources/configs/data_config_cpac_benchmark.yml b/CPAC/resources/configs/data_config_cpac_benchmark.yml index ceb546be14..f31fd12372 100644 --- a/CPAC/resources/configs/data_config_cpac_benchmark.yml +++ b/CPAC/resources/configs/data_config_cpac_benchmark.yml @@ -1,5 +1,5 @@ # CPAC Data Configuration File -# Version +# Version # # http://fcp-indi.github.io for more info. # diff --git a/CPAC/resources/configs/data_settings_template.yml b/CPAC/resources/configs/data_settings_template.yml index 06ee292815..a7e2434fd4 100644 --- a/CPAC/resources/configs/data_settings_template.yml +++ b/CPAC/resources/configs/data_settings_template.yml @@ -1,5 +1,5 @@ # CPAC Data Settings File -# Version +# Version # # http://fcp-indi.github.io for more info. # diff --git a/CPAC/resources/configs/group_config_template.yml b/CPAC/resources/configs/group_config_template.yml index 3d98fb285d..0cb949052a 100644 --- a/CPAC/resources/configs/group_config_template.yml +++ b/CPAC/resources/configs/group_config_template.yml @@ -1,5 +1,5 @@ # CPAC Group-Level Analysis Configuration YAML file -# Version +# Version # # http://fcp-indi.github.io for more info. # diff --git a/CPAC/resources/configs/pipeline_config_abcd-options.yml b/CPAC/resources/configs/pipeline_config_abcd-options.yml index 53763e61b2..c114547e2d 100644 --- a/CPAC/resources/configs/pipeline_config_abcd-options.yml +++ b/CPAC/resources/configs/pipeline_config_abcd-options.yml @@ -1,7 +1,7 @@ %YAML 1.1 --- # CPAC Pipeline Configuration YAML file -# Version +# Version # # http://fcp-indi.github.io for more info. # diff --git a/CPAC/resources/configs/pipeline_config_abcd-prep.yml b/CPAC/resources/configs/pipeline_config_abcd-prep.yml index bea192a97d..ae295dd116 100644 --- a/CPAC/resources/configs/pipeline_config_abcd-prep.yml +++ b/CPAC/resources/configs/pipeline_config_abcd-prep.yml @@ -1,7 +1,7 @@ %YAML 1.1 --- # CPAC Pipeline Configuration YAML file -# Version +# Version # # http://fcp-indi.github.io for more info. # diff --git a/CPAC/resources/configs/pipeline_config_anat-only.yml b/CPAC/resources/configs/pipeline_config_anat-only.yml index b9cccd5948..30880f77d7 100644 --- a/CPAC/resources/configs/pipeline_config_anat-only.yml +++ b/CPAC/resources/configs/pipeline_config_anat-only.yml @@ -1,7 +1,7 @@ %YAML 1.1 --- # CPAC Pipeline Configuration YAML file -# Version +# Version # # http://fcp-indi.github.io for more info. # diff --git a/CPAC/resources/configs/pipeline_config_benchmark-ANTS.yml b/CPAC/resources/configs/pipeline_config_benchmark-ANTS.yml index b2ab123fb6..6795842bec 100644 --- a/CPAC/resources/configs/pipeline_config_benchmark-ANTS.yml +++ b/CPAC/resources/configs/pipeline_config_benchmark-ANTS.yml @@ -1,7 +1,7 @@ %YAML 1.1 --- # CPAC Pipeline Configuration YAML file -# Version +# Version # # http://fcp-indi.github.io for more info. # diff --git a/CPAC/resources/configs/pipeline_config_benchmark-FNIRT.yml b/CPAC/resources/configs/pipeline_config_benchmark-FNIRT.yml index c0ac6e51e5..e3fddf4847 100644 --- a/CPAC/resources/configs/pipeline_config_benchmark-FNIRT.yml +++ b/CPAC/resources/configs/pipeline_config_benchmark-FNIRT.yml @@ -1,7 +1,7 @@ %YAML 1.1 --- # CPAC Pipeline Configuration YAML file -# Version +# Version # # http://fcp-indi.github.io for more info. # diff --git a/CPAC/resources/configs/pipeline_config_blank.yml b/CPAC/resources/configs/pipeline_config_blank.yml index d2c8d21f89..5fdb401f57 100644 --- a/CPAC/resources/configs/pipeline_config_blank.yml +++ b/CPAC/resources/configs/pipeline_config_blank.yml @@ -1,7 +1,7 @@ %YAML 1.1 --- # CPAC Pipeline Configuration YAML file -# Version +# Version # # http://fcp-indi.github.io for more info. # diff --git a/CPAC/resources/configs/pipeline_config_ccs-options.yml b/CPAC/resources/configs/pipeline_config_ccs-options.yml index 1c3fea07ca..78cd1974ca 100644 --- a/CPAC/resources/configs/pipeline_config_ccs-options.yml +++ b/CPAC/resources/configs/pipeline_config_ccs-options.yml @@ -1,7 +1,7 @@ %YAML 1.1 --- # CPAC Pipeline Configuration YAML file -# Version +# Version # # http://fcp-indi.github.io for more info. # diff --git a/CPAC/resources/configs/pipeline_config_default-deprecated.yml b/CPAC/resources/configs/pipeline_config_default-deprecated.yml index 658b311094..85e69367de 100644 --- a/CPAC/resources/configs/pipeline_config_default-deprecated.yml +++ b/CPAC/resources/configs/pipeline_config_default-deprecated.yml @@ -1,7 +1,7 @@ %YAML 1.1 --- # CPAC Pipeline Configuration YAML file -# Version +# Version # # http://fcp-indi.github.io for more info. # diff --git a/CPAC/resources/configs/pipeline_config_default.yml b/CPAC/resources/configs/pipeline_config_default.yml index e856e44dd6..43bfcb441c 100644 --- a/CPAC/resources/configs/pipeline_config_default.yml +++ b/CPAC/resources/configs/pipeline_config_default.yml @@ -1,7 +1,7 @@ %YAML 1.1 --- # CPAC Pipeline Configuration YAML file -# Version +# Version # # http://fcp-indi.github.io for more info. # diff --git a/CPAC/resources/configs/pipeline_config_fmriprep-ingress.yml b/CPAC/resources/configs/pipeline_config_fmriprep-ingress.yml index 9166c3acaa..acb0789a08 100644 --- a/CPAC/resources/configs/pipeline_config_fmriprep-ingress.yml +++ b/CPAC/resources/configs/pipeline_config_fmriprep-ingress.yml @@ -1,7 +1,7 @@ %YAML 1.1 --- # CPAC Pipeline Configuration YAML file -# Version +# Version # # http://fcp-indi.github.io for more info. # diff --git a/CPAC/resources/configs/pipeline_config_fmriprep-options.yml b/CPAC/resources/configs/pipeline_config_fmriprep-options.yml index 0718297532..110a84465c 100644 --- a/CPAC/resources/configs/pipeline_config_fmriprep-options.yml +++ b/CPAC/resources/configs/pipeline_config_fmriprep-options.yml @@ -1,7 +1,7 @@ %YAML 1.1 --- # CPAC Pipeline Configuration YAML file -# Version +# Version # # http://fcp-indi.github.io for more info. # diff --git a/CPAC/resources/configs/pipeline_config_fx-options.yml b/CPAC/resources/configs/pipeline_config_fx-options.yml index 02d6f31ced..88c4bc6cc5 100644 --- a/CPAC/resources/configs/pipeline_config_fx-options.yml +++ b/CPAC/resources/configs/pipeline_config_fx-options.yml @@ -1,7 +1,7 @@ %YAML 1.1 --- # CPAC Pipeline Configuration YAML file -# Version +# Version # # http://fcp-indi.github.io for more info. # diff --git a/CPAC/resources/configs/pipeline_config_monkey-ABCD.yml b/CPAC/resources/configs/pipeline_config_monkey-ABCD.yml index 34e1a2ade7..d3e6d070cc 100644 --- a/CPAC/resources/configs/pipeline_config_monkey-ABCD.yml +++ b/CPAC/resources/configs/pipeline_config_monkey-ABCD.yml @@ -1,7 +1,7 @@ %YAML 1.1 --- # CPAC Pipeline Configuration YAML file -# Version +# Version # # http://fcp-indi.github.io for more info. # diff --git a/CPAC/resources/configs/pipeline_config_monkey.yml b/CPAC/resources/configs/pipeline_config_monkey.yml index 1d5f499c5e..e926172711 100644 --- a/CPAC/resources/configs/pipeline_config_monkey.yml +++ b/CPAC/resources/configs/pipeline_config_monkey.yml @@ -1,7 +1,7 @@ %YAML 1.1 --- # CPAC Pipeline Configuration YAML file -# Version +# Version # # http://fcp-indi.github.io for more info. # diff --git a/CPAC/resources/configs/pipeline_config_ndmg.yml b/CPAC/resources/configs/pipeline_config_ndmg.yml index 7fb48bcef9..5517363da2 100644 --- a/CPAC/resources/configs/pipeline_config_ndmg.yml +++ b/CPAC/resources/configs/pipeline_config_ndmg.yml @@ -1,7 +1,7 @@ %YAML 1.1 --- # CPAC Pipeline Configuration YAML file -# Version +# Version # # http://fcp-indi.github.io for more info. # diff --git a/CPAC/resources/configs/pipeline_config_nhp-macaque.yml b/CPAC/resources/configs/pipeline_config_nhp-macaque.yml index 68afff32df..606fce75e7 100644 --- a/CPAC/resources/configs/pipeline_config_nhp-macaque.yml +++ b/CPAC/resources/configs/pipeline_config_nhp-macaque.yml @@ -1,7 +1,7 @@ %YAML 1.1 --- # CPAC Pipeline Configuration YAML file -# Version +# Version # # http://fcp-indi.github.io for more info. # diff --git a/CPAC/resources/configs/pipeline_config_preproc.yml b/CPAC/resources/configs/pipeline_config_preproc.yml index 028f7de296..7579342c8a 100644 --- a/CPAC/resources/configs/pipeline_config_preproc.yml +++ b/CPAC/resources/configs/pipeline_config_preproc.yml @@ -1,7 +1,7 @@ %YAML 1.1 --- # CPAC Pipeline Configuration YAML file -# Version +# Version # # http://fcp-indi.github.io for more info. # diff --git a/CPAC/resources/configs/pipeline_config_rbc-options.yml b/CPAC/resources/configs/pipeline_config_rbc-options.yml index a7c4dfd6ea..057691ab89 100644 --- a/CPAC/resources/configs/pipeline_config_rbc-options.yml +++ b/CPAC/resources/configs/pipeline_config_rbc-options.yml @@ -1,7 +1,7 @@ %YAML 1.1 --- # CPAC Pipeline Configuration YAML file -# Version +# Version # # http://fcp-indi.github.io for more info. # diff --git a/CPAC/resources/configs/pipeline_config_regtest-1.yml b/CPAC/resources/configs/pipeline_config_regtest-1.yml index ee96ad34d5..591b1e7960 100644 --- a/CPAC/resources/configs/pipeline_config_regtest-1.yml +++ b/CPAC/resources/configs/pipeline_config_regtest-1.yml @@ -1,7 +1,7 @@ %YAML 1.1 --- # CPAC Pipeline Configuration YAML file -# Version +# Version # # http://fcp-indi.github.io for more info. # diff --git a/CPAC/resources/configs/pipeline_config_regtest-2.yml b/CPAC/resources/configs/pipeline_config_regtest-2.yml index f1fcc596aa..87be241869 100644 --- a/CPAC/resources/configs/pipeline_config_regtest-2.yml +++ b/CPAC/resources/configs/pipeline_config_regtest-2.yml @@ -1,7 +1,7 @@ %YAML 1.1 --- # CPAC Pipeline Configuration YAML file -# Version +# Version # # http://fcp-indi.github.io for more info. # diff --git a/CPAC/resources/configs/pipeline_config_regtest-3.yml b/CPAC/resources/configs/pipeline_config_regtest-3.yml index f0c4989c42..aed4562f76 100644 --- a/CPAC/resources/configs/pipeline_config_regtest-3.yml +++ b/CPAC/resources/configs/pipeline_config_regtest-3.yml @@ -1,7 +1,7 @@ %YAML 1.1 --- # CPAC Pipeline Configuration YAML file -# Version +# Version # # http://fcp-indi.github.io for more info. # diff --git a/CPAC/resources/configs/pipeline_config_regtest-4.yml b/CPAC/resources/configs/pipeline_config_regtest-4.yml index 7591d09b7d..b705ce1915 100644 --- a/CPAC/resources/configs/pipeline_config_regtest-4.yml +++ b/CPAC/resources/configs/pipeline_config_regtest-4.yml @@ -1,7 +1,7 @@ %YAML 1.1 --- # CPAC Pipeline Configuration YAML file -# Version +# Version # # http://fcp-indi.github.io for more info. # diff --git a/CPAC/resources/configs/pipeline_config_rodent.yml b/CPAC/resources/configs/pipeline_config_rodent.yml index e7372d31ce..e63d7d26ec 100644 --- a/CPAC/resources/configs/pipeline_config_rodent.yml +++ b/CPAC/resources/configs/pipeline_config_rodent.yml @@ -1,7 +1,7 @@ %YAML 1.1 --- # CPAC Pipeline Configuration YAML file -# Version +# Version # # http://fcp-indi.github.io for more info. # diff --git a/CPAC/resources/configs/system_config.yml b/CPAC/resources/configs/system_config.yml index 06cdcefca4..403e81ddcc 100644 --- a/CPAC/resources/configs/system_config.yml +++ b/CPAC/resources/configs/system_config.yml @@ -1,5 +1,5 @@ # C-PAC System Configuration YAML file -# Version +# Version # # http://fcp-indi.github.io for more info. # diff --git a/CPAC/resources/configs/test_configs/data-test_S3-ADHD200_1.yml b/CPAC/resources/configs/test_configs/data-test_S3-ADHD200_1.yml index b62cea6fa8..3b94995615 100644 --- a/CPAC/resources/configs/test_configs/data-test_S3-ADHD200_1.yml +++ b/CPAC/resources/configs/test_configs/data-test_S3-ADHD200_1.yml @@ -1,5 +1,5 @@ # CPAC Data Configuration File -# Version +# Version # # http://fcp-indi.github.io for more info. # diff --git a/CPAC/resources/configs/test_configs/data-test_S3-ADHD200_no-params.yml b/CPAC/resources/configs/test_configs/data-test_S3-ADHD200_no-params.yml index 00b6d26db6..304ed59128 100644 --- a/CPAC/resources/configs/test_configs/data-test_S3-ADHD200_no-params.yml +++ b/CPAC/resources/configs/test_configs/data-test_S3-ADHD200_no-params.yml @@ -1,5 +1,5 @@ # CPAC Data Configuration File -# Version +# Version # # http://fcp-indi.github.io for more info. # diff --git a/CPAC/resources/configs/test_configs/data-test_S3-NKI-RS_fmap.yml b/CPAC/resources/configs/test_configs/data-test_S3-NKI-RS_fmap.yml index b61fa626a6..49bc433acb 100644 --- a/CPAC/resources/configs/test_configs/data-test_S3-NKI-RS_fmap.yml +++ b/CPAC/resources/configs/test_configs/data-test_S3-NKI-RS_fmap.yml @@ -1,5 +1,5 @@ # CPAC Data Configuration File -# Version +# Version # # http://fcp-indi.github.io for more info. # diff --git a/CPAC/resources/configs/test_configs/data_config_S3_CoRR_5only_mult-scan.yml b/CPAC/resources/configs/test_configs/data_config_S3_CoRR_5only_mult-scan.yml index 73dff0bf8b..1adfaa56a2 100644 --- a/CPAC/resources/configs/test_configs/data_config_S3_CoRR_5only_mult-scan.yml +++ b/CPAC/resources/configs/test_configs/data_config_S3_CoRR_5only_mult-scan.yml @@ -1,5 +1,5 @@ # CPAC Data Configuration File -# Version +# Version # # http://fcp-indi.github.io for more info. # diff --git a/CPAC/resources/configs/test_configs/data_config_S3_CoRR_5only_mult-sess.yml b/CPAC/resources/configs/test_configs/data_config_S3_CoRR_5only_mult-sess.yml index 2c6e03d4d9..40cd0822f8 100644 --- a/CPAC/resources/configs/test_configs/data_config_S3_CoRR_5only_mult-sess.yml +++ b/CPAC/resources/configs/test_configs/data_config_S3_CoRR_5only_mult-sess.yml @@ -1,5 +1,5 @@ # CPAC Data Configuration File -# Version +# Version # # http://fcp-indi.github.io for more info. # diff --git a/CPAC/resources/configs/test_configs/pipe-test_ABCD.yml b/CPAC/resources/configs/test_configs/pipe-test_ABCD.yml index 57f16c3ab5..554d6a1455 100644 --- a/CPAC/resources/configs/test_configs/pipe-test_ABCD.yml +++ b/CPAC/resources/configs/test_configs/pipe-test_ABCD.yml @@ -1,7 +1,7 @@ %YAML 1.1 --- # CPAC Pipeline Configuration YAML file -# Version +# Version # # http://fcp-indi.github.io for more info. # diff --git a/CPAC/resources/configs/test_configs/pipe-test_ANTs-3dSk-AllNuis.yml b/CPAC/resources/configs/test_configs/pipe-test_ANTs-3dSk-AllNuis.yml index d2b076b434..c9dfbd2248 100644 --- a/CPAC/resources/configs/test_configs/pipe-test_ANTs-3dSk-AllNuis.yml +++ b/CPAC/resources/configs/test_configs/pipe-test_ANTs-3dSk-AllNuis.yml @@ -1,7 +1,7 @@ %YAML 1.1 --- # CPAC Pipeline Configuration YAML file -# Version +# Version # # http://fcp-indi.github.io for more info. # diff --git a/CPAC/resources/configs/test_configs/pipe-test_ANTs-3dSk-DistCorr3dSk.yml b/CPAC/resources/configs/test_configs/pipe-test_ANTs-3dSk-DistCorr3dSk.yml index 9e8482ae69..da684e367f 100644 --- a/CPAC/resources/configs/test_configs/pipe-test_ANTs-3dSk-DistCorr3dSk.yml +++ b/CPAC/resources/configs/test_configs/pipe-test_ANTs-3dSk-DistCorr3dSk.yml @@ -1,7 +1,7 @@ %YAML 1.1 --- # CPAC Pipeline Configuration YAML file -# Version +# Version # # http://fcp-indi.github.io for more info. # diff --git a/CPAC/resources/configs/test_configs/pipe-test_ANTs-3dSk-DistCorrBET.yml b/CPAC/resources/configs/test_configs/pipe-test_ANTs-3dSk-DistCorrBET.yml index 60d47af717..4dc38a7443 100644 --- a/CPAC/resources/configs/test_configs/pipe-test_ANTs-3dSk-DistCorrBET.yml +++ b/CPAC/resources/configs/test_configs/pipe-test_ANTs-3dSk-DistCorrBET.yml @@ -1,7 +1,7 @@ %YAML 1.1 --- # CPAC Pipeline Configuration YAML file -# Version +# Version # # http://fcp-indi.github.io for more info. # diff --git a/CPAC/resources/configs/test_configs/pipe-test_ANTs-BET-AllNuis.yml b/CPAC/resources/configs/test_configs/pipe-test_ANTs-BET-AllNuis.yml index 1ef383c560..6d42588ee0 100644 --- a/CPAC/resources/configs/test_configs/pipe-test_ANTs-BET-AllNuis.yml +++ b/CPAC/resources/configs/test_configs/pipe-test_ANTs-BET-AllNuis.yml @@ -1,7 +1,7 @@ %YAML 1.1 --- # CPAC Pipeline Configuration YAML file -# Version +# Version # # http://fcp-indi.github.io for more info. # diff --git a/CPAC/resources/configs/test_configs/pipe-test_FNIRT-3dSk-AllNuis.yml b/CPAC/resources/configs/test_configs/pipe-test_FNIRT-3dSk-AllNuis.yml index 32755f5668..99dd9001c5 100644 --- a/CPAC/resources/configs/test_configs/pipe-test_FNIRT-3dSk-AllNuis.yml +++ b/CPAC/resources/configs/test_configs/pipe-test_FNIRT-3dSk-AllNuis.yml @@ -1,7 +1,7 @@ %YAML 1.1 --- # CPAC Pipeline Configuration YAML file -# Version +# Version # # http://fcp-indi.github.io for more info. # diff --git a/CPAC/resources/configs/test_configs/pipe-test_FNIRT-BET-AllNuis-BASC.yml b/CPAC/resources/configs/test_configs/pipe-test_FNIRT-BET-AllNuis-BASC.yml index f4c9b15081..b6a0a7d513 100644 --- a/CPAC/resources/configs/test_configs/pipe-test_FNIRT-BET-AllNuis-BASC.yml +++ b/CPAC/resources/configs/test_configs/pipe-test_FNIRT-BET-AllNuis-BASC.yml @@ -1,7 +1,7 @@ %YAML 1.1 --- # CPAC Pipeline Configuration YAML file -# Version +# Version # # http://fcp-indi.github.io for more info. # diff --git a/CPAC/resources/configs/test_configs/pipe-test_FNIRT-BET-AllNuis-ISC-voxel.yml b/CPAC/resources/configs/test_configs/pipe-test_FNIRT-BET-AllNuis-ISC-voxel.yml index f4c9b15081..b6a0a7d513 100644 --- a/CPAC/resources/configs/test_configs/pipe-test_FNIRT-BET-AllNuis-ISC-voxel.yml +++ b/CPAC/resources/configs/test_configs/pipe-test_FNIRT-BET-AllNuis-ISC-voxel.yml @@ -1,7 +1,7 @@ %YAML 1.1 --- # CPAC Pipeline Configuration YAML file -# Version +# Version # # http://fcp-indi.github.io for more info. # diff --git a/CPAC/resources/configs/test_configs/pipe-test_FNIRT-BET-AllNuis-ISC.yml b/CPAC/resources/configs/test_configs/pipe-test_FNIRT-BET-AllNuis-ISC.yml index f4c9b15081..b6a0a7d513 100644 --- a/CPAC/resources/configs/test_configs/pipe-test_FNIRT-BET-AllNuis-ISC.yml +++ b/CPAC/resources/configs/test_configs/pipe-test_FNIRT-BET-AllNuis-ISC.yml @@ -1,7 +1,7 @@ %YAML 1.1 --- # CPAC Pipeline Configuration YAML file -# Version +# Version # # http://fcp-indi.github.io for more info. # diff --git a/CPAC/resources/configs/test_configs/pipe-test_FNIRT-BET-AllNuis-MDMR.yml b/CPAC/resources/configs/test_configs/pipe-test_FNIRT-BET-AllNuis-MDMR.yml index f4c9b15081..b6a0a7d513 100644 --- a/CPAC/resources/configs/test_configs/pipe-test_FNIRT-BET-AllNuis-MDMR.yml +++ b/CPAC/resources/configs/test_configs/pipe-test_FNIRT-BET-AllNuis-MDMR.yml @@ -1,7 +1,7 @@ %YAML 1.1 --- # CPAC Pipeline Configuration YAML file -# Version +# Version # # http://fcp-indi.github.io for more info. # diff --git a/CPAC/resources/configs/test_configs/pipe-test_FNIRT-BET-AllNuis.yml b/CPAC/resources/configs/test_configs/pipe-test_FNIRT-BET-AllNuis.yml index f4c9b15081..b6a0a7d513 100644 --- a/CPAC/resources/configs/test_configs/pipe-test_FNIRT-BET-AllNuis.yml +++ b/CPAC/resources/configs/test_configs/pipe-test_FNIRT-BET-AllNuis.yml @@ -1,7 +1,7 @@ %YAML 1.1 --- # CPAC Pipeline Configuration YAML file -# Version +# Version # # http://fcp-indi.github.io for more info. # diff --git a/CPAC/resources/configs/test_configs/pipe-test_all.yml b/CPAC/resources/configs/test_configs/pipe-test_all.yml index e8abebac37..90008c0e6b 100644 --- a/CPAC/resources/configs/test_configs/pipe-test_all.yml +++ b/CPAC/resources/configs/test_configs/pipe-test_all.yml @@ -1,7 +1,7 @@ %YAML 1.1 --- # CPAC Pipeline Configuration YAML file -# Version +# Version # # http://fcp-indi.github.io for more info. # diff --git a/Dockerfile b/Dockerfile index e41bd6fc73..710b08cc5f 100644 --- a/Dockerfile +++ b/Dockerfile @@ -14,7 +14,7 @@ # You should have received a copy of the GNU Lesser General Public # License along with C-PAC. If not, see . -FROM ghcr.io/fcp-indi/c-pac/stage-base:standard-v1.8.8.dev1 +FROM ghcr.io/fcp-indi/c-pac/stage-base:standard-v1.8.8.dev11.8.8.dev11.8.8.dev11.8.8.dev11.8.8.dev11.8.8.dev11.8.8.dev1 LABEL org.opencontainers.image.description="Full C-PAC image" LABEL org.opencontainers.image.source=https://github.com/FCP-INDI/C-PAC USER root @@ -42,7 +42,7 @@ RUN rm -rf /var/lib/apt/lists/* /tmp/* /var/tmp/* /root/.cache/* \ && apt-get autoremove -y \ && ldconfig \ && chmod 777 / \ - && chmod 777 $(ls / | grep -v sys | grep -v proc) + && chmod 777 $(ls / | grep -v1.8.8.dev11.8.8.dev11.8.8.dev11.8.8.dev11.8.8.dev11.8.8.dev1 sys | grep -v1.8.8.dev11.8.8.dev11.8.8.dev11.8.8.dev11.8.8.dev11.8.8.dev1 proc) ENV PYTHONUSERBASE=/home/c-pac_user/.local ENV PATH=$PATH:/home/c-pac_user/.local/bin \ PYTHONPATH=$PYTHONPATH:$PYTHONUSERBASE/lib/python3.10/site-packages \ diff --git a/variant-lite.Dockerfile b/variant-lite.Dockerfile index 6f350c4f18..24f0943ca9 100644 --- a/variant-lite.Dockerfile +++ b/variant-lite.Dockerfile @@ -14,7 +14,7 @@ # You should have received a copy of the GNU Lesser General Public # License along with C-PAC. If not, see . -FROM ghcr.io/fcp-indi/c-pac/stage-base:lite-v1.8.8.dev1 +FROM ghcr.io/fcp-indi/c-pac/stage-base:lite-v1.8.8.dev11.8.8.dev11.8.8.dev11.8.8.dev11.8.8.dev11.8.8.dev11.8.8.dev1 LABEL org.opencontainers.image.description="Full C-PAC image without FreeSurfer" LABEL org.opencontainers.image.source=https://github.com/FCP-INDI/C-PAC USER root @@ -43,7 +43,7 @@ RUN rm -rf /var/lib/apt/lists/* /tmp/* /var/tmp/* /root/.cache/* \ && apt-get autoremove -y \ && ldconfig \ && chmod 777 / \ - && chmod 777 $(ls / | grep -v sys | grep -v proc) + && chmod 777 $(ls / | grep -v1.8.8.dev11.8.8.dev11.8.8.dev11.8.8.dev11.8.8.dev11.8.8.dev1 sys | grep -v1.8.8.dev11.8.8.dev11.8.8.dev11.8.8.dev11.8.8.dev11.8.8.dev1 proc) ENV PYTHONUSERBASE=/home/c-pac_user/.local ENV PATH=$PATH:/home/c-pac_user/.local/bin \ PYTHONPATH=$PYTHONPATH:$PYTHONUSERBASE/lib/python3.10/site-packages \ diff --git a/version b/version index 110ed9b99b..bb676e6961 100644 --- a/version +++ b/version @@ -1 +1 @@ -v +v1.8.8.dev1 From 7bcf7eeb0d40d9e23725811371732069fbc39044 Mon Sep 17 00:00:00 2001 From: Jon Cluce Date: Tue, 23 Sep 2025 13:06:51 -0400 Subject: [PATCH 502/507] :rewind: Revert "reverting multiple version added by ci again" This reverts commit 866524498880fc765aad0ccb82bc04288f4e82d3. --- .../Dockerfiles/C-PAC.develop-jammy.Dockerfile | 4 ++-- .../C-PAC.develop-lite-jammy.Dockerfile | 4 ++-- .github/Dockerfiles/base-standard.Dockerfile | 4 ++-- CPAC/info.py | 16 +++++++++++----- .../configs/data_config_S3-BIDS-ABIDE.yml | 2 +- .../configs/data_config_S3-BIDS-ADHD200.yml | 2 +- .../data_config_S3-BIDS-ADHD200_only2.yml | 2 +- .../data_config_S3-BIDS-NKI-RocklandSample.yml | 2 +- .../configs/data_config_cpac_benchmark.yml | 2 +- .../resources/configs/data_settings_template.yml | 2 +- CPAC/resources/configs/group_config_template.yml | 2 +- .../configs/pipeline_config_abcd-options.yml | 2 +- .../configs/pipeline_config_abcd-prep.yml | 2 +- .../configs/pipeline_config_anat-only.yml | 2 +- .../configs/pipeline_config_benchmark-ANTS.yml | 2 +- .../configs/pipeline_config_benchmark-FNIRT.yml | 2 +- CPAC/resources/configs/pipeline_config_blank.yml | 2 +- .../configs/pipeline_config_ccs-options.yml | 2 +- .../pipeline_config_default-deprecated.yml | 2 +- .../configs/pipeline_config_default.yml | 2 +- .../configs/pipeline_config_fmriprep-ingress.yml | 2 +- .../configs/pipeline_config_fmriprep-options.yml | 2 +- .../configs/pipeline_config_fx-options.yml | 2 +- .../configs/pipeline_config_monkey-ABCD.yml | 2 +- .../resources/configs/pipeline_config_monkey.yml | 2 +- CPAC/resources/configs/pipeline_config_ndmg.yml | 2 +- .../configs/pipeline_config_nhp-macaque.yml | 2 +- .../configs/pipeline_config_preproc.yml | 2 +- .../configs/pipeline_config_rbc-options.yml | 2 +- .../configs/pipeline_config_regtest-1.yml | 2 +- .../configs/pipeline_config_regtest-2.yml | 2 +- .../configs/pipeline_config_regtest-3.yml | 2 +- .../configs/pipeline_config_regtest-4.yml | 2 +- .../resources/configs/pipeline_config_rodent.yml | 2 +- CPAC/resources/configs/system_config.yml | 2 +- .../test_configs/data-test_S3-ADHD200_1.yml | 2 +- .../data-test_S3-ADHD200_no-params.yml | 2 +- .../test_configs/data-test_S3-NKI-RS_fmap.yml | 2 +- .../data_config_S3_CoRR_5only_mult-scan.yml | 2 +- .../data_config_S3_CoRR_5only_mult-sess.yml | 2 +- .../configs/test_configs/pipe-test_ABCD.yml | 2 +- .../test_configs/pipe-test_ANTs-3dSk-AllNuis.yml | 2 +- .../pipe-test_ANTs-3dSk-DistCorr3dSk.yml | 2 +- .../pipe-test_ANTs-3dSk-DistCorrBET.yml | 2 +- .../test_configs/pipe-test_ANTs-BET-AllNuis.yml | 2 +- .../pipe-test_FNIRT-3dSk-AllNuis.yml | 2 +- .../pipe-test_FNIRT-BET-AllNuis-BASC.yml | 2 +- .../pipe-test_FNIRT-BET-AllNuis-ISC-voxel.yml | 2 +- .../pipe-test_FNIRT-BET-AllNuis-ISC.yml | 2 +- .../pipe-test_FNIRT-BET-AllNuis-MDMR.yml | 2 +- .../test_configs/pipe-test_FNIRT-BET-AllNuis.yml | 2 +- .../configs/test_configs/pipe-test_all.yml | 2 +- Dockerfile | 4 ++-- variant-lite.Dockerfile | 4 ++-- version | 2 +- 55 files changed, 70 insertions(+), 64 deletions(-) diff --git a/.github/Dockerfiles/C-PAC.develop-jammy.Dockerfile b/.github/Dockerfiles/C-PAC.develop-jammy.Dockerfile index 710b08cc5f..e41bd6fc73 100644 --- a/.github/Dockerfiles/C-PAC.develop-jammy.Dockerfile +++ b/.github/Dockerfiles/C-PAC.develop-jammy.Dockerfile @@ -14,7 +14,7 @@ # You should have received a copy of the GNU Lesser General Public # License along with C-PAC. If not, see . -FROM ghcr.io/fcp-indi/c-pac/stage-base:standard-v1.8.8.dev11.8.8.dev11.8.8.dev11.8.8.dev11.8.8.dev11.8.8.dev11.8.8.dev1 +FROM ghcr.io/fcp-indi/c-pac/stage-base:standard-v1.8.8.dev1 LABEL org.opencontainers.image.description="Full C-PAC image" LABEL org.opencontainers.image.source=https://github.com/FCP-INDI/C-PAC USER root @@ -42,7 +42,7 @@ RUN rm -rf /var/lib/apt/lists/* /tmp/* /var/tmp/* /root/.cache/* \ && apt-get autoremove -y \ && ldconfig \ && chmod 777 / \ - && chmod 777 $(ls / | grep -v1.8.8.dev11.8.8.dev11.8.8.dev11.8.8.dev11.8.8.dev11.8.8.dev1 sys | grep -v1.8.8.dev11.8.8.dev11.8.8.dev11.8.8.dev11.8.8.dev11.8.8.dev1 proc) + && chmod 777 $(ls / | grep -v sys | grep -v proc) ENV PYTHONUSERBASE=/home/c-pac_user/.local ENV PATH=$PATH:/home/c-pac_user/.local/bin \ PYTHONPATH=$PYTHONPATH:$PYTHONUSERBASE/lib/python3.10/site-packages \ diff --git a/.github/Dockerfiles/C-PAC.develop-lite-jammy.Dockerfile b/.github/Dockerfiles/C-PAC.develop-lite-jammy.Dockerfile index 24f0943ca9..6f350c4f18 100644 --- a/.github/Dockerfiles/C-PAC.develop-lite-jammy.Dockerfile +++ b/.github/Dockerfiles/C-PAC.develop-lite-jammy.Dockerfile @@ -14,7 +14,7 @@ # You should have received a copy of the GNU Lesser General Public # License along with C-PAC. If not, see . -FROM ghcr.io/fcp-indi/c-pac/stage-base:lite-v1.8.8.dev11.8.8.dev11.8.8.dev11.8.8.dev11.8.8.dev11.8.8.dev11.8.8.dev1 +FROM ghcr.io/fcp-indi/c-pac/stage-base:lite-v1.8.8.dev1 LABEL org.opencontainers.image.description="Full C-PAC image without FreeSurfer" LABEL org.opencontainers.image.source=https://github.com/FCP-INDI/C-PAC USER root @@ -43,7 +43,7 @@ RUN rm -rf /var/lib/apt/lists/* /tmp/* /var/tmp/* /root/.cache/* \ && apt-get autoremove -y \ && ldconfig \ && chmod 777 / \ - && chmod 777 $(ls / | grep -v1.8.8.dev11.8.8.dev11.8.8.dev11.8.8.dev11.8.8.dev11.8.8.dev1 sys | grep -v1.8.8.dev11.8.8.dev11.8.8.dev11.8.8.dev11.8.8.dev11.8.8.dev1 proc) + && chmod 777 $(ls / | grep -v sys | grep -v proc) ENV PYTHONUSERBASE=/home/c-pac_user/.local ENV PATH=$PATH:/home/c-pac_user/.local/bin \ PYTHONPATH=$PYTHONPATH:$PYTHONUSERBASE/lib/python3.10/site-packages \ diff --git a/.github/Dockerfiles/base-standard.Dockerfile b/.github/Dockerfiles/base-standard.Dockerfile index 14352330d9..0eb4738375 100644 --- a/.github/Dockerfiles/base-standard.Dockerfile +++ b/.github/Dockerfiles/base-standard.Dockerfile @@ -16,7 +16,7 @@ # License along with C-PAC. If not, see . FROM ghcr.io/fcp-indi/c-pac/freesurfer:6.0.0-min.neurodocker-jammy AS freesurfer -FROM ghcr.io/fcp-indi/c-pac/stage-base:lite-v1.8.8.dev11.8.8.dev11.8.8.dev11.8.8.dev11.8.8.dev11.8.8.dev11.8.8.dev1 +FROM ghcr.io/fcp-indi/c-pac/stage-base:lite-v1.8.8.dev1 LABEL org.opencontainers.image.description="NOT INTENDED FOR USE OTHER THAN AS A STAGE IMAGE IN A MULTI-STAGE BUILD \ Standard software dependencies for C-PAC standard images" LABEL org.opencontainers.image.source=https://github.com/FCP-INDI/C-PAC @@ -56,7 +56,7 @@ RUN apt-get autoremove -y \ && rm -rf results.txt \ && ldconfig \ && chmod 777 / /home/c-pac_user \ - && chmod 777 $(ls / | grep -v1.8.8.dev11.8.8.dev11.8.8.dev11.8.8.dev11.8.8.dev11.8.8.dev1 sys | grep -v1.8.8.dev11.8.8.dev11.8.8.dev11.8.8.dev11.8.8.dev11.8.8.dev1 proc) + && chmod 777 $(ls / | grep -v sys | grep -v proc) # set user USER c-pac_user diff --git a/CPAC/info.py b/CPAC/info.py index a5c63923cd..de41799605 100644 --- a/CPAC/info.py +++ b/CPAC/info.py @@ -52,16 +52,22 @@ def get_cpac_gitversion() -> str | None: """CPAC version as reported by the last commit in git.""" - from pathlib import Path + from importlib.resources import as_file, files import subprocess - gitpath = Path(__file__).parent.resolve() - - gitpathgit = gitpath / ".git" - if not gitpathgit.exists(): + with as_file(files("CPAC")) as _cpac: + gitpath = _cpac + gitpathgit = None + for _cpacpath in [gitpath, *gitpath.parents]: + git_dir = _cpacpath / ".git" + if git_dir.exists(): + gitpathgit = git_dir + break + if not gitpathgit: return None ver = None + try: o, _ = subprocess.Popen( "git describe --always", shell=True, cwd=gitpath, stdout=subprocess.PIPE diff --git a/CPAC/resources/configs/data_config_S3-BIDS-ABIDE.yml b/CPAC/resources/configs/data_config_S3-BIDS-ABIDE.yml index 59bdf37710..287d145476 100644 --- a/CPAC/resources/configs/data_config_S3-BIDS-ABIDE.yml +++ b/CPAC/resources/configs/data_config_S3-BIDS-ABIDE.yml @@ -1,5 +1,5 @@ # CPAC Data Configuration File -# Version +# Version # # http://fcp-indi.github.io for more info. # diff --git a/CPAC/resources/configs/data_config_S3-BIDS-ADHD200.yml b/CPAC/resources/configs/data_config_S3-BIDS-ADHD200.yml index fc123fa6dd..a964812b4f 100644 --- a/CPAC/resources/configs/data_config_S3-BIDS-ADHD200.yml +++ b/CPAC/resources/configs/data_config_S3-BIDS-ADHD200.yml @@ -1,5 +1,5 @@ # CPAC Data Configuration File -# Version +# Version # # http://fcp-indi.github.io for more info. # diff --git a/CPAC/resources/configs/data_config_S3-BIDS-ADHD200_only2.yml b/CPAC/resources/configs/data_config_S3-BIDS-ADHD200_only2.yml index 894f86fd5a..839c3aab13 100644 --- a/CPAC/resources/configs/data_config_S3-BIDS-ADHD200_only2.yml +++ b/CPAC/resources/configs/data_config_S3-BIDS-ADHD200_only2.yml @@ -1,5 +1,5 @@ # CPAC Data Configuration File -# Version +# Version # # http://fcp-indi.github.io for more info. # diff --git a/CPAC/resources/configs/data_config_S3-BIDS-NKI-RocklandSample.yml b/CPAC/resources/configs/data_config_S3-BIDS-NKI-RocklandSample.yml index d166dc3f52..afe2e2d51d 100644 --- a/CPAC/resources/configs/data_config_S3-BIDS-NKI-RocklandSample.yml +++ b/CPAC/resources/configs/data_config_S3-BIDS-NKI-RocklandSample.yml @@ -1,5 +1,5 @@ # CPAC Data Configuration File -# Version +# Version # # http://fcp-indi.github.io for more info. # diff --git a/CPAC/resources/configs/data_config_cpac_benchmark.yml b/CPAC/resources/configs/data_config_cpac_benchmark.yml index f31fd12372..ceb546be14 100644 --- a/CPAC/resources/configs/data_config_cpac_benchmark.yml +++ b/CPAC/resources/configs/data_config_cpac_benchmark.yml @@ -1,5 +1,5 @@ # CPAC Data Configuration File -# Version +# Version # # http://fcp-indi.github.io for more info. # diff --git a/CPAC/resources/configs/data_settings_template.yml b/CPAC/resources/configs/data_settings_template.yml index a7e2434fd4..06ee292815 100644 --- a/CPAC/resources/configs/data_settings_template.yml +++ b/CPAC/resources/configs/data_settings_template.yml @@ -1,5 +1,5 @@ # CPAC Data Settings File -# Version +# Version # # http://fcp-indi.github.io for more info. # diff --git a/CPAC/resources/configs/group_config_template.yml b/CPAC/resources/configs/group_config_template.yml index 0cb949052a..3d98fb285d 100644 --- a/CPAC/resources/configs/group_config_template.yml +++ b/CPAC/resources/configs/group_config_template.yml @@ -1,5 +1,5 @@ # CPAC Group-Level Analysis Configuration YAML file -# Version +# Version # # http://fcp-indi.github.io for more info. # diff --git a/CPAC/resources/configs/pipeline_config_abcd-options.yml b/CPAC/resources/configs/pipeline_config_abcd-options.yml index c114547e2d..53763e61b2 100644 --- a/CPAC/resources/configs/pipeline_config_abcd-options.yml +++ b/CPAC/resources/configs/pipeline_config_abcd-options.yml @@ -1,7 +1,7 @@ %YAML 1.1 --- # CPAC Pipeline Configuration YAML file -# Version +# Version # # http://fcp-indi.github.io for more info. # diff --git a/CPAC/resources/configs/pipeline_config_abcd-prep.yml b/CPAC/resources/configs/pipeline_config_abcd-prep.yml index ae295dd116..bea192a97d 100644 --- a/CPAC/resources/configs/pipeline_config_abcd-prep.yml +++ b/CPAC/resources/configs/pipeline_config_abcd-prep.yml @@ -1,7 +1,7 @@ %YAML 1.1 --- # CPAC Pipeline Configuration YAML file -# Version +# Version # # http://fcp-indi.github.io for more info. # diff --git a/CPAC/resources/configs/pipeline_config_anat-only.yml b/CPAC/resources/configs/pipeline_config_anat-only.yml index 30880f77d7..b9cccd5948 100644 --- a/CPAC/resources/configs/pipeline_config_anat-only.yml +++ b/CPAC/resources/configs/pipeline_config_anat-only.yml @@ -1,7 +1,7 @@ %YAML 1.1 --- # CPAC Pipeline Configuration YAML file -# Version +# Version # # http://fcp-indi.github.io for more info. # diff --git a/CPAC/resources/configs/pipeline_config_benchmark-ANTS.yml b/CPAC/resources/configs/pipeline_config_benchmark-ANTS.yml index 6795842bec..b2ab123fb6 100644 --- a/CPAC/resources/configs/pipeline_config_benchmark-ANTS.yml +++ b/CPAC/resources/configs/pipeline_config_benchmark-ANTS.yml @@ -1,7 +1,7 @@ %YAML 1.1 --- # CPAC Pipeline Configuration YAML file -# Version +# Version # # http://fcp-indi.github.io for more info. # diff --git a/CPAC/resources/configs/pipeline_config_benchmark-FNIRT.yml b/CPAC/resources/configs/pipeline_config_benchmark-FNIRT.yml index e3fddf4847..c0ac6e51e5 100644 --- a/CPAC/resources/configs/pipeline_config_benchmark-FNIRT.yml +++ b/CPAC/resources/configs/pipeline_config_benchmark-FNIRT.yml @@ -1,7 +1,7 @@ %YAML 1.1 --- # CPAC Pipeline Configuration YAML file -# Version +# Version # # http://fcp-indi.github.io for more info. # diff --git a/CPAC/resources/configs/pipeline_config_blank.yml b/CPAC/resources/configs/pipeline_config_blank.yml index 5fdb401f57..d2c8d21f89 100644 --- a/CPAC/resources/configs/pipeline_config_blank.yml +++ b/CPAC/resources/configs/pipeline_config_blank.yml @@ -1,7 +1,7 @@ %YAML 1.1 --- # CPAC Pipeline Configuration YAML file -# Version +# Version # # http://fcp-indi.github.io for more info. # diff --git a/CPAC/resources/configs/pipeline_config_ccs-options.yml b/CPAC/resources/configs/pipeline_config_ccs-options.yml index 78cd1974ca..1c3fea07ca 100644 --- a/CPAC/resources/configs/pipeline_config_ccs-options.yml +++ b/CPAC/resources/configs/pipeline_config_ccs-options.yml @@ -1,7 +1,7 @@ %YAML 1.1 --- # CPAC Pipeline Configuration YAML file -# Version +# Version # # http://fcp-indi.github.io for more info. # diff --git a/CPAC/resources/configs/pipeline_config_default-deprecated.yml b/CPAC/resources/configs/pipeline_config_default-deprecated.yml index 85e69367de..658b311094 100644 --- a/CPAC/resources/configs/pipeline_config_default-deprecated.yml +++ b/CPAC/resources/configs/pipeline_config_default-deprecated.yml @@ -1,7 +1,7 @@ %YAML 1.1 --- # CPAC Pipeline Configuration YAML file -# Version +# Version # # http://fcp-indi.github.io for more info. # diff --git a/CPAC/resources/configs/pipeline_config_default.yml b/CPAC/resources/configs/pipeline_config_default.yml index 43bfcb441c..e856e44dd6 100644 --- a/CPAC/resources/configs/pipeline_config_default.yml +++ b/CPAC/resources/configs/pipeline_config_default.yml @@ -1,7 +1,7 @@ %YAML 1.1 --- # CPAC Pipeline Configuration YAML file -# Version +# Version # # http://fcp-indi.github.io for more info. # diff --git a/CPAC/resources/configs/pipeline_config_fmriprep-ingress.yml b/CPAC/resources/configs/pipeline_config_fmriprep-ingress.yml index acb0789a08..9166c3acaa 100644 --- a/CPAC/resources/configs/pipeline_config_fmriprep-ingress.yml +++ b/CPAC/resources/configs/pipeline_config_fmriprep-ingress.yml @@ -1,7 +1,7 @@ %YAML 1.1 --- # CPAC Pipeline Configuration YAML file -# Version +# Version # # http://fcp-indi.github.io for more info. # diff --git a/CPAC/resources/configs/pipeline_config_fmriprep-options.yml b/CPAC/resources/configs/pipeline_config_fmriprep-options.yml index 110a84465c..0718297532 100644 --- a/CPAC/resources/configs/pipeline_config_fmriprep-options.yml +++ b/CPAC/resources/configs/pipeline_config_fmriprep-options.yml @@ -1,7 +1,7 @@ %YAML 1.1 --- # CPAC Pipeline Configuration YAML file -# Version +# Version # # http://fcp-indi.github.io for more info. # diff --git a/CPAC/resources/configs/pipeline_config_fx-options.yml b/CPAC/resources/configs/pipeline_config_fx-options.yml index 88c4bc6cc5..02d6f31ced 100644 --- a/CPAC/resources/configs/pipeline_config_fx-options.yml +++ b/CPAC/resources/configs/pipeline_config_fx-options.yml @@ -1,7 +1,7 @@ %YAML 1.1 --- # CPAC Pipeline Configuration YAML file -# Version +# Version # # http://fcp-indi.github.io for more info. # diff --git a/CPAC/resources/configs/pipeline_config_monkey-ABCD.yml b/CPAC/resources/configs/pipeline_config_monkey-ABCD.yml index d3e6d070cc..34e1a2ade7 100644 --- a/CPAC/resources/configs/pipeline_config_monkey-ABCD.yml +++ b/CPAC/resources/configs/pipeline_config_monkey-ABCD.yml @@ -1,7 +1,7 @@ %YAML 1.1 --- # CPAC Pipeline Configuration YAML file -# Version +# Version # # http://fcp-indi.github.io for more info. # diff --git a/CPAC/resources/configs/pipeline_config_monkey.yml b/CPAC/resources/configs/pipeline_config_monkey.yml index e926172711..1d5f499c5e 100644 --- a/CPAC/resources/configs/pipeline_config_monkey.yml +++ b/CPAC/resources/configs/pipeline_config_monkey.yml @@ -1,7 +1,7 @@ %YAML 1.1 --- # CPAC Pipeline Configuration YAML file -# Version +# Version # # http://fcp-indi.github.io for more info. # diff --git a/CPAC/resources/configs/pipeline_config_ndmg.yml b/CPAC/resources/configs/pipeline_config_ndmg.yml index 5517363da2..7fb48bcef9 100644 --- a/CPAC/resources/configs/pipeline_config_ndmg.yml +++ b/CPAC/resources/configs/pipeline_config_ndmg.yml @@ -1,7 +1,7 @@ %YAML 1.1 --- # CPAC Pipeline Configuration YAML file -# Version +# Version # # http://fcp-indi.github.io for more info. # diff --git a/CPAC/resources/configs/pipeline_config_nhp-macaque.yml b/CPAC/resources/configs/pipeline_config_nhp-macaque.yml index 606fce75e7..68afff32df 100644 --- a/CPAC/resources/configs/pipeline_config_nhp-macaque.yml +++ b/CPAC/resources/configs/pipeline_config_nhp-macaque.yml @@ -1,7 +1,7 @@ %YAML 1.1 --- # CPAC Pipeline Configuration YAML file -# Version +# Version # # http://fcp-indi.github.io for more info. # diff --git a/CPAC/resources/configs/pipeline_config_preproc.yml b/CPAC/resources/configs/pipeline_config_preproc.yml index 7579342c8a..028f7de296 100644 --- a/CPAC/resources/configs/pipeline_config_preproc.yml +++ b/CPAC/resources/configs/pipeline_config_preproc.yml @@ -1,7 +1,7 @@ %YAML 1.1 --- # CPAC Pipeline Configuration YAML file -# Version +# Version # # http://fcp-indi.github.io for more info. # diff --git a/CPAC/resources/configs/pipeline_config_rbc-options.yml b/CPAC/resources/configs/pipeline_config_rbc-options.yml index 057691ab89..a7c4dfd6ea 100644 --- a/CPAC/resources/configs/pipeline_config_rbc-options.yml +++ b/CPAC/resources/configs/pipeline_config_rbc-options.yml @@ -1,7 +1,7 @@ %YAML 1.1 --- # CPAC Pipeline Configuration YAML file -# Version +# Version # # http://fcp-indi.github.io for more info. # diff --git a/CPAC/resources/configs/pipeline_config_regtest-1.yml b/CPAC/resources/configs/pipeline_config_regtest-1.yml index 591b1e7960..ee96ad34d5 100644 --- a/CPAC/resources/configs/pipeline_config_regtest-1.yml +++ b/CPAC/resources/configs/pipeline_config_regtest-1.yml @@ -1,7 +1,7 @@ %YAML 1.1 --- # CPAC Pipeline Configuration YAML file -# Version +# Version # # http://fcp-indi.github.io for more info. # diff --git a/CPAC/resources/configs/pipeline_config_regtest-2.yml b/CPAC/resources/configs/pipeline_config_regtest-2.yml index 87be241869..f1fcc596aa 100644 --- a/CPAC/resources/configs/pipeline_config_regtest-2.yml +++ b/CPAC/resources/configs/pipeline_config_regtest-2.yml @@ -1,7 +1,7 @@ %YAML 1.1 --- # CPAC Pipeline Configuration YAML file -# Version +# Version # # http://fcp-indi.github.io for more info. # diff --git a/CPAC/resources/configs/pipeline_config_regtest-3.yml b/CPAC/resources/configs/pipeline_config_regtest-3.yml index aed4562f76..f0c4989c42 100644 --- a/CPAC/resources/configs/pipeline_config_regtest-3.yml +++ b/CPAC/resources/configs/pipeline_config_regtest-3.yml @@ -1,7 +1,7 @@ %YAML 1.1 --- # CPAC Pipeline Configuration YAML file -# Version +# Version # # http://fcp-indi.github.io for more info. # diff --git a/CPAC/resources/configs/pipeline_config_regtest-4.yml b/CPAC/resources/configs/pipeline_config_regtest-4.yml index b705ce1915..7591d09b7d 100644 --- a/CPAC/resources/configs/pipeline_config_regtest-4.yml +++ b/CPAC/resources/configs/pipeline_config_regtest-4.yml @@ -1,7 +1,7 @@ %YAML 1.1 --- # CPAC Pipeline Configuration YAML file -# Version +# Version # # http://fcp-indi.github.io for more info. # diff --git a/CPAC/resources/configs/pipeline_config_rodent.yml b/CPAC/resources/configs/pipeline_config_rodent.yml index e63d7d26ec..e7372d31ce 100644 --- a/CPAC/resources/configs/pipeline_config_rodent.yml +++ b/CPAC/resources/configs/pipeline_config_rodent.yml @@ -1,7 +1,7 @@ %YAML 1.1 --- # CPAC Pipeline Configuration YAML file -# Version +# Version # # http://fcp-indi.github.io for more info. # diff --git a/CPAC/resources/configs/system_config.yml b/CPAC/resources/configs/system_config.yml index 403e81ddcc..06cdcefca4 100644 --- a/CPAC/resources/configs/system_config.yml +++ b/CPAC/resources/configs/system_config.yml @@ -1,5 +1,5 @@ # C-PAC System Configuration YAML file -# Version +# Version # # http://fcp-indi.github.io for more info. # diff --git a/CPAC/resources/configs/test_configs/data-test_S3-ADHD200_1.yml b/CPAC/resources/configs/test_configs/data-test_S3-ADHD200_1.yml index 3b94995615..b62cea6fa8 100644 --- a/CPAC/resources/configs/test_configs/data-test_S3-ADHD200_1.yml +++ b/CPAC/resources/configs/test_configs/data-test_S3-ADHD200_1.yml @@ -1,5 +1,5 @@ # CPAC Data Configuration File -# Version +# Version # # http://fcp-indi.github.io for more info. # diff --git a/CPAC/resources/configs/test_configs/data-test_S3-ADHD200_no-params.yml b/CPAC/resources/configs/test_configs/data-test_S3-ADHD200_no-params.yml index 304ed59128..00b6d26db6 100644 --- a/CPAC/resources/configs/test_configs/data-test_S3-ADHD200_no-params.yml +++ b/CPAC/resources/configs/test_configs/data-test_S3-ADHD200_no-params.yml @@ -1,5 +1,5 @@ # CPAC Data Configuration File -# Version +# Version # # http://fcp-indi.github.io for more info. # diff --git a/CPAC/resources/configs/test_configs/data-test_S3-NKI-RS_fmap.yml b/CPAC/resources/configs/test_configs/data-test_S3-NKI-RS_fmap.yml index 49bc433acb..b61fa626a6 100644 --- a/CPAC/resources/configs/test_configs/data-test_S3-NKI-RS_fmap.yml +++ b/CPAC/resources/configs/test_configs/data-test_S3-NKI-RS_fmap.yml @@ -1,5 +1,5 @@ # CPAC Data Configuration File -# Version +# Version # # http://fcp-indi.github.io for more info. # diff --git a/CPAC/resources/configs/test_configs/data_config_S3_CoRR_5only_mult-scan.yml b/CPAC/resources/configs/test_configs/data_config_S3_CoRR_5only_mult-scan.yml index 1adfaa56a2..73dff0bf8b 100644 --- a/CPAC/resources/configs/test_configs/data_config_S3_CoRR_5only_mult-scan.yml +++ b/CPAC/resources/configs/test_configs/data_config_S3_CoRR_5only_mult-scan.yml @@ -1,5 +1,5 @@ # CPAC Data Configuration File -# Version +# Version # # http://fcp-indi.github.io for more info. # diff --git a/CPAC/resources/configs/test_configs/data_config_S3_CoRR_5only_mult-sess.yml b/CPAC/resources/configs/test_configs/data_config_S3_CoRR_5only_mult-sess.yml index 40cd0822f8..2c6e03d4d9 100644 --- a/CPAC/resources/configs/test_configs/data_config_S3_CoRR_5only_mult-sess.yml +++ b/CPAC/resources/configs/test_configs/data_config_S3_CoRR_5only_mult-sess.yml @@ -1,5 +1,5 @@ # CPAC Data Configuration File -# Version +# Version # # http://fcp-indi.github.io for more info. # diff --git a/CPAC/resources/configs/test_configs/pipe-test_ABCD.yml b/CPAC/resources/configs/test_configs/pipe-test_ABCD.yml index 554d6a1455..57f16c3ab5 100644 --- a/CPAC/resources/configs/test_configs/pipe-test_ABCD.yml +++ b/CPAC/resources/configs/test_configs/pipe-test_ABCD.yml @@ -1,7 +1,7 @@ %YAML 1.1 --- # CPAC Pipeline Configuration YAML file -# Version +# Version # # http://fcp-indi.github.io for more info. # diff --git a/CPAC/resources/configs/test_configs/pipe-test_ANTs-3dSk-AllNuis.yml b/CPAC/resources/configs/test_configs/pipe-test_ANTs-3dSk-AllNuis.yml index c9dfbd2248..d2b076b434 100644 --- a/CPAC/resources/configs/test_configs/pipe-test_ANTs-3dSk-AllNuis.yml +++ b/CPAC/resources/configs/test_configs/pipe-test_ANTs-3dSk-AllNuis.yml @@ -1,7 +1,7 @@ %YAML 1.1 --- # CPAC Pipeline Configuration YAML file -# Version +# Version # # http://fcp-indi.github.io for more info. # diff --git a/CPAC/resources/configs/test_configs/pipe-test_ANTs-3dSk-DistCorr3dSk.yml b/CPAC/resources/configs/test_configs/pipe-test_ANTs-3dSk-DistCorr3dSk.yml index da684e367f..9e8482ae69 100644 --- a/CPAC/resources/configs/test_configs/pipe-test_ANTs-3dSk-DistCorr3dSk.yml +++ b/CPAC/resources/configs/test_configs/pipe-test_ANTs-3dSk-DistCorr3dSk.yml @@ -1,7 +1,7 @@ %YAML 1.1 --- # CPAC Pipeline Configuration YAML file -# Version +# Version # # http://fcp-indi.github.io for more info. # diff --git a/CPAC/resources/configs/test_configs/pipe-test_ANTs-3dSk-DistCorrBET.yml b/CPAC/resources/configs/test_configs/pipe-test_ANTs-3dSk-DistCorrBET.yml index 4dc38a7443..60d47af717 100644 --- a/CPAC/resources/configs/test_configs/pipe-test_ANTs-3dSk-DistCorrBET.yml +++ b/CPAC/resources/configs/test_configs/pipe-test_ANTs-3dSk-DistCorrBET.yml @@ -1,7 +1,7 @@ %YAML 1.1 --- # CPAC Pipeline Configuration YAML file -# Version +# Version # # http://fcp-indi.github.io for more info. # diff --git a/CPAC/resources/configs/test_configs/pipe-test_ANTs-BET-AllNuis.yml b/CPAC/resources/configs/test_configs/pipe-test_ANTs-BET-AllNuis.yml index 6d42588ee0..1ef383c560 100644 --- a/CPAC/resources/configs/test_configs/pipe-test_ANTs-BET-AllNuis.yml +++ b/CPAC/resources/configs/test_configs/pipe-test_ANTs-BET-AllNuis.yml @@ -1,7 +1,7 @@ %YAML 1.1 --- # CPAC Pipeline Configuration YAML file -# Version +# Version # # http://fcp-indi.github.io for more info. # diff --git a/CPAC/resources/configs/test_configs/pipe-test_FNIRT-3dSk-AllNuis.yml b/CPAC/resources/configs/test_configs/pipe-test_FNIRT-3dSk-AllNuis.yml index 99dd9001c5..32755f5668 100644 --- a/CPAC/resources/configs/test_configs/pipe-test_FNIRT-3dSk-AllNuis.yml +++ b/CPAC/resources/configs/test_configs/pipe-test_FNIRT-3dSk-AllNuis.yml @@ -1,7 +1,7 @@ %YAML 1.1 --- # CPAC Pipeline Configuration YAML file -# Version +# Version # # http://fcp-indi.github.io for more info. # diff --git a/CPAC/resources/configs/test_configs/pipe-test_FNIRT-BET-AllNuis-BASC.yml b/CPAC/resources/configs/test_configs/pipe-test_FNIRT-BET-AllNuis-BASC.yml index b6a0a7d513..f4c9b15081 100644 --- a/CPAC/resources/configs/test_configs/pipe-test_FNIRT-BET-AllNuis-BASC.yml +++ b/CPAC/resources/configs/test_configs/pipe-test_FNIRT-BET-AllNuis-BASC.yml @@ -1,7 +1,7 @@ %YAML 1.1 --- # CPAC Pipeline Configuration YAML file -# Version +# Version # # http://fcp-indi.github.io for more info. # diff --git a/CPAC/resources/configs/test_configs/pipe-test_FNIRT-BET-AllNuis-ISC-voxel.yml b/CPAC/resources/configs/test_configs/pipe-test_FNIRT-BET-AllNuis-ISC-voxel.yml index b6a0a7d513..f4c9b15081 100644 --- a/CPAC/resources/configs/test_configs/pipe-test_FNIRT-BET-AllNuis-ISC-voxel.yml +++ b/CPAC/resources/configs/test_configs/pipe-test_FNIRT-BET-AllNuis-ISC-voxel.yml @@ -1,7 +1,7 @@ %YAML 1.1 --- # CPAC Pipeline Configuration YAML file -# Version +# Version # # http://fcp-indi.github.io for more info. # diff --git a/CPAC/resources/configs/test_configs/pipe-test_FNIRT-BET-AllNuis-ISC.yml b/CPAC/resources/configs/test_configs/pipe-test_FNIRT-BET-AllNuis-ISC.yml index b6a0a7d513..f4c9b15081 100644 --- a/CPAC/resources/configs/test_configs/pipe-test_FNIRT-BET-AllNuis-ISC.yml +++ b/CPAC/resources/configs/test_configs/pipe-test_FNIRT-BET-AllNuis-ISC.yml @@ -1,7 +1,7 @@ %YAML 1.1 --- # CPAC Pipeline Configuration YAML file -# Version +# Version # # http://fcp-indi.github.io for more info. # diff --git a/CPAC/resources/configs/test_configs/pipe-test_FNIRT-BET-AllNuis-MDMR.yml b/CPAC/resources/configs/test_configs/pipe-test_FNIRT-BET-AllNuis-MDMR.yml index b6a0a7d513..f4c9b15081 100644 --- a/CPAC/resources/configs/test_configs/pipe-test_FNIRT-BET-AllNuis-MDMR.yml +++ b/CPAC/resources/configs/test_configs/pipe-test_FNIRT-BET-AllNuis-MDMR.yml @@ -1,7 +1,7 @@ %YAML 1.1 --- # CPAC Pipeline Configuration YAML file -# Version +# Version # # http://fcp-indi.github.io for more info. # diff --git a/CPAC/resources/configs/test_configs/pipe-test_FNIRT-BET-AllNuis.yml b/CPAC/resources/configs/test_configs/pipe-test_FNIRT-BET-AllNuis.yml index b6a0a7d513..f4c9b15081 100644 --- a/CPAC/resources/configs/test_configs/pipe-test_FNIRT-BET-AllNuis.yml +++ b/CPAC/resources/configs/test_configs/pipe-test_FNIRT-BET-AllNuis.yml @@ -1,7 +1,7 @@ %YAML 1.1 --- # CPAC Pipeline Configuration YAML file -# Version +# Version # # http://fcp-indi.github.io for more info. # diff --git a/CPAC/resources/configs/test_configs/pipe-test_all.yml b/CPAC/resources/configs/test_configs/pipe-test_all.yml index 90008c0e6b..e8abebac37 100644 --- a/CPAC/resources/configs/test_configs/pipe-test_all.yml +++ b/CPAC/resources/configs/test_configs/pipe-test_all.yml @@ -1,7 +1,7 @@ %YAML 1.1 --- # CPAC Pipeline Configuration YAML file -# Version +# Version # # http://fcp-indi.github.io for more info. # diff --git a/Dockerfile b/Dockerfile index 710b08cc5f..e41bd6fc73 100644 --- a/Dockerfile +++ b/Dockerfile @@ -14,7 +14,7 @@ # You should have received a copy of the GNU Lesser General Public # License along with C-PAC. If not, see . -FROM ghcr.io/fcp-indi/c-pac/stage-base:standard-v1.8.8.dev11.8.8.dev11.8.8.dev11.8.8.dev11.8.8.dev11.8.8.dev11.8.8.dev1 +FROM ghcr.io/fcp-indi/c-pac/stage-base:standard-v1.8.8.dev1 LABEL org.opencontainers.image.description="Full C-PAC image" LABEL org.opencontainers.image.source=https://github.com/FCP-INDI/C-PAC USER root @@ -42,7 +42,7 @@ RUN rm -rf /var/lib/apt/lists/* /tmp/* /var/tmp/* /root/.cache/* \ && apt-get autoremove -y \ && ldconfig \ && chmod 777 / \ - && chmod 777 $(ls / | grep -v1.8.8.dev11.8.8.dev11.8.8.dev11.8.8.dev11.8.8.dev11.8.8.dev1 sys | grep -v1.8.8.dev11.8.8.dev11.8.8.dev11.8.8.dev11.8.8.dev11.8.8.dev1 proc) + && chmod 777 $(ls / | grep -v sys | grep -v proc) ENV PYTHONUSERBASE=/home/c-pac_user/.local ENV PATH=$PATH:/home/c-pac_user/.local/bin \ PYTHONPATH=$PYTHONPATH:$PYTHONUSERBASE/lib/python3.10/site-packages \ diff --git a/variant-lite.Dockerfile b/variant-lite.Dockerfile index 24f0943ca9..6f350c4f18 100644 --- a/variant-lite.Dockerfile +++ b/variant-lite.Dockerfile @@ -14,7 +14,7 @@ # You should have received a copy of the GNU Lesser General Public # License along with C-PAC. If not, see . -FROM ghcr.io/fcp-indi/c-pac/stage-base:lite-v1.8.8.dev11.8.8.dev11.8.8.dev11.8.8.dev11.8.8.dev11.8.8.dev11.8.8.dev1 +FROM ghcr.io/fcp-indi/c-pac/stage-base:lite-v1.8.8.dev1 LABEL org.opencontainers.image.description="Full C-PAC image without FreeSurfer" LABEL org.opencontainers.image.source=https://github.com/FCP-INDI/C-PAC USER root @@ -43,7 +43,7 @@ RUN rm -rf /var/lib/apt/lists/* /tmp/* /var/tmp/* /root/.cache/* \ && apt-get autoremove -y \ && ldconfig \ && chmod 777 / \ - && chmod 777 $(ls / | grep -v1.8.8.dev11.8.8.dev11.8.8.dev11.8.8.dev11.8.8.dev11.8.8.dev1 sys | grep -v1.8.8.dev11.8.8.dev11.8.8.dev11.8.8.dev11.8.8.dev11.8.8.dev1 proc) + && chmod 777 $(ls / | grep -v sys | grep -v proc) ENV PYTHONUSERBASE=/home/c-pac_user/.local ENV PATH=$PATH:/home/c-pac_user/.local/bin \ PYTHONPATH=$PYTHONPATH:$PYTHONUSERBASE/lib/python3.10/site-packages \ diff --git a/version b/version index bb676e6961..110ed9b99b 100644 --- a/version +++ b/version @@ -1 +1 @@ -v1.8.8.dev1 +v From bf46638316e397a0eb86c7bb9ecd7fc31951257e Mon Sep 17 00:00:00 2001 From: Jon Cluce Date: Tue, 23 Sep 2025 15:20:08 -0400 Subject: [PATCH 503/507] :recycle: Refactor autoversioning script --- .github/scripts/autoversioning.sh | 202 ++++++++++++++---- .pre-commit-config.yaml | 2 +- .../configs/data_config_S3-BIDS-ABIDE.yml | 2 +- .../configs/data_config_S3-BIDS-ADHD200.yml | 2 +- .../data_config_S3-BIDS-ADHD200_only2.yml | 2 +- ...data_config_S3-BIDS-NKI-RocklandSample.yml | 2 +- .../configs/data_config_cpac_benchmark.yml | 2 +- .../configs/data_settings_template.yml | 2 +- .../configs/group_config_template.yml | 2 +- .../configs/pipeline_config_abcd-options.yml | 21 +- .../configs/pipeline_config_abcd-prep.yml | 2 +- .../configs/pipeline_config_anat-only.yml | 2 +- .../pipeline_config_benchmark-ANTS.yml | 2 +- .../pipeline_config_benchmark-FNIRT.yml | 2 +- .../configs/pipeline_config_blank.yml | 2 +- .../configs/pipeline_config_ccs-options.yml | 2 +- .../pipeline_config_default-deprecated.yml | 2 +- .../configs/pipeline_config_default.yml | 2 +- .../pipeline_config_fmriprep-ingress.yml | 2 +- .../pipeline_config_fmriprep-options.yml | 2 +- .../configs/pipeline_config_fx-options.yml | 2 +- .../configs/pipeline_config_monkey-ABCD.yml | 2 +- .../configs/pipeline_config_monkey.yml | 2 +- .../configs/pipeline_config_ndmg.yml | 2 +- .../configs/pipeline_config_nhp-macaque.yml | 2 +- .../configs/pipeline_config_preproc.yml | 2 +- .../configs/pipeline_config_rbc-options.yml | 2 +- .../configs/pipeline_config_regtest-1.yml | 2 +- .../configs/pipeline_config_regtest-2.yml | 2 +- .../configs/pipeline_config_regtest-3.yml | 2 +- .../configs/pipeline_config_regtest-4.yml | 2 +- .../configs/pipeline_config_rodent.yml | 2 +- CPAC/resources/configs/system_config.yml | 2 +- version | 2 +- 34 files changed, 197 insertions(+), 90 deletions(-) diff --git a/.github/scripts/autoversioning.sh b/.github/scripts/autoversioning.sh index f93dc3f57e..96383bd878 100755 --- a/.github/scripts/autoversioning.sh +++ b/.github/scripts/autoversioning.sh @@ -1,6 +1,6 @@ -#!/bin/bash +#!/usr/bin/env bash -# Copyright (C) 2024 C-PAC Developers +# Copyright (C) 2024-2025 C-PAC Developers # This file is part of C-PAC. @@ -17,54 +17,170 @@ # You should have received a copy of the GNU Lesser General Public # License along with C-PAC. If not, see . -# Update version comment strings -function wait_for_git_lock() { - while [ -f "./.git/index.lock" ]; do - echo "Waiting for the git lock file to be removed..." - sleep 1 - done + +set -euo pipefail +trap 'echo "❌ Script failed at line $LINENO with exit code $?"' ERR + +# ------------------------------------------------------------------------- +# Helpers +# ------------------------------------------------------------------------- + +git_add_with_retry() { + local file=$1 + local attempts=0 + local max_attempts=10 + while ! git add "$file"; do + attempts=$((attempts+1)) + echo "Git add failed for $file (attempt $attempts), retrying..." + sleep 1 + if [[ $attempts -ge $max_attempts ]]; then + echo "❌ Failed to git add $file after $max_attempts attempts" + exit 1 + fi + done +} + +update_file_if_changed() { + # Run a regex replacement or copy on a file and stage it if it changed + local expr=$1 + local src=$2 + local dest=${3:-$src} + + local changed=0 + if [[ -n "$expr" ]]; then + tmp=$(mktemp) + sed -E "$expr" "$src" > "$tmp" + if ! cmp -s "$tmp" "$dest"; then + mv "$tmp" "$dest" + git_add_with_retry "$dest" + changed=1 + else + rm "$tmp" + fi + else + if [[ ! -f "$dest" ]] || ! cmp -s "$src" "$dest"; then + cp "$src" "$dest" + git_add_with_retry "$dest" + changed=1 + fi + fi + return $changed } -cd CPAC || exit 1 -VERSION=$(python -c "from info import __version__; print(('.'.join(('.'.join(__version__[::-1].split('-')[1].split('.')[1:])[::-1], __version__.split('-')[1])) if '-' in __version__ else __version__).split('+', 1)[0])") -cd .. -echo "v${VERSION}" > version -export _SED_COMMAND="s/^(# [Vv]ersion ).*$/# Version ${VERSION}/g" -if [[ "$OSTYPE" == "darwin"* ]]; then - # Mac OSX - find ./CPAC/resources/configs -name "*.yml" -exec sed -i '' -E "${_SED_COMMAND}" {} \; +log_info() { + echo "=== $* ===" +} + +# ------------------------------------------------------------------------- +# Main +# ------------------------------------------------------------------------- + +START_DIR=$(pwd) +SCRIPT_DIR="$( cd -- "$( dirname -- "${BASH_SOURCE[0]}" )" &> /dev/null && pwd )" +REPO_ROOT="$(realpath "$SCRIPT_DIR/../..")" + +# ------------------------------------------------------------------------- +# Fetch version +# ------------------------------------------------------------------------- +log_info "Fetching version" +VERSION=$(python -c "import sys; sys.path.insert(0, '$REPO_ROOT/CPAC'); from info import __version__; print(__version__.split('+', 1)[0])") +VERSION_FILE="$REPO_ROOT/version" +if [[ -f "$VERSION_FILE" ]]; then + OLD_VERSION=$(<"$VERSION_FILE") else - # Linux and others - find ./CPAC/resources/configs -name "*.yml" -exec sed -i'' -r "${_SED_COMMAND}" {} \; + OLD_VERSION="" +fi +echo "v${VERSION}" > "$VERSION_FILE" + +# ------------------------------------------------------------------------- +# Write version file and stage it +# ------------------------------------------------------------------------- +log_info "Updating version file" +if update_file_if_changed "" <(echo "v${VERSION}") "$VERSION_FILE"; then + git_add_with_retry "$VERSION_FILE" fi -wait_for_git_lock && git add version -VERSIONS=( `git show $(git log --pretty=format:'%h' -n 1 version | tail -n 1):version` `cat version` ) -export PATTERN="(declare|typeset) -a" -if [[ "$(declare -p VERSIONS)" =~ $PATTERN ]] -then - for DOCKERFILE in $(find ./.github/Dockerfiles -name "*.Dockerfile") - do - export IFS="" - for LINE in $(grep "FROM ghcr\.io/fcp\-indi/c\-pac/.*\-${VERSIONS[0]}" ${DOCKERFILE}) - do - echo "Updating stage tags in ${DOCKERFILE}" + +# ------------------------------------------------------------------------- +# Update YAML config files +# ------------------------------------------------------------------------- +log_info "Updating YAML config files" +VERSION_EXPR="s/^(# [Vv]ersion ).*$/# Version ${VERSION}/g" +for YAML_FILE in "$REPO_ROOT"/CPAC/resources/configs/*.yml; do + echo "Processing ${YAML_FILE}" + echo "Applying regex: ${VERSION_EXPR}" + + # Run sed safely + tmp=$(mktemp) + if ! sed -E "$VERSION_EXPR" "$YAML_FILE" > "$tmp"; then + echo "❌ sed failed on $YAML_FILE" + rm "$tmp" + exit 1 + fi + + if ! cmp -s "$tmp" "$YAML_FILE"; then + mv "$tmp" "$YAML_FILE" + echo "Updated $YAML_FILE" + git_add_with_retry "$YAML_FILE" + else + rm "$tmp" + echo "No changes needed for $YAML_FILE" + fi +done + +# ------------------------------------------------------------------------- +# Update Dockerfiles (only C-PAC tags) +# ------------------------------------------------------------------------- +log_info "Updating Dockerfiles" +NEW_VERSION=$(<"$VERSION_FILE") + +if [[ "$OLD_VERSION" != "$NEW_VERSION" ]]; then + for DOCKERFILE in "$REPO_ROOT"/.github/Dockerfiles/*.Dockerfile; do + if grep -q "FROM ghcr\.io/fcp-indi/c-pac/.*-${OLD_VERSION}" "$DOCKERFILE"; then + echo "Updating C-PAC version in ${DOCKERFILE} from ${OLD_VERSION} to ${NEW_VERSION}" + if [[ "$OSTYPE" == "darwin"* ]]; then - # Mac OSX - sed -i "" "s/\-${VERSIONS[0]}/\-${VERSIONS[1]}/g" ${DOCKERFILE} + # macOS sed + sed -i "" "s/-${OLD_VERSION}/-${NEW_VERSION}/g" "$DOCKERFILE" else - # Linux and others - sed -i "s/\-${VERSIONS[0]}/\-${VERSIONS[1]}/g" ${DOCKERFILE} + # Linux sed + sed -i -E "s/-${OLD_VERSION}/-${NEW_VERSION}/g" "$DOCKERFILE" fi - done + + git_add_with_retry "$DOCKERFILE" + fi done - unset IFS fi -wait_for_git_lock && git add CPAC/resources/configs .github/Dockerfiles - -# Overwrite top-level Dockerfiles with the CI Dockerfiles -wait_for_git_lock && cp .github/Dockerfiles/C-PAC.develop-jammy.Dockerfile Dockerfile -wait_for_git_lock && cp .github/Dockerfiles/C-PAC.develop-lite-jammy.Dockerfile variant-lite.Dockerfile -for DOCKERFILE in $(ls *Dockerfile) -do - wait_for_git_lock && git add $DOCKERFILE + +# ------------------------------------------------------------------------- +# Overwrite top-level Dockerfiles +# ------------------------------------------------------------------------- +log_info "Updating top-level Dockerfiles" +TOP_DOCKERFILES=( + ".github/Dockerfiles/C-PAC.develop-jammy.Dockerfile:Dockerfile" + ".github/Dockerfiles/C-PAC.develop-lite-jammy.Dockerfile:variant-lite.Dockerfile" +) +for SRC_DST in "${TOP_DOCKERFILES[@]}"; do + # Split SRC_DST by colon safely + SRC="${SRC_DST%%:*}" + DST="${SRC_DST##*:}" + + FULL_SRC="$REPO_ROOT/$SRC" + FULL_DST="$REPO_ROOT/$DST" + + if [[ ! -f "$FULL_SRC" ]]; then + echo "⚠️ Source Dockerfile does not exist: $FULL_SRC" + continue + fi + echo "Updating top-level Dockerfile: $FULL_DST from $FULL_SRC" + cp "$FULL_SRC" "$FULL_DST" && git_add_with_retry "$FULL_DST" done + +# Return to original directory +cd "$START_DIR" + +# ------------------------------------------------------------------------- +# Summary +# ------------------------------------------------------------------------- +echo +echo "Version changed: (from ${OLD_VERSION} to ${NEW_VERSION})" +echo "======================" diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 66b0a5da0e..fd93639282 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -69,7 +69,7 @@ repos: name: Update Dockerfiles and version comments entry: .github/scripts/autoversioning.sh language: script - files: '.*Dockerfile$|.*\.yaml$|^CPAC/info\.py$' + files: '(^CPAC/info\.py$|.*Dockerfile$|.*\.ya?ml$)' - id: update-yaml-comments name: Update YAML comments entry: CPAC/utils/configuration/yaml_template.py diff --git a/CPAC/resources/configs/data_config_S3-BIDS-ABIDE.yml b/CPAC/resources/configs/data_config_S3-BIDS-ABIDE.yml index 287d145476..694fe23286 100644 --- a/CPAC/resources/configs/data_config_S3-BIDS-ABIDE.yml +++ b/CPAC/resources/configs/data_config_S3-BIDS-ABIDE.yml @@ -1,5 +1,5 @@ # CPAC Data Configuration File -# Version +# Version 1.8.8.dev1 # # http://fcp-indi.github.io for more info. # diff --git a/CPAC/resources/configs/data_config_S3-BIDS-ADHD200.yml b/CPAC/resources/configs/data_config_S3-BIDS-ADHD200.yml index a964812b4f..550847bd90 100644 --- a/CPAC/resources/configs/data_config_S3-BIDS-ADHD200.yml +++ b/CPAC/resources/configs/data_config_S3-BIDS-ADHD200.yml @@ -1,5 +1,5 @@ # CPAC Data Configuration File -# Version +# Version 1.8.8.dev1 # # http://fcp-indi.github.io for more info. # diff --git a/CPAC/resources/configs/data_config_S3-BIDS-ADHD200_only2.yml b/CPAC/resources/configs/data_config_S3-BIDS-ADHD200_only2.yml index 839c3aab13..6b1a07f928 100644 --- a/CPAC/resources/configs/data_config_S3-BIDS-ADHD200_only2.yml +++ b/CPAC/resources/configs/data_config_S3-BIDS-ADHD200_only2.yml @@ -1,5 +1,5 @@ # CPAC Data Configuration File -# Version +# Version 1.8.8.dev1 # # http://fcp-indi.github.io for more info. # diff --git a/CPAC/resources/configs/data_config_S3-BIDS-NKI-RocklandSample.yml b/CPAC/resources/configs/data_config_S3-BIDS-NKI-RocklandSample.yml index afe2e2d51d..4fe2dd4f12 100644 --- a/CPAC/resources/configs/data_config_S3-BIDS-NKI-RocklandSample.yml +++ b/CPAC/resources/configs/data_config_S3-BIDS-NKI-RocklandSample.yml @@ -1,5 +1,5 @@ # CPAC Data Configuration File -# Version +# Version 1.8.8.dev1 # # http://fcp-indi.github.io for more info. # diff --git a/CPAC/resources/configs/data_config_cpac_benchmark.yml b/CPAC/resources/configs/data_config_cpac_benchmark.yml index ceb546be14..344e6f2926 100644 --- a/CPAC/resources/configs/data_config_cpac_benchmark.yml +++ b/CPAC/resources/configs/data_config_cpac_benchmark.yml @@ -1,5 +1,5 @@ # CPAC Data Configuration File -# Version +# Version 1.8.8.dev1 # # http://fcp-indi.github.io for more info. # diff --git a/CPAC/resources/configs/data_settings_template.yml b/CPAC/resources/configs/data_settings_template.yml index 06ee292815..8f3193809a 100644 --- a/CPAC/resources/configs/data_settings_template.yml +++ b/CPAC/resources/configs/data_settings_template.yml @@ -1,5 +1,5 @@ # CPAC Data Settings File -# Version +# Version 1.8.8.dev1 # # http://fcp-indi.github.io for more info. # diff --git a/CPAC/resources/configs/group_config_template.yml b/CPAC/resources/configs/group_config_template.yml index 3d98fb285d..f96528dce3 100644 --- a/CPAC/resources/configs/group_config_template.yml +++ b/CPAC/resources/configs/group_config_template.yml @@ -1,5 +1,5 @@ # CPAC Group-Level Analysis Configuration YAML file -# Version +# Version 1.8.8.dev1 # # http://fcp-indi.github.io for more info. # diff --git a/CPAC/resources/configs/pipeline_config_abcd-options.yml b/CPAC/resources/configs/pipeline_config_abcd-options.yml index 53763e61b2..e385ce46dd 100644 --- a/CPAC/resources/configs/pipeline_config_abcd-options.yml +++ b/CPAC/resources/configs/pipeline_config_abcd-options.yml @@ -1,7 +1,7 @@ %YAML 1.1 --- # CPAC Pipeline Configuration YAML file -# Version +# Version 1.8.8.dev1 # # http://fcp-indi.github.io for more info. # @@ -217,20 +217,17 @@ registration_workflows: # Mask the sbref created by coregistration input prep nodeblocks above before registration mask_sbref: Off - # Choose coregistration interpolation - interpolation: spline - - # Choose coregistration degree of freedom - dof: 12 boundary_based_registration: # this is a fork point # run: [On, Off] - this will run both and fork the pipeline run: [On] - # reference for boundary based registration - # options: 'whole-head' or 'brain' - reference: whole-head + # Choose coregistration interpolation + interpolation: spline + + # Choose coregistration degree of freedom + dof: 12 func_registration_to_template: @@ -269,12 +266,6 @@ registration_workflows: # Interpolation method for writing out transformed functional images. # Possible values: Linear, BSpline, LanczosWindowedSinc interpolation: Linear - EPI_registration: - - # directly register the mean functional to an EPI template - # instead of applying the anatomical T1-to-template transform to the functional data that has been - # coregistered to anatomical/T1 space - run: off functional_preproc: run: On diff --git a/CPAC/resources/configs/pipeline_config_abcd-prep.yml b/CPAC/resources/configs/pipeline_config_abcd-prep.yml index bea192a97d..27a4cd5f63 100644 --- a/CPAC/resources/configs/pipeline_config_abcd-prep.yml +++ b/CPAC/resources/configs/pipeline_config_abcd-prep.yml @@ -1,7 +1,7 @@ %YAML 1.1 --- # CPAC Pipeline Configuration YAML file -# Version +# Version 1.8.8.dev1 # # http://fcp-indi.github.io for more info. # diff --git a/CPAC/resources/configs/pipeline_config_anat-only.yml b/CPAC/resources/configs/pipeline_config_anat-only.yml index b9cccd5948..38953f68ef 100644 --- a/CPAC/resources/configs/pipeline_config_anat-only.yml +++ b/CPAC/resources/configs/pipeline_config_anat-only.yml @@ -1,7 +1,7 @@ %YAML 1.1 --- # CPAC Pipeline Configuration YAML file -# Version +# Version 1.8.8.dev1 # # http://fcp-indi.github.io for more info. # diff --git a/CPAC/resources/configs/pipeline_config_benchmark-ANTS.yml b/CPAC/resources/configs/pipeline_config_benchmark-ANTS.yml index b2ab123fb6..b42c30f547 100644 --- a/CPAC/resources/configs/pipeline_config_benchmark-ANTS.yml +++ b/CPAC/resources/configs/pipeline_config_benchmark-ANTS.yml @@ -1,7 +1,7 @@ %YAML 1.1 --- # CPAC Pipeline Configuration YAML file -# Version +# Version 1.8.8.dev1 # # http://fcp-indi.github.io for more info. # diff --git a/CPAC/resources/configs/pipeline_config_benchmark-FNIRT.yml b/CPAC/resources/configs/pipeline_config_benchmark-FNIRT.yml index c0ac6e51e5..7026e1f2fd 100644 --- a/CPAC/resources/configs/pipeline_config_benchmark-FNIRT.yml +++ b/CPAC/resources/configs/pipeline_config_benchmark-FNIRT.yml @@ -1,7 +1,7 @@ %YAML 1.1 --- # CPAC Pipeline Configuration YAML file -# Version +# Version 1.8.8.dev1 # # http://fcp-indi.github.io for more info. # diff --git a/CPAC/resources/configs/pipeline_config_blank.yml b/CPAC/resources/configs/pipeline_config_blank.yml index d2c8d21f89..75c889d9b2 100644 --- a/CPAC/resources/configs/pipeline_config_blank.yml +++ b/CPAC/resources/configs/pipeline_config_blank.yml @@ -1,7 +1,7 @@ %YAML 1.1 --- # CPAC Pipeline Configuration YAML file -# Version +# Version 1.8.8.dev1 # # http://fcp-indi.github.io for more info. # diff --git a/CPAC/resources/configs/pipeline_config_ccs-options.yml b/CPAC/resources/configs/pipeline_config_ccs-options.yml index 1c3fea07ca..1a4d59c7eb 100644 --- a/CPAC/resources/configs/pipeline_config_ccs-options.yml +++ b/CPAC/resources/configs/pipeline_config_ccs-options.yml @@ -1,7 +1,7 @@ %YAML 1.1 --- # CPAC Pipeline Configuration YAML file -# Version +# Version 1.8.8.dev1 # # http://fcp-indi.github.io for more info. # diff --git a/CPAC/resources/configs/pipeline_config_default-deprecated.yml b/CPAC/resources/configs/pipeline_config_default-deprecated.yml index 658b311094..22c0e5dada 100644 --- a/CPAC/resources/configs/pipeline_config_default-deprecated.yml +++ b/CPAC/resources/configs/pipeline_config_default-deprecated.yml @@ -1,7 +1,7 @@ %YAML 1.1 --- # CPAC Pipeline Configuration YAML file -# Version +# Version 1.8.8.dev1 # # http://fcp-indi.github.io for more info. # diff --git a/CPAC/resources/configs/pipeline_config_default.yml b/CPAC/resources/configs/pipeline_config_default.yml index e856e44dd6..5c42e5ad35 100644 --- a/CPAC/resources/configs/pipeline_config_default.yml +++ b/CPAC/resources/configs/pipeline_config_default.yml @@ -1,7 +1,7 @@ %YAML 1.1 --- # CPAC Pipeline Configuration YAML file -# Version +# Version 1.8.8.dev1 # # http://fcp-indi.github.io for more info. # diff --git a/CPAC/resources/configs/pipeline_config_fmriprep-ingress.yml b/CPAC/resources/configs/pipeline_config_fmriprep-ingress.yml index 9166c3acaa..275a7d8d1f 100644 --- a/CPAC/resources/configs/pipeline_config_fmriprep-ingress.yml +++ b/CPAC/resources/configs/pipeline_config_fmriprep-ingress.yml @@ -1,7 +1,7 @@ %YAML 1.1 --- # CPAC Pipeline Configuration YAML file -# Version +# Version 1.8.8.dev1 # # http://fcp-indi.github.io for more info. # diff --git a/CPAC/resources/configs/pipeline_config_fmriprep-options.yml b/CPAC/resources/configs/pipeline_config_fmriprep-options.yml index 0718297532..5453144af5 100644 --- a/CPAC/resources/configs/pipeline_config_fmriprep-options.yml +++ b/CPAC/resources/configs/pipeline_config_fmriprep-options.yml @@ -1,7 +1,7 @@ %YAML 1.1 --- # CPAC Pipeline Configuration YAML file -# Version +# Version 1.8.8.dev1 # # http://fcp-indi.github.io for more info. # diff --git a/CPAC/resources/configs/pipeline_config_fx-options.yml b/CPAC/resources/configs/pipeline_config_fx-options.yml index 02d6f31ced..f8cc2f8de6 100644 --- a/CPAC/resources/configs/pipeline_config_fx-options.yml +++ b/CPAC/resources/configs/pipeline_config_fx-options.yml @@ -1,7 +1,7 @@ %YAML 1.1 --- # CPAC Pipeline Configuration YAML file -# Version +# Version 1.8.8.dev1 # # http://fcp-indi.github.io for more info. # diff --git a/CPAC/resources/configs/pipeline_config_monkey-ABCD.yml b/CPAC/resources/configs/pipeline_config_monkey-ABCD.yml index 34e1a2ade7..dfca2e5e46 100644 --- a/CPAC/resources/configs/pipeline_config_monkey-ABCD.yml +++ b/CPAC/resources/configs/pipeline_config_monkey-ABCD.yml @@ -1,7 +1,7 @@ %YAML 1.1 --- # CPAC Pipeline Configuration YAML file -# Version +# Version 1.8.8.dev1 # # http://fcp-indi.github.io for more info. # diff --git a/CPAC/resources/configs/pipeline_config_monkey.yml b/CPAC/resources/configs/pipeline_config_monkey.yml index 1d5f499c5e..98c6ffe5f1 100644 --- a/CPAC/resources/configs/pipeline_config_monkey.yml +++ b/CPAC/resources/configs/pipeline_config_monkey.yml @@ -1,7 +1,7 @@ %YAML 1.1 --- # CPAC Pipeline Configuration YAML file -# Version +# Version 1.8.8.dev1 # # http://fcp-indi.github.io for more info. # diff --git a/CPAC/resources/configs/pipeline_config_ndmg.yml b/CPAC/resources/configs/pipeline_config_ndmg.yml index 7fb48bcef9..bd77602300 100644 --- a/CPAC/resources/configs/pipeline_config_ndmg.yml +++ b/CPAC/resources/configs/pipeline_config_ndmg.yml @@ -1,7 +1,7 @@ %YAML 1.1 --- # CPAC Pipeline Configuration YAML file -# Version +# Version 1.8.8.dev1 # # http://fcp-indi.github.io for more info. # diff --git a/CPAC/resources/configs/pipeline_config_nhp-macaque.yml b/CPAC/resources/configs/pipeline_config_nhp-macaque.yml index 68afff32df..2dbe4c724c 100644 --- a/CPAC/resources/configs/pipeline_config_nhp-macaque.yml +++ b/CPAC/resources/configs/pipeline_config_nhp-macaque.yml @@ -1,7 +1,7 @@ %YAML 1.1 --- # CPAC Pipeline Configuration YAML file -# Version +# Version 1.8.8.dev1 # # http://fcp-indi.github.io for more info. # diff --git a/CPAC/resources/configs/pipeline_config_preproc.yml b/CPAC/resources/configs/pipeline_config_preproc.yml index 028f7de296..f2d567d661 100644 --- a/CPAC/resources/configs/pipeline_config_preproc.yml +++ b/CPAC/resources/configs/pipeline_config_preproc.yml @@ -1,7 +1,7 @@ %YAML 1.1 --- # CPAC Pipeline Configuration YAML file -# Version +# Version 1.8.8.dev1 # # http://fcp-indi.github.io for more info. # diff --git a/CPAC/resources/configs/pipeline_config_rbc-options.yml b/CPAC/resources/configs/pipeline_config_rbc-options.yml index a7c4dfd6ea..27fae8a7ac 100644 --- a/CPAC/resources/configs/pipeline_config_rbc-options.yml +++ b/CPAC/resources/configs/pipeline_config_rbc-options.yml @@ -1,7 +1,7 @@ %YAML 1.1 --- # CPAC Pipeline Configuration YAML file -# Version +# Version 1.8.8.dev1 # # http://fcp-indi.github.io for more info. # diff --git a/CPAC/resources/configs/pipeline_config_regtest-1.yml b/CPAC/resources/configs/pipeline_config_regtest-1.yml index ee96ad34d5..ee1ff5d5cf 100644 --- a/CPAC/resources/configs/pipeline_config_regtest-1.yml +++ b/CPAC/resources/configs/pipeline_config_regtest-1.yml @@ -1,7 +1,7 @@ %YAML 1.1 --- # CPAC Pipeline Configuration YAML file -# Version +# Version 1.8.8.dev1 # # http://fcp-indi.github.io for more info. # diff --git a/CPAC/resources/configs/pipeline_config_regtest-2.yml b/CPAC/resources/configs/pipeline_config_regtest-2.yml index f1fcc596aa..e5b6dbd626 100644 --- a/CPAC/resources/configs/pipeline_config_regtest-2.yml +++ b/CPAC/resources/configs/pipeline_config_regtest-2.yml @@ -1,7 +1,7 @@ %YAML 1.1 --- # CPAC Pipeline Configuration YAML file -# Version +# Version 1.8.8.dev1 # # http://fcp-indi.github.io for more info. # diff --git a/CPAC/resources/configs/pipeline_config_regtest-3.yml b/CPAC/resources/configs/pipeline_config_regtest-3.yml index f0c4989c42..804e4f5f2f 100644 --- a/CPAC/resources/configs/pipeline_config_regtest-3.yml +++ b/CPAC/resources/configs/pipeline_config_regtest-3.yml @@ -1,7 +1,7 @@ %YAML 1.1 --- # CPAC Pipeline Configuration YAML file -# Version +# Version 1.8.8.dev1 # # http://fcp-indi.github.io for more info. # diff --git a/CPAC/resources/configs/pipeline_config_regtest-4.yml b/CPAC/resources/configs/pipeline_config_regtest-4.yml index 7591d09b7d..5232abdbf5 100644 --- a/CPAC/resources/configs/pipeline_config_regtest-4.yml +++ b/CPAC/resources/configs/pipeline_config_regtest-4.yml @@ -1,7 +1,7 @@ %YAML 1.1 --- # CPAC Pipeline Configuration YAML file -# Version +# Version 1.8.8.dev1 # # http://fcp-indi.github.io for more info. # diff --git a/CPAC/resources/configs/pipeline_config_rodent.yml b/CPAC/resources/configs/pipeline_config_rodent.yml index e7372d31ce..a5f2c6caf9 100644 --- a/CPAC/resources/configs/pipeline_config_rodent.yml +++ b/CPAC/resources/configs/pipeline_config_rodent.yml @@ -1,7 +1,7 @@ %YAML 1.1 --- # CPAC Pipeline Configuration YAML file -# Version +# Version 1.8.8.dev1 # # http://fcp-indi.github.io for more info. # diff --git a/CPAC/resources/configs/system_config.yml b/CPAC/resources/configs/system_config.yml index 06cdcefca4..c33ad0d0a1 100644 --- a/CPAC/resources/configs/system_config.yml +++ b/CPAC/resources/configs/system_config.yml @@ -1,5 +1,5 @@ # C-PAC System Configuration YAML file -# Version +# Version 1.8.8.dev1 # # http://fcp-indi.github.io for more info. # diff --git a/version b/version index 110ed9b99b..bb676e6961 100644 --- a/version +++ b/version @@ -1 +1 @@ -v +v1.8.8.dev1 From d57a78f7ecb040303f0887e0c8462dfdeab8e21a Mon Sep 17 00:00:00 2001 From: Jon Cluce Date: Tue, 23 Sep 2025 15:25:02 -0400 Subject: [PATCH 504/507] :bug: Include `test_configs` --- .github/scripts/autoversioning.sh | 4 +++- .../resources/configs/test_configs/data-test_S3-ADHD200_1.yml | 2 +- .../configs/test_configs/data-test_S3-ADHD200_no-params.yml | 2 +- .../configs/test_configs/data-test_S3-NKI-RS_fmap.yml | 2 +- .../test_configs/data_config_S3_CoRR_5only_mult-scan.yml | 2 +- .../test_configs/data_config_S3_CoRR_5only_mult-sess.yml | 2 +- CPAC/resources/configs/test_configs/pipe-test_ABCD.yml | 2 +- .../configs/test_configs/pipe-test_ANTs-3dSk-AllNuis.yml | 2 +- .../configs/test_configs/pipe-test_ANTs-3dSk-DistCorr3dSk.yml | 2 +- .../configs/test_configs/pipe-test_ANTs-3dSk-DistCorrBET.yml | 2 +- .../configs/test_configs/pipe-test_ANTs-BET-AllNuis.yml | 2 +- .../configs/test_configs/pipe-test_FNIRT-3dSk-AllNuis.yml | 2 +- .../configs/test_configs/pipe-test_FNIRT-BET-AllNuis-BASC.yml | 2 +- .../test_configs/pipe-test_FNIRT-BET-AllNuis-ISC-voxel.yml | 2 +- .../configs/test_configs/pipe-test_FNIRT-BET-AllNuis-ISC.yml | 2 +- .../configs/test_configs/pipe-test_FNIRT-BET-AllNuis-MDMR.yml | 2 +- .../configs/test_configs/pipe-test_FNIRT-BET-AllNuis.yml | 2 +- CPAC/resources/configs/test_configs/pipe-test_all.yml | 2 +- 18 files changed, 20 insertions(+), 18 deletions(-) diff --git a/.github/scripts/autoversioning.sh b/.github/scripts/autoversioning.sh index 96383bd878..ed45763e35 100755 --- a/.github/scripts/autoversioning.sh +++ b/.github/scripts/autoversioning.sh @@ -105,7 +105,9 @@ fi # ------------------------------------------------------------------------- log_info "Updating YAML config files" VERSION_EXPR="s/^(# [Vv]ersion ).*$/# Version ${VERSION}/g" -for YAML_FILE in "$REPO_ROOT"/CPAC/resources/configs/*.yml; do +for YAML_FILE in "$REPO_ROOT"/CPAC/resources/configs/{*.yml,*.yaml,test_configs/*.yml,test_configs/*.yaml}; do + [[ -e "$YAML_FILE" ]] || continue + echo "Processing ${YAML_FILE}" echo "Applying regex: ${VERSION_EXPR}" diff --git a/CPAC/resources/configs/test_configs/data-test_S3-ADHD200_1.yml b/CPAC/resources/configs/test_configs/data-test_S3-ADHD200_1.yml index b62cea6fa8..544ffad843 100644 --- a/CPAC/resources/configs/test_configs/data-test_S3-ADHD200_1.yml +++ b/CPAC/resources/configs/test_configs/data-test_S3-ADHD200_1.yml @@ -1,5 +1,5 @@ # CPAC Data Configuration File -# Version +# Version 1.8.8.dev1 # # http://fcp-indi.github.io for more info. # diff --git a/CPAC/resources/configs/test_configs/data-test_S3-ADHD200_no-params.yml b/CPAC/resources/configs/test_configs/data-test_S3-ADHD200_no-params.yml index 00b6d26db6..474aa7ee70 100644 --- a/CPAC/resources/configs/test_configs/data-test_S3-ADHD200_no-params.yml +++ b/CPAC/resources/configs/test_configs/data-test_S3-ADHD200_no-params.yml @@ -1,5 +1,5 @@ # CPAC Data Configuration File -# Version +# Version 1.8.8.dev1 # # http://fcp-indi.github.io for more info. # diff --git a/CPAC/resources/configs/test_configs/data-test_S3-NKI-RS_fmap.yml b/CPAC/resources/configs/test_configs/data-test_S3-NKI-RS_fmap.yml index b61fa626a6..b5355e01fa 100644 --- a/CPAC/resources/configs/test_configs/data-test_S3-NKI-RS_fmap.yml +++ b/CPAC/resources/configs/test_configs/data-test_S3-NKI-RS_fmap.yml @@ -1,5 +1,5 @@ # CPAC Data Configuration File -# Version +# Version 1.8.8.dev1 # # http://fcp-indi.github.io for more info. # diff --git a/CPAC/resources/configs/test_configs/data_config_S3_CoRR_5only_mult-scan.yml b/CPAC/resources/configs/test_configs/data_config_S3_CoRR_5only_mult-scan.yml index 73dff0bf8b..df33b1cbf8 100644 --- a/CPAC/resources/configs/test_configs/data_config_S3_CoRR_5only_mult-scan.yml +++ b/CPAC/resources/configs/test_configs/data_config_S3_CoRR_5only_mult-scan.yml @@ -1,5 +1,5 @@ # CPAC Data Configuration File -# Version +# Version 1.8.8.dev1 # # http://fcp-indi.github.io for more info. # diff --git a/CPAC/resources/configs/test_configs/data_config_S3_CoRR_5only_mult-sess.yml b/CPAC/resources/configs/test_configs/data_config_S3_CoRR_5only_mult-sess.yml index 2c6e03d4d9..a1de8541b1 100644 --- a/CPAC/resources/configs/test_configs/data_config_S3_CoRR_5only_mult-sess.yml +++ b/CPAC/resources/configs/test_configs/data_config_S3_CoRR_5only_mult-sess.yml @@ -1,5 +1,5 @@ # CPAC Data Configuration File -# Version +# Version 1.8.8.dev1 # # http://fcp-indi.github.io for more info. # diff --git a/CPAC/resources/configs/test_configs/pipe-test_ABCD.yml b/CPAC/resources/configs/test_configs/pipe-test_ABCD.yml index 57f16c3ab5..4f87d4b3ba 100644 --- a/CPAC/resources/configs/test_configs/pipe-test_ABCD.yml +++ b/CPAC/resources/configs/test_configs/pipe-test_ABCD.yml @@ -1,7 +1,7 @@ %YAML 1.1 --- # CPAC Pipeline Configuration YAML file -# Version +# Version 1.8.8.dev1 # # http://fcp-indi.github.io for more info. # diff --git a/CPAC/resources/configs/test_configs/pipe-test_ANTs-3dSk-AllNuis.yml b/CPAC/resources/configs/test_configs/pipe-test_ANTs-3dSk-AllNuis.yml index d2b076b434..e13f77e435 100644 --- a/CPAC/resources/configs/test_configs/pipe-test_ANTs-3dSk-AllNuis.yml +++ b/CPAC/resources/configs/test_configs/pipe-test_ANTs-3dSk-AllNuis.yml @@ -1,7 +1,7 @@ %YAML 1.1 --- # CPAC Pipeline Configuration YAML file -# Version +# Version 1.8.8.dev1 # # http://fcp-indi.github.io for more info. # diff --git a/CPAC/resources/configs/test_configs/pipe-test_ANTs-3dSk-DistCorr3dSk.yml b/CPAC/resources/configs/test_configs/pipe-test_ANTs-3dSk-DistCorr3dSk.yml index 9e8482ae69..41f7262a15 100644 --- a/CPAC/resources/configs/test_configs/pipe-test_ANTs-3dSk-DistCorr3dSk.yml +++ b/CPAC/resources/configs/test_configs/pipe-test_ANTs-3dSk-DistCorr3dSk.yml @@ -1,7 +1,7 @@ %YAML 1.1 --- # CPAC Pipeline Configuration YAML file -# Version +# Version 1.8.8.dev1 # # http://fcp-indi.github.io for more info. # diff --git a/CPAC/resources/configs/test_configs/pipe-test_ANTs-3dSk-DistCorrBET.yml b/CPAC/resources/configs/test_configs/pipe-test_ANTs-3dSk-DistCorrBET.yml index 60d47af717..4d59bdb05d 100644 --- a/CPAC/resources/configs/test_configs/pipe-test_ANTs-3dSk-DistCorrBET.yml +++ b/CPAC/resources/configs/test_configs/pipe-test_ANTs-3dSk-DistCorrBET.yml @@ -1,7 +1,7 @@ %YAML 1.1 --- # CPAC Pipeline Configuration YAML file -# Version +# Version 1.8.8.dev1 # # http://fcp-indi.github.io for more info. # diff --git a/CPAC/resources/configs/test_configs/pipe-test_ANTs-BET-AllNuis.yml b/CPAC/resources/configs/test_configs/pipe-test_ANTs-BET-AllNuis.yml index 1ef383c560..4155ffc2a1 100644 --- a/CPAC/resources/configs/test_configs/pipe-test_ANTs-BET-AllNuis.yml +++ b/CPAC/resources/configs/test_configs/pipe-test_ANTs-BET-AllNuis.yml @@ -1,7 +1,7 @@ %YAML 1.1 --- # CPAC Pipeline Configuration YAML file -# Version +# Version 1.8.8.dev1 # # http://fcp-indi.github.io for more info. # diff --git a/CPAC/resources/configs/test_configs/pipe-test_FNIRT-3dSk-AllNuis.yml b/CPAC/resources/configs/test_configs/pipe-test_FNIRT-3dSk-AllNuis.yml index 32755f5668..c8464849fd 100644 --- a/CPAC/resources/configs/test_configs/pipe-test_FNIRT-3dSk-AllNuis.yml +++ b/CPAC/resources/configs/test_configs/pipe-test_FNIRT-3dSk-AllNuis.yml @@ -1,7 +1,7 @@ %YAML 1.1 --- # CPAC Pipeline Configuration YAML file -# Version +# Version 1.8.8.dev1 # # http://fcp-indi.github.io for more info. # diff --git a/CPAC/resources/configs/test_configs/pipe-test_FNIRT-BET-AllNuis-BASC.yml b/CPAC/resources/configs/test_configs/pipe-test_FNIRT-BET-AllNuis-BASC.yml index f4c9b15081..a1b1664eb9 100644 --- a/CPAC/resources/configs/test_configs/pipe-test_FNIRT-BET-AllNuis-BASC.yml +++ b/CPAC/resources/configs/test_configs/pipe-test_FNIRT-BET-AllNuis-BASC.yml @@ -1,7 +1,7 @@ %YAML 1.1 --- # CPAC Pipeline Configuration YAML file -# Version +# Version 1.8.8.dev1 # # http://fcp-indi.github.io for more info. # diff --git a/CPAC/resources/configs/test_configs/pipe-test_FNIRT-BET-AllNuis-ISC-voxel.yml b/CPAC/resources/configs/test_configs/pipe-test_FNIRT-BET-AllNuis-ISC-voxel.yml index f4c9b15081..a1b1664eb9 100644 --- a/CPAC/resources/configs/test_configs/pipe-test_FNIRT-BET-AllNuis-ISC-voxel.yml +++ b/CPAC/resources/configs/test_configs/pipe-test_FNIRT-BET-AllNuis-ISC-voxel.yml @@ -1,7 +1,7 @@ %YAML 1.1 --- # CPAC Pipeline Configuration YAML file -# Version +# Version 1.8.8.dev1 # # http://fcp-indi.github.io for more info. # diff --git a/CPAC/resources/configs/test_configs/pipe-test_FNIRT-BET-AllNuis-ISC.yml b/CPAC/resources/configs/test_configs/pipe-test_FNIRT-BET-AllNuis-ISC.yml index f4c9b15081..a1b1664eb9 100644 --- a/CPAC/resources/configs/test_configs/pipe-test_FNIRT-BET-AllNuis-ISC.yml +++ b/CPAC/resources/configs/test_configs/pipe-test_FNIRT-BET-AllNuis-ISC.yml @@ -1,7 +1,7 @@ %YAML 1.1 --- # CPAC Pipeline Configuration YAML file -# Version +# Version 1.8.8.dev1 # # http://fcp-indi.github.io for more info. # diff --git a/CPAC/resources/configs/test_configs/pipe-test_FNIRT-BET-AllNuis-MDMR.yml b/CPAC/resources/configs/test_configs/pipe-test_FNIRT-BET-AllNuis-MDMR.yml index f4c9b15081..a1b1664eb9 100644 --- a/CPAC/resources/configs/test_configs/pipe-test_FNIRT-BET-AllNuis-MDMR.yml +++ b/CPAC/resources/configs/test_configs/pipe-test_FNIRT-BET-AllNuis-MDMR.yml @@ -1,7 +1,7 @@ %YAML 1.1 --- # CPAC Pipeline Configuration YAML file -# Version +# Version 1.8.8.dev1 # # http://fcp-indi.github.io for more info. # diff --git a/CPAC/resources/configs/test_configs/pipe-test_FNIRT-BET-AllNuis.yml b/CPAC/resources/configs/test_configs/pipe-test_FNIRT-BET-AllNuis.yml index f4c9b15081..a1b1664eb9 100644 --- a/CPAC/resources/configs/test_configs/pipe-test_FNIRT-BET-AllNuis.yml +++ b/CPAC/resources/configs/test_configs/pipe-test_FNIRT-BET-AllNuis.yml @@ -1,7 +1,7 @@ %YAML 1.1 --- # CPAC Pipeline Configuration YAML file -# Version +# Version 1.8.8.dev1 # # http://fcp-indi.github.io for more info. # diff --git a/CPAC/resources/configs/test_configs/pipe-test_all.yml b/CPAC/resources/configs/test_configs/pipe-test_all.yml index e8abebac37..f973dc89c4 100644 --- a/CPAC/resources/configs/test_configs/pipe-test_all.yml +++ b/CPAC/resources/configs/test_configs/pipe-test_all.yml @@ -1,7 +1,7 @@ %YAML 1.1 --- # CPAC Pipeline Configuration YAML file -# Version +# Version 1.8.8.dev1 # # http://fcp-indi.github.io for more info. # From b9a741aaeca829e410e401cb26237b9131278cab Mon Sep 17 00:00:00 2001 From: Jon Cluce Date: Tue, 23 Sep 2025 15:39:19 -0400 Subject: [PATCH 505/507] :necktie: Get version from git --- .github/scripts/autoversioning.sh | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/.github/scripts/autoversioning.sh b/.github/scripts/autoversioning.sh index ed45763e35..d3aa6228d6 100755 --- a/.github/scripts/autoversioning.sh +++ b/.github/scripts/autoversioning.sh @@ -86,7 +86,9 @@ log_info "Fetching version" VERSION=$(python -c "import sys; sys.path.insert(0, '$REPO_ROOT/CPAC'); from info import __version__; print(__version__.split('+', 1)[0])") VERSION_FILE="$REPO_ROOT/version" if [[ -f "$VERSION_FILE" ]]; then - OLD_VERSION=$(<"$VERSION_FILE") + cd "$REPO_ROOT" + OLD_VERSION=$(git show "$(git log --pretty=format:'%h' -n 1 -- version | tail -n 1)":version) + cd "$START_DIR" else OLD_VERSION="" fi From 5a64fada9b7f1a6060e1dfe2c08d22cf747783bb Mon Sep 17 00:00:00 2001 From: Jon Cluce Date: Tue, 23 Sep 2025 15:55:26 -0400 Subject: [PATCH 506/507] :recycle: Guard `license_notice` --- CPAC/__init__.py | 6 +++++- CPAC/pipeline/cpac_pipeline.py | 2 +- 2 files changed, 6 insertions(+), 2 deletions(-) diff --git a/CPAC/__init__.py b/CPAC/__init__.py index 1aaa8bb066..346fc897f6 100644 --- a/CPAC/__init__.py +++ b/CPAC/__init__.py @@ -35,10 +35,14 @@ def _docs_prefix() -> str: return DOCS_URL_PREFIX -license_notice = f"""Copyright (C) 2022-2024 C-PAC Developers. +def license_notice() -> str: + """Get the license notice for this version.""" + return f"""Copyright (C) 2022-2024 C-PAC Developers. This program comes with ABSOLUTELY NO WARRANTY. This is free software, and you are welcome to redistribute it under certain conditions. For details, see {_docs_prefix()}/license or the COPYING and COPYING.LESSER files included in the source code.""" + + __all__ = ["license_notice", "version", "__version__"] diff --git a/CPAC/pipeline/cpac_pipeline.py b/CPAC/pipeline/cpac_pipeline.py index e274b39ac8..ec5339a188 100644 --- a/CPAC/pipeline/cpac_pipeline.py +++ b/CPAC/pipeline/cpac_pipeline.py @@ -414,7 +414,7 @@ def run_workflow( ) if c.pipeline_setup["system_config"]["random_seed"] is not None else "", - license_notice=CPAC.license_notice.replace("\n", "\n "), + license_notice=CPAC.license_notice().replace("\n", "\n "), ), ) subject_info = {} From 8dfdd9e7076164c420270bd05dad625a8ea5ffb5 Mon Sep 17 00:00:00 2001 From: Jon Cluce Date: Thu, 25 Sep 2025 15:10:49 -0400 Subject: [PATCH 507/507] fixup! :recycle: Guard `license_notice` --- CPAC/_entrypoints/run.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/CPAC/_entrypoints/run.py b/CPAC/_entrypoints/run.py index 1721756719..ac68c7dfb3 100755 --- a/CPAC/_entrypoints/run.py +++ b/CPAC/_entrypoints/run.py @@ -112,7 +112,7 @@ def resolve_aws_credential(source: Path | str) -> str: def run_main(): """Run this function if not importing as a script.""" parser = argparse.ArgumentParser( - description="C-PAC Pipeline Runner. " + license_notice + description="C-PAC Pipeline Runner. " + license_notice() ) parser.add_argument( "bids_dir",