diff --git a/.github/workflows/auto-conv.yaml b/.github/workflows/auto-conv.yaml new file mode 100644 index 0000000..c7c2ed7 --- /dev/null +++ b/.github/workflows/auto-conv.yaml @@ -0,0 +1,66 @@ +#This workflow will install Python dependencies, run tests and lint with a variety of Python versions +# For more information see: https://help.github.com/actions/language-and-framework-guides/using-python-with-github-actions + +# For deployment, it will be necessary to create a PyPI API token and store it as a secret +# https://docs.github.com/en/actions/reference/encrypted-secrets + +name: Auto-convert Nipype tasks to Pydra + +on: + workflow_dispatch: # Trigger this workflow manually or via a repository dispatch event + repository_dispatch: + types: [auto-conv] + +permissions: + contents: write + pages: write + id-token: write + +jobs: + + auto-conv: + runs-on: ubuntu-latest + steps: + + - name: Checkout + uses: actions/checkout@v4 + + - name: Checkout auto-conv branch + run: git checkout auto-conv + + - name: Set up Python + uses: actions/setup-python@v5 + with: + python-version: '3.13' + + - name: Install build dependencies + run: python -m pip install --upgrade pip + + - name: Install requirements + run: python -m pip install -r ./nipype-auto-conv/requirements.txt + + - name: Run automatic Nipype > Pydra conversion + run: ./nipype-auto-conv/generate + + - name: Create branch that rebases main on auto-conv + run: | + git config --local user.email "action@github.com" + git config --local user.name "Github Action" + git checkout -b main-rebase main + git rebase auto-conv + + - name: Create pull request + uses: peter-evans/create-pull-request@v4 + with: + branch: main-rebase + title: 'Rebase main on auto-conv' + body: 'This PR rebases the main branch on the auto-conv branch to include the latest Nipype to Pydra conversions.' + base: main + commit-message: 'Rebase main on auto-conv' + labels: auto-conv + + - uses: actions/upload-artifact@v4 + with: + name: converted-nipype + path: pydra/tasks/ants/v2 + retention-days: 7 diff --git a/.github/workflows/ci-cd.yaml b/.github/workflows/ci-cd.yaml index 9e80028..eefecf4 100644 --- a/.github/workflows/ci-cd.yaml +++ b/.github/workflows/ci-cd.yaml @@ -7,117 +7,51 @@ name: CI/CD on: - push: - branches: [ main, develop ] - pull_request: - branches: [ main, develop ] release: types: [published] - repository_dispatch: - types: [create-post-release] + push: + branches: + - main + pull_request: + branches: + - main + +concurrency: + group: ${{ github.workflow }}-${{ github.ref }} + cancel-in-progress: true -env: - FSL_VERSION: 6.0.7.9 - FSL_HOME: ${{ github.workspace }}/fsl-install permissions: contents: read pages: write id-token: write +env: # Define environment variables + # Force tox and pytest to use color + FORCE_COLOR: true jobs: - nipype-conv: + build: + name: Build & verify package runs-on: ubuntu-latest + permissions: + attestations: write + id-token: write steps: - - - name: Checkout - uses: actions/checkout@v4 - - - name: Revert version to most recent version tag on upstream update - if: github.event_name == 'repository_dispatch' - run: git checkout $(git tag -l | grep 'v.*' | tail -n 1 | awk -F post '{print $1}') - - - name: Set up Python - uses: actions/setup-python@v5 - - - name: Install build dependencies - run: python -m pip install --upgrade pip - - - name: Install requirements - run: python -m pip install ./related-packages/fileformats -r ./nipype-auto-conv/requirements.txt - - - name: Run automatic Nipype > Pydra conversion - run: ./nipype-auto-conv/generate - - - uses: actions/upload-artifact@v4 - with: - name: converted-nipype - path: pydra/tasks/fsl/auto - - devcheck: - needs: [nipype-conv] - runs-on: ubuntu-latest - strategy: - matrix: - python-version: ['3.8', '3.11'] # Check oldest and newest versions - pip-flags: ['', '--editable'] - pydra: - - 'pydra' - - '--editable git+https://github.com/nipype/pydra.git#egg=pydra' - - steps: - - - name: Checkout - uses: actions/checkout@v4 - - - name: Revert version to most recent version tag on upstream update - if: github.event_name == 'repository_dispatch' - run: git checkout $(git tag -l | grep 'v.*' | tail -n 1 | awk -F post '{print $1}') - - - name: Download tasks converted from Nipype - uses: actions/download-artifact@v4 - with: - name: converted-nipype - path: pydra/tasks/fsl/auto - - - name: Strip auto package from gitignore so it is included in package - run: | - sed -i '/\/pydra\/tasks\/fsl\/auto/d' .gitignore - sed -i '/^_version.py/d' .gitignore - - - name: Set up Python ${{ matrix.python-version }} - uses: actions/setup-python@v5 - with: - python-version: ${{ matrix.python-version }} - - - name: Install build dependencies - run: | - python -m pip install --upgrade pip - - - name: Install Pydra - run: | - pushd $HOME - pip install ${{ matrix.pydra }} - popd - python -c "import pydra as m; print(f'{m.__name__} {m.__version__} @ {m.__file__}')" - - - name: Install task package - run: | - pip install ${{ matrix.pip-flags }} "./related-packages/fileformats[dev]" - pip install ${{ matrix.pip-flags }} "related-packages/fileformats-extras[dev]" - pip install ${{ matrix.pip-flags }} ".[dev]" - python -c "import pydra.tasks.fsl as m; print(f'{m.__name__} {m.__version__} @ {m.__file__}')" - python -c "import pydra as m; print(f'{m.__name__} {m.__version__} @ {m.__file__}')" - python -c "import fileformats.medimage_fsl as m; print(f'{m.__name__} {m.__version__} @ {m.__file__}')" - python -c "import fileformats.extras.medimage_fsl as m; print(f'{m.__name__} {m.__version__} @ {m.__file__}')" + - uses: actions/checkout@v4 + with: + fetch-depth: 0 + - uses: hynek/build-and-inspect-python-package@v2 + with: + attest-build-provenance-github: ${{ github.event_name != 'pull_request' }} test: - needs: [nipype-conv] runs-on: ubuntu-latest strategy: matrix: - python-version: ['3.8', '3.11'] + python-version: ['3.11', '3.13'] + fail-fast: false + steps: - name: Removed unnecessary tools to free space @@ -173,38 +107,49 @@ jobs: uses: actions/setup-python@v5 with: python-version: ${{ matrix.python-version }} - + - name: Install the latest version of uv + uses: astral-sh/setup-uv@v6 + - name: Install tox + run: | + uv tool install tox --with=tox-uv --with=tox-gh-actions + - name: Show tox config + run: tox c + - name: Run tox + run: tox -v --exit-and-dump-after 1200 + - name: Upload coverage to Codecov + uses: codecov/codecov-action@v5 + with: + fail_ci_if_error: false + token: ${{ secrets.CODECOV_TOKEN }} + + fileformats-test: + runs-on: ubuntu-latest + strategy: + matrix: + python-version: ['3.11', '3.13'] + steps: + - uses: actions/checkout@v3 + - name: Revert version to most recent tag on upstream update + if: github.event_name == 'repository_dispatch' + run: git checkout $(git tag -l | tail -n 1 | awk -F post '{print $1}') + - name: Set up Python ${{ matrix.python-version }} + uses: actions/setup-python@v4 + with: + python-version: ${{ matrix.python-version }} - name: Install build dependencies run: | python -m pip install --upgrade pip - - name: Install task package run: | - pip install "./related-packages/fileformats" "./related-packages/fileformats-extras" ".[test]" - python -c "import pydra.tasks.fsl as m; print(f'{m.__name__} {m.__version__} @ {m.__file__}')" - python -c "import pydra as m; print(f'{m.__name__} {m.__version__} @ {m.__file__}')" - - - name: Test with pytest + pip install "./related-packages/fileformats[test]" "./related-packages/fileformats-extras[test]" + python -c "import fileformats.medimage_fsl as m; print(f'{m.__name__} {m.__version__} @ {m.__file__}')" + - name: Test fileformats with pytest run: >- - pytest -sv - ./pydra/tasks/fsl - ./related-packages/fileformats - ./related-packages/fileformats-extras - --cov pydra.tasks.fsl - --cov fileformats.medimage_fsl - --cov fileformats.extras.medimage_fsl - --cov-report xml - - - name: Upload to CodeCov - uses: codecov/codecov-action@v4 - if: ${{ always() }} - with: - files: coverage.xml - name: pydra-fsl - + pytest ./related-packages -sv --cov fileformats.medimage_fsl + --cov fileformats.extras.medimage_fsl --cov-report xml . deploy-fileformats: - needs: [devcheck, test] + needs: [build, test, fileformats-test] runs-on: ubuntu-latest steps: @@ -282,184 +227,20 @@ jobs: packages-dir: ./related-packages/fileformats-extras/dist deploy: - needs: [nipype-conv, test, deploy-fileformats, deploy-fileformats-extras] + needs: [build, test] runs-on: ubuntu-latest + if: github.event_name == 'release' + permissions: + attestations: write + id-token: write steps: - - - name: Checkout repository - uses: actions/checkout@v4 - with: - submodules: recursive - fetch-depth: 0 - - - name: Set up Git user - run: | - git config --local user.email "action@github.com" - git config --local user.name "GitHub Action" - - - name: Get latest version tag - id: latest_tag - run: | - git fetch --tags - echo "TAG=$(git tag -l | grep 'v.*' | tail -n 1 | awk -F post '{print $1}')" >> $GITHUB_OUTPUT - - - name: Revert to latest tag - if: github.event_name == 'repository_dispatch' - run: git checkout ${{ steps.latest_tag.outputs.TAG }} - - - name: Download tasks converted from Nipype + - name: Download dist uses: actions/download-artifact@v4 with: - name: converted-nipype - path: pydra/tasks/fsl/auto - - - name: Show the contents of the auto-generated tasks - run: tree pydra - - - name: Set up Python - uses: actions/setup-python@v5 - with: - python-version: '3.11' - - - name: Install build tools - run: python -m pip install build twine - - - name: Strip auto package from gitignore so it is included in package - run: | - sed -i '/\/pydra\/tasks\/fsl\/auto/d' .gitignore - cat .gitignore - - - name: Install task package to calculate post-release tag - run: | - pip install "./related-packages/fileformats" "./related-packages/fileformats-extras" ".[test]" - - - name: Generate post-release tag based on Nipype and Nipype2Pydra versions - id: post_release_tag - run: | - POST=$(python -c "from pydra.tasks.fsl.auto._post_release import *; print(post_release)") - echo "TAG=${{ steps.latest_tag.outputs.TAG }}post${POST}" >> $GITHUB_OUTPUT - - - name: Add auto directory to git repo - if: github.event_name == 'release' || github.event_name == 'repository_dispatch' - run: | - git add pydra/tasks/fsl/auto - git commit -am"added auto-generated version to make new tag for package version" - git status - - - name: Overwrite the tag of release event with latest commit (i.e. including the auto directory) - if: github.event_name == 'release' - run: | - git tag -d ${{ steps.latest_tag.outputs.TAG }}; - git tag ${{ steps.latest_tag.outputs.TAG }}; - - - name: Tag repo with the post-release - if: github.event_name == 'repository_dispatch' - run: git tag ${{ steps.post_release_tag.outputs.TAG }} - - - name: Build source and wheel distributions - run: python -m build . - - - name: Check distributions - run: twine check dist/* - - - uses: actions/upload-artifact@v4 - with: - name: distributions - path: dist/ - - - name: Check for PyPI token on tag - id: deployable - if: github.event_name == 'release' || github.event_name == 'repository_dispatch' - env: - PYPI_API_TOKEN: "${{ secrets.PYPI_API_TOKEN }}" - run: if [ -n "$PYPI_API_TOKEN" ]; then echo "DEPLOY=true" >> $GITHUB_OUTPUT; fi - + name: Packages + path: dist - name: Upload to PyPI - if: steps.deployable.outputs.DEPLOY uses: pypa/gh-action-pypi-publish@release/v1 - with: - user: __token__ - password: ${{ secrets.PYPI_API_TOKEN }} - - - name: Create post-release release for releases triggered by nipype2pydra dispatches - if: steps.deployable.outputs.DEPLOY && github.event_name == 'repository_dispatch' - uses: actions/create-release@v1 - env: - GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} # This token is provided by Actions, you do not need to create your own token - with: - tag_name: ${{ steps.post_release_tag.outputs.TAG }} - release_name: Release ${{ steps.post_release_tag.outputs.TAG }} - draft: false - prerelease: false - - # docs: - # needs: deploy - # environment: - # name: github-pages - # url: ${{ steps.deployment.outputs.page_url }} - # runs-on: ubuntu-latest - # steps: - # - uses: actions/checkout@v4 - # - uses: actions/setup-python@v5 - # with: - # python-version: '3.x' - - # - name: Download tasks converted from Nipype - # uses: actions/download-artifact@v4 - # with: - # name: converted-nipype - # path: pydra/tasks/freesurfer/auto - - # - name: Install dependencies - # run: python -m pip install related-packages/fileformats .[doc] - - # - name: Build docs - # run: | - # pushd docs - # make html - # popd - - # - name: Upload artifact - # uses: actions/upload-pages-artifact@v3 - # with: - # path: 'docs/build/html' - - # - name: Setup GitHub Pages - # if: github.event_name == 'release' || github.event_name == 'repository_dispatch' - # uses: actions/configure-pages@v4 - - # - name: Deploy to GitHub Pages - # if: github.event_name == 'release' || github.event_name == 'repository_dispatch' - # id: deployment - # uses: actions/deploy-pages@v4 - - - # report_progress: - # needs: [deploy] - # runs-on: ubuntu-latest - # steps: - - # - name: Checkout - # uses: actions/checkout@v4 - - # - name: Generate progress report - # id: generate-report - # run: | - # tools/report_progress.py outputs/progress-report.json - # echo "progress_report=$(cat outputs/progress-report.json)" >> $GITHUB_OUTPUT - - # - name: Report progress to Nipype2Pydra repo - # if: github.event_name == 'release' || github.event_name == 'repository_dispatch' - # run: >- - # curl -XPOST -u "${{ env.REPORT_PROGRESS_PAT }}" -H "Accept: application/vnd.github.everest-preview+json" - # "https://api.github.com/repos/nipype/pydra-fsl/dispatches" - # -d '{ - # "event_type": "progress-report", - # "client_payload": ${{ steps.generate-report.output.progress_report }} - # }' - # env: - # PAT: ${{ env.REPORT_PROGRESS_PAT }} - # Deploy on tags if PYPI_API_TOKEN is defined in the repository secrets. # Secrets are not accessible in the if: condition [0], so set an output variable [1] diff --git a/nipype-auto-conv/generate.py b/nipype-auto-conv/generate.py new file mode 100644 index 0000000..7dc1449 --- /dev/null +++ b/nipype-auto-conv/generate.py @@ -0,0 +1,20 @@ +from pathlib import Path +from click.testing import CliRunner +from nipype2pydra.utils import show_cli_trace +from nipype2pydra.cli import convert + +PKG_PATH = Path(__file__).parent.parent.absolute() + +runner = CliRunner() + + +result = runner.invoke( + convert, + [ + str(PKG_PATH / "nipype-auto-conv" / "specs"), + str(PKG_PATH), + ], + catch_exceptions=False, +) + +assert not result.exit_code, show_cli_trace(result) diff --git a/nipype-auto-conv/specs/interfaces/accuracy_tester.yaml b/nipype-auto-conv/specs/interfaces/accuracy_tester.yaml index b9facd6..ef7b6e9 100644 --- a/nipype-auto-conv/specs/interfaces/accuracy_tester.yaml +++ b/nipype-auto-conv/specs/interfaces/accuracy_tester.yaml @@ -6,8 +6,8 @@ # Docs # ---- # -# Test the accuracy of an existing training dataset on a set of hand-labelled subjects. -# Note: This may or may not be working. Couldn't presently not confirm because fix fails on this (even outside of nipype) without leaving an error msg. +# Test the accuracy of an existing training dataset on a set of hand-labelled subjects. +# Note: This may or may not be working. Couldn't presently not confirm because fix fails on this (even outside of nipype) without leaving an error msg. # task_name: AccuracyTester nipype_name: AccuracyTester @@ -18,17 +18,15 @@ inputs: rename: # dict[str, str] - fields to rename in the Pydra interface types: - # dict[str, type] - override inferred types (use "mime-like" string for file-format types, - # e.g. 'medimage/nifti-gz'). For most fields the type will be correctly inferred - # from the nipype interface, but you may want to be more specific, particularly - # for file types, where specifying the format also specifies the file that will be - # passed to the field in the automatically generated unittests. + # dict[str, type] - override inferred types (use "mime-like" string for file-format types, + # e.g. 'medimage/nifti-gz'). For most fields the type will be correctly inferred + # from the nipype interface, but you may want to be more specific, particularly + # for file types, where specifying the format also specifies the file that will be + # passed to the field in the automatically generated unittests. + mel_icas: generic/directory+list-of + # type=inputmultiobject|default=[]: Melodic output directories trained_wts_file: generic/file # type=file|default=: trained-weights file - mel_icas: medimage-fsl/melodic-ica+list-of - # type=inputmultiobject|default=[]: Melodic output directories - output_directory: Path - # type=directory: Path to folder in which to store the results of the accuracy test. callable_defaults: # dict[str, str] - names of methods/callable classes defined in the adjacent `*_callables.py` # to set as the `default` method of input fields @@ -40,11 +38,11 @@ outputs: rename: # dict[str, str] - fields to rename in the Pydra interface types: - # dict[str, type] - override inferred types (use "mime-like" string for file-format types, - # e.g. 'medimage/nifti-gz'). For most fields the type will be correctly inferred - # from the nipype interface, but you may want to be more specific, particularly - # for file types, where specifying the format also specifies the file that will be - # passed to the field in the automatically generated unittests. + # dict[str, type] - override inferred types (use "mime-like" string for file-format types, + # e.g. 'medimage/nifti-gz'). For most fields the type will be correctly inferred + # from the nipype interface, but you may want to be more specific, particularly + # for file types, where specifying the format also specifies the file that will be + # passed to the field in the automatically generated unittests. output_directory: generic/directory # type=directory: Path to folder in which to store the results of the accuracy test. # type=directory|default=: Path to folder in which to store the results of the accuracy test. @@ -52,37 +50,37 @@ outputs: # dict[str, str] - names of methods/callable classes defined in the adjacent `*_callables.py` # to set to the `callable` attribute of output fields templates: - # dict[str, str] - `output_file_template` values to be provided to output fields + # dict[str, str] - `path_template` values to be provided to output fields requirements: # dict[str, list[str]] - input fields that are required to be provided for the output field to be present tests: - - inputs: - # dict[str, str] - values to provide to inputs fields in the task initialisation - # (if not specified, will try to choose a sensible value) - mel_icas: - # type=inputmultiobject|default=[]: Melodic output directories - trained_wts_file: - # type=file|default=: trained-weights file - output_directory: - # type=directory: Path to folder in which to store the results of the accuracy test. - # type=directory|default=: Path to folder in which to store the results of the accuracy test. - args: - # type=str|default='': Additional parameters to the command - environ: - # type=dict|default={}: Environment variables - imports: - # list[nipype2pydra.task.base.importstatement] - list import statements required by the test, with each list item - # consisting of 'module', 'name', and optionally 'alias' keys - expected_outputs: - # dict[str, str] - expected values for selected outputs, noting that tests will typically - # be terminated before they complete for time-saving reasons, and therefore - # these values will be ignored, when running in CI - timeout: 10 - # int - the value to set for the timeout in the generated test, - # after which the test will be considered to have been initialised - # successfully. Set to 0 to disable the timeout (warning, this could - # lead to the unittests taking a very long time to complete) - xfail: true - # bool - whether the unittest is expected to fail or not. Set to false - # when you are satisfied with the edits you have made to this file +- inputs: + # dict[str, str] - values to provide to inputs fields in the task initialisation + # (if not specified, will try to choose a sensible value) + mel_icas: + # type=inputmultiobject|default=[]: Melodic output directories + trained_wts_file: + # type=file|default=: trained-weights file + output_directory: + # type=directory: Path to folder in which to store the results of the accuracy test. + # type=directory|default=: Path to folder in which to store the results of the accuracy test. + args: + # type=str|default='': Additional parameters to the command + environ: + # type=dict|default={}: Environment variables + imports: + # list[nipype2pydra.statements.imports.explicitimport] - list import statements required by the test, with each list item + # consisting of 'module', 'name', and optionally 'alias' keys + expected_outputs: + # dict[str, str] - expected values for selected outputs, noting that tests will typically + # be terminated before they complete for time-saving reasons, and therefore + # these values will be ignored, when running in CI + timeout: 10 + # int - the value to set for the timeout in the generated test, + # after which the test will be considered to have been initialised + # successfully. Set to 0 to disable the timeout (warning, this could + # lead to the unittests taking a very long time to complete) + xfail: true + # bool - whether the unittest is expected to fail or not. Set to false + # when you are satisfied with the edits you have made to this file doctests: [] diff --git a/nipype-auto-conv/specs/interfaces/accuracy_tester_callables.py b/nipype-auto-conv/specs/interfaces/accuracy_tester_callables.py deleted file mode 100644 index d6e4893..0000000 --- a/nipype-auto-conv/specs/interfaces/accuracy_tester_callables.py +++ /dev/null @@ -1,28 +0,0 @@ -"""Module to put any functions that are referred to in the "callables" section of AccuracyTester.yaml""" - -import attrs -from fileformats.generic import Directory - - -def output_directory_callable(output_dir, inputs, stdout, stderr): - outputs = _list_outputs( - output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr - ) - return outputs["output_directory"] - - -# Original source at L885 of /interfaces/base/core.py -def _gen_filename(name, inputs=None, stdout=None, stderr=None, output_dir=None): - raise NotImplementedError - - -# Original source at L251 of /interfaces/fsl/fix.py -def _list_outputs(inputs=None, stdout=None, stderr=None, output_dir=None): - outputs = {} - if inputs.output_directory is not attrs.NOTHING: - outputs["output_directory"] = Directory( - exists=False, value=inputs.output_directory - ) - else: - outputs["output_directory"] = Directory(exists=False, value="accuracy_test") - return outputs diff --git a/nipype-auto-conv/specs/interfaces/apply_mask.yaml b/nipype-auto-conv/specs/interfaces/apply_mask.yaml index 13d115d..d1d8e28 100644 --- a/nipype-auto-conv/specs/interfaces/apply_mask.yaml +++ b/nipype-auto-conv/specs/interfaces/apply_mask.yaml @@ -15,18 +15,15 @@ inputs: rename: # dict[str, str] - fields to rename in the Pydra interface types: - # dict[str, type] - override inferred types (use "mime-like" string for file-format types, - # e.g. 'medimage/nifti-gz'). For most fields the type will be correctly inferred - # from the nipype interface, but you may want to be more specific, particularly - # for file types, where specifying the format also specifies the file that will be - # passed to the field in the automatically generated unittests. - mask_file: medimage/mask+nifti-gz - # type=file|default=: binary image defining mask space - in_file: medimage/nifti-gz + # dict[str, type] - override inferred types (use "mime-like" string for file-format types, + # e.g. 'medimage/nifti-gz'). For most fields the type will be correctly inferred + # from the nipype interface, but you may want to be more specific, particularly + # for file types, where specifying the format also specifies the file that will be + # passed to the field in the automatically generated unittests. + in_file: generic/file # type=file|default=: image to operate on - out_file: Path - # type=file: image written after calculations - # type=file|default=: image to write + mask_file: generic/file + # type=file|default=: binary image defining mask space callable_defaults: # dict[str, str] - names of methods/callable classes defined in the adjacent `*_callables.py` # to set as the `default` method of input fields @@ -38,60 +35,60 @@ outputs: rename: # dict[str, str] - fields to rename in the Pydra interface types: - # dict[str, type] - override inferred types (use "mime-like" string for file-format types, - # e.g. 'medimage/nifti-gz'). For most fields the type will be correctly inferred - # from the nipype interface, but you may want to be more specific, particularly - # for file types, where specifying the format also specifies the file that will be - # passed to the field in the automatically generated unittests. - out_file: medimage/nifti-gz + # dict[str, type] - override inferred types (use "mime-like" string for file-format types, + # e.g. 'medimage/nifti-gz'). For most fields the type will be correctly inferred + # from the nipype interface, but you may want to be more specific, particularly + # for file types, where specifying the format also specifies the file that will be + # passed to the field in the automatically generated unittests. + out_file: generic/file # type=file: image written after calculations # type=file|default=: image to write callables: # dict[str, str] - names of methods/callable classes defined in the adjacent `*_callables.py` # to set to the `callable` attribute of output fields templates: - # dict[str, str] - `output_file_template` values to be provided to output fields - out_file: out_file.nii.gz + # dict[str, str] - `path_template` values to be provided to output fields + out_file: out_file # type=file: image written after calculations # type=file|default=: image to write requirements: # dict[str, list[str]] - input fields that are required to be provided for the output field to be present tests: - - inputs: - # dict[str, str] - values to provide to inputs fields in the task initialisation - # (if not specified, will try to choose a sensible value) - mask_file: - # type=file|default=: binary image defining mask space - in_file: - # type=file|default=: image to operate on - out_file: - # type=file: image written after calculations - # type=file|default=: image to write - internal_datatype: - # type=enum|default='float'|allowed['char','double','float','input','int','short']: datatype to use for calculations (default is float) - output_datatype: - # type=enum|default='float'|allowed['char','double','float','input','int','short']: datatype to use for output (default uses input type) - nan2zeros: - # type=bool|default=False: change NaNs to zeros before doing anything - output_type: - # type=enum|default='NIFTI'|allowed['NIFTI','NIFTI_GZ','NIFTI_PAIR','NIFTI_PAIR_GZ']: FSL output type - args: - # type=str|default='': Additional parameters to the command - environ: - # type=dict|default={}: Environment variables - imports: - # list[nipype2pydra.task.base.importstatement] - list import statements required by the test, with each list item - # consisting of 'module', 'name', and optionally 'alias' keys - expected_outputs: - # dict[str, str] - expected values for selected outputs, noting that tests will typically - # be terminated before they complete for time-saving reasons, and therefore - # these values will be ignored, when running in CI - timeout: 10 - # int - the value to set for the timeout in the generated test, - # after which the test will be considered to have been initialised - # successfully. Set to 0 to disable the timeout (warning, this could - # lead to the unittests taking a very long time to complete) - xfail: true - # bool - whether the unittest is expected to fail or not. Set to false - # when you are satisfied with the edits you have made to this file +- inputs: + # dict[str, str] - values to provide to inputs fields in the task initialisation + # (if not specified, will try to choose a sensible value) + mask_file: + # type=file|default=: binary image defining mask space + in_file: + # type=file|default=: image to operate on + out_file: + # type=file: image written after calculations + # type=file|default=: image to write + internal_datatype: + # type=enum|default='float'|allowed['char','double','float','input','int','short']: datatype to use for calculations (default is float) + output_datatype: + # type=enum|default='float'|allowed['char','double','float','input','int','short']: datatype to use for output (default uses input type) + nan2zeros: + # type=bool|default=False: change NaNs to zeros before doing anything + output_type: + # type=enum|default='NIFTI'|allowed['GIFTI','NIFTI','NIFTI_GZ','NIFTI_PAIR','NIFTI_PAIR_GZ']: FSL output type + args: + # type=str|default='': Additional parameters to the command + environ: + # type=dict|default={}: Environment variables + imports: + # list[nipype2pydra.statements.imports.explicitimport] - list import statements required by the test, with each list item + # consisting of 'module', 'name', and optionally 'alias' keys + expected_outputs: + # dict[str, str] - expected values for selected outputs, noting that tests will typically + # be terminated before they complete for time-saving reasons, and therefore + # these values will be ignored, when running in CI + timeout: 10 + # int - the value to set for the timeout in the generated test, + # after which the test will be considered to have been initialised + # successfully. Set to 0 to disable the timeout (warning, this could + # lead to the unittests taking a very long time to complete) + xfail: true + # bool - whether the unittest is expected to fail or not. Set to false + # when you are satisfied with the edits you have made to this file doctests: [] diff --git a/nipype-auto-conv/specs/interfaces/apply_mask_callables.py b/nipype-auto-conv/specs/interfaces/apply_mask_callables.py deleted file mode 100644 index fcd5f27..0000000 --- a/nipype-auto-conv/specs/interfaces/apply_mask_callables.py +++ /dev/null @@ -1,329 +0,0 @@ -"""Module to put any functions that are referred to in the "callables" section of ApplyMask.yaml""" - -import attrs -import logging -import os -import os.path as op -from glob import glob -from pathlib import Path - - -def out_file_default(inputs): - return _gen_filename("out_file", inputs=inputs) - - -def out_file_callable(output_dir, inputs, stdout, stderr): - outputs = _list_outputs( - output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr - ) - return outputs["out_file"] - - -IFLOGGER = logging.getLogger("nipype.interface") - - -# Original source at L61 of /interfaces/fsl/maths.py -def _gen_filename(name, inputs=None, stdout=None, stderr=None, output_dir=None): - if name == "out_file": - return _list_outputs( - inputs=inputs, stdout=stdout, stderr=stderr, output_dir=output_dir - )["out_file"] - return None - - -# Original source at L205 of /interfaces/fsl/base.py -def _gen_fname( - basename, - cwd=None, - suffix=None, - change_ext=True, - ext=None, - inputs=None, - stdout=None, - stderr=None, - output_dir=None, -): - """Generate a filename based on the given parameters. - - The filename will take the form: cwd/basename. - If change_ext is True, it will use the extensions specified in - inputs.output_type. - - Parameters - ---------- - basename : str - Filename to base the new filename on. - cwd : str - Path to prefix to the new filename. (default is output_dir) - suffix : str - Suffix to add to the `basename`. (defaults is '' ) - change_ext : bool - Flag to change the filename extension to the FSL output type. - (default True) - - Returns - ------- - fname : str - New filename based on given parameters. - - """ - - if basename == "": - msg = "Unable to generate filename for command %s. " % "fslmaths" - msg += "basename is not set!" - raise ValueError(msg) - if cwd is None: - cwd = output_dir - if ext is None: - ext = Info.output_type_to_ext(inputs.output_type) - if change_ext: - if suffix: - suffix = "".join((suffix, ext)) - else: - suffix = ext - if suffix is None: - suffix = "" - fname = fname_presuffix(basename, suffix=suffix, use_ext=False, newpath=cwd) - return fname - - -# Original source at L51 of /interfaces/fsl/maths.py -def _list_outputs(inputs=None, stdout=None, stderr=None, output_dir=None): - outputs = {} - outputs["out_file"] = inputs.out_file - if inputs.out_file is attrs.NOTHING: - outputs["out_file"] = _gen_fname( - inputs.in_file, - suffix=_suffix, - inputs=inputs, - stdout=stdout, - stderr=stderr, - output_dir=output_dir, - ) - outputs["out_file"] = os.path.abspath(outputs["out_file"]) - return outputs - - -# Original source at L108 of /utils/filemanip.py -def fname_presuffix(fname, prefix="", suffix="", newpath=None, use_ext=True): - """Manipulates path and name of input filename - - Parameters - ---------- - fname : string - A filename (may or may not include path) - prefix : string - Characters to prepend to the filename - suffix : string - Characters to append to the filename - newpath : string - Path to replace the path of the input fname - use_ext : boolean - If True (default), appends the extension of the original file - to the output name. - - Returns - ------- - Absolute path of the modified filename - - >>> from nipype.utils.filemanip import fname_presuffix - >>> fname = 'foo.nii.gz' - >>> fname_presuffix(fname,'pre','post','/tmp') - '/tmp/prefoopost.nii.gz' - - >>> from nipype.interfaces.base import attrs.NOTHING - >>> fname_presuffix(fname, 'pre', 'post', attrs.NOTHING) == \ - fname_presuffix(fname, 'pre', 'post') - True - - """ - pth, fname, ext = split_filename(fname) - if not use_ext: - ext = "" - - # No need for : bool(attrs.NOTHING is not attrs.NOTHING) evaluates to False - if newpath: - pth = op.abspath(newpath) - return op.join(pth, prefix + fname + suffix + ext) - - -# Original source at L58 of /utils/filemanip.py -def split_filename(fname): - """Split a filename into parts: path, base filename and extension. - - Parameters - ---------- - fname : str - file or path name - - Returns - ------- - pth : str - base path from fname - fname : str - filename from fname, without extension - ext : str - file extension from fname - - Examples - -------- - >>> from nipype.utils.filemanip import split_filename - >>> pth, fname, ext = split_filename('/home/data/subject.nii.gz') - >>> pth - '/home/data' - - >>> fname - 'subject' - - >>> ext - '.nii.gz' - - """ - - special_extensions = [".nii.gz", ".tar.gz", ".niml.dset"] - - pth = op.dirname(fname) - fname = op.basename(fname) - - ext = None - for special_ext in special_extensions: - ext_len = len(special_ext) - if (len(fname) > ext_len) and (fname[-ext_len:].lower() == special_ext.lower()): - ext = fname[-ext_len:] - fname = fname[:-ext_len] - break - if not ext: - fname, ext = op.splitext(fname) - - return pth, fname, ext - - -# Original source at L1069 of /interfaces/base/core.py -class PackageInfo(object): - _version = None - version_cmd = None - version_file = None - - @classmethod - def version(klass): - if klass._version is None: - if klass.version_cmd is not None: - try: - clout = CommandLine( - command=klass.version_cmd, - resource_monitor=False, - terminal_output="allatonce", - ).run() - except IOError: - return None - - raw_info = clout.runtime.stdout - elif klass.version_file is not None: - try: - with open(klass.version_file, "rt") as fobj: - raw_info = fobj.read() - except OSError: - return None - else: - return None - - klass._version = klass.parse_version(raw_info) - - return klass._version - - @staticmethod - def parse_version(raw_info): - raise NotImplementedError - - -# Original source at L40 of /interfaces/fsl/base.py -class Info(PackageInfo): - """ - Handle FSL ``output_type`` and version information. - - output type refers to the type of file fsl defaults to writing - eg, NIFTI, NIFTI_GZ - - Examples - -------- - - >>> from nipype.interfaces.fsl import Info - >>> Info.version() # doctest: +SKIP - >>> Info.output_type() # doctest: +SKIP - - """ - - ftypes = { - "NIFTI": ".nii", - "NIFTI_PAIR": ".img", - "NIFTI_GZ": ".nii.gz", - "NIFTI_PAIR_GZ": ".img.gz", - } - - if os.getenv("FSLDIR"): - version_file = os.path.join(os.getenv("FSLDIR"), "etc", "fslversion") - - @staticmethod - def parse_version(raw_info): - return raw_info.splitlines()[0] - - @classmethod - def output_type_to_ext(cls, output_type): - """Get the file extension for the given output type. - - Parameters - ---------- - output_type : {'NIFTI', 'NIFTI_GZ', 'NIFTI_PAIR', 'NIFTI_PAIR_GZ'} - String specifying the output type. - - Returns - ------- - extension : str - The file extension for the output type. - """ - - try: - return cls.ftypes[output_type] - except KeyError: - msg = "Invalid FSLOUTPUTTYPE: ", output_type - raise KeyError(msg) - - @classmethod - def output_type(cls): - """Get the global FSL output file type FSLOUTPUTTYPE. - - This returns the value of the environment variable - FSLOUTPUTTYPE. An exception is raised if it is not defined. - - Returns - ------- - fsl_ftype : string - Represents the current environment setting of FSLOUTPUTTYPE - """ - try: - return os.environ["FSLOUTPUTTYPE"] - except KeyError: - IFLOGGER.warning( - "FSLOUTPUTTYPE environment variable is not set. " - "Setting FSLOUTPUTTYPE=NIFTI" - ) - return "NIFTI" - - @staticmethod - def standard_image(img_name=None): - """Grab an image from the standard location. - - Returns a list of standard images if called without arguments. - - Could be made more fancy to allow for more relocatability""" - try: - fsldir = os.environ["FSLDIR"] - except KeyError: - raise Exception("FSL environment variables not set") - stdpath = os.path.join(fsldir, "data", "standard") - if img_name is None: - return [ - filename.replace(stdpath + "/", "") - for filename in glob(os.path.join(stdpath, "*nii*")) - ] - return os.path.join(stdpath, img_name) diff --git a/nipype-auto-conv/specs/interfaces/apply_topup.yaml b/nipype-auto-conv/specs/interfaces/apply_topup.yaml index e74a9e4..515b4fc 100644 --- a/nipype-auto-conv/specs/interfaces/apply_topup.yaml +++ b/nipype-auto-conv/specs/interfaces/apply_topup.yaml @@ -6,27 +6,27 @@ # Docs # ---- # -# Interface for FSL topup, a tool for estimating and correcting -# susceptibility induced distortions. -# `General reference -# `_ -# and `use example -# `_. +# Interface for FSL topup, a tool for estimating and correcting +# susceptibility induced distortions. +# `General reference +# `_ +# and `use example +# `_. # # -# Examples -# -------- +# Examples +# -------- # -# >>> from nipype.interfaces.fsl import ApplyTOPUP -# >>> applytopup = ApplyTOPUP() -# >>> applytopup.inputs.in_files = ["epi.nii", "epi_rev.nii"] -# >>> applytopup.inputs.encoding_file = "topup_encoding.txt" -# >>> applytopup.inputs.in_topup_fieldcoef = "topup_fieldcoef.nii.gz" -# >>> applytopup.inputs.in_topup_movpar = "topup_movpar.txt" -# >>> applytopup.inputs.output_type = "NIFTI_GZ" -# >>> applytopup.cmdline # doctest: +ELLIPSIS -# 'applytopup --datain=topup_encoding.txt --imain=epi.nii,epi_rev.nii --inindex=1,2 --topup=topup --out=epi_corrected.nii.gz' -# >>> res = applytopup.run() # doctest: +SKIP +# >>> from nipype.interfaces.fsl import ApplyTOPUP +# >>> applytopup = ApplyTOPUP() +# >>> applytopup.inputs.in_files = ["epi.nii", "epi_rev.nii"] +# >>> applytopup.inputs.encoding_file = "topup_encoding.txt" +# >>> applytopup.inputs.in_topup_fieldcoef = "topup_fieldcoef.nii.gz" +# >>> applytopup.inputs.in_topup_movpar = "topup_movpar.txt" +# >>> applytopup.inputs.output_type = "NIFTI_GZ" +# >>> applytopup.cmdline # doctest: +ELLIPSIS +# 'applytopup --datain=topup_encoding.txt --imain=epi.nii,epi_rev.nii --inindex=1,2 --topup=topup --out=epi_corrected.nii.gz' +# >>> res = applytopup.run() # doctest: +SKIP # # task_name: ApplyTOPUP @@ -43,17 +43,14 @@ inputs: # from the nipype interface, but you may want to be more specific, particularly # for file types, where specifying the format also specifies the file that will be # passed to the field in the automatically generated unittests. - encoding_file: text/text-file + encoding_file: generic/file # type=file|default=: name of text file with PE directions/times in_files: medimage/nifti1+list-of # type=inputmultiobject|default=[]: name of file with images in_topup_fieldcoef: medimage/nifti-gz # type=file|default=: topup file containing the field coefficients - in_topup_movpar: text/text-file + in_topup_movpar: generic/file # type=file|default=: topup movpar.txt file - out_corrected: Path - # type=file: name of 4D image file with unwarped images - # type=file|default=: output (warped) image callable_defaults: # dict[str, str] - names of methods/callable classes defined in the adjacent `*_callables.py` # to set as the `default` method of input fields @@ -77,7 +74,7 @@ outputs: # dict[str, str] - names of methods/callable classes defined in the adjacent `*_callables.py` # to set to the `callable` attribute of output fields templates: - # dict[str, str] - `output_file_template` values to be provided to output fields + # dict[str, str] - `path_template` values to be provided to output fields requirements: # dict[str, list[str]] - input fields that are required to be provided for the output field to be present tests: @@ -104,13 +101,13 @@ tests: datatype: # type=enum|default='char'|allowed['char','double','float','int','short']: force output data type output_type: - # type=enum|default='NIFTI'|allowed['NIFTI','NIFTI_GZ','NIFTI_PAIR','NIFTI_PAIR_GZ']: FSL output type + # type=enum|default='NIFTI'|allowed['GIFTI','NIFTI','NIFTI_GZ','NIFTI_PAIR','NIFTI_PAIR_GZ']: FSL output type args: # type=str|default='': Additional parameters to the command environ: # type=dict|default={}: Environment variables imports: - # list[nipype2pydra.task.base.importstatement] - list import statements required by the test, with each list item + # list[nipype2pydra.statements.imports.explicitimport] - list import statements required by the test, with each list item # consisting of 'module', 'name', and optionally 'alias' keys expected_outputs: # dict[str, str] - expected values for selected outputs, noting that tests will typically @@ -129,16 +126,12 @@ tests: # (if not specified, will try to choose a sensible value) in_files: # type=inputmultiobject|default=[]: name of file with images - encoding_file: - # type=file|default=: name of text file with PE directions/times in_topup_fieldcoef: # type=file|default=: topup file containing the field coefficients - in_topup_movpar: - # type=file|default=: topup movpar.txt file output_type: '"NIFTI_GZ"' - # type=enum|default='NIFTI'|allowed['NIFTI','NIFTI_GZ','NIFTI_PAIR','NIFTI_PAIR_GZ']: FSL output type + # type=enum|default='NIFTI'|allowed['GIFTI','NIFTI','NIFTI_GZ','NIFTI_PAIR','NIFTI_PAIR_GZ']: FSL output type imports: - # list[nipype2pydra.task.base.importstatement] - list import statements required by the test, with each list item + # list[nipype2pydra.statements.imports.explicitimport] - list import statements required by the test, with each list item # consisting of 'module', 'name', and optionally 'alias' keys expected_outputs: # dict[str, str] - expected values for selected outputs, noting that tests will typically @@ -161,16 +154,12 @@ doctests: # '.mock()' method of the corresponding class is used instead. in_files: '["epi.nii", "epi_rev.nii"]' # type=inputmultiobject|default=[]: name of file with images - encoding_file: '"topup_encoding.txt"' - # type=file|default=: name of text file with PE directions/times in_topup_fieldcoef: '"topup_fieldcoef.nii.gz"' # type=file|default=: topup file containing the field coefficients - in_topup_movpar: '"topup_movpar.txt"' - # type=file|default=: topup movpar.txt file output_type: '"NIFTI_GZ"' - # type=enum|default='NIFTI'|allowed['NIFTI','NIFTI_GZ','NIFTI_PAIR','NIFTI_PAIR_GZ']: FSL output type + # type=enum|default='NIFTI'|allowed['GIFTI','NIFTI','NIFTI_GZ','NIFTI_PAIR','NIFTI_PAIR_GZ']: FSL output type imports: - # list[nipype2pydra.task.base.importstatement] - list import statements required by the test, with each list item + # list[nipype2pydra.statements.imports.explicitimport] - list import statements required by the test, with each list item # consisting of 'module', 'name', and optionally 'alias' keys directive: # str - any doctest directive to place on the cmdline call, e.g. # doctest: +ELLIPSIS diff --git a/nipype-auto-conv/specs/interfaces/apply_topup_callables.py b/nipype-auto-conv/specs/interfaces/apply_topup_callables.py deleted file mode 100644 index 6ae5fc5..0000000 --- a/nipype-auto-conv/specs/interfaces/apply_topup_callables.py +++ /dev/null @@ -1,338 +0,0 @@ -"""Module to put any functions that are referred to in the "callables" section of ApplyTOPUP.yaml""" - -import attrs -import logging -import os -import os.path as op -from glob import glob - - -def out_corrected_callable(output_dir, inputs, stdout, stderr): - outputs = _list_outputs( - output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr - ) - return outputs["out_corrected"] - - -IFLOGGER = logging.getLogger("nipype.interface") - - -iflogger = logging.getLogger("nipype.interface") - - -# Original source at L809 of /interfaces/base/core.py -def _filename_from_source( - name, chain=None, inputs=None, stdout=None, stderr=None, output_dir=None -): - if chain is None: - chain = [] - - trait_spec = inputs.trait(name) - retval = getattr(inputs, name) - source_ext = None - if (retval is attrs.NOTHING) or "%s" in retval: - if not trait_spec.name_source: - return retval - - # Do not generate filename when excluded by other inputs - if any( - (getattr(inputs, field) is not attrs.NOTHING) - for field in trait_spec.xor or () - ): - return retval - - # Do not generate filename when required fields are missing - if not all( - (getattr(inputs, field) is not attrs.NOTHING) - for field in trait_spec.requires or () - ): - return retval - - if (retval is not attrs.NOTHING) and "%s" in retval: - name_template = retval - else: - name_template = trait_spec.name_template - if not name_template: - name_template = "%s_generated" - - ns = trait_spec.name_source - while isinstance(ns, (list, tuple)): - if len(ns) > 1: - iflogger.warning("Only one name_source per trait is allowed") - ns = ns[0] - - if not isinstance(ns, (str, bytes)): - raise ValueError( - "name_source of '{}' trait should be an input trait " - "name, but a type {} object was found".format(name, type(ns)) - ) - - if getattr(inputs, ns) is not attrs.NOTHING: - name_source = ns - source = getattr(inputs, name_source) - while isinstance(source, list): - source = source[0] - - # special treatment for files - try: - _, base, source_ext = split_filename(source) - except (AttributeError, TypeError): - base = source - else: - if name in chain: - raise NipypeInterfaceError("Mutually pointing name_sources") - - chain.append(name) - base = _filename_from_source( - ns, - chain, - inputs=inputs, - stdout=stdout, - stderr=stderr, - output_dir=output_dir, - ) - if base is not attrs.NOTHING: - _, _, source_ext = split_filename(base) - else: - # Do not generate filename when required fields are missing - return retval - - chain = None - retval = name_template % base - _, _, ext = split_filename(retval) - if trait_spec.keep_extension and (ext or source_ext): - if (ext is None or not ext) and source_ext: - retval = retval + source_ext - else: - retval = _overload_extension( - retval, - name, - inputs=inputs, - stdout=stdout, - stderr=stderr, - output_dir=output_dir, - ) - return retval - - -# Original source at L885 of /interfaces/base/core.py -def _gen_filename(name, inputs=None, stdout=None, stderr=None, output_dir=None): - raise NotImplementedError - - -# Original source at L891 of /interfaces/base/core.py -def _list_outputs(inputs=None, stdout=None, stderr=None, output_dir=None): - metadata = dict(name_source=lambda t: t is not None) - traits = inputs.traits(**metadata) - if traits: - outputs = {} - for name, trait_spec in list(traits.items()): - out_name = name - if trait_spec.output_name is not None: - out_name = trait_spec.output_name - fname = _filename_from_source( - name, inputs=inputs, stdout=stdout, stderr=stderr, output_dir=output_dir - ) - if fname is not attrs.NOTHING: - outputs[out_name] = os.path.abspath(fname) - return outputs - - -# Original source at L249 of /interfaces/fsl/base.py -def _overload_extension( - value, name=None, inputs=None, stdout=None, stderr=None, output_dir=None -): - return value + Info.output_type_to_ext(inputs.output_type) - - -# Original source at L58 of /utils/filemanip.py -def split_filename(fname): - """Split a filename into parts: path, base filename and extension. - - Parameters - ---------- - fname : str - file or path name - - Returns - ------- - pth : str - base path from fname - fname : str - filename from fname, without extension - ext : str - file extension from fname - - Examples - -------- - >>> from nipype.utils.filemanip import split_filename - >>> pth, fname, ext = split_filename('/home/data/subject.nii.gz') - >>> pth - '/home/data' - - >>> fname - 'subject' - - >>> ext - '.nii.gz' - - """ - - special_extensions = [".nii.gz", ".tar.gz", ".niml.dset"] - - pth = op.dirname(fname) - fname = op.basename(fname) - - ext = None - for special_ext in special_extensions: - ext_len = len(special_ext) - if (len(fname) > ext_len) and (fname[-ext_len:].lower() == special_ext.lower()): - ext = fname[-ext_len:] - fname = fname[:-ext_len] - break - if not ext: - fname, ext = op.splitext(fname) - - return pth, fname, ext - - -# Original source at L1069 of /interfaces/base/core.py -class PackageInfo(object): - _version = None - version_cmd = None - version_file = None - - @classmethod - def version(klass): - if klass._version is None: - if klass.version_cmd is not None: - try: - clout = CommandLine( - command=klass.version_cmd, - resource_monitor=False, - terminal_output="allatonce", - ).run() - except IOError: - return None - - raw_info = clout.runtime.stdout - elif klass.version_file is not None: - try: - with open(klass.version_file, "rt") as fobj: - raw_info = fobj.read() - except OSError: - return None - else: - return None - - klass._version = klass.parse_version(raw_info) - - return klass._version - - @staticmethod - def parse_version(raw_info): - raise NotImplementedError - - -# Original source at L40 of /interfaces/fsl/base.py -class Info(PackageInfo): - """ - Handle FSL ``output_type`` and version information. - - output type refers to the type of file fsl defaults to writing - eg, NIFTI, NIFTI_GZ - - Examples - -------- - - >>> from nipype.interfaces.fsl import Info - >>> Info.version() # doctest: +SKIP - >>> Info.output_type() # doctest: +SKIP - - """ - - ftypes = { - "NIFTI": ".nii", - "NIFTI_PAIR": ".img", - "NIFTI_GZ": ".nii.gz", - "NIFTI_PAIR_GZ": ".img.gz", - } - - if os.getenv("FSLDIR"): - version_file = os.path.join(os.getenv("FSLDIR"), "etc", "fslversion") - - @staticmethod - def parse_version(raw_info): - return raw_info.splitlines()[0] - - @classmethod - def output_type_to_ext(cls, output_type): - """Get the file extension for the given output type. - - Parameters - ---------- - output_type : {'NIFTI', 'NIFTI_GZ', 'NIFTI_PAIR', 'NIFTI_PAIR_GZ'} - String specifying the output type. - - Returns - ------- - extension : str - The file extension for the output type. - """ - - try: - return cls.ftypes[output_type] - except KeyError: - msg = "Invalid FSLOUTPUTTYPE: ", output_type - raise KeyError(msg) - - @classmethod - def output_type(cls): - """Get the global FSL output file type FSLOUTPUTTYPE. - - This returns the value of the environment variable - FSLOUTPUTTYPE. An exception is raised if it is not defined. - - Returns - ------- - fsl_ftype : string - Represents the current environment setting of FSLOUTPUTTYPE - """ - try: - return os.environ["FSLOUTPUTTYPE"] - except KeyError: - IFLOGGER.warning( - "FSLOUTPUTTYPE environment variable is not set. " - "Setting FSLOUTPUTTYPE=NIFTI" - ) - return "NIFTI" - - @staticmethod - def standard_image(img_name=None): - """Grab an image from the standard location. - - Returns a list of standard images if called without arguments. - - Could be made more fancy to allow for more relocatability""" - try: - fsldir = os.environ["FSLDIR"] - except KeyError: - raise Exception("FSL environment variables not set") - stdpath = os.path.join(fsldir, "data", "standard") - if img_name is None: - return [ - filename.replace(stdpath + "/", "") - for filename in glob(os.path.join(stdpath, "*nii*")) - ] - return os.path.join(stdpath, img_name) - - -# Original source at L125 of /interfaces/base/support.py -class NipypeInterfaceError(Exception): - """Custom error for interfaces""" - - def __init__(self, value): - self.value = value - - def __str__(self): - return "{}".format(self.value) diff --git a/nipype-auto-conv/specs/interfaces/apply_warp.yaml b/nipype-auto-conv/specs/interfaces/apply_warp.yaml index f0630ac..10ec47c 100644 --- a/nipype-auto-conv/specs/interfaces/apply_warp.yaml +++ b/nipype-auto-conv/specs/interfaces/apply_warp.yaml @@ -7,15 +7,15 @@ # ---- # FSL's applywarp wrapper to apply the results of a FNIRT registration # -# Examples -# -------- -# >>> from nipype.interfaces import fsl -# >>> from nipype.testing import example_data -# >>> aw = fsl.ApplyWarp() -# >>> aw.inputs.in_file = example_data('structural.nii') -# >>> aw.inputs.ref_file = example_data('mni.nii') -# >>> aw.inputs.field_file = 'my_coefficients_filed.nii' #doctest: +SKIP -# >>> res = aw.run() #doctest: +SKIP +# Examples +# -------- +# >>> from nipype.interfaces import fsl +# >>> from nipype.testing import example_data +# >>> aw = fsl.ApplyWarp() +# >>> aw.inputs.in_file = example_data('structural.nii') +# >>> aw.inputs.ref_file = example_data('mni.nii') +# >>> aw.inputs.field_file = 'my_coefficients_filed.nii' #doctest: +SKIP +# >>> res = aw.run() #doctest: +SKIP # # # @@ -28,26 +28,23 @@ inputs: rename: # dict[str, str] - fields to rename in the Pydra interface types: - # dict[str, type] - override inferred types (use "mime-like" string for file-format types, - # e.g. 'medimage/nifti-gz'). For most fields the type will be correctly inferred - # from the nipype interface, but you may want to be more specific, particularly - # for file types, where specifying the format also specifies the file that will be - # passed to the field in the automatically generated unittests. - in_file: medimage/nifti-gz - # type=file|default=: image to be warped - ref_file: medimage/nifti-gz - # type=file|default=: reference image - field_file: medimage/nifti-gz + # dict[str, type] - override inferred types (use "mime-like" string for file-format types, + # e.g. 'medimage/nifti-gz'). For most fields the type will be correctly inferred + # from the nipype interface, but you may want to be more specific, particularly + # for file types, where specifying the format also specifies the file that will be + # passed to the field in the automatically generated unittests. + field_file: generic/file # type=file|default=: file containing warp field - premat: Path - # type=file|default=: filename for pre-transform (affine matrix) - postmat: Path - # type=file|default=: filename for post-transform (affine matrix) - mask_file: Path + in_file: generic/file + # type=file|default=: image to be warped + mask_file: generic/file # type=file|default=: filename for mask image (in reference space) - out_file: Path - # type=file: Warped output file - # type=file|default=: output filename + postmat: generic/file + # type=file|default=: filename for post-transform (affine matrix) + premat: generic/file + # type=file|default=: filename for pre-transform (affine matrix) + ref_file: generic/file + # type=file|default=: reference image callable_defaults: # dict[str, str] - names of methods/callable classes defined in the adjacent `*_callables.py` # to set as the `default` method of input fields @@ -59,11 +56,11 @@ outputs: rename: # dict[str, str] - fields to rename in the Pydra interface types: - # dict[str, type] - override inferred types (use "mime-like" string for file-format types, - # e.g. 'medimage/nifti-gz'). For most fields the type will be correctly inferred - # from the nipype interface, but you may want to be more specific, particularly - # for file types, where specifying the format also specifies the file that will be - # passed to the field in the automatically generated unittests. + # dict[str, type] - override inferred types (use "mime-like" string for file-format types, + # e.g. 'medimage/nifti-gz'). For most fields the type will be correctly inferred + # from the nipype interface, but you may want to be more specific, particularly + # for file types, where specifying the format also specifies the file that will be + # passed to the field in the automatically generated unittests. out_file: generic/file # type=file: Warped output file # type=file|default=: output filename @@ -71,62 +68,62 @@ outputs: # dict[str, str] - names of methods/callable classes defined in the adjacent `*_callables.py` # to set to the `callable` attribute of output fields templates: - # dict[str, str] - `output_file_template` values to be provided to output fields + # dict[str, str] - `path_template` values to be provided to output fields out_file: out_file # type=file: Warped output file # type=file|default=: output filename requirements: # dict[str, list[str]] - input fields that are required to be provided for the output field to be present tests: - - inputs: - # dict[str, str] - values to provide to inputs fields in the task initialisation - # (if not specified, will try to choose a sensible value) - in_file: - # type=file|default=: image to be warped - out_file: - # type=file: Warped output file - # type=file|default=: output filename - ref_file: - # type=file|default=: reference image - field_file: - # type=file|default=: file containing warp field - abswarp: - # type=bool|default=False: treat warp field as absolute: x' = w(x) - relwarp: - # type=bool|default=False: treat warp field as relative: x' = x + w(x) - datatype: - # type=enum|default='char'|allowed['char','double','float','int','short']: Force output data type [char short int float double]. - supersample: - # type=bool|default=False: intermediary supersampling of output, default is off - superlevel: - # type=traitcompound|default=None: level of intermediary supersampling, a for 'automatic' or integer level. Default = 2 - premat: - # type=file|default=: filename for pre-transform (affine matrix) - postmat: - # type=file|default=: filename for post-transform (affine matrix) - mask_file: - # type=file|default=: filename for mask image (in reference space) - interp: - # type=enum|default='nn'|allowed['nn','sinc','spline','trilinear']: interpolation method - output_type: - # type=enum|default='NIFTI'|allowed['NIFTI','NIFTI_GZ','NIFTI_PAIR','NIFTI_PAIR_GZ']: FSL output type - args: - # type=str|default='': Additional parameters to the command - environ: - # type=dict|default={}: Environment variables - imports: - # list[nipype2pydra.task.base.importstatement] - list import statements required by the test, with each list item - # consisting of 'module', 'name', and optionally 'alias' keys - expected_outputs: - # dict[str, str] - expected values for selected outputs, noting that tests will typically - # be terminated before they complete for time-saving reasons, and therefore - # these values will be ignored, when running in CI - timeout: 10 - # int - the value to set for the timeout in the generated test, - # after which the test will be considered to have been initialised - # successfully. Set to 0 to disable the timeout (warning, this could - # lead to the unittests taking a very long time to complete) - xfail: true - # bool - whether the unittest is expected to fail or not. Set to false - # when you are satisfied with the edits you have made to this file +- inputs: + # dict[str, str] - values to provide to inputs fields in the task initialisation + # (if not specified, will try to choose a sensible value) + in_file: + # type=file|default=: image to be warped + out_file: + # type=file: Warped output file + # type=file|default=: output filename + ref_file: + # type=file|default=: reference image + field_file: + # type=file|default=: file containing warp field + abswarp: + # type=bool|default=False: treat warp field as absolute: x' = w(x) + relwarp: + # type=bool|default=False: treat warp field as relative: x' = x + w(x) + datatype: + # type=enum|default='char'|allowed['char','double','float','int','short']: Force output data type [char short int float double]. + supersample: + # type=bool|default=False: intermediary supersampling of output, default is off + superlevel: + # type=traitcompound|default=None: level of intermediary supersampling, a for 'automatic' or integer level. Default = 2 + premat: + # type=file|default=: filename for pre-transform (affine matrix) + postmat: + # type=file|default=: filename for post-transform (affine matrix) + mask_file: + # type=file|default=: filename for mask image (in reference space) + interp: + # type=enum|default='nn'|allowed['nn','sinc','spline','trilinear']: interpolation method + output_type: + # type=enum|default='NIFTI'|allowed['GIFTI','NIFTI','NIFTI_GZ','NIFTI_PAIR','NIFTI_PAIR_GZ']: FSL output type + args: + # type=str|default='': Additional parameters to the command + environ: + # type=dict|default={}: Environment variables + imports: + # list[nipype2pydra.statements.imports.explicitimport] - list import statements required by the test, with each list item + # consisting of 'module', 'name', and optionally 'alias' keys + expected_outputs: + # dict[str, str] - expected values for selected outputs, noting that tests will typically + # be terminated before they complete for time-saving reasons, and therefore + # these values will be ignored, when running in CI + timeout: 10 + # int - the value to set for the timeout in the generated test, + # after which the test will be considered to have been initialised + # successfully. Set to 0 to disable the timeout (warning, this could + # lead to the unittests taking a very long time to complete) + xfail: true + # bool - whether the unittest is expected to fail or not. Set to false + # when you are satisfied with the edits you have made to this file doctests: [] diff --git a/nipype-auto-conv/specs/interfaces/apply_warp_callables.py b/nipype-auto-conv/specs/interfaces/apply_warp_callables.py deleted file mode 100644 index 6d2586d..0000000 --- a/nipype-auto-conv/specs/interfaces/apply_warp_callables.py +++ /dev/null @@ -1,329 +0,0 @@ -"""Module to put any functions that are referred to in the "callables" section of ApplyWarp.yaml""" - -import attrs -import logging -import os -import os.path as op -from glob import glob -from pathlib import Path - - -def out_file_default(inputs): - return _gen_filename("out_file", inputs=inputs) - - -def out_file_callable(output_dir, inputs, stdout, stderr): - outputs = _list_outputs( - output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr - ) - return outputs["out_file"] - - -IFLOGGER = logging.getLogger("nipype.interface") - - -# Original source at L1494 of /interfaces/fsl/preprocess.py -def _gen_filename(name, inputs=None, stdout=None, stderr=None, output_dir=None): - if name == "out_file": - return _list_outputs( - inputs=inputs, stdout=stdout, stderr=stderr, output_dir=output_dir - )[name] - return None - - -# Original source at L205 of /interfaces/fsl/base.py -def _gen_fname( - basename, - cwd=None, - suffix=None, - change_ext=True, - ext=None, - inputs=None, - stdout=None, - stderr=None, - output_dir=None, -): - """Generate a filename based on the given parameters. - - The filename will take the form: cwd/basename. - If change_ext is True, it will use the extensions specified in - inputs.output_type. - - Parameters - ---------- - basename : str - Filename to base the new filename on. - cwd : str - Path to prefix to the new filename. (default is output_dir) - suffix : str - Suffix to add to the `basename`. (defaults is '' ) - change_ext : bool - Flag to change the filename extension to the FSL output type. - (default True) - - Returns - ------- - fname : str - New filename based on given parameters. - - """ - - if basename == "": - msg = "Unable to generate filename for command %s. " % "applywarp" - msg += "basename is not set!" - raise ValueError(msg) - if cwd is None: - cwd = output_dir - if ext is None: - ext = Info.output_type_to_ext(inputs.output_type) - if change_ext: - if suffix: - suffix = "".join((suffix, ext)) - else: - suffix = ext - if suffix is None: - suffix = "" - fname = fname_presuffix(basename, suffix=suffix, use_ext=False, newpath=cwd) - return fname - - -# Original source at L1486 of /interfaces/fsl/preprocess.py -def _list_outputs(inputs=None, stdout=None, stderr=None, output_dir=None): - outputs = {} - if inputs.out_file is attrs.NOTHING: - outputs["out_file"] = _gen_fname( - inputs.in_file, - suffix="_warp", - inputs=inputs, - stdout=stdout, - stderr=stderr, - output_dir=output_dir, - ) - else: - outputs["out_file"] = os.path.abspath(inputs.out_file) - return outputs - - -# Original source at L108 of /utils/filemanip.py -def fname_presuffix(fname, prefix="", suffix="", newpath=None, use_ext=True): - """Manipulates path and name of input filename - - Parameters - ---------- - fname : string - A filename (may or may not include path) - prefix : string - Characters to prepend to the filename - suffix : string - Characters to append to the filename - newpath : string - Path to replace the path of the input fname - use_ext : boolean - If True (default), appends the extension of the original file - to the output name. - - Returns - ------- - Absolute path of the modified filename - - >>> from nipype.utils.filemanip import fname_presuffix - >>> fname = 'foo.nii.gz' - >>> fname_presuffix(fname,'pre','post','/tmp') - '/tmp/prefoopost.nii.gz' - - >>> from nipype.interfaces.base import attrs.NOTHING - >>> fname_presuffix(fname, 'pre', 'post', attrs.NOTHING) == \ - fname_presuffix(fname, 'pre', 'post') - True - - """ - pth, fname, ext = split_filename(fname) - if not use_ext: - ext = "" - - # No need for : bool(attrs.NOTHING is not attrs.NOTHING) evaluates to False - if newpath: - pth = op.abspath(newpath) - return op.join(pth, prefix + fname + suffix + ext) - - -# Original source at L58 of /utils/filemanip.py -def split_filename(fname): - """Split a filename into parts: path, base filename and extension. - - Parameters - ---------- - fname : str - file or path name - - Returns - ------- - pth : str - base path from fname - fname : str - filename from fname, without extension - ext : str - file extension from fname - - Examples - -------- - >>> from nipype.utils.filemanip import split_filename - >>> pth, fname, ext = split_filename('/home/data/subject.nii.gz') - >>> pth - '/home/data' - - >>> fname - 'subject' - - >>> ext - '.nii.gz' - - """ - - special_extensions = [".nii.gz", ".tar.gz", ".niml.dset"] - - pth = op.dirname(fname) - fname = op.basename(fname) - - ext = None - for special_ext in special_extensions: - ext_len = len(special_ext) - if (len(fname) > ext_len) and (fname[-ext_len:].lower() == special_ext.lower()): - ext = fname[-ext_len:] - fname = fname[:-ext_len] - break - if not ext: - fname, ext = op.splitext(fname) - - return pth, fname, ext - - -# Original source at L1069 of /interfaces/base/core.py -class PackageInfo(object): - _version = None - version_cmd = None - version_file = None - - @classmethod - def version(klass): - if klass._version is None: - if klass.version_cmd is not None: - try: - clout = CommandLine( - command=klass.version_cmd, - resource_monitor=False, - terminal_output="allatonce", - ).run() - except IOError: - return None - - raw_info = clout.runtime.stdout - elif klass.version_file is not None: - try: - with open(klass.version_file, "rt") as fobj: - raw_info = fobj.read() - except OSError: - return None - else: - return None - - klass._version = klass.parse_version(raw_info) - - return klass._version - - @staticmethod - def parse_version(raw_info): - raise NotImplementedError - - -# Original source at L40 of /interfaces/fsl/base.py -class Info(PackageInfo): - """ - Handle FSL ``output_type`` and version information. - - output type refers to the type of file fsl defaults to writing - eg, NIFTI, NIFTI_GZ - - Examples - -------- - - >>> from nipype.interfaces.fsl import Info - >>> Info.version() # doctest: +SKIP - >>> Info.output_type() # doctest: +SKIP - - """ - - ftypes = { - "NIFTI": ".nii", - "NIFTI_PAIR": ".img", - "NIFTI_GZ": ".nii.gz", - "NIFTI_PAIR_GZ": ".img.gz", - } - - if os.getenv("FSLDIR"): - version_file = os.path.join(os.getenv("FSLDIR"), "etc", "fslversion") - - @staticmethod - def parse_version(raw_info): - return raw_info.splitlines()[0] - - @classmethod - def output_type_to_ext(cls, output_type): - """Get the file extension for the given output type. - - Parameters - ---------- - output_type : {'NIFTI', 'NIFTI_GZ', 'NIFTI_PAIR', 'NIFTI_PAIR_GZ'} - String specifying the output type. - - Returns - ------- - extension : str - The file extension for the output type. - """ - - try: - return cls.ftypes[output_type] - except KeyError: - msg = "Invalid FSLOUTPUTTYPE: ", output_type - raise KeyError(msg) - - @classmethod - def output_type(cls): - """Get the global FSL output file type FSLOUTPUTTYPE. - - This returns the value of the environment variable - FSLOUTPUTTYPE. An exception is raised if it is not defined. - - Returns - ------- - fsl_ftype : string - Represents the current environment setting of FSLOUTPUTTYPE - """ - try: - return os.environ["FSLOUTPUTTYPE"] - except KeyError: - IFLOGGER.warning( - "FSLOUTPUTTYPE environment variable is not set. " - "Setting FSLOUTPUTTYPE=NIFTI" - ) - return "NIFTI" - - @staticmethod - def standard_image(img_name=None): - """Grab an image from the standard location. - - Returns a list of standard images if called without arguments. - - Could be made more fancy to allow for more relocatability""" - try: - fsldir = os.environ["FSLDIR"] - except KeyError: - raise Exception("FSL environment variables not set") - stdpath = os.path.join(fsldir, "data", "standard") - if img_name is None: - return [ - filename.replace(stdpath + "/", "") - for filename in glob(os.path.join(stdpath, "*nii*")) - ] - return os.path.join(stdpath, img_name) diff --git a/nipype-auto-conv/specs/interfaces/apply_xfm.yaml b/nipype-auto-conv/specs/interfaces/apply_xfm.yaml index 2504b98..372b5c3 100644 --- a/nipype-auto-conv/specs/interfaces/apply_xfm.yaml +++ b/nipype-auto-conv/specs/interfaces/apply_xfm.yaml @@ -6,23 +6,23 @@ # Docs # ---- # Currently just a light wrapper around FLIRT, -# with no modifications +# with no modifications # -# ApplyXFM is used to apply an existing transform to an image +# ApplyXFM is used to apply an existing transform to an image # # -# Examples -# -------- +# Examples +# -------- # -# >>> import nipype.interfaces.fsl as fsl -# >>> from nipype.testing import example_data -# >>> applyxfm = fsl.preprocess.ApplyXFM() -# >>> applyxfm.inputs.in_file = example_data('structural.nii') -# >>> applyxfm.inputs.in_matrix_file = example_data('trans.mat') -# >>> applyxfm.inputs.out_file = 'newfile.nii' -# >>> applyxfm.inputs.reference = example_data('mni.nii') -# >>> applyxfm.inputs.apply_xfm = True -# >>> result = applyxfm.run() # doctest: +SKIP +# >>> import nipype.interfaces.fsl as fsl +# >>> from nipype.testing import example_data +# >>> applyxfm = fsl.preprocess.ApplyXFM() +# >>> applyxfm.inputs.in_file = example_data('structural.nii') +# >>> applyxfm.inputs.in_matrix_file = example_data('trans.mat') +# >>> applyxfm.inputs.out_file = 'newfile.nii' +# >>> applyxfm.inputs.reference = example_data('mni.nii') +# >>> applyxfm.inputs.apply_xfm = True +# >>> result = applyxfm.run() # doctest: +SKIP # # task_name: ApplyXFM @@ -49,15 +49,6 @@ inputs: # type=file|default=: input 4x4 affine matrix in_weight: generic/file # type=file|default=: File for input weighting volume - out_file: Path - # type=file: path/name of registered file (if generated) - # type=file|default=: registered output file - out_log: Path - # type=file: path/name of output log (if generated) - # type=file|default=: output log - out_matrix_file: Path - # type=file: path/name of calculated affine transform (if generated) - # type=file|default=: output affine matrix in 4x4 asciii format ref_weight: generic/file # type=file|default=: File for reference weighting volume reference: generic/file @@ -99,7 +90,7 @@ outputs: # dict[str, str] - names of methods/callable classes defined in the adjacent `*_callables.py` # to set to the `callable` attribute of output fields templates: - # dict[str, str] - `output_file_template` values to be provided to output fields + # dict[str, str] - `path_template` values to be provided to output fields requirements: # dict[str, list[str]] - input fields that are required to be provided for the output field to be present tests: @@ -204,13 +195,13 @@ tests: bbrslope: # type=float|default=0.0: value of bbr slope output_type: - # type=enum|default='NIFTI'|allowed['NIFTI','NIFTI_GZ','NIFTI_PAIR','NIFTI_PAIR_GZ']: FSL output type + # type=enum|default='NIFTI'|allowed['GIFTI','NIFTI','NIFTI_GZ','NIFTI_PAIR','NIFTI_PAIR_GZ']: FSL output type args: # type=str|default='': Additional parameters to the command environ: # type=dict|default={}: Environment variables imports: - # list[nipype2pydra.task.base.importstatement] - list import statements required by the test, with each list item + # list[nipype2pydra.statements.imports.explicitimport] - list import statements required by the test, with each list item # consisting of 'module', 'name', and optionally 'alias' keys expected_outputs: # dict[str, str] - expected values for selected outputs, noting that tests will typically diff --git a/nipype-auto-conv/specs/interfaces/apply_xfm_callables.py b/nipype-auto-conv/specs/interfaces/apply_xfm_callables.py deleted file mode 100644 index 6b019b9..0000000 --- a/nipype-auto-conv/specs/interfaces/apply_xfm_callables.py +++ /dev/null @@ -1,352 +0,0 @@ -"""Module to put any functions that are referred to in the "callables" section of ApplyXFM.yaml""" - -import attrs -import logging -import os -import os.path as op -from glob import glob - - -def out_file_callable(output_dir, inputs, stdout, stderr): - outputs = _list_outputs( - output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr - ) - return outputs["out_file"] - - -def out_log_callable(output_dir, inputs, stdout, stderr): - outputs = _list_outputs( - output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr - ) - return outputs["out_log"] - - -def out_matrix_file_callable(output_dir, inputs, stdout, stderr): - outputs = _list_outputs( - output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr - ) - return outputs["out_matrix_file"] - - -IFLOGGER = logging.getLogger("nipype.interface") - - -iflogger = logging.getLogger("nipype.interface") - - -# Original source at L809 of /interfaces/base/core.py -def _filename_from_source( - name, chain=None, inputs=None, stdout=None, stderr=None, output_dir=None -): - if chain is None: - chain = [] - - trait_spec = inputs.trait(name) - retval = getattr(inputs, name) - source_ext = None - if (retval is attrs.NOTHING) or "%s" in retval: - if not trait_spec.name_source: - return retval - - # Do not generate filename when excluded by other inputs - if any( - (getattr(inputs, field) is not attrs.NOTHING) - for field in trait_spec.xor or () - ): - return retval - - # Do not generate filename when required fields are missing - if not all( - (getattr(inputs, field) is not attrs.NOTHING) - for field in trait_spec.requires or () - ): - return retval - - if (retval is not attrs.NOTHING) and "%s" in retval: - name_template = retval - else: - name_template = trait_spec.name_template - if not name_template: - name_template = "%s_generated" - - ns = trait_spec.name_source - while isinstance(ns, (list, tuple)): - if len(ns) > 1: - iflogger.warning("Only one name_source per trait is allowed") - ns = ns[0] - - if not isinstance(ns, (str, bytes)): - raise ValueError( - "name_source of '{}' trait should be an input trait " - "name, but a type {} object was found".format(name, type(ns)) - ) - - if getattr(inputs, ns) is not attrs.NOTHING: - name_source = ns - source = getattr(inputs, name_source) - while isinstance(source, list): - source = source[0] - - # special treatment for files - try: - _, base, source_ext = split_filename(source) - except (AttributeError, TypeError): - base = source - else: - if name in chain: - raise NipypeInterfaceError("Mutually pointing name_sources") - - chain.append(name) - base = _filename_from_source( - ns, - chain, - inputs=inputs, - stdout=stdout, - stderr=stderr, - output_dir=output_dir, - ) - if base is not attrs.NOTHING: - _, _, source_ext = split_filename(base) - else: - # Do not generate filename when required fields are missing - return retval - - chain = None - retval = name_template % base - _, _, ext = split_filename(retval) - if trait_spec.keep_extension and (ext or source_ext): - if (ext is None or not ext) and source_ext: - retval = retval + source_ext - else: - retval = _overload_extension( - retval, - name, - inputs=inputs, - stdout=stdout, - stderr=stderr, - output_dir=output_dir, - ) - return retval - - -# Original source at L885 of /interfaces/base/core.py -def _gen_filename(name, inputs=None, stdout=None, stderr=None, output_dir=None): - raise NotImplementedError - - -# Original source at L891 of /interfaces/base/core.py -def _list_outputs(inputs=None, stdout=None, stderr=None, output_dir=None): - metadata = dict(name_source=lambda t: t is not None) - traits = inputs.traits(**metadata) - if traits: - outputs = {} - for name, trait_spec in list(traits.items()): - out_name = name - if trait_spec.output_name is not None: - out_name = trait_spec.output_name - fname = _filename_from_source( - name, inputs=inputs, stdout=stdout, stderr=stderr, output_dir=output_dir - ) - if fname is not attrs.NOTHING: - outputs[out_name] = os.path.abspath(fname) - return outputs - - -# Original source at L249 of /interfaces/fsl/base.py -def _overload_extension( - value, name=None, inputs=None, stdout=None, stderr=None, output_dir=None -): - return value + Info.output_type_to_ext(inputs.output_type) - - -# Original source at L58 of /utils/filemanip.py -def split_filename(fname): - """Split a filename into parts: path, base filename and extension. - - Parameters - ---------- - fname : str - file or path name - - Returns - ------- - pth : str - base path from fname - fname : str - filename from fname, without extension - ext : str - file extension from fname - - Examples - -------- - >>> from nipype.utils.filemanip import split_filename - >>> pth, fname, ext = split_filename('/home/data/subject.nii.gz') - >>> pth - '/home/data' - - >>> fname - 'subject' - - >>> ext - '.nii.gz' - - """ - - special_extensions = [".nii.gz", ".tar.gz", ".niml.dset"] - - pth = op.dirname(fname) - fname = op.basename(fname) - - ext = None - for special_ext in special_extensions: - ext_len = len(special_ext) - if (len(fname) > ext_len) and (fname[-ext_len:].lower() == special_ext.lower()): - ext = fname[-ext_len:] - fname = fname[:-ext_len] - break - if not ext: - fname, ext = op.splitext(fname) - - return pth, fname, ext - - -# Original source at L1069 of /interfaces/base/core.py -class PackageInfo(object): - _version = None - version_cmd = None - version_file = None - - @classmethod - def version(klass): - if klass._version is None: - if klass.version_cmd is not None: - try: - clout = CommandLine( - command=klass.version_cmd, - resource_monitor=False, - terminal_output="allatonce", - ).run() - except IOError: - return None - - raw_info = clout.runtime.stdout - elif klass.version_file is not None: - try: - with open(klass.version_file, "rt") as fobj: - raw_info = fobj.read() - except OSError: - return None - else: - return None - - klass._version = klass.parse_version(raw_info) - - return klass._version - - @staticmethod - def parse_version(raw_info): - raise NotImplementedError - - -# Original source at L40 of /interfaces/fsl/base.py -class Info(PackageInfo): - """ - Handle FSL ``output_type`` and version information. - - output type refers to the type of file fsl defaults to writing - eg, NIFTI, NIFTI_GZ - - Examples - -------- - - >>> from nipype.interfaces.fsl import Info - >>> Info.version() # doctest: +SKIP - >>> Info.output_type() # doctest: +SKIP - - """ - - ftypes = { - "NIFTI": ".nii", - "NIFTI_PAIR": ".img", - "NIFTI_GZ": ".nii.gz", - "NIFTI_PAIR_GZ": ".img.gz", - } - - if os.getenv("FSLDIR"): - version_file = os.path.join(os.getenv("FSLDIR"), "etc", "fslversion") - - @staticmethod - def parse_version(raw_info): - return raw_info.splitlines()[0] - - @classmethod - def output_type_to_ext(cls, output_type): - """Get the file extension for the given output type. - - Parameters - ---------- - output_type : {'NIFTI', 'NIFTI_GZ', 'NIFTI_PAIR', 'NIFTI_PAIR_GZ'} - String specifying the output type. - - Returns - ------- - extension : str - The file extension for the output type. - """ - - try: - return cls.ftypes[output_type] - except KeyError: - msg = "Invalid FSLOUTPUTTYPE: ", output_type - raise KeyError(msg) - - @classmethod - def output_type(cls): - """Get the global FSL output file type FSLOUTPUTTYPE. - - This returns the value of the environment variable - FSLOUTPUTTYPE. An exception is raised if it is not defined. - - Returns - ------- - fsl_ftype : string - Represents the current environment setting of FSLOUTPUTTYPE - """ - try: - return os.environ["FSLOUTPUTTYPE"] - except KeyError: - IFLOGGER.warning( - "FSLOUTPUTTYPE environment variable is not set. " - "Setting FSLOUTPUTTYPE=NIFTI" - ) - return "NIFTI" - - @staticmethod - def standard_image(img_name=None): - """Grab an image from the standard location. - - Returns a list of standard images if called without arguments. - - Could be made more fancy to allow for more relocatability""" - try: - fsldir = os.environ["FSLDIR"] - except KeyError: - raise Exception("FSL environment variables not set") - stdpath = os.path.join(fsldir, "data", "standard") - if img_name is None: - return [ - filename.replace(stdpath + "/", "") - for filename in glob(os.path.join(stdpath, "*nii*")) - ] - return os.path.join(stdpath, img_name) - - -# Original source at L125 of /interfaces/base/support.py -class NipypeInterfaceError(Exception): - """Custom error for interfaces""" - - def __init__(self, value): - self.value = value - - def __str__(self): - return "{}".format(self.value) diff --git a/nipype-auto-conv/specs/interfaces/ar1_image.yaml b/nipype-auto-conv/specs/interfaces/ar1_image.yaml index 4cca722..0001152 100644 --- a/nipype-auto-conv/specs/interfaces/ar1_image.yaml +++ b/nipype-auto-conv/specs/interfaces/ar1_image.yaml @@ -6,7 +6,7 @@ # Docs # ---- # Use fslmaths to generate an AR1 coefficient image across a -# given dimension. (Should use -odt float and probably demean first) +# given dimension. (Should use -odt float and probably demean first) # # task_name: AR1Image @@ -25,9 +25,6 @@ inputs: # passed to the field in the automatically generated unittests. in_file: generic/file # type=file|default=: image to operate on - out_file: Path - # type=file: image written after calculations - # type=file|default=: image to write callable_defaults: # dict[str, str] - names of methods/callable classes defined in the adjacent `*_callables.py` # to set as the `default` method of input fields @@ -51,7 +48,7 @@ outputs: # dict[str, str] - names of methods/callable classes defined in the adjacent `*_callables.py` # to set to the `callable` attribute of output fields templates: - # dict[str, str] - `output_file_template` values to be provided to output fields + # dict[str, str] - `path_template` values to be provided to output fields out_file: out_file # type=file: image written after calculations # type=file|default=: image to write @@ -75,13 +72,13 @@ tests: nan2zeros: # type=bool|default=False: change NaNs to zeros before doing anything output_type: - # type=enum|default='NIFTI'|allowed['NIFTI','NIFTI_GZ','NIFTI_PAIR','NIFTI_PAIR_GZ']: FSL output type + # type=enum|default='NIFTI'|allowed['GIFTI','NIFTI','NIFTI_GZ','NIFTI_PAIR','NIFTI_PAIR_GZ']: FSL output type args: # type=str|default='': Additional parameters to the command environ: # type=dict|default={}: Environment variables imports: - # list[nipype2pydra.task.base.importstatement] - list import statements required by the test, with each list item + # list[nipype2pydra.statements.imports.explicitimport] - list import statements required by the test, with each list item # consisting of 'module', 'name', and optionally 'alias' keys expected_outputs: # dict[str, str] - expected values for selected outputs, noting that tests will typically diff --git a/nipype-auto-conv/specs/interfaces/ar1_image_callables.py b/nipype-auto-conv/specs/interfaces/ar1_image_callables.py deleted file mode 100644 index aee1108..0000000 --- a/nipype-auto-conv/specs/interfaces/ar1_image_callables.py +++ /dev/null @@ -1,329 +0,0 @@ -"""Module to put any functions that are referred to in the "callables" section of AR1Image.yaml""" - -import attrs -import logging -import os -import os.path as op -from glob import glob -from pathlib import Path - - -def out_file_default(inputs): - return _gen_filename("out_file", inputs=inputs) - - -def out_file_callable(output_dir, inputs, stdout, stderr): - outputs = _list_outputs( - output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr - ) - return outputs["out_file"] - - -IFLOGGER = logging.getLogger("nipype.interface") - - -# Original source at L61 of /interfaces/fsl/maths.py -def _gen_filename(name, inputs=None, stdout=None, stderr=None, output_dir=None): - if name == "out_file": - return _list_outputs( - inputs=inputs, stdout=stdout, stderr=stderr, output_dir=output_dir - )["out_file"] - return None - - -# Original source at L205 of /interfaces/fsl/base.py -def _gen_fname( - basename, - cwd=None, - suffix=None, - change_ext=True, - ext=None, - inputs=None, - stdout=None, - stderr=None, - output_dir=None, -): - """Generate a filename based on the given parameters. - - The filename will take the form: cwd/basename. - If change_ext is True, it will use the extensions specified in - inputs.output_type. - - Parameters - ---------- - basename : str - Filename to base the new filename on. - cwd : str - Path to prefix to the new filename. (default is output_dir) - suffix : str - Suffix to add to the `basename`. (defaults is '' ) - change_ext : bool - Flag to change the filename extension to the FSL output type. - (default True) - - Returns - ------- - fname : str - New filename based on given parameters. - - """ - - if basename == "": - msg = "Unable to generate filename for command %s. " % "fslmaths" - msg += "basename is not set!" - raise ValueError(msg) - if cwd is None: - cwd = output_dir - if ext is None: - ext = Info.output_type_to_ext(inputs.output_type) - if change_ext: - if suffix: - suffix = "".join((suffix, ext)) - else: - suffix = ext - if suffix is None: - suffix = "" - fname = fname_presuffix(basename, suffix=suffix, use_ext=False, newpath=cwd) - return fname - - -# Original source at L51 of /interfaces/fsl/maths.py -def _list_outputs(inputs=None, stdout=None, stderr=None, output_dir=None): - outputs = {} - outputs["out_file"] = inputs.out_file - if inputs.out_file is attrs.NOTHING: - outputs["out_file"] = _gen_fname( - inputs.in_file, - suffix=_suffix, - inputs=inputs, - stdout=stdout, - stderr=stderr, - output_dir=output_dir, - ) - outputs["out_file"] = os.path.abspath(outputs["out_file"]) - return outputs - - -# Original source at L108 of /utils/filemanip.py -def fname_presuffix(fname, prefix="", suffix="", newpath=None, use_ext=True): - """Manipulates path and name of input filename - - Parameters - ---------- - fname : string - A filename (may or may not include path) - prefix : string - Characters to prepend to the filename - suffix : string - Characters to append to the filename - newpath : string - Path to replace the path of the input fname - use_ext : boolean - If True (default), appends the extension of the original file - to the output name. - - Returns - ------- - Absolute path of the modified filename - - >>> from nipype.utils.filemanip import fname_presuffix - >>> fname = 'foo.nii.gz' - >>> fname_presuffix(fname,'pre','post','/tmp') - '/tmp/prefoopost.nii.gz' - - >>> from nipype.interfaces.base import attrs.NOTHING - >>> fname_presuffix(fname, 'pre', 'post', attrs.NOTHING) == \ - fname_presuffix(fname, 'pre', 'post') - True - - """ - pth, fname, ext = split_filename(fname) - if not use_ext: - ext = "" - - # No need for : bool(attrs.NOTHING is not attrs.NOTHING) evaluates to False - if newpath: - pth = op.abspath(newpath) - return op.join(pth, prefix + fname + suffix + ext) - - -# Original source at L58 of /utils/filemanip.py -def split_filename(fname): - """Split a filename into parts: path, base filename and extension. - - Parameters - ---------- - fname : str - file or path name - - Returns - ------- - pth : str - base path from fname - fname : str - filename from fname, without extension - ext : str - file extension from fname - - Examples - -------- - >>> from nipype.utils.filemanip import split_filename - >>> pth, fname, ext = split_filename('/home/data/subject.nii.gz') - >>> pth - '/home/data' - - >>> fname - 'subject' - - >>> ext - '.nii.gz' - - """ - - special_extensions = [".nii.gz", ".tar.gz", ".niml.dset"] - - pth = op.dirname(fname) - fname = op.basename(fname) - - ext = None - for special_ext in special_extensions: - ext_len = len(special_ext) - if (len(fname) > ext_len) and (fname[-ext_len:].lower() == special_ext.lower()): - ext = fname[-ext_len:] - fname = fname[:-ext_len] - break - if not ext: - fname, ext = op.splitext(fname) - - return pth, fname, ext - - -# Original source at L1069 of /interfaces/base/core.py -class PackageInfo(object): - _version = None - version_cmd = None - version_file = None - - @classmethod - def version(klass): - if klass._version is None: - if klass.version_cmd is not None: - try: - clout = CommandLine( - command=klass.version_cmd, - resource_monitor=False, - terminal_output="allatonce", - ).run() - except IOError: - return None - - raw_info = clout.runtime.stdout - elif klass.version_file is not None: - try: - with open(klass.version_file, "rt") as fobj: - raw_info = fobj.read() - except OSError: - return None - else: - return None - - klass._version = klass.parse_version(raw_info) - - return klass._version - - @staticmethod - def parse_version(raw_info): - raise NotImplementedError - - -# Original source at L40 of /interfaces/fsl/base.py -class Info(PackageInfo): - """ - Handle FSL ``output_type`` and version information. - - output type refers to the type of file fsl defaults to writing - eg, NIFTI, NIFTI_GZ - - Examples - -------- - - >>> from nipype.interfaces.fsl import Info - >>> Info.version() # doctest: +SKIP - >>> Info.output_type() # doctest: +SKIP - - """ - - ftypes = { - "NIFTI": ".nii", - "NIFTI_PAIR": ".img", - "NIFTI_GZ": ".nii.gz", - "NIFTI_PAIR_GZ": ".img.gz", - } - - if os.getenv("FSLDIR"): - version_file = os.path.join(os.getenv("FSLDIR"), "etc", "fslversion") - - @staticmethod - def parse_version(raw_info): - return raw_info.splitlines()[0] - - @classmethod - def output_type_to_ext(cls, output_type): - """Get the file extension for the given output type. - - Parameters - ---------- - output_type : {'NIFTI', 'NIFTI_GZ', 'NIFTI_PAIR', 'NIFTI_PAIR_GZ'} - String specifying the output type. - - Returns - ------- - extension : str - The file extension for the output type. - """ - - try: - return cls.ftypes[output_type] - except KeyError: - msg = "Invalid FSLOUTPUTTYPE: ", output_type - raise KeyError(msg) - - @classmethod - def output_type(cls): - """Get the global FSL output file type FSLOUTPUTTYPE. - - This returns the value of the environment variable - FSLOUTPUTTYPE. An exception is raised if it is not defined. - - Returns - ------- - fsl_ftype : string - Represents the current environment setting of FSLOUTPUTTYPE - """ - try: - return os.environ["FSLOUTPUTTYPE"] - except KeyError: - IFLOGGER.warning( - "FSLOUTPUTTYPE environment variable is not set. " - "Setting FSLOUTPUTTYPE=NIFTI" - ) - return "NIFTI" - - @staticmethod - def standard_image(img_name=None): - """Grab an image from the standard location. - - Returns a list of standard images if called without arguments. - - Could be made more fancy to allow for more relocatability""" - try: - fsldir = os.environ["FSLDIR"] - except KeyError: - raise Exception("FSL environment variables not set") - stdpath = os.path.join(fsldir, "data", "standard") - if img_name is None: - return [ - filename.replace(stdpath + "/", "") - for filename in glob(os.path.join(stdpath, "*nii*")) - ] - return os.path.join(stdpath, img_name) diff --git a/nipype-auto-conv/specs/interfaces/av_scale.yaml b/nipype-auto-conv/specs/interfaces/av_scale.yaml index 02fe620..d1b8de6 100644 --- a/nipype-auto-conv/specs/interfaces/av_scale.yaml +++ b/nipype-auto-conv/specs/interfaces/av_scale.yaml @@ -7,12 +7,12 @@ # ---- # Use FSL avscale command to extract info from mat file output of FLIRT # -# Examples -# -------- +# Examples +# -------- # -# >>> avscale = AvScale() -# >>> avscale.inputs.mat_file = 'flirt.mat' -# >>> res = avscale.run() # doctest: +SKIP +# >>> avscale = AvScale() +# >>> avscale.inputs.mat_file = 'flirt.mat' +# >>> res = avscale.run() # doctest: +SKIP # # # @@ -60,7 +60,7 @@ outputs: left_right_orientation_preserved: left_right_orientation_preserved_callable # type=bool: True if LR orientation preserved templates: - # dict[str, str] - `output_file_template` values to be provided to output fields + # dict[str, str] - `path_template` values to be provided to output fields requirements: # dict[str, list[str]] - input fields that are required to be provided for the output field to be present tests: @@ -78,7 +78,7 @@ tests: environ: # type=dict|default={}: Environment variables imports: - # list[nipype2pydra.task.base.importstatement] - list import statements required by the test, with each list item + # list[nipype2pydra.statements.imports.explicitimport] - list import statements required by the test, with each list item # consisting of 'module', 'name', and optionally 'alias' keys expected_outputs: # dict[str, str] - expected values for selected outputs, noting that tests will typically diff --git a/nipype-auto-conv/specs/interfaces/av_scale_callables.py b/nipype-auto-conv/specs/interfaces/av_scale_callables.py deleted file mode 100644 index a26202f..0000000 --- a/nipype-auto-conv/specs/interfaces/av_scale_callables.py +++ /dev/null @@ -1,81 +0,0 @@ -"""Module to put any functions that are referred to in the "callables" section of AvScale.yaml""" - - -def average_scaling_callable(output_dir, inputs, stdout, stderr): - outputs = _list_outputs( - output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr - ) - return outputs["average_scaling"] - - -def backward_half_transform_callable(output_dir, inputs, stdout, stderr): - outputs = _list_outputs( - output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr - ) - return outputs["backward_half_transform"] - - -def determinant_callable(output_dir, inputs, stdout, stderr): - outputs = _list_outputs( - output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr - ) - return outputs["determinant"] - - -def forward_half_transform_callable(output_dir, inputs, stdout, stderr): - outputs = _list_outputs( - output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr - ) - return outputs["forward_half_transform"] - - -def left_right_orientation_preserved_callable(output_dir, inputs, stdout, stderr): - outputs = _list_outputs( - output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr - ) - return outputs["left_right_orientation_preserved"] - - -def rot_angles_callable(output_dir, inputs, stdout, stderr): - outputs = _list_outputs( - output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr - ) - return outputs["rot_angles"] - - -def rotation_translation_matrix_callable(output_dir, inputs, stdout, stderr): - outputs = _list_outputs( - output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr - ) - return outputs["rotation_translation_matrix"] - - -def scales_callable(output_dir, inputs, stdout, stderr): - outputs = _list_outputs( - output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr - ) - return outputs["scales"] - - -def skews_callable(output_dir, inputs, stdout, stderr): - outputs = _list_outputs( - output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr - ) - return outputs["skews"] - - -def translations_callable(output_dir, inputs, stdout, stderr): - outputs = _list_outputs( - output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr - ) - return outputs["translations"] - - -# Original source at L885 of /interfaces/base/core.py -def _gen_filename(name, inputs=None, stdout=None, stderr=None, output_dir=None): - raise NotImplementedError - - -# Original source at L935 of /interfaces/fsl/utils.py -def _list_outputs(inputs=None, stdout=None, stderr=None, output_dir=None): - return _results diff --git a/nipype-auto-conv/specs/interfaces/b0_calc.yaml b/nipype-auto-conv/specs/interfaces/b0_calc.yaml index edd6ef7..6570e65 100644 --- a/nipype-auto-conv/specs/interfaces/b0_calc.yaml +++ b/nipype-auto-conv/specs/interfaces/b0_calc.yaml @@ -6,21 +6,21 @@ # Docs # ---- # -# B0 inhomogeneities occur at interfaces of materials with different magnetic susceptibilities, -# such as tissue-air interfaces. These differences lead to distortion in the local magnetic field, -# as Maxwell’s equations need to be satisfied. An example of B0 inhomogneity is the first volume -# of the 4D volume ```$FSLDIR/data/possum/b0_ppm.nii.gz```. +# B0 inhomogeneities occur at interfaces of materials with different magnetic susceptibilities, +# such as tissue-air interfaces. These differences lead to distortion in the local magnetic field, +# as Maxwell’s equations need to be satisfied. An example of B0 inhomogneity is the first volume +# of the 4D volume ```$FSLDIR/data/possum/b0_ppm.nii.gz```. # -# Examples -# -------- +# Examples +# -------- # -# >>> from nipype.interfaces.fsl import B0Calc -# >>> b0calc = B0Calc() -# >>> b0calc.inputs.in_file = 'tissue+air_map.nii' -# >>> b0calc.inputs.z_b0 = 3.0 -# >>> b0calc.inputs.output_type = "NIFTI_GZ" -# >>> b0calc.cmdline -# 'b0calc -i tissue+air_map.nii -o tissue+air_map_b0field.nii.gz --chi0=4.000000e-07 -d -9.450000e-06 --extendboundary=1.00 --b0x=0.00 --gx=0.0000 --b0y=0.00 --gy=0.0000 --b0=3.00 --gz=0.0000' +# >>> from nipype.interfaces.fsl import B0Calc +# >>> b0calc = B0Calc() +# >>> b0calc.inputs.in_file = 'tissue+air_map.nii' +# >>> b0calc.inputs.z_b0 = 3.0 +# >>> b0calc.inputs.output_type = "NIFTI_GZ" +# >>> b0calc.cmdline +# 'b0calc -i tissue+air_map.nii -o tissue+air_map_b0field.nii.gz --chi0=4.000000e-07 -d -9.450000e-06 --extendboundary=1.00 --b0x=0.00 --gx=0.0000 --b0y=0.00 --gy=0.0000 --b0=3.00 --gz=0.0000' # # task_name: B0Calc @@ -39,9 +39,6 @@ inputs: # passed to the field in the automatically generated unittests. in_file: medimage/nifti1 # type=file|default=: filename of input image (usually a tissue/air segmentation) - out_file: Path - # type=file: filename of B0 output volume - # type=file|default=: filename of B0 output volume callable_defaults: # dict[str, str] - names of methods/callable classes defined in the adjacent `*_callables.py` # to set as the `default` method of input fields @@ -65,7 +62,7 @@ outputs: # dict[str, str] - names of methods/callable classes defined in the adjacent `*_callables.py` # to set to the `callable` attribute of output fields templates: - # dict[str, str] - `output_file_template` values to be provided to output fields + # dict[str, str] - `path_template` values to be provided to output fields requirements: # dict[str, list[str]] - input fields that are required to be provided for the output field to be present tests: @@ -102,13 +99,13 @@ tests: directconv: # type=bool|default=False: use direct (image space) convolution, not FFT output_type: - # type=enum|default='NIFTI'|allowed['NIFTI','NIFTI_GZ','NIFTI_PAIR','NIFTI_PAIR_GZ']: FSL output type + # type=enum|default='NIFTI'|allowed['GIFTI','NIFTI','NIFTI_GZ','NIFTI_PAIR','NIFTI_PAIR_GZ']: FSL output type args: # type=str|default='': Additional parameters to the command environ: # type=dict|default={}: Environment variables imports: - # list[nipype2pydra.task.base.importstatement] - list import statements required by the test, with each list item + # list[nipype2pydra.statements.imports.explicitimport] - list import statements required by the test, with each list item # consisting of 'module', 'name', and optionally 'alias' keys expected_outputs: # dict[str, str] - expected values for selected outputs, noting that tests will typically @@ -127,12 +124,10 @@ tests: # (if not specified, will try to choose a sensible value) in_file: # type=file|default=: filename of input image (usually a tissue/air segmentation) - z_b0: '3.0' - # type=float|default=1.0: Value for zeroth-order b0 field (z-component), in Tesla output_type: '"NIFTI_GZ"' - # type=enum|default='NIFTI'|allowed['NIFTI','NIFTI_GZ','NIFTI_PAIR','NIFTI_PAIR_GZ']: FSL output type + # type=enum|default='NIFTI'|allowed['GIFTI','NIFTI','NIFTI_GZ','NIFTI_PAIR','NIFTI_PAIR_GZ']: FSL output type imports: - # list[nipype2pydra.task.base.importstatement] - list import statements required by the test, with each list item + # list[nipype2pydra.statements.imports.explicitimport] - list import statements required by the test, with each list item # consisting of 'module', 'name', and optionally 'alias' keys expected_outputs: # dict[str, str] - expected values for selected outputs, noting that tests will typically @@ -155,12 +150,10 @@ doctests: # '.mock()' method of the corresponding class is used instead. in_file: '"tissue+air_map.nii"' # type=file|default=: filename of input image (usually a tissue/air segmentation) - z_b0: '3.0' - # type=float|default=1.0: Value for zeroth-order b0 field (z-component), in Tesla output_type: '"NIFTI_GZ"' - # type=enum|default='NIFTI'|allowed['NIFTI','NIFTI_GZ','NIFTI_PAIR','NIFTI_PAIR_GZ']: FSL output type + # type=enum|default='NIFTI'|allowed['GIFTI','NIFTI','NIFTI_GZ','NIFTI_PAIR','NIFTI_PAIR_GZ']: FSL output type imports: - # list[nipype2pydra.task.base.importstatement] - list import statements required by the test, with each list item + # list[nipype2pydra.statements.imports.explicitimport] - list import statements required by the test, with each list item # consisting of 'module', 'name', and optionally 'alias' keys directive: # str - any doctest directive to place on the cmdline call, e.g. # doctest: +ELLIPSIS diff --git a/nipype-auto-conv/specs/interfaces/b0_calc_callables.py b/nipype-auto-conv/specs/interfaces/b0_calc_callables.py deleted file mode 100644 index 1a08f4f..0000000 --- a/nipype-auto-conv/specs/interfaces/b0_calc_callables.py +++ /dev/null @@ -1,338 +0,0 @@ -"""Module to put any functions that are referred to in the "callables" section of B0Calc.yaml""" - -import attrs -import logging -import os -import os.path as op -from glob import glob - - -def out_file_callable(output_dir, inputs, stdout, stderr): - outputs = _list_outputs( - output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr - ) - return outputs["out_file"] - - -IFLOGGER = logging.getLogger("nipype.interface") - - -iflogger = logging.getLogger("nipype.interface") - - -# Original source at L809 of /interfaces/base/core.py -def _filename_from_source( - name, chain=None, inputs=None, stdout=None, stderr=None, output_dir=None -): - if chain is None: - chain = [] - - trait_spec = inputs.trait(name) - retval = getattr(inputs, name) - source_ext = None - if (retval is attrs.NOTHING) or "%s" in retval: - if not trait_spec.name_source: - return retval - - # Do not generate filename when excluded by other inputs - if any( - (getattr(inputs, field) is not attrs.NOTHING) - for field in trait_spec.xor or () - ): - return retval - - # Do not generate filename when required fields are missing - if not all( - (getattr(inputs, field) is not attrs.NOTHING) - for field in trait_spec.requires or () - ): - return retval - - if (retval is not attrs.NOTHING) and "%s" in retval: - name_template = retval - else: - name_template = trait_spec.name_template - if not name_template: - name_template = "%s_generated" - - ns = trait_spec.name_source - while isinstance(ns, (list, tuple)): - if len(ns) > 1: - iflogger.warning("Only one name_source per trait is allowed") - ns = ns[0] - - if not isinstance(ns, (str, bytes)): - raise ValueError( - "name_source of '{}' trait should be an input trait " - "name, but a type {} object was found".format(name, type(ns)) - ) - - if getattr(inputs, ns) is not attrs.NOTHING: - name_source = ns - source = getattr(inputs, name_source) - while isinstance(source, list): - source = source[0] - - # special treatment for files - try: - _, base, source_ext = split_filename(source) - except (AttributeError, TypeError): - base = source - else: - if name in chain: - raise NipypeInterfaceError("Mutually pointing name_sources") - - chain.append(name) - base = _filename_from_source( - ns, - chain, - inputs=inputs, - stdout=stdout, - stderr=stderr, - output_dir=output_dir, - ) - if base is not attrs.NOTHING: - _, _, source_ext = split_filename(base) - else: - # Do not generate filename when required fields are missing - return retval - - chain = None - retval = name_template % base - _, _, ext = split_filename(retval) - if trait_spec.keep_extension and (ext or source_ext): - if (ext is None or not ext) and source_ext: - retval = retval + source_ext - else: - retval = _overload_extension( - retval, - name, - inputs=inputs, - stdout=stdout, - stderr=stderr, - output_dir=output_dir, - ) - return retval - - -# Original source at L885 of /interfaces/base/core.py -def _gen_filename(name, inputs=None, stdout=None, stderr=None, output_dir=None): - raise NotImplementedError - - -# Original source at L891 of /interfaces/base/core.py -def _list_outputs(inputs=None, stdout=None, stderr=None, output_dir=None): - metadata = dict(name_source=lambda t: t is not None) - traits = inputs.traits(**metadata) - if traits: - outputs = {} - for name, trait_spec in list(traits.items()): - out_name = name - if trait_spec.output_name is not None: - out_name = trait_spec.output_name - fname = _filename_from_source( - name, inputs=inputs, stdout=stdout, stderr=stderr, output_dir=output_dir - ) - if fname is not attrs.NOTHING: - outputs[out_name] = os.path.abspath(fname) - return outputs - - -# Original source at L249 of /interfaces/fsl/base.py -def _overload_extension( - value, name=None, inputs=None, stdout=None, stderr=None, output_dir=None -): - return value + Info.output_type_to_ext(inputs.output_type) - - -# Original source at L58 of /utils/filemanip.py -def split_filename(fname): - """Split a filename into parts: path, base filename and extension. - - Parameters - ---------- - fname : str - file or path name - - Returns - ------- - pth : str - base path from fname - fname : str - filename from fname, without extension - ext : str - file extension from fname - - Examples - -------- - >>> from nipype.utils.filemanip import split_filename - >>> pth, fname, ext = split_filename('/home/data/subject.nii.gz') - >>> pth - '/home/data' - - >>> fname - 'subject' - - >>> ext - '.nii.gz' - - """ - - special_extensions = [".nii.gz", ".tar.gz", ".niml.dset"] - - pth = op.dirname(fname) - fname = op.basename(fname) - - ext = None - for special_ext in special_extensions: - ext_len = len(special_ext) - if (len(fname) > ext_len) and (fname[-ext_len:].lower() == special_ext.lower()): - ext = fname[-ext_len:] - fname = fname[:-ext_len] - break - if not ext: - fname, ext = op.splitext(fname) - - return pth, fname, ext - - -# Original source at L1069 of /interfaces/base/core.py -class PackageInfo(object): - _version = None - version_cmd = None - version_file = None - - @classmethod - def version(klass): - if klass._version is None: - if klass.version_cmd is not None: - try: - clout = CommandLine( - command=klass.version_cmd, - resource_monitor=False, - terminal_output="allatonce", - ).run() - except IOError: - return None - - raw_info = clout.runtime.stdout - elif klass.version_file is not None: - try: - with open(klass.version_file, "rt") as fobj: - raw_info = fobj.read() - except OSError: - return None - else: - return None - - klass._version = klass.parse_version(raw_info) - - return klass._version - - @staticmethod - def parse_version(raw_info): - raise NotImplementedError - - -# Original source at L40 of /interfaces/fsl/base.py -class Info(PackageInfo): - """ - Handle FSL ``output_type`` and version information. - - output type refers to the type of file fsl defaults to writing - eg, NIFTI, NIFTI_GZ - - Examples - -------- - - >>> from nipype.interfaces.fsl import Info - >>> Info.version() # doctest: +SKIP - >>> Info.output_type() # doctest: +SKIP - - """ - - ftypes = { - "NIFTI": ".nii", - "NIFTI_PAIR": ".img", - "NIFTI_GZ": ".nii.gz", - "NIFTI_PAIR_GZ": ".img.gz", - } - - if os.getenv("FSLDIR"): - version_file = os.path.join(os.getenv("FSLDIR"), "etc", "fslversion") - - @staticmethod - def parse_version(raw_info): - return raw_info.splitlines()[0] - - @classmethod - def output_type_to_ext(cls, output_type): - """Get the file extension for the given output type. - - Parameters - ---------- - output_type : {'NIFTI', 'NIFTI_GZ', 'NIFTI_PAIR', 'NIFTI_PAIR_GZ'} - String specifying the output type. - - Returns - ------- - extension : str - The file extension for the output type. - """ - - try: - return cls.ftypes[output_type] - except KeyError: - msg = "Invalid FSLOUTPUTTYPE: ", output_type - raise KeyError(msg) - - @classmethod - def output_type(cls): - """Get the global FSL output file type FSLOUTPUTTYPE. - - This returns the value of the environment variable - FSLOUTPUTTYPE. An exception is raised if it is not defined. - - Returns - ------- - fsl_ftype : string - Represents the current environment setting of FSLOUTPUTTYPE - """ - try: - return os.environ["FSLOUTPUTTYPE"] - except KeyError: - IFLOGGER.warning( - "FSLOUTPUTTYPE environment variable is not set. " - "Setting FSLOUTPUTTYPE=NIFTI" - ) - return "NIFTI" - - @staticmethod - def standard_image(img_name=None): - """Grab an image from the standard location. - - Returns a list of standard images if called without arguments. - - Could be made more fancy to allow for more relocatability""" - try: - fsldir = os.environ["FSLDIR"] - except KeyError: - raise Exception("FSL environment variables not set") - stdpath = os.path.join(fsldir, "data", "standard") - if img_name is None: - return [ - filename.replace(stdpath + "/", "") - for filename in glob(os.path.join(stdpath, "*nii*")) - ] - return os.path.join(stdpath, img_name) - - -# Original source at L125 of /interfaces/base/support.py -class NipypeInterfaceError(Exception): - """Custom error for interfaces""" - - def __init__(self, value): - self.value = value - - def __str__(self): - return "{}".format(self.value) diff --git a/nipype-auto-conv/specs/interfaces/bedpostx5.yaml b/nipype-auto-conv/specs/interfaces/bedpostx5.yaml index 4ef3d5b..49b85ba 100644 --- a/nipype-auto-conv/specs/interfaces/bedpostx5.yaml +++ b/nipype-auto-conv/specs/interfaces/bedpostx5.yaml @@ -6,27 +6,27 @@ # Docs # ---- # -# BEDPOSTX stands for Bayesian Estimation of Diffusion Parameters Obtained -# using Sampling Techniques. The X stands for modelling Crossing Fibres. -# bedpostx runs Markov Chain Monte Carlo sampling to build up distributions -# on diffusion parameters at each voxel. It creates all the files necessary -# for running probabilistic tractography. For an overview of the modelling -# carried out within bedpostx see this `technical report -# `_. +# BEDPOSTX stands for Bayesian Estimation of Diffusion Parameters Obtained +# using Sampling Techniques. The X stands for modelling Crossing Fibres. +# bedpostx runs Markov Chain Monte Carlo sampling to build up distributions +# on diffusion parameters at each voxel. It creates all the files necessary +# for running probabilistic tractography. For an overview of the modelling +# carried out within bedpostx see this `technical report +# `_. # # -# .. note:: Consider using -# :func:`niflow.nipype1.workflows.fsl.dmri.create_bedpostx_pipeline` instead. +# .. note:: Consider using +# :func:`niflow.nipype1.workflows.fsl.dmri.create_bedpostx_pipeline` instead. # # -# Example -# ------- +# Example +# ------- # -# >>> from nipype.interfaces import fsl -# >>> bedp = fsl.BEDPOSTX5(bvecs='bvecs', bvals='bvals', dwi='diffusion.nii', -# ... mask='mask.nii', n_fibres=1) -# >>> bedp.cmdline -# 'bedpostx bedpostx -b 0 --burnin_noard=0 --forcedir -n 1 -j 5000 -s 1 --updateproposalevery=40' +# >>> from nipype.interfaces import fsl +# >>> bedp = fsl.BEDPOSTX5(bvecs='bvecs', bvals='bvals', dwi='diffusion.nii', +# ... mask='mask.nii', n_fibres=1) +# >>> bedp.cmdline +# 'bedpostx bedpostx -b 0 --burnin_noard=0 --forcedir -n 1 -j 5000 -s 1 --updateproposalevery=40' # # task_name: BEDPOSTX5 @@ -38,11 +38,11 @@ inputs: rename: # dict[str, str] - fields to rename in the Pydra interface types: - # dict[str, type] - override inferred types (use "mime-like" string for file-format types, - # e.g. 'medimage/nifti-gz'). For most fields the type will be correctly inferred - # from the nipype interface, but you may want to be more specific, particularly - # for file types, where specifying the format also specifies the file that will be - # passed to the field in the automatically generated unittests. + # dict[str, type] - override inferred types (use "mime-like" string for file-format types, + # e.g. 'medimage/nifti-gz'). For most fields the type will be correctly inferred + # from the nipype interface, but you may want to be more specific, particularly + # for file types, where specifying the format also specifies the file that will be + # passed to the field in the automatically generated unittests. bvals: medimage/bval # type=file|default=: b values file bvecs: medimage/bvec @@ -68,17 +68,17 @@ outputs: rename: # dict[str, str] - fields to rename in the Pydra interface types: - # dict[str, type] - override inferred types (use "mime-like" string for file-format types, - # e.g. 'medimage/nifti-gz'). For most fields the type will be correctly inferred - # from the nipype interface, but you may want to be more specific, particularly - # for file types, where specifying the format also specifies the file that will be - # passed to the field in the automatically generated unittests. + # dict[str, type] - override inferred types (use "mime-like" string for file-format types, + # e.g. 'medimage/nifti-gz'). For most fields the type will be correctly inferred + # from the nipype interface, but you may want to be more specific, particularly + # for file types, where specifying the format also specifies the file that will be + # passed to the field in the automatically generated unittests. dyads: generic/file+list-of # type=outputmultiobject: Mean of PDD distribution in vector form. dyads_dispersion: generic/file+list-of # type=outputmultiobject: Dispersion mean_S0samples: generic/file - # type=file: Mean of distribution on T2wbaseline signal intensity S0 + # type=file: Mean of distribution on T2w baseline signal intensity S0 mean_dsamples: generic/file # type=file: Mean of distribution on diffusivity d mean_fsamples: generic/file+list-of @@ -97,135 +97,135 @@ outputs: # dict[str, str] - names of methods/callable classes defined in the adjacent `*_callables.py` # to set to the `callable` attribute of output fields templates: - # dict[str, str] - `output_file_template` values to be provided to output fields + # dict[str, str] - `path_template` values to be provided to output fields requirements: # dict[str, list[str]] - input fields that are required to be provided for the output field to be present tests: - - inputs: - # dict[str, str] - values to provide to inputs fields in the task initialisation - # (if not specified, will try to choose a sensible value) - dwi: - # type=file|default=: diffusion weighted image data file - mask: - # type=file|default=: bet binary mask file - bvecs: - # type=file|default=: b vectors file - bvals: - # type=file|default=: b values file - logdir: - # type=directory|default=: - n_fibres: - # type=range|default=2: Maximum number of fibres to fit in each voxel - model: - # type=enum|default=1|allowed[1,2,3]: use monoexponential (1, default, required for single-shell) or multiexponential (2, multi-shell) model - fudge: - # type=int|default=0: ARD fudge factor - n_jumps: - # type=int|default=5000: Num of jumps to be made by MCMC - burn_in: - # type=range|default=0: Total num of jumps at start of MCMC to be discarded - sample_every: - # type=range|default=1: Num of jumps for each sample (MCMC) - out_dir: - # type=directory|default='bedpostx': output directory - gradnonlin: - # type=bool|default=False: consider gradient nonlinearities, default off - grad_dev: - # type=file|default=: grad_dev file, if gradnonlin, -g is True - use_gpu: - # type=bool|default=False: Use the GPU version of bedpostx - burn_in_no_ard: - # type=range|default=0: num of burnin jumps before the ard is imposed - update_proposal_every: - # type=range|default=40: Num of jumps for each update to the proposal density std (MCMC) - seed: - # type=int|default=0: seed for pseudo random number generator - no_ard: - # type=bool|default=False: Turn ARD off on all fibres - all_ard: - # type=bool|default=False: Turn ARD on on all fibres - no_spat: - # type=bool|default=False: Initialise with tensor, not spatially - non_linear: - # type=bool|default=False: Initialise with nonlinear fitting - cnlinear: - # type=bool|default=False: Initialise with constrained nonlinear fitting - rician: - # type=bool|default=False: use Rician noise modeling - f0_noard: - # type=bool|default=False: Noise floor model: add to the model an unattenuated signal compartment f0 - f0_ard: - # type=bool|default=False: Noise floor model: add to the model an unattenuated signal compartment f0 - force_dir: - # type=bool|default=True: use the actual directory name given (do not add + to make a new directory) - output_type: - # type=enum|default='NIFTI'|allowed['NIFTI','NIFTI_GZ','NIFTI_PAIR','NIFTI_PAIR_GZ']: FSL output type - args: - # type=str|default='': Additional parameters to the command - environ: - # type=dict|default={}: Environment variables - imports: - # list[nipype2pydra.task.base.importstatement] - list import statements required by the test, with each list item - # consisting of 'module', 'name', and optionally 'alias' keys - expected_outputs: - # dict[str, str] - expected values for selected outputs, noting that tests will typically - # be terminated before they complete for time-saving reasons, and therefore - # these values will be ignored, when running in CI - timeout: 10 - # int - the value to set for the timeout in the generated test, - # after which the test will be considered to have been initialised - # successfully. Set to 0 to disable the timeout (warning, this could - # lead to the unittests taking a very long time to complete) - xfail: true - # bool - whether the unittest is expected to fail or not. Set to false - # when you are satisfied with the edits you have made to this file - - inputs: - # dict[str, str] - values to provide to inputs fields in the task initialisation - # (if not specified, will try to choose a sensible value) - bvecs: - # type=file|default=: b vectors file - bvals: - # type=file|default=: b values file - dwi: - # type=file|default=: diffusion weighted image data file - mask: - # type=file|default=: bet binary mask file - n_fibres: "1" - # type=range|default=2: Maximum number of fibres to fit in each voxel - imports: - # list[nipype2pydra.task.base.importstatement] - list import statements required by the test, with each list item - # consisting of 'module', 'name', and optionally 'alias' keys - expected_outputs: - # dict[str, str] - expected values for selected outputs, noting that tests will typically - # be terminated before they complete for time-saving reasons, and therefore - # these values will be ignored, when running in CI - timeout: 10 - # int - the value to set for the timeout in the generated test, - # after which the test will be considered to have been initialised - # successfully. Set to 0 to disable the timeout (warning, this could - # lead to the unittests taking a very long time to complete) - xfail: true - # bool - whether the unittest is expected to fail or not. Set to false - # when you are satisfied with the edits you have made to this file +- inputs: + # dict[str, str] - values to provide to inputs fields in the task initialisation + # (if not specified, will try to choose a sensible value) + dwi: + # type=file|default=: diffusion weighted image data file + mask: + # type=file|default=: bet binary mask file + bvecs: + # type=file|default=: b vectors file + bvals: + # type=file|default=: b values file + logdir: + # type=directory|default=: + n_fibres: + # type=range|default=2: Maximum number of fibres to fit in each voxel + model: + # type=enum|default=1|allowed[1,2,3]: use monoexponential (1, default, required for single-shell) or multiexponential (2, multi-shell) model + fudge: + # type=int|default=0: ARD fudge factor + n_jumps: + # type=int|default=5000: Num of jumps to be made by MCMC + burn_in: + # type=range|default=0: Total num of jumps at start of MCMC to be discarded + sample_every: + # type=range|default=1: Num of jumps for each sample (MCMC) + out_dir: + # type=directory|default='bedpostx': output directory + gradnonlin: + # type=bool|default=False: consider gradient nonlinearities, default off + grad_dev: + # type=file|default=: grad_dev file, if gradnonlin, -g is True + use_gpu: + # type=bool|default=False: Use the GPU version of bedpostx + burn_in_no_ard: + # type=range|default=0: num of burnin jumps before the ard is imposed + update_proposal_every: + # type=range|default=40: Num of jumps for each update to the proposal density std (MCMC) + seed: + # type=int|default=0: seed for pseudo random number generator + no_ard: + # type=bool|default=False: Turn ARD off on all fibres + all_ard: + # type=bool|default=False: Turn ARD on on all fibres + no_spat: + # type=bool|default=False: Initialise with tensor, not spatially + non_linear: + # type=bool|default=False: Initialise with nonlinear fitting + cnlinear: + # type=bool|default=False: Initialise with constrained nonlinear fitting + rician: + # type=bool|default=False: use Rician noise modeling + f0_noard: + # type=bool|default=False: Noise floor model: add to the model an unattenuated signal compartment f0 + f0_ard: + # type=bool|default=False: Noise floor model: add to the model an unattenuated signal compartment f0 + force_dir: + # type=bool|default=True: use the actual directory name given (do not add + to make a new directory) + output_type: + # type=enum|default='NIFTI'|allowed['GIFTI','NIFTI','NIFTI_GZ','NIFTI_PAIR','NIFTI_PAIR_GZ']: FSL output type + args: + # type=str|default='': Additional parameters to the command + environ: + # type=dict|default={}: Environment variables + imports: + # list[nipype2pydra.statements.imports.explicitimport] - list import statements required by the test, with each list item + # consisting of 'module', 'name', and optionally 'alias' keys + expected_outputs: + # dict[str, str] - expected values for selected outputs, noting that tests will typically + # be terminated before they complete for time-saving reasons, and therefore + # these values will be ignored, when running in CI + timeout: 10 + # int - the value to set for the timeout in the generated test, + # after which the test will be considered to have been initialised + # successfully. Set to 0 to disable the timeout (warning, this could + # lead to the unittests taking a very long time to complete) + xfail: true + # bool - whether the unittest is expected to fail or not. Set to false + # when you are satisfied with the edits you have made to this file +- inputs: + # dict[str, str] - values to provide to inputs fields in the task initialisation + # (if not specified, will try to choose a sensible value) + bvecs: + # type=file|default=: b vectors file + bvals: + # type=file|default=: b values file + dwi: + # type=file|default=: diffusion weighted image data file + mask: + # type=file|default=: bet binary mask file + n_fibres: '1' + # type=range|default=2: Maximum number of fibres to fit in each voxel + imports: + # list[nipype2pydra.statements.imports.explicitimport] - list import statements required by the test, with each list item + # consisting of 'module', 'name', and optionally 'alias' keys + expected_outputs: + # dict[str, str] - expected values for selected outputs, noting that tests will typically + # be terminated before they complete for time-saving reasons, and therefore + # these values will be ignored, when running in CI + timeout: 10 + # int - the value to set for the timeout in the generated test, + # after which the test will be considered to have been initialised + # successfully. Set to 0 to disable the timeout (warning, this could + # lead to the unittests taking a very long time to complete) + xfail: true + # bool - whether the unittest is expected to fail or not. Set to false + # when you are satisfied with the edits you have made to this file doctests: - - cmdline: bedpostx bedpostx -b 0 --burnin_noard=0 --forcedir -n 1 -j 5000 -s 1 --updateproposalevery=40 - # str - the expected cmdline output - inputs: - # dict[str, str] - name-value pairs for inputs to be provided to the doctest. - # If the field is of file-format type and the value is None, then the - # '.mock()' method of the corresponding class is used instead. - bvecs: '"bvecs"' - # type=file|default=: b vectors file - bvals: '"bvals"' - # type=file|default=: b values file - dwi: '"diffusion.nii"' - # type=file|default=: diffusion weighted image data file - mask: '"mask.nii"' - # type=file|default=: bet binary mask file - n_fibres: "1" - # type=range|default=2: Maximum number of fibres to fit in each voxel - imports: - # list[nipype2pydra.task.base.importstatement] - list import statements required by the test, with each list item - # consisting of 'module', 'name', and optionally 'alias' keys - directive: - # str - any doctest directive to place on the cmdline call, e.g. # doctest: +ELLIPSIS +- cmdline: bedpostx bedpostx -b 0 --burnin_noard=0 --forcedir -n 1 -j 5000 -s 1 --updateproposalevery=40 + # str - the expected cmdline output + inputs: + # dict[str, str] - name-value pairs for inputs to be provided to the doctest. + # If the field is of file-format type and the value is None, then the + # '.mock()' method of the corresponding class is used instead. + bvecs: '"bvecs"' + # type=file|default=: b vectors file + bvals: '"bvals"' + # type=file|default=: b values file + dwi: '"diffusion.nii"' + # type=file|default=: diffusion weighted image data file + mask: '"mask.nii"' + # type=file|default=: bet binary mask file + n_fibres: '1' + # type=range|default=2: Maximum number of fibres to fit in each voxel + imports: + # list[nipype2pydra.statements.imports.explicitimport] - list import statements required by the test, with each list item + # consisting of 'module', 'name', and optionally 'alias' keys + directive: + # str - any doctest directive to place on the cmdline call, e.g. # doctest: +ELLIPSIS diff --git a/nipype-auto-conv/specs/interfaces/bedpostx5_callables.py b/nipype-auto-conv/specs/interfaces/bedpostx5_callables.py deleted file mode 100644 index 8525bc4..0000000 --- a/nipype-auto-conv/specs/interfaces/bedpostx5_callables.py +++ /dev/null @@ -1,481 +0,0 @@ -"""Module to put any functions that are referred to in the "callables" section of BEDPOSTX5.yaml""" - -import logging -import os -import os.path as op -from glob import glob -from pathlib import Path - - -def dyads_callable(output_dir, inputs, stdout, stderr): - outputs = _list_outputs( - output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr - ) - return outputs["dyads"] - - -def dyads_dispersion_callable(output_dir, inputs, stdout, stderr): - outputs = _list_outputs( - output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr - ) - return outputs["dyads_dispersion"] - - -def mean_S0samples_callable(output_dir, inputs, stdout, stderr): - outputs = _list_outputs( - output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr - ) - return outputs["mean_S0samples"] - - -def mean_dsamples_callable(output_dir, inputs, stdout, stderr): - outputs = _list_outputs( - output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr - ) - return outputs["mean_dsamples"] - - -def mean_fsamples_callable(output_dir, inputs, stdout, stderr): - outputs = _list_outputs( - output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr - ) - return outputs["mean_fsamples"] - - -def mean_phsamples_callable(output_dir, inputs, stdout, stderr): - outputs = _list_outputs( - output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr - ) - return outputs["mean_phsamples"] - - -def mean_thsamples_callable(output_dir, inputs, stdout, stderr): - outputs = _list_outputs( - output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr - ) - return outputs["mean_thsamples"] - - -def merged_fsamples_callable(output_dir, inputs, stdout, stderr): - outputs = _list_outputs( - output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr - ) - return outputs["merged_fsamples"] - - -def merged_phsamples_callable(output_dir, inputs, stdout, stderr): - outputs = _list_outputs( - output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr - ) - return outputs["merged_phsamples"] - - -def merged_thsamples_callable(output_dir, inputs, stdout, stderr): - outputs = _list_outputs( - output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr - ) - return outputs["merged_thsamples"] - - -IFLOGGER = logging.getLogger("nipype.interface") - - -# Original source at L885 of /interfaces/base/core.py -def _gen_filename(name, inputs=None, stdout=None, stderr=None, output_dir=None): - raise NotImplementedError - - -# Original source at L205 of /interfaces/fsl/base.py -def _gen_fname( - basename, - cwd=None, - suffix=None, - change_ext=True, - ext=None, - inputs=None, - stdout=None, - stderr=None, - output_dir=None, -): - """Generate a filename based on the given parameters. - - The filename will take the form: cwd/basename. - If change_ext is True, it will use the extensions specified in - inputs.output_type. - - Parameters - ---------- - basename : str - Filename to base the new filename on. - cwd : str - Path to prefix to the new filename. (default is output_dir) - suffix : str - Suffix to add to the `basename`. (defaults is '' ) - change_ext : bool - Flag to change the filename extension to the FSL output type. - (default True) - - Returns - ------- - fname : str - New filename based on given parameters. - - """ - - if basename == "": - msg = "Unable to generate filename for command %s. " % "bedpostx" - msg += "basename is not set!" - raise ValueError(msg) - if cwd is None: - cwd = output_dir - if ext is None: - ext = Info.output_type_to_ext(inputs.output_type) - if change_ext: - if suffix: - suffix = "".join((suffix, ext)) - else: - suffix = ext - if suffix is None: - suffix = "" - fname = fname_presuffix(basename, suffix=suffix, use_ext=False, newpath=cwd) - return fname - - -# Original source at L483 of /interfaces/fsl/dti.py -def _list_outputs(inputs=None, stdout=None, stderr=None, output_dir=None): - outputs = {} - n_fibres = inputs.n_fibres - - multi_out = [ - "merged_thsamples", - "merged_fsamples", - "merged_phsamples", - "mean_phsamples", - "mean_thsamples", - "mean_fsamples", - "dyads_dispersion", - "dyads", - ] - - single_out = ["mean_dsamples", "mean_S0samples"] - - for k in single_out: - outputs[k] = _gen_fname( - k, - cwd=_out_dir, - inputs=inputs, - stdout=stdout, - stderr=stderr, - output_dir=output_dir, - ) - - for k in multi_out: - outputs[k] = [] - - for i in range(1, n_fibres + 1): - outputs["merged_thsamples"].append( - _gen_fname( - "merged_th%dsamples" % i, - cwd=_out_dir, - inputs=inputs, - stdout=stdout, - stderr=stderr, - output_dir=output_dir, - ) - ) - outputs["merged_fsamples"].append( - _gen_fname( - "merged_f%dsamples" % i, - cwd=_out_dir, - inputs=inputs, - stdout=stdout, - stderr=stderr, - output_dir=output_dir, - ) - ) - outputs["merged_phsamples"].append( - _gen_fname( - "merged_ph%dsamples" % i, - cwd=_out_dir, - inputs=inputs, - stdout=stdout, - stderr=stderr, - output_dir=output_dir, - ) - ) - outputs["mean_thsamples"].append( - _gen_fname( - "mean_th%dsamples" % i, - cwd=_out_dir, - inputs=inputs, - stdout=stdout, - stderr=stderr, - output_dir=output_dir, - ) - ) - outputs["mean_phsamples"].append( - _gen_fname( - "mean_ph%dsamples" % i, - cwd=_out_dir, - inputs=inputs, - stdout=stdout, - stderr=stderr, - output_dir=output_dir, - ) - ) - outputs["mean_fsamples"].append( - _gen_fname( - "mean_f%dsamples" % i, - cwd=_out_dir, - inputs=inputs, - stdout=stdout, - stderr=stderr, - output_dir=output_dir, - ) - ) - outputs["dyads"].append( - _gen_fname( - "dyads%d" % i, - cwd=_out_dir, - inputs=inputs, - stdout=stdout, - stderr=stderr, - output_dir=output_dir, - ) - ) - outputs["dyads_dispersion"].append( - _gen_fname( - "dyads%d_dispersion" % i, - cwd=_out_dir, - inputs=inputs, - stdout=stdout, - stderr=stderr, - output_dir=output_dir, - ) - ) - return outputs - - -# Original source at L108 of /utils/filemanip.py -def fname_presuffix(fname, prefix="", suffix="", newpath=None, use_ext=True): - """Manipulates path and name of input filename - - Parameters - ---------- - fname : string - A filename (may or may not include path) - prefix : string - Characters to prepend to the filename - suffix : string - Characters to append to the filename - newpath : string - Path to replace the path of the input fname - use_ext : boolean - If True (default), appends the extension of the original file - to the output name. - - Returns - ------- - Absolute path of the modified filename - - >>> from nipype.utils.filemanip import fname_presuffix - >>> fname = 'foo.nii.gz' - >>> fname_presuffix(fname,'pre','post','/tmp') - '/tmp/prefoopost.nii.gz' - - >>> from nipype.interfaces.base import attrs.NOTHING - >>> fname_presuffix(fname, 'pre', 'post', attrs.NOTHING) == \ - fname_presuffix(fname, 'pre', 'post') - True - - """ - pth, fname, ext = split_filename(fname) - if not use_ext: - ext = "" - - # No need for : bool(attrs.NOTHING is not attrs.NOTHING) evaluates to False - if newpath: - pth = op.abspath(newpath) - return op.join(pth, prefix + fname + suffix + ext) - - -# Original source at L58 of /utils/filemanip.py -def split_filename(fname): - """Split a filename into parts: path, base filename and extension. - - Parameters - ---------- - fname : str - file or path name - - Returns - ------- - pth : str - base path from fname - fname : str - filename from fname, without extension - ext : str - file extension from fname - - Examples - -------- - >>> from nipype.utils.filemanip import split_filename - >>> pth, fname, ext = split_filename('/home/data/subject.nii.gz') - >>> pth - '/home/data' - - >>> fname - 'subject' - - >>> ext - '.nii.gz' - - """ - - special_extensions = [".nii.gz", ".tar.gz", ".niml.dset"] - - pth = op.dirname(fname) - fname = op.basename(fname) - - ext = None - for special_ext in special_extensions: - ext_len = len(special_ext) - if (len(fname) > ext_len) and (fname[-ext_len:].lower() == special_ext.lower()): - ext = fname[-ext_len:] - fname = fname[:-ext_len] - break - if not ext: - fname, ext = op.splitext(fname) - - return pth, fname, ext - - -# Original source at L1069 of /interfaces/base/core.py -class PackageInfo(object): - _version = None - version_cmd = None - version_file = None - - @classmethod - def version(klass): - if klass._version is None: - if klass.version_cmd is not None: - try: - clout = CommandLine( - command=klass.version_cmd, - resource_monitor=False, - terminal_output="allatonce", - ).run() - except IOError: - return None - - raw_info = clout.runtime.stdout - elif klass.version_file is not None: - try: - with open(klass.version_file, "rt") as fobj: - raw_info = fobj.read() - except OSError: - return None - else: - return None - - klass._version = klass.parse_version(raw_info) - - return klass._version - - @staticmethod - def parse_version(raw_info): - raise NotImplementedError - - -# Original source at L40 of /interfaces/fsl/base.py -class Info(PackageInfo): - """ - Handle FSL ``output_type`` and version information. - - output type refers to the type of file fsl defaults to writing - eg, NIFTI, NIFTI_GZ - - Examples - -------- - - >>> from nipype.interfaces.fsl import Info - >>> Info.version() # doctest: +SKIP - >>> Info.output_type() # doctest: +SKIP - - """ - - ftypes = { - "NIFTI": ".nii", - "NIFTI_PAIR": ".img", - "NIFTI_GZ": ".nii.gz", - "NIFTI_PAIR_GZ": ".img.gz", - } - - if os.getenv("FSLDIR"): - version_file = os.path.join(os.getenv("FSLDIR"), "etc", "fslversion") - - @staticmethod - def parse_version(raw_info): - return raw_info.splitlines()[0] - - @classmethod - def output_type_to_ext(cls, output_type): - """Get the file extension for the given output type. - - Parameters - ---------- - output_type : {'NIFTI', 'NIFTI_GZ', 'NIFTI_PAIR', 'NIFTI_PAIR_GZ'} - String specifying the output type. - - Returns - ------- - extension : str - The file extension for the output type. - """ - - try: - return cls.ftypes[output_type] - except KeyError: - msg = "Invalid FSLOUTPUTTYPE: ", output_type - raise KeyError(msg) - - @classmethod - def output_type(cls): - """Get the global FSL output file type FSLOUTPUTTYPE. - - This returns the value of the environment variable - FSLOUTPUTTYPE. An exception is raised if it is not defined. - - Returns - ------- - fsl_ftype : string - Represents the current environment setting of FSLOUTPUTTYPE - """ - try: - return os.environ["FSLOUTPUTTYPE"] - except KeyError: - IFLOGGER.warning( - "FSLOUTPUTTYPE environment variable is not set. " - "Setting FSLOUTPUTTYPE=NIFTI" - ) - return "NIFTI" - - @staticmethod - def standard_image(img_name=None): - """Grab an image from the standard location. - - Returns a list of standard images if called without arguments. - - Could be made more fancy to allow for more relocatability""" - try: - fsldir = os.environ["FSLDIR"] - except KeyError: - raise Exception("FSL environment variables not set") - stdpath = os.path.join(fsldir, "data", "standard") - if img_name is None: - return [ - filename.replace(stdpath + "/", "") - for filename in glob(os.path.join(stdpath, "*nii*")) - ] - return os.path.join(stdpath, img_name) diff --git a/nipype-auto-conv/specs/interfaces/bet.yaml b/nipype-auto-conv/specs/interfaces/bet.yaml index 40c9a43..2d32ef9 100644 --- a/nipype-auto-conv/specs/interfaces/bet.yaml +++ b/nipype-auto-conv/specs/interfaces/bet.yaml @@ -7,19 +7,19 @@ # ---- # FSL BET wrapper for skull stripping # -# For complete details, see the `BET Documentation. -# `_ +# For complete details, see the `BET Documentation. +# `_ # -# Examples -# -------- -# >>> from nipype.interfaces import fsl -# >>> btr = fsl.BET() -# >>> btr.inputs.in_file = 'structural.nii' -# >>> btr.inputs.frac = 0.7 -# >>> btr.inputs.out_file = 'brain_anat.nii' -# >>> btr.cmdline -# 'bet structural.nii brain_anat.nii -f 0.70' -# >>> res = btr.run() # doctest: +SKIP +# Examples +# -------- +# >>> from nipype.interfaces import fsl +# >>> btr = fsl.BET() +# >>> btr.inputs.in_file = 'structural.nii' +# >>> btr.inputs.frac = 0.7 +# >>> btr.inputs.out_file = 'brain_anat.nii' +# >>> btr.cmdline +# 'bet structural.nii brain_anat.nii -f 0.70' +# >>> res = btr.run() # doctest: +SKIP # # task_name: BET @@ -38,9 +38,6 @@ inputs: # passed to the field in the automatically generated unittests. in_file: medimage/nifti1 # type=file|default=: input file to skull strip - out_file: Path - # type=file: path/name of skullstripped file (if generated) - # type=file|default=: name of output skull stripped image t2_guided: generic/file # type=file|default=: as with creating surfaces, when also feeding in non-brain-extracted T2 (includes registrations) callable_defaults: @@ -88,7 +85,7 @@ outputs: # dict[str, str] - names of methods/callable classes defined in the adjacent `*_callables.py` # to set to the `callable` attribute of output fields templates: - # dict[str, str] - `output_file_template` values to be provided to output fields + # dict[str, str] - `path_template` values to be provided to output fields out_file: '"brain_anat.nii"' # type=file: path/name of skullstripped file (if generated) # type=file|default=: name of output skull stripped image @@ -138,13 +135,13 @@ tests: reduce_bias: # type=bool|default=False: bias field and neck cleanup output_type: - # type=enum|default='NIFTI'|allowed['NIFTI','NIFTI_GZ','NIFTI_PAIR','NIFTI_PAIR_GZ']: FSL output type + # type=enum|default='NIFTI'|allowed['GIFTI','NIFTI','NIFTI_GZ','NIFTI_PAIR','NIFTI_PAIR_GZ']: FSL output type args: # type=str|default='': Additional parameters to the command environ: # type=dict|default={}: Environment variables imports: - # list[nipype2pydra.task.base.importstatement] - list import statements required by the test, with each list item + # list[nipype2pydra.statements.imports.explicitimport] - list import statements required by the test, with each list item # consisting of 'module', 'name', and optionally 'alias' keys expected_outputs: # dict[str, str] - expected values for selected outputs, noting that tests will typically @@ -163,13 +160,11 @@ tests: # (if not specified, will try to choose a sensible value) in_file: # type=file|default=: input file to skull strip - frac: '0.7' - # type=float|default=0.0: fractional intensity threshold out_file: '"brain_anat.nii"' # type=file: path/name of skullstripped file (if generated) # type=file|default=: name of output skull stripped image imports: - # list[nipype2pydra.task.base.importstatement] - list import statements required by the test, with each list item + # list[nipype2pydra.statements.imports.explicitimport] - list import statements required by the test, with each list item # consisting of 'module', 'name', and optionally 'alias' keys expected_outputs: # dict[str, str] - expected values for selected outputs, noting that tests will typically @@ -184,7 +179,7 @@ tests: # bool - whether the unittest is expected to fail or not. Set to false # when you are satisfied with the edits you have made to this file doctests: -- cmdline: bet structural.nii brain_anat.nii -f 0.70 +- cmdline: # str - the expected cmdline output inputs: # dict[str, str] - name-value pairs for inputs to be provided to the doctest. @@ -192,13 +187,11 @@ doctests: # '.mock()' method of the corresponding class is used instead. in_file: '"structural.nii"' # type=file|default=: input file to skull strip - frac: '0.7' - # type=float|default=0.0: fractional intensity threshold out_file: '"brain_anat.nii"' # type=file: path/name of skullstripped file (if generated) # type=file|default=: name of output skull stripped image imports: - # list[nipype2pydra.task.base.importstatement] - list import statements required by the test, with each list item + # list[nipype2pydra.statements.imports.explicitimport] - list import statements required by the test, with each list item # consisting of 'module', 'name', and optionally 'alias' keys directive: # str - any doctest directive to place on the cmdline call, e.g. # doctest: +ELLIPSIS diff --git a/nipype-auto-conv/specs/interfaces/bet_callables.py b/nipype-auto-conv/specs/interfaces/bet_callables.py deleted file mode 100644 index 1e2b1c3..0000000 --- a/nipype-auto-conv/specs/interfaces/bet_callables.py +++ /dev/null @@ -1,524 +0,0 @@ -"""Module to put any functions that are referred to in the "callables" section of BET.yaml""" - -import attrs -import logging -import os -import os.path as op -from glob import glob -from pathlib import Path - - -def out_file_default(inputs): - return _gen_filename("out_file", inputs=inputs) - - -def inskull_mask_file_callable(output_dir, inputs, stdout, stderr): - outputs = _list_outputs( - output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr - ) - return outputs["inskull_mask_file"] - - -def inskull_mesh_file_callable(output_dir, inputs, stdout, stderr): - outputs = _list_outputs( - output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr - ) - return outputs["inskull_mesh_file"] - - -def mask_file_callable(output_dir, inputs, stdout, stderr): - outputs = _list_outputs( - output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr - ) - return outputs["mask_file"] - - -def meshfile_callable(output_dir, inputs, stdout, stderr): - outputs = _list_outputs( - output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr - ) - return outputs["meshfile"] - - -def out_file_callable(output_dir, inputs, stdout, stderr): - outputs = _list_outputs( - output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr - ) - return outputs["out_file"] - - -def outline_file_callable(output_dir, inputs, stdout, stderr): - outputs = _list_outputs( - output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr - ) - return outputs["outline_file"] - - -def outskin_mask_file_callable(output_dir, inputs, stdout, stderr): - outputs = _list_outputs( - output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr - ) - return outputs["outskin_mask_file"] - - -def outskin_mesh_file_callable(output_dir, inputs, stdout, stderr): - outputs = _list_outputs( - output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr - ) - return outputs["outskin_mesh_file"] - - -def outskull_mask_file_callable(output_dir, inputs, stdout, stderr): - outputs = _list_outputs( - output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr - ) - return outputs["outskull_mask_file"] - - -def outskull_mesh_file_callable(output_dir, inputs, stdout, stderr): - outputs = _list_outputs( - output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr - ) - return outputs["outskull_mesh_file"] - - -def skull_file_callable(output_dir, inputs, stdout, stderr): - outputs = _list_outputs( - output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr - ) - return outputs["skull_file"] - - -def skull_mask_file_callable(output_dir, inputs, stdout, stderr): - outputs = _list_outputs( - output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr - ) - return outputs["skull_mask_file"] - - -IFLOGGER = logging.getLogger("nipype.interface") - - -# Original source at L232 of /interfaces/fsl/preprocess.py -def _gen_filename(name, inputs=None, stdout=None, stderr=None, output_dir=None): - if name == "out_file": - return _gen_outfilename( - inputs=inputs, stdout=stdout, stderr=stderr, output_dir=output_dir - ) - return None - - -# Original source at L205 of /interfaces/fsl/base.py -def _gen_fname( - basename, - cwd=None, - suffix=None, - change_ext=True, - ext=None, - inputs=None, - stdout=None, - stderr=None, - output_dir=None, -): - """Generate a filename based on the given parameters. - - The filename will take the form: cwd/basename. - If change_ext is True, it will use the extensions specified in - inputs.output_type. - - Parameters - ---------- - basename : str - Filename to base the new filename on. - cwd : str - Path to prefix to the new filename. (default is output_dir) - suffix : str - Suffix to add to the `basename`. (defaults is '' ) - change_ext : bool - Flag to change the filename extension to the FSL output type. - (default True) - - Returns - ------- - fname : str - New filename based on given parameters. - - """ - - if basename == "": - msg = "Unable to generate filename for command %s. " % "bet" - msg += "basename is not set!" - raise ValueError(msg) - if cwd is None: - cwd = output_dir - if ext is None: - ext = Info.output_type_to_ext(inputs.output_type) - if change_ext: - if suffix: - suffix = "".join((suffix, ext)) - else: - suffix = ext - if suffix is None: - suffix = "" - fname = fname_presuffix(basename, suffix=suffix, use_ext=False, newpath=cwd) - return fname - - -# Original source at L176 of /interfaces/fsl/preprocess.py -def _gen_outfilename(inputs=None, stdout=None, stderr=None, output_dir=None): - out_file = inputs.out_file - # Generate default output filename if non specified. - if (out_file is attrs.NOTHING) and (inputs.in_file is not attrs.NOTHING): - out_file = _gen_fname( - inputs.in_file, - suffix="_brain", - inputs=inputs, - stdout=stdout, - stderr=stderr, - output_dir=output_dir, - ) - # Convert to relative path to prevent BET failure - # with long paths. - return op.relpath(out_file, start=output_dir) - return out_file - - -# Original source at L186 of /interfaces/fsl/preprocess.py -def _list_outputs(inputs=None, stdout=None, stderr=None, output_dir=None): - outputs = {} - outputs["out_file"] = os.path.abspath( - _gen_outfilename( - inputs=inputs, stdout=stdout, stderr=stderr, output_dir=output_dir - ) - ) - - basename = os.path.basename(outputs["out_file"]) - cwd = os.path.dirname(outputs["out_file"]) - kwargs = {"basename": basename, "cwd": cwd} - - if ((inputs.mesh is not attrs.NOTHING) and inputs.mesh) or ( - (inputs.surfaces is not attrs.NOTHING) and inputs.surfaces - ): - outputs["meshfile"] = _gen_fname( - suffix="_mesh.vtk", - change_ext=False, - **kwargs, - inputs=inputs, - stdout=stdout, - stderr=stderr, - output_dir=output_dir, - ) - if ((inputs.mask is not attrs.NOTHING) and inputs.mask) or ( - (inputs.reduce_bias is not attrs.NOTHING) and inputs.reduce_bias - ): - outputs["mask_file"] = _gen_fname( - suffix="_mask", - **kwargs, - inputs=inputs, - stdout=stdout, - stderr=stderr, - output_dir=output_dir, - ) - if (inputs.outline is not attrs.NOTHING) and inputs.outline: - outputs["outline_file"] = _gen_fname( - suffix="_overlay", - **kwargs, - inputs=inputs, - stdout=stdout, - stderr=stderr, - output_dir=output_dir, - ) - if (inputs.surfaces is not attrs.NOTHING) and inputs.surfaces: - outputs["inskull_mask_file"] = _gen_fname( - suffix="_inskull_mask", - **kwargs, - inputs=inputs, - stdout=stdout, - stderr=stderr, - output_dir=output_dir, - ) - outputs["inskull_mesh_file"] = _gen_fname( - suffix="_inskull_mesh", - **kwargs, - inputs=inputs, - stdout=stdout, - stderr=stderr, - output_dir=output_dir, - ) - outputs["outskull_mask_file"] = _gen_fname( - suffix="_outskull_mask", - **kwargs, - inputs=inputs, - stdout=stdout, - stderr=stderr, - output_dir=output_dir, - ) - outputs["outskull_mesh_file"] = _gen_fname( - suffix="_outskull_mesh", - **kwargs, - inputs=inputs, - stdout=stdout, - stderr=stderr, - output_dir=output_dir, - ) - outputs["outskin_mask_file"] = _gen_fname( - suffix="_outskin_mask", - **kwargs, - inputs=inputs, - stdout=stdout, - stderr=stderr, - output_dir=output_dir, - ) - outputs["outskin_mesh_file"] = _gen_fname( - suffix="_outskin_mesh", - **kwargs, - inputs=inputs, - stdout=stdout, - stderr=stderr, - output_dir=output_dir, - ) - outputs["skull_mask_file"] = _gen_fname( - suffix="_skull_mask", - **kwargs, - inputs=inputs, - stdout=stdout, - stderr=stderr, - output_dir=output_dir, - ) - if (inputs.skull is not attrs.NOTHING) and inputs.skull: - outputs["skull_file"] = _gen_fname( - suffix="_skull", - **kwargs, - inputs=inputs, - stdout=stdout, - stderr=stderr, - output_dir=output_dir, - ) - if (inputs.no_output is not attrs.NOTHING) and inputs.no_output: - outputs["out_file"] = attrs.NOTHING - return outputs - - -# Original source at L108 of /utils/filemanip.py -def fname_presuffix(fname, prefix="", suffix="", newpath=None, use_ext=True): - """Manipulates path and name of input filename - - Parameters - ---------- - fname : string - A filename (may or may not include path) - prefix : string - Characters to prepend to the filename - suffix : string - Characters to append to the filename - newpath : string - Path to replace the path of the input fname - use_ext : boolean - If True (default), appends the extension of the original file - to the output name. - - Returns - ------- - Absolute path of the modified filename - - >>> from nipype.utils.filemanip import fname_presuffix - >>> fname = 'foo.nii.gz' - >>> fname_presuffix(fname,'pre','post','/tmp') - '/tmp/prefoopost.nii.gz' - - >>> from nipype.interfaces.base import attrs.NOTHING - >>> fname_presuffix(fname, 'pre', 'post', attrs.NOTHING) == \ - fname_presuffix(fname, 'pre', 'post') - True - - """ - pth, fname, ext = split_filename(fname) - if not use_ext: - ext = "" - - # No need for : bool(attrs.NOTHING is not attrs.NOTHING) evaluates to False - if newpath: - pth = op.abspath(newpath) - return op.join(pth, prefix + fname + suffix + ext) - - -# Original source at L58 of /utils/filemanip.py -def split_filename(fname): - """Split a filename into parts: path, base filename and extension. - - Parameters - ---------- - fname : str - file or path name - - Returns - ------- - pth : str - base path from fname - fname : str - filename from fname, without extension - ext : str - file extension from fname - - Examples - -------- - >>> from nipype.utils.filemanip import split_filename - >>> pth, fname, ext = split_filename('/home/data/subject.nii.gz') - >>> pth - '/home/data' - - >>> fname - 'subject' - - >>> ext - '.nii.gz' - - """ - - special_extensions = [".nii.gz", ".tar.gz", ".niml.dset"] - - pth = op.dirname(fname) - fname = op.basename(fname) - - ext = None - for special_ext in special_extensions: - ext_len = len(special_ext) - if (len(fname) > ext_len) and (fname[-ext_len:].lower() == special_ext.lower()): - ext = fname[-ext_len:] - fname = fname[:-ext_len] - break - if not ext: - fname, ext = op.splitext(fname) - - return pth, fname, ext - - -# Original source at L1069 of /interfaces/base/core.py -class PackageInfo(object): - _version = None - version_cmd = None - version_file = None - - @classmethod - def version(klass): - if klass._version is None: - if klass.version_cmd is not None: - try: - clout = CommandLine( - command=klass.version_cmd, - resource_monitor=False, - terminal_output="allatonce", - ).run() - except IOError: - return None - - raw_info = clout.runtime.stdout - elif klass.version_file is not None: - try: - with open(klass.version_file, "rt") as fobj: - raw_info = fobj.read() - except OSError: - return None - else: - return None - - klass._version = klass.parse_version(raw_info) - - return klass._version - - @staticmethod - def parse_version(raw_info): - raise NotImplementedError - - -# Original source at L40 of /interfaces/fsl/base.py -class Info(PackageInfo): - """ - Handle FSL ``output_type`` and version information. - - output type refers to the type of file fsl defaults to writing - eg, NIFTI, NIFTI_GZ - - Examples - -------- - - >>> from nipype.interfaces.fsl import Info - >>> Info.version() # doctest: +SKIP - >>> Info.output_type() # doctest: +SKIP - - """ - - ftypes = { - "NIFTI": ".nii", - "NIFTI_PAIR": ".img", - "NIFTI_GZ": ".nii.gz", - "NIFTI_PAIR_GZ": ".img.gz", - } - - if os.getenv("FSLDIR"): - version_file = os.path.join(os.getenv("FSLDIR"), "etc", "fslversion") - - @staticmethod - def parse_version(raw_info): - return raw_info.splitlines()[0] - - @classmethod - def output_type_to_ext(cls, output_type): - """Get the file extension for the given output type. - - Parameters - ---------- - output_type : {'NIFTI', 'NIFTI_GZ', 'NIFTI_PAIR', 'NIFTI_PAIR_GZ'} - String specifying the output type. - - Returns - ------- - extension : str - The file extension for the output type. - """ - - try: - return cls.ftypes[output_type] - except KeyError: - msg = "Invalid FSLOUTPUTTYPE: ", output_type - raise KeyError(msg) - - @classmethod - def output_type(cls): - """Get the global FSL output file type FSLOUTPUTTYPE. - - This returns the value of the environment variable - FSLOUTPUTTYPE. An exception is raised if it is not defined. - - Returns - ------- - fsl_ftype : string - Represents the current environment setting of FSLOUTPUTTYPE - """ - try: - return os.environ["FSLOUTPUTTYPE"] - except KeyError: - IFLOGGER.warning( - "FSLOUTPUTTYPE environment variable is not set. " - "Setting FSLOUTPUTTYPE=NIFTI" - ) - return "NIFTI" - - @staticmethod - def standard_image(img_name=None): - """Grab an image from the standard location. - - Returns a list of standard images if called without arguments. - - Could be made more fancy to allow for more relocatability""" - try: - fsldir = os.environ["FSLDIR"] - except KeyError: - raise Exception("FSL environment variables not set") - stdpath = os.path.join(fsldir, "data", "standard") - if img_name is None: - return [ - filename.replace(stdpath + "/", "") - for filename in glob(os.path.join(stdpath, "*nii*")) - ] - return os.path.join(stdpath, img_name) diff --git a/nipype-auto-conv/specs/interfaces/binary_maths.yaml b/nipype-auto-conv/specs/interfaces/binary_maths.yaml index 3c32248..26f90ad 100644 --- a/nipype-auto-conv/specs/interfaces/binary_maths.yaml +++ b/nipype-auto-conv/specs/interfaces/binary_maths.yaml @@ -6,7 +6,7 @@ # Docs # ---- # Use fslmaths to perform mathematical operations using a second image or -# a numeric value. +# a numeric value. # # task_name: BinaryMaths @@ -27,9 +27,6 @@ inputs: # type=file|default=: image to operate on operand_file: generic/file # type=file|default=: second image to perform operation with - out_file: Path - # type=file: image written after calculations - # type=file|default=: image to write callable_defaults: # dict[str, str] - names of methods/callable classes defined in the adjacent `*_callables.py` # to set as the `default` method of input fields @@ -53,7 +50,7 @@ outputs: # dict[str, str] - names of methods/callable classes defined in the adjacent `*_callables.py` # to set to the `callable` attribute of output fields templates: - # dict[str, str] - `output_file_template` values to be provided to output fields + # dict[str, str] - `path_template` values to be provided to output fields out_file: out_file # type=file: image written after calculations # type=file|default=: image to write @@ -81,13 +78,13 @@ tests: nan2zeros: # type=bool|default=False: change NaNs to zeros before doing anything output_type: - # type=enum|default='NIFTI'|allowed['NIFTI','NIFTI_GZ','NIFTI_PAIR','NIFTI_PAIR_GZ']: FSL output type + # type=enum|default='NIFTI'|allowed['GIFTI','NIFTI','NIFTI_GZ','NIFTI_PAIR','NIFTI_PAIR_GZ']: FSL output type args: # type=str|default='': Additional parameters to the command environ: # type=dict|default={}: Environment variables imports: - # list[nipype2pydra.task.base.importstatement] - list import statements required by the test, with each list item + # list[nipype2pydra.statements.imports.explicitimport] - list import statements required by the test, with each list item # consisting of 'module', 'name', and optionally 'alias' keys expected_outputs: # dict[str, str] - expected values for selected outputs, noting that tests will typically diff --git a/nipype-auto-conv/specs/interfaces/binary_maths_callables.py b/nipype-auto-conv/specs/interfaces/binary_maths_callables.py deleted file mode 100644 index ac159de..0000000 --- a/nipype-auto-conv/specs/interfaces/binary_maths_callables.py +++ /dev/null @@ -1,329 +0,0 @@ -"""Module to put any functions that are referred to in the "callables" section of BinaryMaths.yaml""" - -import attrs -import logging -import os -import os.path as op -from glob import glob -from pathlib import Path - - -def out_file_default(inputs): - return _gen_filename("out_file", inputs=inputs) - - -def out_file_callable(output_dir, inputs, stdout, stderr): - outputs = _list_outputs( - output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr - ) - return outputs["out_file"] - - -IFLOGGER = logging.getLogger("nipype.interface") - - -# Original source at L61 of /interfaces/fsl/maths.py -def _gen_filename(name, inputs=None, stdout=None, stderr=None, output_dir=None): - if name == "out_file": - return _list_outputs( - inputs=inputs, stdout=stdout, stderr=stderr, output_dir=output_dir - )["out_file"] - return None - - -# Original source at L205 of /interfaces/fsl/base.py -def _gen_fname( - basename, - cwd=None, - suffix=None, - change_ext=True, - ext=None, - inputs=None, - stdout=None, - stderr=None, - output_dir=None, -): - """Generate a filename based on the given parameters. - - The filename will take the form: cwd/basename. - If change_ext is True, it will use the extensions specified in - inputs.output_type. - - Parameters - ---------- - basename : str - Filename to base the new filename on. - cwd : str - Path to prefix to the new filename. (default is output_dir) - suffix : str - Suffix to add to the `basename`. (defaults is '' ) - change_ext : bool - Flag to change the filename extension to the FSL output type. - (default True) - - Returns - ------- - fname : str - New filename based on given parameters. - - """ - - if basename == "": - msg = "Unable to generate filename for command %s. " % "fslmaths" - msg += "basename is not set!" - raise ValueError(msg) - if cwd is None: - cwd = output_dir - if ext is None: - ext = Info.output_type_to_ext(inputs.output_type) - if change_ext: - if suffix: - suffix = "".join((suffix, ext)) - else: - suffix = ext - if suffix is None: - suffix = "" - fname = fname_presuffix(basename, suffix=suffix, use_ext=False, newpath=cwd) - return fname - - -# Original source at L51 of /interfaces/fsl/maths.py -def _list_outputs(inputs=None, stdout=None, stderr=None, output_dir=None): - outputs = {} - outputs["out_file"] = inputs.out_file - if inputs.out_file is attrs.NOTHING: - outputs["out_file"] = _gen_fname( - inputs.in_file, - suffix=_suffix, - inputs=inputs, - stdout=stdout, - stderr=stderr, - output_dir=output_dir, - ) - outputs["out_file"] = os.path.abspath(outputs["out_file"]) - return outputs - - -# Original source at L108 of /utils/filemanip.py -def fname_presuffix(fname, prefix="", suffix="", newpath=None, use_ext=True): - """Manipulates path and name of input filename - - Parameters - ---------- - fname : string - A filename (may or may not include path) - prefix : string - Characters to prepend to the filename - suffix : string - Characters to append to the filename - newpath : string - Path to replace the path of the input fname - use_ext : boolean - If True (default), appends the extension of the original file - to the output name. - - Returns - ------- - Absolute path of the modified filename - - >>> from nipype.utils.filemanip import fname_presuffix - >>> fname = 'foo.nii.gz' - >>> fname_presuffix(fname,'pre','post','/tmp') - '/tmp/prefoopost.nii.gz' - - >>> from nipype.interfaces.base import attrs.NOTHING - >>> fname_presuffix(fname, 'pre', 'post', attrs.NOTHING) == \ - fname_presuffix(fname, 'pre', 'post') - True - - """ - pth, fname, ext = split_filename(fname) - if not use_ext: - ext = "" - - # No need for : bool(attrs.NOTHING is not attrs.NOTHING) evaluates to False - if newpath: - pth = op.abspath(newpath) - return op.join(pth, prefix + fname + suffix + ext) - - -# Original source at L58 of /utils/filemanip.py -def split_filename(fname): - """Split a filename into parts: path, base filename and extension. - - Parameters - ---------- - fname : str - file or path name - - Returns - ------- - pth : str - base path from fname - fname : str - filename from fname, without extension - ext : str - file extension from fname - - Examples - -------- - >>> from nipype.utils.filemanip import split_filename - >>> pth, fname, ext = split_filename('/home/data/subject.nii.gz') - >>> pth - '/home/data' - - >>> fname - 'subject' - - >>> ext - '.nii.gz' - - """ - - special_extensions = [".nii.gz", ".tar.gz", ".niml.dset"] - - pth = op.dirname(fname) - fname = op.basename(fname) - - ext = None - for special_ext in special_extensions: - ext_len = len(special_ext) - if (len(fname) > ext_len) and (fname[-ext_len:].lower() == special_ext.lower()): - ext = fname[-ext_len:] - fname = fname[:-ext_len] - break - if not ext: - fname, ext = op.splitext(fname) - - return pth, fname, ext - - -# Original source at L1069 of /interfaces/base/core.py -class PackageInfo(object): - _version = None - version_cmd = None - version_file = None - - @classmethod - def version(klass): - if klass._version is None: - if klass.version_cmd is not None: - try: - clout = CommandLine( - command=klass.version_cmd, - resource_monitor=False, - terminal_output="allatonce", - ).run() - except IOError: - return None - - raw_info = clout.runtime.stdout - elif klass.version_file is not None: - try: - with open(klass.version_file, "rt") as fobj: - raw_info = fobj.read() - except OSError: - return None - else: - return None - - klass._version = klass.parse_version(raw_info) - - return klass._version - - @staticmethod - def parse_version(raw_info): - raise NotImplementedError - - -# Original source at L40 of /interfaces/fsl/base.py -class Info(PackageInfo): - """ - Handle FSL ``output_type`` and version information. - - output type refers to the type of file fsl defaults to writing - eg, NIFTI, NIFTI_GZ - - Examples - -------- - - >>> from nipype.interfaces.fsl import Info - >>> Info.version() # doctest: +SKIP - >>> Info.output_type() # doctest: +SKIP - - """ - - ftypes = { - "NIFTI": ".nii", - "NIFTI_PAIR": ".img", - "NIFTI_GZ": ".nii.gz", - "NIFTI_PAIR_GZ": ".img.gz", - } - - if os.getenv("FSLDIR"): - version_file = os.path.join(os.getenv("FSLDIR"), "etc", "fslversion") - - @staticmethod - def parse_version(raw_info): - return raw_info.splitlines()[0] - - @classmethod - def output_type_to_ext(cls, output_type): - """Get the file extension for the given output type. - - Parameters - ---------- - output_type : {'NIFTI', 'NIFTI_GZ', 'NIFTI_PAIR', 'NIFTI_PAIR_GZ'} - String specifying the output type. - - Returns - ------- - extension : str - The file extension for the output type. - """ - - try: - return cls.ftypes[output_type] - except KeyError: - msg = "Invalid FSLOUTPUTTYPE: ", output_type - raise KeyError(msg) - - @classmethod - def output_type(cls): - """Get the global FSL output file type FSLOUTPUTTYPE. - - This returns the value of the environment variable - FSLOUTPUTTYPE. An exception is raised if it is not defined. - - Returns - ------- - fsl_ftype : string - Represents the current environment setting of FSLOUTPUTTYPE - """ - try: - return os.environ["FSLOUTPUTTYPE"] - except KeyError: - IFLOGGER.warning( - "FSLOUTPUTTYPE environment variable is not set. " - "Setting FSLOUTPUTTYPE=NIFTI" - ) - return "NIFTI" - - @staticmethod - def standard_image(img_name=None): - """Grab an image from the standard location. - - Returns a list of standard images if called without arguments. - - Could be made more fancy to allow for more relocatability""" - try: - fsldir = os.environ["FSLDIR"] - except KeyError: - raise Exception("FSL environment variables not set") - stdpath = os.path.join(fsldir, "data", "standard") - if img_name is None: - return [ - filename.replace(stdpath + "/", "") - for filename in glob(os.path.join(stdpath, "*nii*")) - ] - return os.path.join(stdpath, img_name) diff --git a/nipype-auto-conv/specs/interfaces/change_data_type.yaml b/nipype-auto-conv/specs/interfaces/change_data_type.yaml index 2206fa7..7c84d78 100644 --- a/nipype-auto-conv/specs/interfaces/change_data_type.yaml +++ b/nipype-auto-conv/specs/interfaces/change_data_type.yaml @@ -22,9 +22,6 @@ inputs: # passed to the field in the automatically generated unittests. in_file: generic/file # type=file|default=: image to operate on - out_file: Path - # type=file: image written after calculations - # type=file|default=: image to write callable_defaults: # dict[str, str] - names of methods/callable classes defined in the adjacent `*_callables.py` # to set as the `default` method of input fields @@ -48,7 +45,7 @@ outputs: # dict[str, str] - names of methods/callable classes defined in the adjacent `*_callables.py` # to set to the `callable` attribute of output fields templates: - # dict[str, str] - `output_file_template` values to be provided to output fields + # dict[str, str] - `path_template` values to be provided to output fields out_file: out_file # type=file: image written after calculations # type=file|default=: image to write @@ -70,13 +67,13 @@ tests: nan2zeros: # type=bool|default=False: change NaNs to zeros before doing anything output_type: - # type=enum|default='NIFTI'|allowed['NIFTI','NIFTI_GZ','NIFTI_PAIR','NIFTI_PAIR_GZ']: FSL output type + # type=enum|default='NIFTI'|allowed['GIFTI','NIFTI','NIFTI_GZ','NIFTI_PAIR','NIFTI_PAIR_GZ']: FSL output type args: # type=str|default='': Additional parameters to the command environ: # type=dict|default={}: Environment variables imports: - # list[nipype2pydra.task.base.importstatement] - list import statements required by the test, with each list item + # list[nipype2pydra.statements.imports.explicitimport] - list import statements required by the test, with each list item # consisting of 'module', 'name', and optionally 'alias' keys expected_outputs: # dict[str, str] - expected values for selected outputs, noting that tests will typically diff --git a/nipype-auto-conv/specs/interfaces/change_data_type_callables.py b/nipype-auto-conv/specs/interfaces/change_data_type_callables.py deleted file mode 100644 index 3af76c1..0000000 --- a/nipype-auto-conv/specs/interfaces/change_data_type_callables.py +++ /dev/null @@ -1,329 +0,0 @@ -"""Module to put any functions that are referred to in the "callables" section of ChangeDataType.yaml""" - -import attrs -import logging -import os -import os.path as op -from glob import glob -from pathlib import Path - - -def out_file_default(inputs): - return _gen_filename("out_file", inputs=inputs) - - -def out_file_callable(output_dir, inputs, stdout, stderr): - outputs = _list_outputs( - output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr - ) - return outputs["out_file"] - - -IFLOGGER = logging.getLogger("nipype.interface") - - -# Original source at L61 of /interfaces/fsl/maths.py -def _gen_filename(name, inputs=None, stdout=None, stderr=None, output_dir=None): - if name == "out_file": - return _list_outputs( - inputs=inputs, stdout=stdout, stderr=stderr, output_dir=output_dir - )["out_file"] - return None - - -# Original source at L205 of /interfaces/fsl/base.py -def _gen_fname( - basename, - cwd=None, - suffix=None, - change_ext=True, - ext=None, - inputs=None, - stdout=None, - stderr=None, - output_dir=None, -): - """Generate a filename based on the given parameters. - - The filename will take the form: cwd/basename. - If change_ext is True, it will use the extensions specified in - inputs.output_type. - - Parameters - ---------- - basename : str - Filename to base the new filename on. - cwd : str - Path to prefix to the new filename. (default is output_dir) - suffix : str - Suffix to add to the `basename`. (defaults is '' ) - change_ext : bool - Flag to change the filename extension to the FSL output type. - (default True) - - Returns - ------- - fname : str - New filename based on given parameters. - - """ - - if basename == "": - msg = "Unable to generate filename for command %s. " % "fslmaths" - msg += "basename is not set!" - raise ValueError(msg) - if cwd is None: - cwd = output_dir - if ext is None: - ext = Info.output_type_to_ext(inputs.output_type) - if change_ext: - if suffix: - suffix = "".join((suffix, ext)) - else: - suffix = ext - if suffix is None: - suffix = "" - fname = fname_presuffix(basename, suffix=suffix, use_ext=False, newpath=cwd) - return fname - - -# Original source at L51 of /interfaces/fsl/maths.py -def _list_outputs(inputs=None, stdout=None, stderr=None, output_dir=None): - outputs = {} - outputs["out_file"] = inputs.out_file - if inputs.out_file is attrs.NOTHING: - outputs["out_file"] = _gen_fname( - inputs.in_file, - suffix=_suffix, - inputs=inputs, - stdout=stdout, - stderr=stderr, - output_dir=output_dir, - ) - outputs["out_file"] = os.path.abspath(outputs["out_file"]) - return outputs - - -# Original source at L108 of /utils/filemanip.py -def fname_presuffix(fname, prefix="", suffix="", newpath=None, use_ext=True): - """Manipulates path and name of input filename - - Parameters - ---------- - fname : string - A filename (may or may not include path) - prefix : string - Characters to prepend to the filename - suffix : string - Characters to append to the filename - newpath : string - Path to replace the path of the input fname - use_ext : boolean - If True (default), appends the extension of the original file - to the output name. - - Returns - ------- - Absolute path of the modified filename - - >>> from nipype.utils.filemanip import fname_presuffix - >>> fname = 'foo.nii.gz' - >>> fname_presuffix(fname,'pre','post','/tmp') - '/tmp/prefoopost.nii.gz' - - >>> from nipype.interfaces.base import attrs.NOTHING - >>> fname_presuffix(fname, 'pre', 'post', attrs.NOTHING) == \ - fname_presuffix(fname, 'pre', 'post') - True - - """ - pth, fname, ext = split_filename(fname) - if not use_ext: - ext = "" - - # No need for : bool(attrs.NOTHING is not attrs.NOTHING) evaluates to False - if newpath: - pth = op.abspath(newpath) - return op.join(pth, prefix + fname + suffix + ext) - - -# Original source at L58 of /utils/filemanip.py -def split_filename(fname): - """Split a filename into parts: path, base filename and extension. - - Parameters - ---------- - fname : str - file or path name - - Returns - ------- - pth : str - base path from fname - fname : str - filename from fname, without extension - ext : str - file extension from fname - - Examples - -------- - >>> from nipype.utils.filemanip import split_filename - >>> pth, fname, ext = split_filename('/home/data/subject.nii.gz') - >>> pth - '/home/data' - - >>> fname - 'subject' - - >>> ext - '.nii.gz' - - """ - - special_extensions = [".nii.gz", ".tar.gz", ".niml.dset"] - - pth = op.dirname(fname) - fname = op.basename(fname) - - ext = None - for special_ext in special_extensions: - ext_len = len(special_ext) - if (len(fname) > ext_len) and (fname[-ext_len:].lower() == special_ext.lower()): - ext = fname[-ext_len:] - fname = fname[:-ext_len] - break - if not ext: - fname, ext = op.splitext(fname) - - return pth, fname, ext - - -# Original source at L1069 of /interfaces/base/core.py -class PackageInfo(object): - _version = None - version_cmd = None - version_file = None - - @classmethod - def version(klass): - if klass._version is None: - if klass.version_cmd is not None: - try: - clout = CommandLine( - command=klass.version_cmd, - resource_monitor=False, - terminal_output="allatonce", - ).run() - except IOError: - return None - - raw_info = clout.runtime.stdout - elif klass.version_file is not None: - try: - with open(klass.version_file, "rt") as fobj: - raw_info = fobj.read() - except OSError: - return None - else: - return None - - klass._version = klass.parse_version(raw_info) - - return klass._version - - @staticmethod - def parse_version(raw_info): - raise NotImplementedError - - -# Original source at L40 of /interfaces/fsl/base.py -class Info(PackageInfo): - """ - Handle FSL ``output_type`` and version information. - - output type refers to the type of file fsl defaults to writing - eg, NIFTI, NIFTI_GZ - - Examples - -------- - - >>> from nipype.interfaces.fsl import Info - >>> Info.version() # doctest: +SKIP - >>> Info.output_type() # doctest: +SKIP - - """ - - ftypes = { - "NIFTI": ".nii", - "NIFTI_PAIR": ".img", - "NIFTI_GZ": ".nii.gz", - "NIFTI_PAIR_GZ": ".img.gz", - } - - if os.getenv("FSLDIR"): - version_file = os.path.join(os.getenv("FSLDIR"), "etc", "fslversion") - - @staticmethod - def parse_version(raw_info): - return raw_info.splitlines()[0] - - @classmethod - def output_type_to_ext(cls, output_type): - """Get the file extension for the given output type. - - Parameters - ---------- - output_type : {'NIFTI', 'NIFTI_GZ', 'NIFTI_PAIR', 'NIFTI_PAIR_GZ'} - String specifying the output type. - - Returns - ------- - extension : str - The file extension for the output type. - """ - - try: - return cls.ftypes[output_type] - except KeyError: - msg = "Invalid FSLOUTPUTTYPE: ", output_type - raise KeyError(msg) - - @classmethod - def output_type(cls): - """Get the global FSL output file type FSLOUTPUTTYPE. - - This returns the value of the environment variable - FSLOUTPUTTYPE. An exception is raised if it is not defined. - - Returns - ------- - fsl_ftype : string - Represents the current environment setting of FSLOUTPUTTYPE - """ - try: - return os.environ["FSLOUTPUTTYPE"] - except KeyError: - IFLOGGER.warning( - "FSLOUTPUTTYPE environment variable is not set. " - "Setting FSLOUTPUTTYPE=NIFTI" - ) - return "NIFTI" - - @staticmethod - def standard_image(img_name=None): - """Grab an image from the standard location. - - Returns a list of standard images if called without arguments. - - Could be made more fancy to allow for more relocatability""" - try: - fsldir = os.environ["FSLDIR"] - except KeyError: - raise Exception("FSL environment variables not set") - stdpath = os.path.join(fsldir, "data", "standard") - if img_name is None: - return [ - filename.replace(stdpath + "/", "") - for filename in glob(os.path.join(stdpath, "*nii*")) - ] - return os.path.join(stdpath, img_name) diff --git a/nipype-auto-conv/specs/interfaces/classifier.yaml b/nipype-auto-conv/specs/interfaces/classifier.yaml index 99a202a..c0f7555 100644 --- a/nipype-auto-conv/specs/interfaces/classifier.yaml +++ b/nipype-auto-conv/specs/interfaces/classifier.yaml @@ -6,7 +6,7 @@ # Docs # ---- # -# Classify ICA components using a specific training dataset ( is in the range 0-100, typically 5-20). +# Classify ICA components using a specific training dataset ( is in the range 0-100, typically 5-20). # task_name: Classifier nipype_name: Classifier @@ -22,9 +22,6 @@ inputs: # from the nipype interface, but you may want to be more specific, particularly # for file types, where specifying the format also specifies the file that will be # passed to the field in the automatically generated unittests. - artifacts_list_file: Path - # type=file: Text file listing which ICs are artifacts; can be the output from classification or can be created manually - # type=file|default=: Text file listing which ICs are artifacts; can be the output from classification or can be created manually mel_ica: generic/directory # type=directory|default=: Melodic output directory or directories trained_wts_file: generic/file @@ -52,7 +49,7 @@ outputs: # dict[str, str] - names of methods/callable classes defined in the adjacent `*_callables.py` # to set to the `callable` attribute of output fields templates: - # dict[str, str] - `output_file_template` values to be provided to output fields + # dict[str, str] - `path_template` values to be provided to output fields requirements: # dict[str, list[str]] - input fields that are required to be provided for the output field to be present tests: @@ -73,7 +70,7 @@ tests: environ: # type=dict|default={}: Environment variables imports: - # list[nipype2pydra.task.base.importstatement] - list import statements required by the test, with each list item + # list[nipype2pydra.statements.imports.explicitimport] - list import statements required by the test, with each list item # consisting of 'module', 'name', and optionally 'alias' keys expected_outputs: # dict[str, str] - expected values for selected outputs, noting that tests will typically diff --git a/nipype-auto-conv/specs/interfaces/classifier_callables.py b/nipype-auto-conv/specs/interfaces/classifier_callables.py deleted file mode 100644 index 49a89d3..0000000 --- a/nipype-auto-conv/specs/interfaces/classifier_callables.py +++ /dev/null @@ -1,42 +0,0 @@ -"""Module to put any functions that are referred to in the "callables" section of Classifier.yaml""" - -import os - - -def artifacts_list_file_callable(output_dir, inputs, stdout, stderr): - outputs = _list_outputs( - output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr - ) - return outputs["artifacts_list_file"] - - -# Original source at L304 of /interfaces/fsl/fix.py -def _gen_artifacts_list_file( - mel_ica, thresh, inputs=None, stdout=None, stderr=None, output_dir=None -): - _, trained_wts_file = os.path.split(inputs.trained_wts_file) - trained_wts_filestem = trained_wts_file.split(".")[0] - filestem = "fix4melview_" + trained_wts_filestem + "_thr" - - fname = os.path.join(mel_ica, filestem + str(thresh) + ".txt") - return fname - - -# Original source at L885 of /interfaces/base/core.py -def _gen_filename(name, inputs=None, stdout=None, stderr=None, output_dir=None): - raise NotImplementedError - - -# Original source at L312 of /interfaces/fsl/fix.py -def _list_outputs(inputs=None, stdout=None, stderr=None, output_dir=None): - outputs = {} - outputs["artifacts_list_file"] = _gen_artifacts_list_file( - inputs.mel_ica, - inputs.thresh, - inputs=inputs, - stdout=stdout, - stderr=stderr, - output_dir=output_dir, - ) - - return outputs diff --git a/nipype-auto-conv/specs/interfaces/cleaner.yaml b/nipype-auto-conv/specs/interfaces/cleaner.yaml index f316007..8177818 100644 --- a/nipype-auto-conv/specs/interfaces/cleaner.yaml +++ b/nipype-auto-conv/specs/interfaces/cleaner.yaml @@ -6,7 +6,7 @@ # Docs # ---- # -# Extract features (for later training and/or classifying) +# Extract features (for later training and/or classifying) # task_name: Cleaner nipype_name: Cleaner @@ -52,7 +52,7 @@ outputs: # dict[str, str] - names of methods/callable classes defined in the adjacent `*_callables.py` # to set to the `callable` attribute of output fields templates: - # dict[str, str] - `output_file_template` values to be provided to output fields + # dict[str, str] - `path_template` values to be provided to output fields requirements: # dict[str, list[str]] - input fields that are required to be provided for the output field to be present tests: @@ -78,7 +78,7 @@ tests: environ: # type=dict|default={}: Environment variables imports: - # list[nipype2pydra.task.base.importstatement] - list import statements required by the test, with each list item + # list[nipype2pydra.statements.imports.explicitimport] - list import statements required by the test, with each list item # consisting of 'module', 'name', and optionally 'alias' keys expected_outputs: # dict[str, str] - expected values for selected outputs, noting that tests will typically diff --git a/nipype-auto-conv/specs/interfaces/cleaner_callables.py b/nipype-auto-conv/specs/interfaces/cleaner_callables.py deleted file mode 100644 index 8a8692c..0000000 --- a/nipype-auto-conv/specs/interfaces/cleaner_callables.py +++ /dev/null @@ -1,42 +0,0 @@ -"""Module to put any functions that are referred to in the "callables" section of Cleaner.yaml""" - -import os - - -def cleaned_functional_file_callable(output_dir, inputs, stdout, stderr): - outputs = _list_outputs( - output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr - ) - return outputs["cleaned_functional_file"] - - -# Original source at L885 of /interfaces/base/core.py -def _gen_filename(name, inputs=None, stdout=None, stderr=None, output_dir=None): - raise NotImplementedError - - -# Original source at L376 of /interfaces/fsl/fix.py -def _get_cleaned_functional_filename( - artifacts_list_filename, inputs=None, stdout=None, stderr=None, output_dir=None -): - """extract the proper filename from the first line of the artifacts file""" - artifacts_list_file = open(artifacts_list_filename, "r") - functional_filename, extension = artifacts_list_file.readline().split(".") - artifacts_list_file_path, artifacts_list_filename = os.path.split( - artifacts_list_filename - ) - - return os.path.join(artifacts_list_file_path, functional_filename + "_clean.nii.gz") - - -# Original source at L388 of /interfaces/fsl/fix.py -def _list_outputs(inputs=None, stdout=None, stderr=None, output_dir=None): - outputs = {} - outputs["cleaned_functional_file"] = _get_cleaned_functional_filename( - inputs.artifacts_list_file, - inputs=inputs, - stdout=stdout, - stderr=stderr, - output_dir=output_dir, - ) - return outputs diff --git a/nipype-auto-conv/specs/interfaces/cluster.yaml b/nipype-auto-conv/specs/interfaces/cluster.yaml index 48b618e..405a0d2 100644 --- a/nipype-auto-conv/specs/interfaces/cluster.yaml +++ b/nipype-auto-conv/specs/interfaces/cluster.yaml @@ -7,16 +7,18 @@ # ---- # Uses FSL cluster to perform clustering on statistical output # -# Examples -# -------- +# Examples +# -------- +# +# >>> cl = Cluster() +# >>> cl.inputs.threshold = 2.3 +# >>> cl.inputs.in_file = 'zstat1.nii.gz' +# >>> cl.inputs.out_localmax_txt_file = 'stats.txt' +# >>> cl.inputs.use_mm = True +# >>> cl.cmdline +# 'cluster --in=zstat1.nii.gz --olmax=stats.txt --thresh=2.3000000000 --mm' +# # -# >>> cl = Cluster() -# >>> cl.inputs.threshold = 2.3 -# >>> cl.inputs.in_file = 'zstat1.nii.gz' -# >>> cl.inputs.out_localmax_txt_file = 'stats.txt' -# >>> cl.inputs.use_mm = True -# >>> cl.cmdline -# 'cluster --in=zstat1.nii.gz --olmax=stats.txt --thresh=2.3000000000 --mm' task_name: Cluster nipype_name: Cluster nipype_module: nipype.interfaces.fsl.model @@ -26,19 +28,19 @@ inputs: rename: # dict[str, str] - fields to rename in the Pydra interface types: - # dict[str, type] - override inferred types (use "mime-like" string for file-format types, - # e.g. 'medimage/nifti-gz'). For most fields the type will be correctly inferred - # from the nipype interface, but you may want to be more specific, particularly - # for file types, where specifying the format also specifies the file that will be - # passed to the field in the automatically generated unittests. + # dict[str, type] - override inferred types (use "mime-like" string for file-format types, + # e.g. 'medimage/nifti-gz'). For most fields the type will be correctly inferred + # from the nipype interface, but you may want to be more specific, particularly + # for file types, where specifying the format also specifies the file that will be + # passed to the field in the automatically generated unittests. cope_file: generic/file # type=file|default=: cope volume - in_file: medimage/nifti-gz + in_file: generic/file # type=file|default=: input volume std_space_file: generic/file # type=file|default=: filename for standard-space volume warpfield_file: generic/file - # type=file|default=: file contining warpfield + # type=file|default=: file containing warpfield xfm_file: generic/file # type=file|default=: filename for Linear: input->standard-space transform. Non-linear: input->highres transform callable_defaults: @@ -52,11 +54,11 @@ outputs: rename: # dict[str, str] - fields to rename in the Pydra interface types: - # dict[str, type] - override inferred types (use "mime-like" string for file-format types, - # e.g. 'medimage/nifti-gz'). For most fields the type will be correctly inferred - # from the nipype interface, but you may want to be more specific, particularly - # for file types, where specifying the format also specifies the file that will be - # passed to the field in the automatically generated unittests. + # dict[str, type] - override inferred types (use "mime-like" string for file-format types, + # e.g. 'medimage/nifti-gz'). For most fields the type will be correctly inferred + # from the nipype interface, but you may want to be more specific, particularly + # for file types, where specifying the format also specifies the file that will be + # passed to the field in the automatically generated unittests. index_file: generic/file # type=file: output of cluster index (in size order) localmax_txt_file: generic/file @@ -77,127 +79,119 @@ outputs: # dict[str, str] - names of methods/callable classes defined in the adjacent `*_callables.py` # to set to the `callable` attribute of output fields templates: - # dict[str, str] - `output_file_template` values to be provided to output fields + # dict[str, str] - `path_template` values to be provided to output fields requirements: # dict[str, list[str]] - input fields that are required to be provided for the output field to be present tests: - - inputs: - # dict[str, str] - values to provide to inputs fields in the task initialisation - # (if not specified, will try to choose a sensible value) - in_file: - # type=file|default=: input volume - threshold: - # type=float|default=0.0: threshold for input volume - out_index_file: - # type=traitcompound|default=None: output of cluster index (in size order) - out_threshold_file: - # type=traitcompound|default=None: thresholded image - out_localmax_txt_file: - # type=traitcompound|default=None: local maxima text file - out_localmax_vol_file: - # type=traitcompound|default=None: output of local maxima volume - out_size_file: - # type=traitcompound|default=None: filename for output of size image - out_max_file: - # type=traitcompound|default=None: filename for output of max image - out_mean_file: - # type=traitcompound|default=None: filename for output of mean image - out_pval_file: - # type=traitcompound|default=None: filename for image output of log pvals - pthreshold: - # type=float|default=0.0: p-threshold for clusters - peak_distance: - # type=float|default=0.0: minimum distance between local maxima/minima, in mm (default 0) - cope_file: - # type=file|default=: cope volume - volume: - # type=int|default=0: number of voxels in the mask - dlh: - # type=float|default=0.0: smoothness estimate = sqrt(det(Lambda)) - fractional: - # type=bool|default=False: interprets the threshold as a fraction of the robust range - connectivity: - # type=int|default=0: the connectivity of voxels (default 26) - use_mm: - # type=bool|default=False: use mm, not voxel, coordinates - find_min: - # type=bool|default=False: find minima instead of maxima - no_table: - # type=bool|default=False: suppresses printing of the table info - minclustersize: - # type=bool|default=False: prints out minimum significant cluster size - xfm_file: - # type=file|default=: filename for Linear: input->standard-space transform. Non-linear: input->highres transform - std_space_file: - # type=file|default=: filename for standard-space volume - num_maxima: - # type=int|default=0: no of local maxima to report - warpfield_file: - # type=file|default=: file containing warpfield - output_type: - # type=enum|default='NIFTI'|allowed['NIFTI','NIFTI_GZ','NIFTI_PAIR','NIFTI_PAIR_GZ']: FSL output type - args: - # type=str|default='': Additional parameters to the command - environ: - # type=dict|default={}: Environment variables - imports: - # list[nipype2pydra.task.base.importstatement] - list import statements required by the test, with each list item - # consisting of 'module', 'name', and optionally 'alias' keys - expected_outputs: - # dict[str, str] - expected values for selected outputs, noting that tests will typically - # be terminated before they complete for time-saving reasons, and therefore - # these values will be ignored, when running in CI - timeout: 10 - # int - the value to set for the timeout in the generated test, - # after which the test will be considered to have been initialised - # successfully. Set to 0 to disable the timeout (warning, this could - # lead to the unittests taking a very long time to complete) - xfail: true - # bool - whether the unittest is expected to fail or not. Set to false - # when you are satisfied with the edits you have made to this file - - inputs: - # dict[str, str] - values to provide to inputs fields in the task initialisation - # (if not specified, will try to choose a sensible value) - threshold: "2.3" - # type=float|default=0.0: threshold for input volume - in_file: - # type=file|default=: input volume - out_localmax_txt_file: '"stats.txt"' - # type=traitcompound|default=None: local maxima text file - use_mm: "True" - # type=bool|default=False: use mm, not voxel, coordinates - imports: - # list[nipype2pydra.task.base.importstatement] - list import statements required by the test, with each list item - # consisting of 'module', 'name', and optionally 'alias' keys - expected_outputs: - # dict[str, str] - expected values for selected outputs, noting that tests will typically - # be terminated before they complete for time-saving reasons, and therefore - # these values will be ignored, when running in CI - timeout: 10 - # int - the value to set for the timeout in the generated test, - # after which the test will be considered to have been initialised - # successfully. Set to 0 to disable the timeout (warning, this could - # lead to the unittests taking a very long time to complete) - xfail: true - # bool - whether the unittest is expected to fail or not. Set to false - # when you are satisfied with the edits you have made to this file +- inputs: + # dict[str, str] - values to provide to inputs fields in the task initialisation + # (if not specified, will try to choose a sensible value) + in_file: + # type=file|default=: input volume + threshold: + # type=float|default=0.0: threshold for input volume + out_index_file: + # type=traitcompound|default=None: output of cluster index (in size order) + out_threshold_file: + # type=traitcompound|default=None: thresholded image + out_localmax_txt_file: + # type=traitcompound|default=None: local maxima text file + out_localmax_vol_file: + # type=traitcompound|default=None: output of local maxima volume + out_size_file: + # type=traitcompound|default=None: filename for output of size image + out_max_file: + # type=traitcompound|default=None: filename for output of max image + out_mean_file: + # type=traitcompound|default=None: filename for output of mean image + out_pval_file: + # type=traitcompound|default=None: filename for image output of log pvals + pthreshold: + # type=float|default=0.0: p-threshold for clusters + peak_distance: + # type=float|default=0.0: minimum distance between local maxima/minima, in mm (default 0) + cope_file: + # type=file|default=: cope volume + volume: + # type=int|default=0: number of voxels in the mask + dlh: + # type=float|default=0.0: smoothness estimate = sqrt(det(Lambda)) + fractional: + # type=bool|default=False: interprets the threshold as a fraction of the robust range + connectivity: + # type=int|default=0: the connectivity of voxels (default 26) + use_mm: + # type=bool|default=False: use mm, not voxel, coordinates + find_min: + # type=bool|default=False: find minima instead of maxima + no_table: + # type=bool|default=False: suppresses printing of the table info + minclustersize: + # type=bool|default=False: prints out minimum significant cluster size + xfm_file: + # type=file|default=: filename for Linear: input->standard-space transform. Non-linear: input->highres transform + std_space_file: + # type=file|default=: filename for standard-space volume + num_maxima: + # type=int|default=0: no of local maxima to report + warpfield_file: + # type=file|default=: file containing warpfield + output_type: + # type=enum|default='NIFTI'|allowed['GIFTI','NIFTI','NIFTI_GZ','NIFTI_PAIR','NIFTI_PAIR_GZ']: FSL output type + args: + # type=str|default='': Additional parameters to the command + environ: + # type=dict|default={}: Environment variables + imports: + # list[nipype2pydra.statements.imports.explicitimport] - list import statements required by the test, with each list item + # consisting of 'module', 'name', and optionally 'alias' keys + expected_outputs: + # dict[str, str] - expected values for selected outputs, noting that tests will typically + # be terminated before they complete for time-saving reasons, and therefore + # these values will be ignored, when running in CI + timeout: 10 + # int - the value to set for the timeout in the generated test, + # after which the test will be considered to have been initialised + # successfully. Set to 0 to disable the timeout (warning, this could + # lead to the unittests taking a very long time to complete) + xfail: true + # bool - whether the unittest is expected to fail or not. Set to false + # when you are satisfied with the edits you have made to this file +- inputs: + # dict[str, str] - values to provide to inputs fields in the task initialisation + # (if not specified, will try to choose a sensible value) + threshold: '2.3' + # type=float|default=0.0: threshold for input volume + out_localmax_txt_file: '"stats.txt"' + # type=traitcompound|default=None: local maxima text file + imports: + # list[nipype2pydra.statements.imports.explicitimport] - list import statements required by the test, with each list item + # consisting of 'module', 'name', and optionally 'alias' keys + expected_outputs: + # dict[str, str] - expected values for selected outputs, noting that tests will typically + # be terminated before they complete for time-saving reasons, and therefore + # these values will be ignored, when running in CI + timeout: 10 + # int - the value to set for the timeout in the generated test, + # after which the test will be considered to have been initialised + # successfully. Set to 0 to disable the timeout (warning, this could + # lead to the unittests taking a very long time to complete) + xfail: true + # bool - whether the unittest is expected to fail or not. Set to false + # when you are satisfied with the edits you have made to this file doctests: - - cmdline: cluster --in=zstat1.nii.gz --olmax=stats.txt --thresh=2.3000000000 --mm - # str - the expected cmdline output - inputs: - # dict[str, str] - name-value pairs for inputs to be provided to the doctest. - # If the field is of file-format type and the value is None, then the - # '.mock()' method of the corresponding class is used instead. - threshold: "2.3" - # type=float|default=0.0: threshold for input volume - in_file: '"zstat1.nii.gz"' - # type=file|default=: input volume - out_localmax_txt_file: '"stats.txt"' - # type=traitcompound|default=None: local maxima text file - use_mm: "True" - # type=bool|default=False: use mm, not voxel, coordinates - imports: - # list[nipype2pydra.task.base.importstatement] - list import statements required by the test, with each list item - # consisting of 'module', 'name', and optionally 'alias' keys - directive: - # str - any doctest directive to place on the cmdline call, e.g. # doctest: +ELLIPSIS +- cmdline: cluster --in=zstat1.nii.gz --olmax=stats.txt --thresh=2.3000000000 --mm + # str - the expected cmdline output + inputs: + # dict[str, str] - name-value pairs for inputs to be provided to the doctest. + # If the field is of file-format type and the value is None, then the + # '.mock()' method of the corresponding class is used instead. + threshold: '2.3' + # type=float|default=0.0: threshold for input volume + out_localmax_txt_file: '"stats.txt"' + # type=traitcompound|default=None: local maxima text file + imports: + # list[nipype2pydra.statements.imports.explicitimport] - list import statements required by the test, with each list item + # consisting of 'module', 'name', and optionally 'alias' keys + directive: + # str - any doctest directive to place on the cmdline call, e.g. # doctest: +ELLIPSIS diff --git a/nipype-auto-conv/specs/interfaces/cluster_callables.py b/nipype-auto-conv/specs/interfaces/cluster_callables.py deleted file mode 100644 index c6a7247..0000000 --- a/nipype-auto-conv/specs/interfaces/cluster_callables.py +++ /dev/null @@ -1,379 +0,0 @@ -"""Module to put any functions that are referred to in the "callables" section of Cluster.yaml""" - -import attrs -import logging -import os -import os.path as op -from glob import glob -from pathlib import Path - - -def index_file_callable(output_dir, inputs, stdout, stderr): - outputs = _list_outputs( - output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr - ) - return outputs["index_file"] - - -def localmax_txt_file_callable(output_dir, inputs, stdout, stderr): - outputs = _list_outputs( - output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr - ) - return outputs["localmax_txt_file"] - - -def localmax_vol_file_callable(output_dir, inputs, stdout, stderr): - outputs = _list_outputs( - output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr - ) - return outputs["localmax_vol_file"] - - -def max_file_callable(output_dir, inputs, stdout, stderr): - outputs = _list_outputs( - output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr - ) - return outputs["max_file"] - - -def mean_file_callable(output_dir, inputs, stdout, stderr): - outputs = _list_outputs( - output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr - ) - return outputs["mean_file"] - - -def pval_file_callable(output_dir, inputs, stdout, stderr): - outputs = _list_outputs( - output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr - ) - return outputs["pval_file"] - - -def size_file_callable(output_dir, inputs, stdout, stderr): - outputs = _list_outputs( - output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr - ) - return outputs["size_file"] - - -def threshold_file_callable(output_dir, inputs, stdout, stderr): - outputs = _list_outputs( - output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr - ) - return outputs["threshold_file"] - - -IFLOGGER = logging.getLogger("nipype.interface") - - -# Original source at L885 of /interfaces/base/core.py -def _gen_filename(name, inputs=None, stdout=None, stderr=None, output_dir=None): - raise NotImplementedError - - -# Original source at L205 of /interfaces/fsl/base.py -def _gen_fname( - basename, - cwd=None, - suffix=None, - change_ext=True, - ext=None, - inputs=None, - stdout=None, - stderr=None, - output_dir=None, -): - """Generate a filename based on the given parameters. - - The filename will take the form: cwd/basename. - If change_ext is True, it will use the extensions specified in - inputs.output_type. - - Parameters - ---------- - basename : str - Filename to base the new filename on. - cwd : str - Path to prefix to the new filename. (default is output_dir) - suffix : str - Suffix to add to the `basename`. (defaults is '' ) - change_ext : bool - Flag to change the filename extension to the FSL output type. - (default True) - - Returns - ------- - fname : str - New filename based on given parameters. - - """ - - if basename == "": - msg = "Unable to generate filename for command %s. " % "cluster" - msg += "basename is not set!" - raise ValueError(msg) - if cwd is None: - cwd = output_dir - if ext is None: - ext = Info.output_type_to_ext(inputs.output_type) - if change_ext: - if suffix: - suffix = "".join((suffix, ext)) - else: - suffix = ext - if suffix is None: - suffix = "" - fname = fname_presuffix(basename, suffix=suffix, use_ext=False, newpath=cwd) - return fname - - -# Original source at L2074 of /interfaces/fsl/model.py -def _list_outputs(inputs=None, stdout=None, stderr=None, output_dir=None): - outputs = {} - for key, suffix in list(filemap.items()): - outkey = key[4:] - inval = getattr(inputs, key) - if inval is not attrs.NOTHING: - if isinstance(inval, bool): - if inval: - change_ext = True - if suffix.endswith(".txt"): - change_ext = False - outputs[outkey] = _gen_fname( - inputs.in_file, - suffix="_" + suffix, - change_ext=change_ext, - inputs=inputs, - stdout=stdout, - stderr=stderr, - output_dir=output_dir, - ) - else: - outputs[outkey] = os.path.abspath(inval) - return outputs - - -# Original source at L108 of /utils/filemanip.py -def fname_presuffix(fname, prefix="", suffix="", newpath=None, use_ext=True): - """Manipulates path and name of input filename - - Parameters - ---------- - fname : string - A filename (may or may not include path) - prefix : string - Characters to prepend to the filename - suffix : string - Characters to append to the filename - newpath : string - Path to replace the path of the input fname - use_ext : boolean - If True (default), appends the extension of the original file - to the output name. - - Returns - ------- - Absolute path of the modified filename - - >>> from nipype.utils.filemanip import fname_presuffix - >>> fname = 'foo.nii.gz' - >>> fname_presuffix(fname,'pre','post','/tmp') - '/tmp/prefoopost.nii.gz' - - >>> from nipype.interfaces.base import attrs.NOTHING - >>> fname_presuffix(fname, 'pre', 'post', attrs.NOTHING) == \ - fname_presuffix(fname, 'pre', 'post') - True - - """ - pth, fname, ext = split_filename(fname) - if not use_ext: - ext = "" - - # No need for : bool(attrs.NOTHING is not attrs.NOTHING) evaluates to False - if newpath: - pth = op.abspath(newpath) - return op.join(pth, prefix + fname + suffix + ext) - - -# Original source at L58 of /utils/filemanip.py -def split_filename(fname): - """Split a filename into parts: path, base filename and extension. - - Parameters - ---------- - fname : str - file or path name - - Returns - ------- - pth : str - base path from fname - fname : str - filename from fname, without extension - ext : str - file extension from fname - - Examples - -------- - >>> from nipype.utils.filemanip import split_filename - >>> pth, fname, ext = split_filename('/home/data/subject.nii.gz') - >>> pth - '/home/data' - - >>> fname - 'subject' - - >>> ext - '.nii.gz' - - """ - - special_extensions = [".nii.gz", ".tar.gz", ".niml.dset"] - - pth = op.dirname(fname) - fname = op.basename(fname) - - ext = None - for special_ext in special_extensions: - ext_len = len(special_ext) - if (len(fname) > ext_len) and (fname[-ext_len:].lower() == special_ext.lower()): - ext = fname[-ext_len:] - fname = fname[:-ext_len] - break - if not ext: - fname, ext = op.splitext(fname) - - return pth, fname, ext - - -# Original source at L1069 of /interfaces/base/core.py -class PackageInfo(object): - _version = None - version_cmd = None - version_file = None - - @classmethod - def version(klass): - if klass._version is None: - if klass.version_cmd is not None: - try: - clout = CommandLine( - command=klass.version_cmd, - resource_monitor=False, - terminal_output="allatonce", - ).run() - except IOError: - return None - - raw_info = clout.runtime.stdout - elif klass.version_file is not None: - try: - with open(klass.version_file, "rt") as fobj: - raw_info = fobj.read() - except OSError: - return None - else: - return None - - klass._version = klass.parse_version(raw_info) - - return klass._version - - @staticmethod - def parse_version(raw_info): - raise NotImplementedError - - -# Original source at L40 of /interfaces/fsl/base.py -class Info(PackageInfo): - """ - Handle FSL ``output_type`` and version information. - - output type refers to the type of file fsl defaults to writing - eg, NIFTI, NIFTI_GZ - - Examples - -------- - - >>> from nipype.interfaces.fsl import Info - >>> Info.version() # doctest: +SKIP - >>> Info.output_type() # doctest: +SKIP - - """ - - ftypes = { - "NIFTI": ".nii", - "NIFTI_PAIR": ".img", - "NIFTI_GZ": ".nii.gz", - "NIFTI_PAIR_GZ": ".img.gz", - } - - if os.getenv("FSLDIR"): - version_file = os.path.join(os.getenv("FSLDIR"), "etc", "fslversion") - - @staticmethod - def parse_version(raw_info): - return raw_info.splitlines()[0] - - @classmethod - def output_type_to_ext(cls, output_type): - """Get the file extension for the given output type. - - Parameters - ---------- - output_type : {'NIFTI', 'NIFTI_GZ', 'NIFTI_PAIR', 'NIFTI_PAIR_GZ'} - String specifying the output type. - - Returns - ------- - extension : str - The file extension for the output type. - """ - - try: - return cls.ftypes[output_type] - except KeyError: - msg = "Invalid FSLOUTPUTTYPE: ", output_type - raise KeyError(msg) - - @classmethod - def output_type(cls): - """Get the global FSL output file type FSLOUTPUTTYPE. - - This returns the value of the environment variable - FSLOUTPUTTYPE. An exception is raised if it is not defined. - - Returns - ------- - fsl_ftype : string - Represents the current environment setting of FSLOUTPUTTYPE - """ - try: - return os.environ["FSLOUTPUTTYPE"] - except KeyError: - IFLOGGER.warning( - "FSLOUTPUTTYPE environment variable is not set. " - "Setting FSLOUTPUTTYPE=NIFTI" - ) - return "NIFTI" - - @staticmethod - def standard_image(img_name=None): - """Grab an image from the standard location. - - Returns a list of standard images if called without arguments. - - Could be made more fancy to allow for more relocatability""" - try: - fsldir = os.environ["FSLDIR"] - except KeyError: - raise Exception("FSL environment variables not set") - stdpath = os.path.join(fsldir, "data", "standard") - if img_name is None: - return [ - filename.replace(stdpath + "/", "") - for filename in glob(os.path.join(stdpath, "*nii*")) - ] - return os.path.join(stdpath, img_name) diff --git a/nipype-auto-conv/specs/interfaces/complex.yaml b/nipype-auto-conv/specs/interfaces/complex.yaml index b612ec1..7cc33d8 100644 --- a/nipype-auto-conv/specs/interfaces/complex.yaml +++ b/nipype-auto-conv/specs/interfaces/complex.yaml @@ -7,13 +7,13 @@ # ---- # fslcomplex is a tool for converting complex data # -# Examples -# -------- +# Examples +# -------- # -# >>> cplx = Complex() -# >>> cplx.inputs.complex_in_file = "complex.nii" -# >>> cplx.real_polar = True -# >>> res = cplx.run() # doctest: +SKIP +# >>> cplx = Complex() +# >>> cplx.inputs.complex_in_file = "complex.nii" +# >>> cplx.real_polar = True +# >>> res = cplx.run() # doctest: +SKIP # # # @@ -35,29 +35,14 @@ inputs: # type=file|default=: complex_in_file2: generic/file # type=file|default=: - complex_out_file: Path - # type=file: - # type=file|default=: imaginary_in_file: generic/file # type=file|default=: - imaginary_out_file: Path - # type=file: - # type=file|default=: magnitude_in_file: generic/file # type=file|default=: - magnitude_out_file: Path - # type=file: - # type=file|default=: phase_in_file: generic/file # type=file|default=: - phase_out_file: Path - # type=file: - # type=file|default=: real_in_file: generic/file # type=file|default=: - real_out_file: Path - # type=file: - # type=file|default=: callable_defaults: # dict[str, str] - names of methods/callable classes defined in the adjacent `*_callables.py` # to set as the `default` method of input fields @@ -93,7 +78,7 @@ outputs: # dict[str, str] - names of methods/callable classes defined in the adjacent `*_callables.py` # to set to the `callable` attribute of output fields templates: - # dict[str, str] - `output_file_template` values to be provided to output fields + # dict[str, str] - `path_template` values to be provided to output fields complex_out_file: complex_out_file # type=file: # type=file|default=: @@ -159,13 +144,13 @@ tests: complex_merge: # type=bool|default=False: output_type: - # type=enum|default='NIFTI'|allowed['NIFTI','NIFTI_GZ','NIFTI_PAIR','NIFTI_PAIR_GZ']: FSL output type + # type=enum|default='NIFTI'|allowed['GIFTI','NIFTI','NIFTI_GZ','NIFTI_PAIR','NIFTI_PAIR_GZ']: FSL output type args: # type=str|default='': Additional parameters to the command environ: # type=dict|default={}: Environment variables imports: - # list[nipype2pydra.task.base.importstatement] - list import statements required by the test, with each list item + # list[nipype2pydra.statements.imports.explicitimport] - list import statements required by the test, with each list item # consisting of 'module', 'name', and optionally 'alias' keys expected_outputs: # dict[str, str] - expected values for selected outputs, noting that tests will typically diff --git a/nipype-auto-conv/specs/interfaces/complex_callables.py b/nipype-auto-conv/specs/interfaces/complex_callables.py deleted file mode 100644 index b8a505b..0000000 --- a/nipype-auto-conv/specs/interfaces/complex_callables.py +++ /dev/null @@ -1,464 +0,0 @@ -"""Module to put any functions that are referred to in the "callables" section of Complex.yaml""" - -import attrs -import logging -import os -import os.path as op -from glob import glob -from pathlib import Path - - -def complex_out_file_default(inputs): - return _gen_filename("complex_out_file", inputs=inputs) - - -def imaginary_out_file_default(inputs): - return _gen_filename("imaginary_out_file", inputs=inputs) - - -def magnitude_out_file_default(inputs): - return _gen_filename("magnitude_out_file", inputs=inputs) - - -def phase_out_file_default(inputs): - return _gen_filename("phase_out_file", inputs=inputs) - - -def real_out_file_default(inputs): - return _gen_filename("real_out_file", inputs=inputs) - - -def complex_out_file_callable(output_dir, inputs, stdout, stderr): - outputs = _list_outputs( - output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr - ) - return outputs["complex_out_file"] - - -def imaginary_out_file_callable(output_dir, inputs, stdout, stderr): - outputs = _list_outputs( - output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr - ) - return outputs["imaginary_out_file"] - - -def magnitude_out_file_callable(output_dir, inputs, stdout, stderr): - outputs = _list_outputs( - output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr - ) - return outputs["magnitude_out_file"] - - -def phase_out_file_callable(output_dir, inputs, stdout, stderr): - outputs = _list_outputs( - output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr - ) - return outputs["phase_out_file"] - - -def real_out_file_callable(output_dir, inputs, stdout, stderr): - outputs = _list_outputs( - output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr - ) - return outputs["real_out_file"] - - -IFLOGGER = logging.getLogger("nipype.interface") - - -# Original source at L2031 of /interfaces/fsl/utils.py -def _gen_filename(name, inputs=None, stdout=None, stderr=None, output_dir=None): - if name == "complex_out_file": - if inputs.complex_cartesian: - in_file = inputs.real_in_file - elif inputs.complex_polar: - in_file = inputs.magnitude_in_file - elif inputs.complex_split or inputs.complex_merge: - in_file = inputs.complex_in_file - else: - return None - return _gen_fname( - in_file, - suffix="_cplx", - inputs=inputs, - stdout=stdout, - stderr=stderr, - output_dir=output_dir, - ) - elif name == "magnitude_out_file": - return _gen_fname( - inputs.complex_in_file, - suffix="_mag", - inputs=inputs, - stdout=stdout, - stderr=stderr, - output_dir=output_dir, - ) - elif name == "phase_out_file": - return _gen_fname( - inputs.complex_in_file, - suffix="_phase", - inputs=inputs, - stdout=stdout, - stderr=stderr, - output_dir=output_dir, - ) - elif name == "real_out_file": - return _gen_fname( - inputs.complex_in_file, - suffix="_real", - inputs=inputs, - stdout=stdout, - stderr=stderr, - output_dir=output_dir, - ) - elif name == "imaginary_out_file": - return _gen_fname( - inputs.complex_in_file, - suffix="_imag", - inputs=inputs, - stdout=stdout, - stderr=stderr, - output_dir=output_dir, - ) - return None - - -# Original source at L205 of /interfaces/fsl/base.py -def _gen_fname( - basename, - cwd=None, - suffix=None, - change_ext=True, - ext=None, - inputs=None, - stdout=None, - stderr=None, - output_dir=None, -): - """Generate a filename based on the given parameters. - - The filename will take the form: cwd/basename. - If change_ext is True, it will use the extensions specified in - inputs.output_type. - - Parameters - ---------- - basename : str - Filename to base the new filename on. - cwd : str - Path to prefix to the new filename. (default is output_dir) - suffix : str - Suffix to add to the `basename`. (defaults is '' ) - change_ext : bool - Flag to change the filename extension to the FSL output type. - (default True) - - Returns - ------- - fname : str - New filename based on given parameters. - - """ - - if basename == "": - msg = "Unable to generate filename for command %s. " % "fslcomplex" - msg += "basename is not set!" - raise ValueError(msg) - if cwd is None: - cwd = output_dir - if ext is None: - ext = Info.output_type_to_ext(inputs.output_type) - if change_ext: - if suffix: - suffix = "".join((suffix, ext)) - else: - suffix = ext - if suffix is None: - suffix = "" - fname = fname_presuffix(basename, suffix=suffix, use_ext=False, newpath=cwd) - return fname - - -# Original source at L2052 of /interfaces/fsl/utils.py -def _get_output(name, inputs=None, stdout=None, stderr=None, output_dir=None): - output = getattr(inputs, name) - if output is attrs.NOTHING: - output = _gen_filename( - name, inputs=inputs, stdout=stdout, stderr=stderr, output_dir=output_dir - ) - return os.path.abspath(output) - - -# Original source at L2058 of /interfaces/fsl/utils.py -def _list_outputs(inputs=None, stdout=None, stderr=None, output_dir=None): - outputs = {} - if ( - inputs.complex_cartesian - or inputs.complex_polar - or inputs.complex_split - or inputs.complex_merge - ): - outputs["complex_out_file"] = _get_output( - "complex_out_file", - inputs=inputs, - stdout=stdout, - stderr=stderr, - output_dir=output_dir, - ) - elif inputs.real_cartesian: - outputs["real_out_file"] = _get_output( - "real_out_file", - inputs=inputs, - stdout=stdout, - stderr=stderr, - output_dir=output_dir, - ) - outputs["imaginary_out_file"] = _get_output( - "imaginary_out_file", - inputs=inputs, - stdout=stdout, - stderr=stderr, - output_dir=output_dir, - ) - elif inputs.real_polar: - outputs["magnitude_out_file"] = _get_output( - "magnitude_out_file", - inputs=inputs, - stdout=stdout, - stderr=stderr, - output_dir=output_dir, - ) - outputs["phase_out_file"] = _get_output( - "phase_out_file", - inputs=inputs, - stdout=stdout, - stderr=stderr, - output_dir=output_dir, - ) - return outputs - - -# Original source at L108 of /utils/filemanip.py -def fname_presuffix(fname, prefix="", suffix="", newpath=None, use_ext=True): - """Manipulates path and name of input filename - - Parameters - ---------- - fname : string - A filename (may or may not include path) - prefix : string - Characters to prepend to the filename - suffix : string - Characters to append to the filename - newpath : string - Path to replace the path of the input fname - use_ext : boolean - If True (default), appends the extension of the original file - to the output name. - - Returns - ------- - Absolute path of the modified filename - - >>> from nipype.utils.filemanip import fname_presuffix - >>> fname = 'foo.nii.gz' - >>> fname_presuffix(fname,'pre','post','/tmp') - '/tmp/prefoopost.nii.gz' - - >>> from nipype.interfaces.base import attrs.NOTHING - >>> fname_presuffix(fname, 'pre', 'post', attrs.NOTHING) == \ - fname_presuffix(fname, 'pre', 'post') - True - - """ - pth, fname, ext = split_filename(fname) - if not use_ext: - ext = "" - - # No need for : bool(attrs.NOTHING is not attrs.NOTHING) evaluates to False - if newpath: - pth = op.abspath(newpath) - return op.join(pth, prefix + fname + suffix + ext) - - -# Original source at L58 of /utils/filemanip.py -def split_filename(fname): - """Split a filename into parts: path, base filename and extension. - - Parameters - ---------- - fname : str - file or path name - - Returns - ------- - pth : str - base path from fname - fname : str - filename from fname, without extension - ext : str - file extension from fname - - Examples - -------- - >>> from nipype.utils.filemanip import split_filename - >>> pth, fname, ext = split_filename('/home/data/subject.nii.gz') - >>> pth - '/home/data' - - >>> fname - 'subject' - - >>> ext - '.nii.gz' - - """ - - special_extensions = [".nii.gz", ".tar.gz", ".niml.dset"] - - pth = op.dirname(fname) - fname = op.basename(fname) - - ext = None - for special_ext in special_extensions: - ext_len = len(special_ext) - if (len(fname) > ext_len) and (fname[-ext_len:].lower() == special_ext.lower()): - ext = fname[-ext_len:] - fname = fname[:-ext_len] - break - if not ext: - fname, ext = op.splitext(fname) - - return pth, fname, ext - - -# Original source at L1069 of /interfaces/base/core.py -class PackageInfo(object): - _version = None - version_cmd = None - version_file = None - - @classmethod - def version(klass): - if klass._version is None: - if klass.version_cmd is not None: - try: - clout = CommandLine( - command=klass.version_cmd, - resource_monitor=False, - terminal_output="allatonce", - ).run() - except IOError: - return None - - raw_info = clout.runtime.stdout - elif klass.version_file is not None: - try: - with open(klass.version_file, "rt") as fobj: - raw_info = fobj.read() - except OSError: - return None - else: - return None - - klass._version = klass.parse_version(raw_info) - - return klass._version - - @staticmethod - def parse_version(raw_info): - raise NotImplementedError - - -# Original source at L40 of /interfaces/fsl/base.py -class Info(PackageInfo): - """ - Handle FSL ``output_type`` and version information. - - output type refers to the type of file fsl defaults to writing - eg, NIFTI, NIFTI_GZ - - Examples - -------- - - >>> from nipype.interfaces.fsl import Info - >>> Info.version() # doctest: +SKIP - >>> Info.output_type() # doctest: +SKIP - - """ - - ftypes = { - "NIFTI": ".nii", - "NIFTI_PAIR": ".img", - "NIFTI_GZ": ".nii.gz", - "NIFTI_PAIR_GZ": ".img.gz", - } - - if os.getenv("FSLDIR"): - version_file = os.path.join(os.getenv("FSLDIR"), "etc", "fslversion") - - @staticmethod - def parse_version(raw_info): - return raw_info.splitlines()[0] - - @classmethod - def output_type_to_ext(cls, output_type): - """Get the file extension for the given output type. - - Parameters - ---------- - output_type : {'NIFTI', 'NIFTI_GZ', 'NIFTI_PAIR', 'NIFTI_PAIR_GZ'} - String specifying the output type. - - Returns - ------- - extension : str - The file extension for the output type. - """ - - try: - return cls.ftypes[output_type] - except KeyError: - msg = "Invalid FSLOUTPUTTYPE: ", output_type - raise KeyError(msg) - - @classmethod - def output_type(cls): - """Get the global FSL output file type FSLOUTPUTTYPE. - - This returns the value of the environment variable - FSLOUTPUTTYPE. An exception is raised if it is not defined. - - Returns - ------- - fsl_ftype : string - Represents the current environment setting of FSLOUTPUTTYPE - """ - try: - return os.environ["FSLOUTPUTTYPE"] - except KeyError: - IFLOGGER.warning( - "FSLOUTPUTTYPE environment variable is not set. " - "Setting FSLOUTPUTTYPE=NIFTI" - ) - return "NIFTI" - - @staticmethod - def standard_image(img_name=None): - """Grab an image from the standard location. - - Returns a list of standard images if called without arguments. - - Could be made more fancy to allow for more relocatability""" - try: - fsldir = os.environ["FSLDIR"] - except KeyError: - raise Exception("FSL environment variables not set") - stdpath = os.path.join(fsldir, "data", "standard") - if img_name is None: - return [ - filename.replace(stdpath + "/", "") - for filename in glob(os.path.join(stdpath, "*nii*")) - ] - return os.path.join(stdpath, img_name) diff --git a/nipype-auto-conv/specs/interfaces/contrast_mgr.yaml b/nipype-auto-conv/specs/interfaces/contrast_mgr.yaml index 4d5e94d..7fe713d 100644 --- a/nipype-auto-conv/specs/interfaces/contrast_mgr.yaml +++ b/nipype-auto-conv/specs/interfaces/contrast_mgr.yaml @@ -7,9 +7,9 @@ # ---- # Use FSL contrast_mgr command to evaluate contrasts # -# In interface mode this file assumes that all the required inputs are in the -# same location. This has deprecated for FSL versions 5.0.7+ as the necessary -# corrections file is no longer generated by FILMGLS. +# In interface mode this file assumes that all the required inputs are in the +# same location. This has deprecated for FSL versions 5.0.7+ as the necessary +# corrections file is no longer generated by FILMGLS. # task_name: ContrastMgr nipype_name: ContrastMgr @@ -71,7 +71,7 @@ outputs: # dict[str, str] - names of methods/callable classes defined in the adjacent `*_callables.py` # to set to the `callable` attribute of output fields templates: - # dict[str, str] - `output_file_template` values to be provided to output fields + # dict[str, str] - `path_template` values to be provided to output fields requirements: # dict[str, list[str]] - input fields that are required to be provided for the output field to be present tests: @@ -95,13 +95,13 @@ tests: suffix: # type=str|default='': suffix to put on the end of the cope filename before the contrast number, default is nothing output_type: - # type=enum|default='NIFTI'|allowed['NIFTI','NIFTI_GZ','NIFTI_PAIR','NIFTI_PAIR_GZ']: FSL output type + # type=enum|default='NIFTI'|allowed['GIFTI','NIFTI','NIFTI_GZ','NIFTI_PAIR','NIFTI_PAIR_GZ']: FSL output type args: # type=str|default='': Additional parameters to the command environ: # type=dict|default={}: Environment variables imports: - # list[nipype2pydra.task.base.importstatement] - list import statements required by the test, with each list item + # list[nipype2pydra.statements.imports.explicitimport] - list import statements required by the test, with each list item # consisting of 'module', 'name', and optionally 'alias' keys expected_outputs: # dict[str, str] - expected values for selected outputs, noting that tests will typically diff --git a/nipype-auto-conv/specs/interfaces/contrast_mgr_callables.py b/nipype-auto-conv/specs/interfaces/contrast_mgr_callables.py deleted file mode 100644 index a2849d0..0000000 --- a/nipype-auto-conv/specs/interfaces/contrast_mgr_callables.py +++ /dev/null @@ -1,468 +0,0 @@ -"""Module to put any functions that are referred to in the "callables" section of ContrastMgr.yaml""" - -import attrs -import logging -import os -import os.path as op -from glob import glob -from pathlib import Path - - -def copes_callable(output_dir, inputs, stdout, stderr): - outputs = _list_outputs( - output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr - ) - return outputs["copes"] - - -def fstats_callable(output_dir, inputs, stdout, stderr): - outputs = _list_outputs( - output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr - ) - return outputs["fstats"] - - -def neffs_callable(output_dir, inputs, stdout, stderr): - outputs = _list_outputs( - output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr - ) - return outputs["neffs"] - - -def tstats_callable(output_dir, inputs, stdout, stderr): - outputs = _list_outputs( - output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr - ) - return outputs["tstats"] - - -def varcopes_callable(output_dir, inputs, stdout, stderr): - outputs = _list_outputs( - output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr - ) - return outputs["varcopes"] - - -def zfstats_callable(output_dir, inputs, stdout, stderr): - outputs = _list_outputs( - output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr - ) - return outputs["zfstats"] - - -def zstats_callable(output_dir, inputs, stdout, stderr): - outputs = _list_outputs( - output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr - ) - return outputs["zstats"] - - -IFLOGGER = logging.getLogger("nipype.interface") - - -# Original source at L885 of /interfaces/base/core.py -def _gen_filename(name, inputs=None, stdout=None, stderr=None, output_dir=None): - raise NotImplementedError - - -# Original source at L205 of /interfaces/fsl/base.py -def _gen_fname( - basename, - cwd=None, - suffix=None, - change_ext=True, - ext=None, - inputs=None, - stdout=None, - stderr=None, - output_dir=None, -): - """Generate a filename based on the given parameters. - - The filename will take the form: cwd/basename. - If change_ext is True, it will use the extensions specified in - inputs.output_type. - - Parameters - ---------- - basename : str - Filename to base the new filename on. - cwd : str - Path to prefix to the new filename. (default is output_dir) - suffix : str - Suffix to add to the `basename`. (defaults is '' ) - change_ext : bool - Flag to change the filename extension to the FSL output type. - (default True) - - Returns - ------- - fname : str - New filename based on given parameters. - - """ - - if basename == "": - msg = "Unable to generate filename for command %s. " % "contrast_mgr" - msg += "basename is not set!" - raise ValueError(msg) - if cwd is None: - cwd = output_dir - if ext is None: - ext = Info.output_type_to_ext(inputs.output_type) - if change_ext: - if suffix: - suffix = "".join((suffix, ext)) - else: - suffix = ext - if suffix is None: - suffix = "" - fname = fname_presuffix(basename, suffix=suffix, use_ext=False, newpath=cwd) - return fname - - -# Original source at L1301 of /interfaces/fsl/model.py -def _get_numcons(inputs=None, stdout=None, stderr=None, output_dir=None): - numtcons = 0 - numfcons = 0 - if inputs.tcon_file is not attrs.NOTHING: - fp = open(inputs.tcon_file, "rt") - for line in fp.readlines(): - if line.startswith("/NumContrasts"): - numtcons = int(line.split()[-1]) - break - fp.close() - if inputs.fcon_file is not attrs.NOTHING: - fp = open(inputs.fcon_file, "rt") - for line in fp.readlines(): - if line.startswith("/NumContrasts"): - numfcons = int(line.split()[-1]) - break - fp.close() - return numtcons, numfcons - - -# Original source at L1320 of /interfaces/fsl/model.py -def _list_outputs(inputs=None, stdout=None, stderr=None, output_dir=None): - outputs = {} - pth, _ = os.path.split(inputs.sigmasquareds) - numtcons, numfcons = _get_numcons( - inputs=inputs, stdout=stdout, stderr=stderr, output_dir=output_dir - ) - base_contrast = 1 - if inputs.contrast_num is not attrs.NOTHING: - base_contrast = inputs.contrast_num - copes = [] - varcopes = [] - zstats = [] - tstats = [] - neffs = [] - for i in range(numtcons): - copes.append( - _gen_fname( - "cope%d.nii" % (base_contrast + i), - cwd=pth, - inputs=inputs, - stdout=stdout, - stderr=stderr, - output_dir=output_dir, - ) - ) - varcopes.append( - _gen_fname( - "varcope%d.nii" % (base_contrast + i), - cwd=pth, - inputs=inputs, - stdout=stdout, - stderr=stderr, - output_dir=output_dir, - ) - ) - zstats.append( - _gen_fname( - "zstat%d.nii" % (base_contrast + i), - cwd=pth, - inputs=inputs, - stdout=stdout, - stderr=stderr, - output_dir=output_dir, - ) - ) - tstats.append( - _gen_fname( - "tstat%d.nii" % (base_contrast + i), - cwd=pth, - inputs=inputs, - stdout=stdout, - stderr=stderr, - output_dir=output_dir, - ) - ) - neffs.append( - _gen_fname( - "neff%d.nii" % (base_contrast + i), - cwd=pth, - inputs=inputs, - stdout=stdout, - stderr=stderr, - output_dir=output_dir, - ) - ) - if copes: - outputs["copes"] = copes - outputs["varcopes"] = varcopes - outputs["zstats"] = zstats - outputs["tstats"] = tstats - outputs["neffs"] = neffs - fstats = [] - zfstats = [] - for i in range(numfcons): - fstats.append( - _gen_fname( - "fstat%d.nii" % (base_contrast + i), - cwd=pth, - inputs=inputs, - stdout=stdout, - stderr=stderr, - output_dir=output_dir, - ) - ) - zfstats.append( - _gen_fname( - "zfstat%d.nii" % (base_contrast + i), - cwd=pth, - inputs=inputs, - stdout=stdout, - stderr=stderr, - output_dir=output_dir, - ) - ) - if fstats: - outputs["fstats"] = fstats - outputs["zfstats"] = zfstats - return outputs - - -# Original source at L108 of /utils/filemanip.py -def fname_presuffix(fname, prefix="", suffix="", newpath=None, use_ext=True): - """Manipulates path and name of input filename - - Parameters - ---------- - fname : string - A filename (may or may not include path) - prefix : string - Characters to prepend to the filename - suffix : string - Characters to append to the filename - newpath : string - Path to replace the path of the input fname - use_ext : boolean - If True (default), appends the extension of the original file - to the output name. - - Returns - ------- - Absolute path of the modified filename - - >>> from nipype.utils.filemanip import fname_presuffix - >>> fname = 'foo.nii.gz' - >>> fname_presuffix(fname,'pre','post','/tmp') - '/tmp/prefoopost.nii.gz' - - >>> from nipype.interfaces.base import attrs.NOTHING - >>> fname_presuffix(fname, 'pre', 'post', attrs.NOTHING) == \ - fname_presuffix(fname, 'pre', 'post') - True - - """ - pth, fname, ext = split_filename(fname) - if not use_ext: - ext = "" - - # No need for : bool(attrs.NOTHING is not attrs.NOTHING) evaluates to False - if newpath: - pth = op.abspath(newpath) - return op.join(pth, prefix + fname + suffix + ext) - - -# Original source at L58 of /utils/filemanip.py -def split_filename(fname): - """Split a filename into parts: path, base filename and extension. - - Parameters - ---------- - fname : str - file or path name - - Returns - ------- - pth : str - base path from fname - fname : str - filename from fname, without extension - ext : str - file extension from fname - - Examples - -------- - >>> from nipype.utils.filemanip import split_filename - >>> pth, fname, ext = split_filename('/home/data/subject.nii.gz') - >>> pth - '/home/data' - - >>> fname - 'subject' - - >>> ext - '.nii.gz' - - """ - - special_extensions = [".nii.gz", ".tar.gz", ".niml.dset"] - - pth = op.dirname(fname) - fname = op.basename(fname) - - ext = None - for special_ext in special_extensions: - ext_len = len(special_ext) - if (len(fname) > ext_len) and (fname[-ext_len:].lower() == special_ext.lower()): - ext = fname[-ext_len:] - fname = fname[:-ext_len] - break - if not ext: - fname, ext = op.splitext(fname) - - return pth, fname, ext - - -# Original source at L1069 of /interfaces/base/core.py -class PackageInfo(object): - _version = None - version_cmd = None - version_file = None - - @classmethod - def version(klass): - if klass._version is None: - if klass.version_cmd is not None: - try: - clout = CommandLine( - command=klass.version_cmd, - resource_monitor=False, - terminal_output="allatonce", - ).run() - except IOError: - return None - - raw_info = clout.runtime.stdout - elif klass.version_file is not None: - try: - with open(klass.version_file, "rt") as fobj: - raw_info = fobj.read() - except OSError: - return None - else: - return None - - klass._version = klass.parse_version(raw_info) - - return klass._version - - @staticmethod - def parse_version(raw_info): - raise NotImplementedError - - -# Original source at L40 of /interfaces/fsl/base.py -class Info(PackageInfo): - """ - Handle FSL ``output_type`` and version information. - - output type refers to the type of file fsl defaults to writing - eg, NIFTI, NIFTI_GZ - - Examples - -------- - - >>> from nipype.interfaces.fsl import Info - >>> Info.version() # doctest: +SKIP - >>> Info.output_type() # doctest: +SKIP - - """ - - ftypes = { - "NIFTI": ".nii", - "NIFTI_PAIR": ".img", - "NIFTI_GZ": ".nii.gz", - "NIFTI_PAIR_GZ": ".img.gz", - } - - if os.getenv("FSLDIR"): - version_file = os.path.join(os.getenv("FSLDIR"), "etc", "fslversion") - - @staticmethod - def parse_version(raw_info): - return raw_info.splitlines()[0] - - @classmethod - def output_type_to_ext(cls, output_type): - """Get the file extension for the given output type. - - Parameters - ---------- - output_type : {'NIFTI', 'NIFTI_GZ', 'NIFTI_PAIR', 'NIFTI_PAIR_GZ'} - String specifying the output type. - - Returns - ------- - extension : str - The file extension for the output type. - """ - - try: - return cls.ftypes[output_type] - except KeyError: - msg = "Invalid FSLOUTPUTTYPE: ", output_type - raise KeyError(msg) - - @classmethod - def output_type(cls): - """Get the global FSL output file type FSLOUTPUTTYPE. - - This returns the value of the environment variable - FSLOUTPUTTYPE. An exception is raised if it is not defined. - - Returns - ------- - fsl_ftype : string - Represents the current environment setting of FSLOUTPUTTYPE - """ - try: - return os.environ["FSLOUTPUTTYPE"] - except KeyError: - IFLOGGER.warning( - "FSLOUTPUTTYPE environment variable is not set. " - "Setting FSLOUTPUTTYPE=NIFTI" - ) - return "NIFTI" - - @staticmethod - def standard_image(img_name=None): - """Grab an image from the standard location. - - Returns a list of standard images if called without arguments. - - Could be made more fancy to allow for more relocatability""" - try: - fsldir = os.environ["FSLDIR"] - except KeyError: - raise Exception("FSL environment variables not set") - stdpath = os.path.join(fsldir, "data", "standard") - if img_name is None: - return [ - filename.replace(stdpath + "/", "") - for filename in glob(os.path.join(stdpath, "*nii*")) - ] - return os.path.join(stdpath, img_name) diff --git a/nipype-auto-conv/specs/interfaces/convert_warp.yaml b/nipype-auto-conv/specs/interfaces/convert_warp.yaml index 9a3fcfe..6f5b37e 100644 --- a/nipype-auto-conv/specs/interfaces/convert_warp.yaml +++ b/nipype-auto-conv/specs/interfaces/convert_warp.yaml @@ -6,21 +6,21 @@ # Docs # ---- # Use FSL `convertwarp `_ -# for combining multiple transforms into one. +# for combining multiple transforms into one. # # -# Examples -# -------- +# Examples +# -------- # -# >>> from nipype.interfaces.fsl import ConvertWarp -# >>> warputils = ConvertWarp() -# >>> warputils.inputs.warp1 = "warpfield.nii" -# >>> warputils.inputs.reference = "T1.nii" -# >>> warputils.inputs.relwarp = True -# >>> warputils.inputs.output_type = "NIFTI_GZ" -# >>> warputils.cmdline # doctest: +ELLIPSIS -# 'convertwarp --ref=T1.nii --rel --warp1=warpfield.nii --out=T1_concatwarp.nii.gz' -# >>> res = warputils.run() # doctest: +SKIP +# >>> from nipype.interfaces.fsl import ConvertWarp +# >>> warputils = ConvertWarp() +# >>> warputils.inputs.warp1 = "warpfield.nii" +# >>> warputils.inputs.reference = "T1.nii" +# >>> warputils.inputs.relwarp = True +# >>> warputils.inputs.output_type = "NIFTI_GZ" +# >>> warputils.cmdline # doctest: +ELLIPSIS +# 'convertwarp --ref=T1.nii --rel --warp1=warpfield.nii --out=T1_concatwarp.nii.gz' +# >>> res = warputils.run() # doctest: +SKIP # # # @@ -40,14 +40,11 @@ inputs: # passed to the field in the automatically generated unittests. midmat: generic/file # type=file|default=: Name of file containing mid-warp-affine transform - out_file: Path - # type=file: Name of output file, containing the warp as field or coefficients. - # type=file|default=: Name of output file, containing warps that are the combination of all those given as arguments. The format of this will be a field-file (rather than spline coefficients) with any affine components included. postmat: generic/file # type=file|default=: Name of file containing an affine transform (applied last). It could e.g. be an affine transform that maps the MNI152-space into a better approximation to the Talairach-space (if indeed there is one). premat: generic/file # type=file|default=: filename for pre-transform (affine matrix) - reference: medimage/nifti1 + reference: generic/file # type=file|default=: Name of a file in target space of the full transform. shift_in_file: generic/file # type=file|default=: Name of file containing a "shiftmap", a non-linear transform with displacements only in one direction (applied first, before premat). This would typically be a fieldmap that has been pre-processed using fugue that maps a subjects functional (EPI) data onto an undistorted space (i.e. a space that corresponds to his/her true anatomy). @@ -78,7 +75,7 @@ outputs: # dict[str, str] - names of methods/callable classes defined in the adjacent `*_callables.py` # to set to the `callable` attribute of output fields templates: - # dict[str, str] - `output_file_template` values to be provided to output fields + # dict[str, str] - `path_template` values to be provided to output fields requirements: # dict[str, list[str]] - input fields that are required to be provided for the output field to be present tests: @@ -119,13 +116,13 @@ tests: out_relwarp: # type=bool|default=False: If set it indicates that the warps in --out should be relative, i.e. the values in --out are displacements from the coordinates in --ref. output_type: - # type=enum|default='NIFTI'|allowed['NIFTI','NIFTI_GZ','NIFTI_PAIR','NIFTI_PAIR_GZ']: FSL output type + # type=enum|default='NIFTI'|allowed['GIFTI','NIFTI','NIFTI_GZ','NIFTI_PAIR','NIFTI_PAIR_GZ']: FSL output type args: # type=str|default='': Additional parameters to the command environ: # type=dict|default={}: Environment variables imports: - # list[nipype2pydra.task.base.importstatement] - list import statements required by the test, with each list item + # list[nipype2pydra.statements.imports.explicitimport] - list import statements required by the test, with each list item # consisting of 'module', 'name', and optionally 'alias' keys expected_outputs: # dict[str, str] - expected values for selected outputs, noting that tests will typically @@ -144,14 +141,10 @@ tests: # (if not specified, will try to choose a sensible value) warp1: # type=file|default=: Name of file containing initial warp-fields/coefficients (follows premat). This could e.g. be a fnirt-transform from a subjects structural scan to an average of a group of subjects. - reference: - # type=file|default=: Name of a file in target space of the full transform. relwarp: 'True' # type=bool|default=False: If set it indicates that the warps in --warp1/2 should be interpreted as relative. I.e. the values in --warp1/2 are displacements from the coordinates in the next space. - output_type: '"NIFTI_GZ"' - # type=enum|default='NIFTI'|allowed['NIFTI','NIFTI_GZ','NIFTI_PAIR','NIFTI_PAIR_GZ']: FSL output type imports: - # list[nipype2pydra.task.base.importstatement] - list import statements required by the test, with each list item + # list[nipype2pydra.statements.imports.explicitimport] - list import statements required by the test, with each list item # consisting of 'module', 'name', and optionally 'alias' keys expected_outputs: # dict[str, str] - expected values for selected outputs, noting that tests will typically @@ -174,14 +167,10 @@ doctests: # '.mock()' method of the corresponding class is used instead. warp1: '"warpfield.nii"' # type=file|default=: Name of file containing initial warp-fields/coefficients (follows premat). This could e.g. be a fnirt-transform from a subjects structural scan to an average of a group of subjects. - reference: '"T1.nii"' - # type=file|default=: Name of a file in target space of the full transform. relwarp: 'True' # type=bool|default=False: If set it indicates that the warps in --warp1/2 should be interpreted as relative. I.e. the values in --warp1/2 are displacements from the coordinates in the next space. - output_type: '"NIFTI_GZ"' - # type=enum|default='NIFTI'|allowed['NIFTI','NIFTI_GZ','NIFTI_PAIR','NIFTI_PAIR_GZ']: FSL output type imports: - # list[nipype2pydra.task.base.importstatement] - list import statements required by the test, with each list item + # list[nipype2pydra.statements.imports.explicitimport] - list import statements required by the test, with each list item # consisting of 'module', 'name', and optionally 'alias' keys directive: # str - any doctest directive to place on the cmdline call, e.g. # doctest: +ELLIPSIS diff --git a/nipype-auto-conv/specs/interfaces/convert_warp_callables.py b/nipype-auto-conv/specs/interfaces/convert_warp_callables.py deleted file mode 100644 index bb15f5b..0000000 --- a/nipype-auto-conv/specs/interfaces/convert_warp_callables.py +++ /dev/null @@ -1,338 +0,0 @@ -"""Module to put any functions that are referred to in the "callables" section of ConvertWarp.yaml""" - -import attrs -import logging -import os -import os.path as op -from glob import glob - - -def out_file_callable(output_dir, inputs, stdout, stderr): - outputs = _list_outputs( - output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr - ) - return outputs["out_file"] - - -IFLOGGER = logging.getLogger("nipype.interface") - - -iflogger = logging.getLogger("nipype.interface") - - -# Original source at L809 of /interfaces/base/core.py -def _filename_from_source( - name, chain=None, inputs=None, stdout=None, stderr=None, output_dir=None -): - if chain is None: - chain = [] - - trait_spec = inputs.trait(name) - retval = getattr(inputs, name) - source_ext = None - if (retval is attrs.NOTHING) or "%s" in retval: - if not trait_spec.name_source: - return retval - - # Do not generate filename when excluded by other inputs - if any( - (getattr(inputs, field) is not attrs.NOTHING) - for field in trait_spec.xor or () - ): - return retval - - # Do not generate filename when required fields are missing - if not all( - (getattr(inputs, field) is not attrs.NOTHING) - for field in trait_spec.requires or () - ): - return retval - - if (retval is not attrs.NOTHING) and "%s" in retval: - name_template = retval - else: - name_template = trait_spec.name_template - if not name_template: - name_template = "%s_generated" - - ns = trait_spec.name_source - while isinstance(ns, (list, tuple)): - if len(ns) > 1: - iflogger.warning("Only one name_source per trait is allowed") - ns = ns[0] - - if not isinstance(ns, (str, bytes)): - raise ValueError( - "name_source of '{}' trait should be an input trait " - "name, but a type {} object was found".format(name, type(ns)) - ) - - if getattr(inputs, ns) is not attrs.NOTHING: - name_source = ns - source = getattr(inputs, name_source) - while isinstance(source, list): - source = source[0] - - # special treatment for files - try: - _, base, source_ext = split_filename(source) - except (AttributeError, TypeError): - base = source - else: - if name in chain: - raise NipypeInterfaceError("Mutually pointing name_sources") - - chain.append(name) - base = _filename_from_source( - ns, - chain, - inputs=inputs, - stdout=stdout, - stderr=stderr, - output_dir=output_dir, - ) - if base is not attrs.NOTHING: - _, _, source_ext = split_filename(base) - else: - # Do not generate filename when required fields are missing - return retval - - chain = None - retval = name_template % base - _, _, ext = split_filename(retval) - if trait_spec.keep_extension and (ext or source_ext): - if (ext is None or not ext) and source_ext: - retval = retval + source_ext - else: - retval = _overload_extension( - retval, - name, - inputs=inputs, - stdout=stdout, - stderr=stderr, - output_dir=output_dir, - ) - return retval - - -# Original source at L885 of /interfaces/base/core.py -def _gen_filename(name, inputs=None, stdout=None, stderr=None, output_dir=None): - raise NotImplementedError - - -# Original source at L891 of /interfaces/base/core.py -def _list_outputs(inputs=None, stdout=None, stderr=None, output_dir=None): - metadata = dict(name_source=lambda t: t is not None) - traits = inputs.traits(**metadata) - if traits: - outputs = {} - for name, trait_spec in list(traits.items()): - out_name = name - if trait_spec.output_name is not None: - out_name = trait_spec.output_name - fname = _filename_from_source( - name, inputs=inputs, stdout=stdout, stderr=stderr, output_dir=output_dir - ) - if fname is not attrs.NOTHING: - outputs[out_name] = os.path.abspath(fname) - return outputs - - -# Original source at L249 of /interfaces/fsl/base.py -def _overload_extension( - value, name=None, inputs=None, stdout=None, stderr=None, output_dir=None -): - return value + Info.output_type_to_ext(inputs.output_type) - - -# Original source at L58 of /utils/filemanip.py -def split_filename(fname): - """Split a filename into parts: path, base filename and extension. - - Parameters - ---------- - fname : str - file or path name - - Returns - ------- - pth : str - base path from fname - fname : str - filename from fname, without extension - ext : str - file extension from fname - - Examples - -------- - >>> from nipype.utils.filemanip import split_filename - >>> pth, fname, ext = split_filename('/home/data/subject.nii.gz') - >>> pth - '/home/data' - - >>> fname - 'subject' - - >>> ext - '.nii.gz' - - """ - - special_extensions = [".nii.gz", ".tar.gz", ".niml.dset"] - - pth = op.dirname(fname) - fname = op.basename(fname) - - ext = None - for special_ext in special_extensions: - ext_len = len(special_ext) - if (len(fname) > ext_len) and (fname[-ext_len:].lower() == special_ext.lower()): - ext = fname[-ext_len:] - fname = fname[:-ext_len] - break - if not ext: - fname, ext = op.splitext(fname) - - return pth, fname, ext - - -# Original source at L1069 of /interfaces/base/core.py -class PackageInfo(object): - _version = None - version_cmd = None - version_file = None - - @classmethod - def version(klass): - if klass._version is None: - if klass.version_cmd is not None: - try: - clout = CommandLine( - command=klass.version_cmd, - resource_monitor=False, - terminal_output="allatonce", - ).run() - except IOError: - return None - - raw_info = clout.runtime.stdout - elif klass.version_file is not None: - try: - with open(klass.version_file, "rt") as fobj: - raw_info = fobj.read() - except OSError: - return None - else: - return None - - klass._version = klass.parse_version(raw_info) - - return klass._version - - @staticmethod - def parse_version(raw_info): - raise NotImplementedError - - -# Original source at L40 of /interfaces/fsl/base.py -class Info(PackageInfo): - """ - Handle FSL ``output_type`` and version information. - - output type refers to the type of file fsl defaults to writing - eg, NIFTI, NIFTI_GZ - - Examples - -------- - - >>> from nipype.interfaces.fsl import Info - >>> Info.version() # doctest: +SKIP - >>> Info.output_type() # doctest: +SKIP - - """ - - ftypes = { - "NIFTI": ".nii", - "NIFTI_PAIR": ".img", - "NIFTI_GZ": ".nii.gz", - "NIFTI_PAIR_GZ": ".img.gz", - } - - if os.getenv("FSLDIR"): - version_file = os.path.join(os.getenv("FSLDIR"), "etc", "fslversion") - - @staticmethod - def parse_version(raw_info): - return raw_info.splitlines()[0] - - @classmethod - def output_type_to_ext(cls, output_type): - """Get the file extension for the given output type. - - Parameters - ---------- - output_type : {'NIFTI', 'NIFTI_GZ', 'NIFTI_PAIR', 'NIFTI_PAIR_GZ'} - String specifying the output type. - - Returns - ------- - extension : str - The file extension for the output type. - """ - - try: - return cls.ftypes[output_type] - except KeyError: - msg = "Invalid FSLOUTPUTTYPE: ", output_type - raise KeyError(msg) - - @classmethod - def output_type(cls): - """Get the global FSL output file type FSLOUTPUTTYPE. - - This returns the value of the environment variable - FSLOUTPUTTYPE. An exception is raised if it is not defined. - - Returns - ------- - fsl_ftype : string - Represents the current environment setting of FSLOUTPUTTYPE - """ - try: - return os.environ["FSLOUTPUTTYPE"] - except KeyError: - IFLOGGER.warning( - "FSLOUTPUTTYPE environment variable is not set. " - "Setting FSLOUTPUTTYPE=NIFTI" - ) - return "NIFTI" - - @staticmethod - def standard_image(img_name=None): - """Grab an image from the standard location. - - Returns a list of standard images if called without arguments. - - Could be made more fancy to allow for more relocatability""" - try: - fsldir = os.environ["FSLDIR"] - except KeyError: - raise Exception("FSL environment variables not set") - stdpath = os.path.join(fsldir, "data", "standard") - if img_name is None: - return [ - filename.replace(stdpath + "/", "") - for filename in glob(os.path.join(stdpath, "*nii*")) - ] - return os.path.join(stdpath, img_name) - - -# Original source at L125 of /interfaces/base/support.py -class NipypeInterfaceError(Exception): - """Custom error for interfaces""" - - def __init__(self, value): - self.value = value - - def __str__(self): - return "{}".format(self.value) diff --git a/nipype-auto-conv/specs/interfaces/convert_xfm.yaml b/nipype-auto-conv/specs/interfaces/convert_xfm.yaml index 1348dc2..dedcb8b 100644 --- a/nipype-auto-conv/specs/interfaces/convert_xfm.yaml +++ b/nipype-auto-conv/specs/interfaces/convert_xfm.yaml @@ -7,16 +7,16 @@ # ---- # Use the FSL utility convert_xfm to modify FLIRT transformation matrices. # -# Examples -# -------- +# Examples +# -------- # -# >>> import nipype.interfaces.fsl as fsl -# >>> invt = fsl.ConvertXFM() -# >>> invt.inputs.in_file = "flirt.mat" -# >>> invt.inputs.invert_xfm = True -# >>> invt.inputs.out_file = 'flirt_inv.mat' -# >>> invt.cmdline -# 'convert_xfm -omat flirt_inv.mat -inverse flirt.mat' +# >>> import nipype.interfaces.fsl as fsl +# >>> invt = fsl.ConvertXFM() +# >>> invt.inputs.in_file = "flirt.mat" +# >>> invt.inputs.invert_xfm = True +# >>> invt.inputs.out_file = 'flirt_inv.mat' +# >>> invt.cmdline +# 'convert_xfm -omat flirt_inv.mat -inverse flirt.mat' # # # @@ -38,9 +38,6 @@ inputs: # type=file|default=: input transformation matrix in_file2: generic/file # type=file|default=: second input matrix (for use with fix_scale_skew or concat_xfm) - out_file: Path - # type=file: output transformation matrix - # type=file|default=: final transformation matrix callable_defaults: # dict[str, str] - names of methods/callable classes defined in the adjacent `*_callables.py` # to set as the `default` method of input fields @@ -64,7 +61,7 @@ outputs: # dict[str, str] - names of methods/callable classes defined in the adjacent `*_callables.py` # to set to the `callable` attribute of output fields templates: - # dict[str, str] - `output_file_template` values to be provided to output fields + # dict[str, str] - `path_template` values to be provided to output fields out_file: '"flirt_inv.mat"' # type=file: output transformation matrix # type=file|default=: final transformation matrix @@ -88,13 +85,13 @@ tests: # type=file: output transformation matrix # type=file|default=: final transformation matrix output_type: - # type=enum|default='NIFTI'|allowed['NIFTI','NIFTI_GZ','NIFTI_PAIR','NIFTI_PAIR_GZ']: FSL output type + # type=enum|default='NIFTI'|allowed['GIFTI','NIFTI','NIFTI_GZ','NIFTI_PAIR','NIFTI_PAIR_GZ']: FSL output type args: # type=str|default='': Additional parameters to the command environ: # type=dict|default={}: Environment variables imports: - # list[nipype2pydra.task.base.importstatement] - list import statements required by the test, with each list item + # list[nipype2pydra.statements.imports.explicitimport] - list import statements required by the test, with each list item # consisting of 'module', 'name', and optionally 'alias' keys expected_outputs: # dict[str, str] - expected values for selected outputs, noting that tests will typically @@ -113,13 +110,11 @@ tests: # (if not specified, will try to choose a sensible value) in_file: # type=file|default=: input transformation matrix - invert_xfm: 'True' - # type=bool|default=False: invert input transformation out_file: '"flirt_inv.mat"' # type=file: output transformation matrix # type=file|default=: final transformation matrix imports: &id001 - # list[nipype2pydra.task.base.importstatement] - list import statements required by the test, with each list item + # list[nipype2pydra.statements.imports.explicitimport] - list import statements required by the test, with each list item # consisting of 'module', 'name', and optionally 'alias' keys - module: nipype.interfaces.fsl as fsl expected_outputs: @@ -143,13 +138,11 @@ doctests: # '.mock()' method of the corresponding class is used instead. in_file: '"flirt.mat"' # type=file|default=: input transformation matrix - invert_xfm: 'True' - # type=bool|default=False: invert input transformation out_file: '"flirt_inv.mat"' # type=file: output transformation matrix # type=file|default=: final transformation matrix imports: *id001 - # list[nipype2pydra.task.base.importstatement] - list import statements required by the test, with each list item + # list[nipype2pydra.statements.imports.explicitimport] - list import statements required by the test, with each list item # consisting of 'module', 'name', and optionally 'alias' keys directive: # str - any doctest directive to place on the cmdline call, e.g. # doctest: +ELLIPSIS diff --git a/nipype-auto-conv/specs/interfaces/convert_xfm_callables.py b/nipype-auto-conv/specs/interfaces/convert_xfm_callables.py deleted file mode 100644 index 374e76f..0000000 --- a/nipype-auto-conv/specs/interfaces/convert_xfm_callables.py +++ /dev/null @@ -1,147 +0,0 @@ -"""Module to put any functions that are referred to in the "callables" section of ConvertXFM.yaml""" - -import attrs -import os -import os.path as op -from pathlib import Path - - -def out_file_default(inputs): - return _gen_filename("out_file", inputs=inputs) - - -def out_file_callable(output_dir, inputs, stdout, stderr): - outputs = _list_outputs( - output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr - ) - return outputs["out_file"] - - -# Original source at L1592 of /interfaces/fsl/utils.py -def _gen_filename(name, inputs=None, stdout=None, stderr=None, output_dir=None): - if name == "out_file": - return _list_outputs( - inputs=inputs, stdout=stdout, stderr=stderr, output_dir=output_dir - )["out_file"] - return None - - -# Original source at L1567 of /interfaces/fsl/utils.py -def _list_outputs(inputs=None, stdout=None, stderr=None, output_dir=None): - outputs = {} - outfile = inputs.out_file - if outfile is attrs.NOTHING: - _, infile1, _ = split_filename(inputs.in_file) - if inputs.invert_xfm: - outfile = fname_presuffix( - infile1, suffix="_inv.mat", newpath=output_dir, use_ext=False - ) - else: - if inputs.concat_xfm: - _, infile2, _ = split_filename(inputs.in_file2) - outfile = fname_presuffix( - "%s_%s" % (infile1, infile2), - suffix=".mat", - newpath=output_dir, - use_ext=False, - ) - else: - outfile = fname_presuffix( - infile1, suffix="_fix.mat", newpath=output_dir, use_ext=False - ) - outputs["out_file"] = os.path.abspath(outfile) - return outputs - - -# Original source at L108 of /utils/filemanip.py -def fname_presuffix(fname, prefix="", suffix="", newpath=None, use_ext=True): - """Manipulates path and name of input filename - - Parameters - ---------- - fname : string - A filename (may or may not include path) - prefix : string - Characters to prepend to the filename - suffix : string - Characters to append to the filename - newpath : string - Path to replace the path of the input fname - use_ext : boolean - If True (default), appends the extension of the original file - to the output name. - - Returns - ------- - Absolute path of the modified filename - - >>> from nipype.utils.filemanip import fname_presuffix - >>> fname = 'foo.nii.gz' - >>> fname_presuffix(fname,'pre','post','/tmp') - '/tmp/prefoopost.nii.gz' - - >>> from nipype.interfaces.base import attrs.NOTHING - >>> fname_presuffix(fname, 'pre', 'post', attrs.NOTHING) == \ - fname_presuffix(fname, 'pre', 'post') - True - - """ - pth, fname, ext = split_filename(fname) - if not use_ext: - ext = "" - - # No need for : bool(attrs.NOTHING is not attrs.NOTHING) evaluates to False - if newpath: - pth = op.abspath(newpath) - return op.join(pth, prefix + fname + suffix + ext) - - -# Original source at L58 of /utils/filemanip.py -def split_filename(fname): - """Split a filename into parts: path, base filename and extension. - - Parameters - ---------- - fname : str - file or path name - - Returns - ------- - pth : str - base path from fname - fname : str - filename from fname, without extension - ext : str - file extension from fname - - Examples - -------- - >>> from nipype.utils.filemanip import split_filename - >>> pth, fname, ext = split_filename('/home/data/subject.nii.gz') - >>> pth - '/home/data' - - >>> fname - 'subject' - - >>> ext - '.nii.gz' - - """ - - special_extensions = [".nii.gz", ".tar.gz", ".niml.dset"] - - pth = op.dirname(fname) - fname = op.basename(fname) - - ext = None - for special_ext in special_extensions: - ext_len = len(special_ext) - if (len(fname) > ext_len) and (fname[-ext_len:].lower() == special_ext.lower()): - ext = fname[-ext_len:] - fname = fname[:-ext_len] - break - if not ext: - fname, ext = op.splitext(fname) - - return pth, fname, ext diff --git a/nipype-auto-conv/specs/interfaces/copy_geom.yaml b/nipype-auto-conv/specs/interfaces/copy_geom.yaml index 296945d..fe75795 100644 --- a/nipype-auto-conv/specs/interfaces/copy_geom.yaml +++ b/nipype-auto-conv/specs/interfaces/copy_geom.yaml @@ -6,12 +6,12 @@ # Docs # ---- # Use fslcpgeom to copy the header geometry information to another image. -# Copy certain parts of the header information (image dimensions, voxel -# dimensions, voxel dimensions units string, image orientation/origin or -# qform/sform info) from one image to another. Note that only copies from -# Analyze to Analyze or Nifti to Nifti will work properly. Copying from -# different files will result in loss of information or potentially incorrect -# settings. +# Copy certain parts of the header information (image dimensions, voxel +# dimensions, voxel dimensions units string, image orientation/origin or +# qform/sform info) from one image to another. Note that only copies from +# Analyze to Analyze or Nifti to Nifti will work properly. Copying from +# different files will result in loss of information or potentially incorrect +# settings. # task_name: CopyGeom nipype_name: CopyGeom @@ -53,7 +53,7 @@ outputs: # dict[str, str] - names of methods/callable classes defined in the adjacent `*_callables.py` # to set to the `callable` attribute of output fields templates: - # dict[str, str] - `output_file_template` values to be provided to output fields + # dict[str, str] - `path_template` values to be provided to output fields requirements: # dict[str, list[str]] - input fields that are required to be provided for the output field to be present tests: @@ -67,13 +67,13 @@ tests: ignore_dims: # type=bool|default=False: Do not copy image dimensions output_type: - # type=enum|default='NIFTI'|allowed['NIFTI','NIFTI_GZ','NIFTI_PAIR','NIFTI_PAIR_GZ']: FSL output type + # type=enum|default='NIFTI'|allowed['GIFTI','NIFTI','NIFTI_GZ','NIFTI_PAIR','NIFTI_PAIR_GZ']: FSL output type args: # type=str|default='': Additional parameters to the command environ: # type=dict|default={}: Environment variables imports: - # list[nipype2pydra.task.base.importstatement] - list import statements required by the test, with each list item + # list[nipype2pydra.statements.imports.explicitimport] - list import statements required by the test, with each list item # consisting of 'module', 'name', and optionally 'alias' keys expected_outputs: # dict[str, str] - expected values for selected outputs, noting that tests will typically diff --git a/nipype-auto-conv/specs/interfaces/copy_geom_callables.py b/nipype-auto-conv/specs/interfaces/copy_geom_callables.py deleted file mode 100644 index f1ae1c9..0000000 --- a/nipype-auto-conv/specs/interfaces/copy_geom_callables.py +++ /dev/null @@ -1,338 +0,0 @@ -"""Module to put any functions that are referred to in the "callables" section of CopyGeom.yaml""" - -import attrs -import logging -import os -import os.path as op -from glob import glob - - -def out_file_callable(output_dir, inputs, stdout, stderr): - outputs = _list_outputs( - output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr - ) - return outputs["out_file"] - - -IFLOGGER = logging.getLogger("nipype.interface") - - -iflogger = logging.getLogger("nipype.interface") - - -# Original source at L809 of /interfaces/base/core.py -def _filename_from_source( - name, chain=None, inputs=None, stdout=None, stderr=None, output_dir=None -): - if chain is None: - chain = [] - - trait_spec = inputs.trait(name) - retval = getattr(inputs, name) - source_ext = None - if (retval is attrs.NOTHING) or "%s" in retval: - if not trait_spec.name_source: - return retval - - # Do not generate filename when excluded by other inputs - if any( - (getattr(inputs, field) is not attrs.NOTHING) - for field in trait_spec.xor or () - ): - return retval - - # Do not generate filename when required fields are missing - if not all( - (getattr(inputs, field) is not attrs.NOTHING) - for field in trait_spec.requires or () - ): - return retval - - if (retval is not attrs.NOTHING) and "%s" in retval: - name_template = retval - else: - name_template = trait_spec.name_template - if not name_template: - name_template = "%s_generated" - - ns = trait_spec.name_source - while isinstance(ns, (list, tuple)): - if len(ns) > 1: - iflogger.warning("Only one name_source per trait is allowed") - ns = ns[0] - - if not isinstance(ns, (str, bytes)): - raise ValueError( - "name_source of '{}' trait should be an input trait " - "name, but a type {} object was found".format(name, type(ns)) - ) - - if getattr(inputs, ns) is not attrs.NOTHING: - name_source = ns - source = getattr(inputs, name_source) - while isinstance(source, list): - source = source[0] - - # special treatment for files - try: - _, base, source_ext = split_filename(source) - except (AttributeError, TypeError): - base = source - else: - if name in chain: - raise NipypeInterfaceError("Mutually pointing name_sources") - - chain.append(name) - base = _filename_from_source( - ns, - chain, - inputs=inputs, - stdout=stdout, - stderr=stderr, - output_dir=output_dir, - ) - if base is not attrs.NOTHING: - _, _, source_ext = split_filename(base) - else: - # Do not generate filename when required fields are missing - return retval - - chain = None - retval = name_template % base - _, _, ext = split_filename(retval) - if trait_spec.keep_extension and (ext or source_ext): - if (ext is None or not ext) and source_ext: - retval = retval + source_ext - else: - retval = _overload_extension( - retval, - name, - inputs=inputs, - stdout=stdout, - stderr=stderr, - output_dir=output_dir, - ) - return retval - - -# Original source at L885 of /interfaces/base/core.py -def _gen_filename(name, inputs=None, stdout=None, stderr=None, output_dir=None): - raise NotImplementedError - - -# Original source at L891 of /interfaces/base/core.py -def _list_outputs(inputs=None, stdout=None, stderr=None, output_dir=None): - metadata = dict(name_source=lambda t: t is not None) - traits = inputs.traits(**metadata) - if traits: - outputs = {} - for name, trait_spec in list(traits.items()): - out_name = name - if trait_spec.output_name is not None: - out_name = trait_spec.output_name - fname = _filename_from_source( - name, inputs=inputs, stdout=stdout, stderr=stderr, output_dir=output_dir - ) - if fname is not attrs.NOTHING: - outputs[out_name] = os.path.abspath(fname) - return outputs - - -# Original source at L249 of /interfaces/fsl/base.py -def _overload_extension( - value, name=None, inputs=None, stdout=None, stderr=None, output_dir=None -): - return value + Info.output_type_to_ext(inputs.output_type) - - -# Original source at L58 of /utils/filemanip.py -def split_filename(fname): - """Split a filename into parts: path, base filename and extension. - - Parameters - ---------- - fname : str - file or path name - - Returns - ------- - pth : str - base path from fname - fname : str - filename from fname, without extension - ext : str - file extension from fname - - Examples - -------- - >>> from nipype.utils.filemanip import split_filename - >>> pth, fname, ext = split_filename('/home/data/subject.nii.gz') - >>> pth - '/home/data' - - >>> fname - 'subject' - - >>> ext - '.nii.gz' - - """ - - special_extensions = [".nii.gz", ".tar.gz", ".niml.dset"] - - pth = op.dirname(fname) - fname = op.basename(fname) - - ext = None - for special_ext in special_extensions: - ext_len = len(special_ext) - if (len(fname) > ext_len) and (fname[-ext_len:].lower() == special_ext.lower()): - ext = fname[-ext_len:] - fname = fname[:-ext_len] - break - if not ext: - fname, ext = op.splitext(fname) - - return pth, fname, ext - - -# Original source at L1069 of /interfaces/base/core.py -class PackageInfo(object): - _version = None - version_cmd = None - version_file = None - - @classmethod - def version(klass): - if klass._version is None: - if klass.version_cmd is not None: - try: - clout = CommandLine( - command=klass.version_cmd, - resource_monitor=False, - terminal_output="allatonce", - ).run() - except IOError: - return None - - raw_info = clout.runtime.stdout - elif klass.version_file is not None: - try: - with open(klass.version_file, "rt") as fobj: - raw_info = fobj.read() - except OSError: - return None - else: - return None - - klass._version = klass.parse_version(raw_info) - - return klass._version - - @staticmethod - def parse_version(raw_info): - raise NotImplementedError - - -# Original source at L40 of /interfaces/fsl/base.py -class Info(PackageInfo): - """ - Handle FSL ``output_type`` and version information. - - output type refers to the type of file fsl defaults to writing - eg, NIFTI, NIFTI_GZ - - Examples - -------- - - >>> from nipype.interfaces.fsl import Info - >>> Info.version() # doctest: +SKIP - >>> Info.output_type() # doctest: +SKIP - - """ - - ftypes = { - "NIFTI": ".nii", - "NIFTI_PAIR": ".img", - "NIFTI_GZ": ".nii.gz", - "NIFTI_PAIR_GZ": ".img.gz", - } - - if os.getenv("FSLDIR"): - version_file = os.path.join(os.getenv("FSLDIR"), "etc", "fslversion") - - @staticmethod - def parse_version(raw_info): - return raw_info.splitlines()[0] - - @classmethod - def output_type_to_ext(cls, output_type): - """Get the file extension for the given output type. - - Parameters - ---------- - output_type : {'NIFTI', 'NIFTI_GZ', 'NIFTI_PAIR', 'NIFTI_PAIR_GZ'} - String specifying the output type. - - Returns - ------- - extension : str - The file extension for the output type. - """ - - try: - return cls.ftypes[output_type] - except KeyError: - msg = "Invalid FSLOUTPUTTYPE: ", output_type - raise KeyError(msg) - - @classmethod - def output_type(cls): - """Get the global FSL output file type FSLOUTPUTTYPE. - - This returns the value of the environment variable - FSLOUTPUTTYPE. An exception is raised if it is not defined. - - Returns - ------- - fsl_ftype : string - Represents the current environment setting of FSLOUTPUTTYPE - """ - try: - return os.environ["FSLOUTPUTTYPE"] - except KeyError: - IFLOGGER.warning( - "FSLOUTPUTTYPE environment variable is not set. " - "Setting FSLOUTPUTTYPE=NIFTI" - ) - return "NIFTI" - - @staticmethod - def standard_image(img_name=None): - """Grab an image from the standard location. - - Returns a list of standard images if called without arguments. - - Could be made more fancy to allow for more relocatability""" - try: - fsldir = os.environ["FSLDIR"] - except KeyError: - raise Exception("FSL environment variables not set") - stdpath = os.path.join(fsldir, "data", "standard") - if img_name is None: - return [ - filename.replace(stdpath + "/", "") - for filename in glob(os.path.join(stdpath, "*nii*")) - ] - return os.path.join(stdpath, img_name) - - -# Original source at L125 of /interfaces/base/support.py -class NipypeInterfaceError(Exception): - """Custom error for interfaces""" - - def __init__(self, value): - self.value = value - - def __str__(self): - return "{}".format(self.value) diff --git a/nipype-auto-conv/specs/interfaces/dilate_image.yaml b/nipype-auto-conv/specs/interfaces/dilate_image.yaml index e4b6cc6..13bc962 100644 --- a/nipype-auto-conv/specs/interfaces/dilate_image.yaml +++ b/nipype-auto-conv/specs/interfaces/dilate_image.yaml @@ -24,9 +24,6 @@ inputs: # type=file|default=: image to operate on kernel_file: generic/file # type=file|default=: use external file for kernel - out_file: Path - # type=file: image written after calculations - # type=file|default=: image to write callable_defaults: # dict[str, str] - names of methods/callable classes defined in the adjacent `*_callables.py` # to set as the `default` method of input fields @@ -50,7 +47,7 @@ outputs: # dict[str, str] - names of methods/callable classes defined in the adjacent `*_callables.py` # to set to the `callable` attribute of output fields templates: - # dict[str, str] - `output_file_template` values to be provided to output fields + # dict[str, str] - `path_template` values to be provided to output fields out_file: out_file # type=file: image written after calculations # type=file|default=: image to write @@ -80,13 +77,13 @@ tests: nan2zeros: # type=bool|default=False: change NaNs to zeros before doing anything output_type: - # type=enum|default='NIFTI'|allowed['NIFTI','NIFTI_GZ','NIFTI_PAIR','NIFTI_PAIR_GZ']: FSL output type + # type=enum|default='NIFTI'|allowed['GIFTI','NIFTI','NIFTI_GZ','NIFTI_PAIR','NIFTI_PAIR_GZ']: FSL output type args: # type=str|default='': Additional parameters to the command environ: # type=dict|default={}: Environment variables imports: - # list[nipype2pydra.task.base.importstatement] - list import statements required by the test, with each list item + # list[nipype2pydra.statements.imports.explicitimport] - list import statements required by the test, with each list item # consisting of 'module', 'name', and optionally 'alias' keys expected_outputs: # dict[str, str] - expected values for selected outputs, noting that tests will typically diff --git a/nipype-auto-conv/specs/interfaces/dilate_image_callables.py b/nipype-auto-conv/specs/interfaces/dilate_image_callables.py deleted file mode 100644 index db0f502..0000000 --- a/nipype-auto-conv/specs/interfaces/dilate_image_callables.py +++ /dev/null @@ -1,329 +0,0 @@ -"""Module to put any functions that are referred to in the "callables" section of DilateImage.yaml""" - -import attrs -import logging -import os -import os.path as op -from glob import glob -from pathlib import Path - - -def out_file_default(inputs): - return _gen_filename("out_file", inputs=inputs) - - -def out_file_callable(output_dir, inputs, stdout, stderr): - outputs = _list_outputs( - output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr - ) - return outputs["out_file"] - - -IFLOGGER = logging.getLogger("nipype.interface") - - -# Original source at L61 of /interfaces/fsl/maths.py -def _gen_filename(name, inputs=None, stdout=None, stderr=None, output_dir=None): - if name == "out_file": - return _list_outputs( - inputs=inputs, stdout=stdout, stderr=stderr, output_dir=output_dir - )["out_file"] - return None - - -# Original source at L205 of /interfaces/fsl/base.py -def _gen_fname( - basename, - cwd=None, - suffix=None, - change_ext=True, - ext=None, - inputs=None, - stdout=None, - stderr=None, - output_dir=None, -): - """Generate a filename based on the given parameters. - - The filename will take the form: cwd/basename. - If change_ext is True, it will use the extensions specified in - inputs.output_type. - - Parameters - ---------- - basename : str - Filename to base the new filename on. - cwd : str - Path to prefix to the new filename. (default is output_dir) - suffix : str - Suffix to add to the `basename`. (defaults is '' ) - change_ext : bool - Flag to change the filename extension to the FSL output type. - (default True) - - Returns - ------- - fname : str - New filename based on given parameters. - - """ - - if basename == "": - msg = "Unable to generate filename for command %s. " % "fslmaths" - msg += "basename is not set!" - raise ValueError(msg) - if cwd is None: - cwd = output_dir - if ext is None: - ext = Info.output_type_to_ext(inputs.output_type) - if change_ext: - if suffix: - suffix = "".join((suffix, ext)) - else: - suffix = ext - if suffix is None: - suffix = "" - fname = fname_presuffix(basename, suffix=suffix, use_ext=False, newpath=cwd) - return fname - - -# Original source at L51 of /interfaces/fsl/maths.py -def _list_outputs(inputs=None, stdout=None, stderr=None, output_dir=None): - outputs = {} - outputs["out_file"] = inputs.out_file - if inputs.out_file is attrs.NOTHING: - outputs["out_file"] = _gen_fname( - inputs.in_file, - suffix=_suffix, - inputs=inputs, - stdout=stdout, - stderr=stderr, - output_dir=output_dir, - ) - outputs["out_file"] = os.path.abspath(outputs["out_file"]) - return outputs - - -# Original source at L108 of /utils/filemanip.py -def fname_presuffix(fname, prefix="", suffix="", newpath=None, use_ext=True): - """Manipulates path and name of input filename - - Parameters - ---------- - fname : string - A filename (may or may not include path) - prefix : string - Characters to prepend to the filename - suffix : string - Characters to append to the filename - newpath : string - Path to replace the path of the input fname - use_ext : boolean - If True (default), appends the extension of the original file - to the output name. - - Returns - ------- - Absolute path of the modified filename - - >>> from nipype.utils.filemanip import fname_presuffix - >>> fname = 'foo.nii.gz' - >>> fname_presuffix(fname,'pre','post','/tmp') - '/tmp/prefoopost.nii.gz' - - >>> from nipype.interfaces.base import attrs.NOTHING - >>> fname_presuffix(fname, 'pre', 'post', attrs.NOTHING) == \ - fname_presuffix(fname, 'pre', 'post') - True - - """ - pth, fname, ext = split_filename(fname) - if not use_ext: - ext = "" - - # No need for : bool(attrs.NOTHING is not attrs.NOTHING) evaluates to False - if newpath: - pth = op.abspath(newpath) - return op.join(pth, prefix + fname + suffix + ext) - - -# Original source at L58 of /utils/filemanip.py -def split_filename(fname): - """Split a filename into parts: path, base filename and extension. - - Parameters - ---------- - fname : str - file or path name - - Returns - ------- - pth : str - base path from fname - fname : str - filename from fname, without extension - ext : str - file extension from fname - - Examples - -------- - >>> from nipype.utils.filemanip import split_filename - >>> pth, fname, ext = split_filename('/home/data/subject.nii.gz') - >>> pth - '/home/data' - - >>> fname - 'subject' - - >>> ext - '.nii.gz' - - """ - - special_extensions = [".nii.gz", ".tar.gz", ".niml.dset"] - - pth = op.dirname(fname) - fname = op.basename(fname) - - ext = None - for special_ext in special_extensions: - ext_len = len(special_ext) - if (len(fname) > ext_len) and (fname[-ext_len:].lower() == special_ext.lower()): - ext = fname[-ext_len:] - fname = fname[:-ext_len] - break - if not ext: - fname, ext = op.splitext(fname) - - return pth, fname, ext - - -# Original source at L1069 of /interfaces/base/core.py -class PackageInfo(object): - _version = None - version_cmd = None - version_file = None - - @classmethod - def version(klass): - if klass._version is None: - if klass.version_cmd is not None: - try: - clout = CommandLine( - command=klass.version_cmd, - resource_monitor=False, - terminal_output="allatonce", - ).run() - except IOError: - return None - - raw_info = clout.runtime.stdout - elif klass.version_file is not None: - try: - with open(klass.version_file, "rt") as fobj: - raw_info = fobj.read() - except OSError: - return None - else: - return None - - klass._version = klass.parse_version(raw_info) - - return klass._version - - @staticmethod - def parse_version(raw_info): - raise NotImplementedError - - -# Original source at L40 of /interfaces/fsl/base.py -class Info(PackageInfo): - """ - Handle FSL ``output_type`` and version information. - - output type refers to the type of file fsl defaults to writing - eg, NIFTI, NIFTI_GZ - - Examples - -------- - - >>> from nipype.interfaces.fsl import Info - >>> Info.version() # doctest: +SKIP - >>> Info.output_type() # doctest: +SKIP - - """ - - ftypes = { - "NIFTI": ".nii", - "NIFTI_PAIR": ".img", - "NIFTI_GZ": ".nii.gz", - "NIFTI_PAIR_GZ": ".img.gz", - } - - if os.getenv("FSLDIR"): - version_file = os.path.join(os.getenv("FSLDIR"), "etc", "fslversion") - - @staticmethod - def parse_version(raw_info): - return raw_info.splitlines()[0] - - @classmethod - def output_type_to_ext(cls, output_type): - """Get the file extension for the given output type. - - Parameters - ---------- - output_type : {'NIFTI', 'NIFTI_GZ', 'NIFTI_PAIR', 'NIFTI_PAIR_GZ'} - String specifying the output type. - - Returns - ------- - extension : str - The file extension for the output type. - """ - - try: - return cls.ftypes[output_type] - except KeyError: - msg = "Invalid FSLOUTPUTTYPE: ", output_type - raise KeyError(msg) - - @classmethod - def output_type(cls): - """Get the global FSL output file type FSLOUTPUTTYPE. - - This returns the value of the environment variable - FSLOUTPUTTYPE. An exception is raised if it is not defined. - - Returns - ------- - fsl_ftype : string - Represents the current environment setting of FSLOUTPUTTYPE - """ - try: - return os.environ["FSLOUTPUTTYPE"] - except KeyError: - IFLOGGER.warning( - "FSLOUTPUTTYPE environment variable is not set. " - "Setting FSLOUTPUTTYPE=NIFTI" - ) - return "NIFTI" - - @staticmethod - def standard_image(img_name=None): - """Grab an image from the standard location. - - Returns a list of standard images if called without arguments. - - Could be made more fancy to allow for more relocatability""" - try: - fsldir = os.environ["FSLDIR"] - except KeyError: - raise Exception("FSL environment variables not set") - stdpath = os.path.join(fsldir, "data", "standard") - if img_name is None: - return [ - filename.replace(stdpath + "/", "") - for filename in glob(os.path.join(stdpath, "*nii*")) - ] - return os.path.join(stdpath, img_name) diff --git a/nipype-auto-conv/specs/interfaces/distance_map.yaml b/nipype-auto-conv/specs/interfaces/distance_map.yaml index 872a507..cfc573e 100644 --- a/nipype-auto-conv/specs/interfaces/distance_map.yaml +++ b/nipype-auto-conv/specs/interfaces/distance_map.yaml @@ -6,15 +6,15 @@ # Docs # ---- # Use FSL's distancemap to generate a map of the distance to the nearest -# nonzero voxel. +# nonzero voxel. # -# Example -# ------- +# Example +# ------- # -# >>> import nipype.interfaces.fsl as fsl -# >>> mapper = fsl.DistanceMap() -# >>> mapper.inputs.in_file = "skeleton_mask.nii.gz" -# >>> mapper.run() # doctest: +SKIP +# >>> import nipype.interfaces.fsl as fsl +# >>> mapper = fsl.DistanceMap() +# >>> mapper.inputs.in_file = "skeleton_mask.nii.gz" +# >>> mapper.run() # doctest: +SKIP # # task_name: DistanceMap @@ -31,9 +31,6 @@ inputs: # from the nipype interface, but you may want to be more specific, particularly # for file types, where specifying the format also specifies the file that will be # passed to the field in the automatically generated unittests. - distance_map: Path - # type=file: value is distance to nearest nonzero voxels - # type=file|default=: distance map to write in_file: generic/file # type=file|default=: image to calculate distance values for mask_file: generic/file @@ -64,7 +61,7 @@ outputs: # dict[str, str] - names of methods/callable classes defined in the adjacent `*_callables.py` # to set to the `callable` attribute of output fields templates: - # dict[str, str] - `output_file_template` values to be provided to output fields + # dict[str, str] - `path_template` values to be provided to output fields distance_map: distance_map # type=file: value is distance to nearest nonzero voxels # type=file|default=: distance map to write @@ -87,13 +84,13 @@ tests: # type=file: value is distance to nearest nonzero voxels # type=file|default=: distance map to write output_type: - # type=enum|default='NIFTI'|allowed['NIFTI','NIFTI_GZ','NIFTI_PAIR','NIFTI_PAIR_GZ']: FSL output type + # type=enum|default='NIFTI'|allowed['GIFTI','NIFTI','NIFTI_GZ','NIFTI_PAIR','NIFTI_PAIR_GZ']: FSL output type args: # type=str|default='': Additional parameters to the command environ: # type=dict|default={}: Environment variables imports: - # list[nipype2pydra.task.base.importstatement] - list import statements required by the test, with each list item + # list[nipype2pydra.statements.imports.explicitimport] - list import statements required by the test, with each list item # consisting of 'module', 'name', and optionally 'alias' keys expected_outputs: # dict[str, str] - expected values for selected outputs, noting that tests will typically diff --git a/nipype-auto-conv/specs/interfaces/distance_map_callables.py b/nipype-auto-conv/specs/interfaces/distance_map_callables.py deleted file mode 100644 index f12fbb6..0000000 --- a/nipype-auto-conv/specs/interfaces/distance_map_callables.py +++ /dev/null @@ -1,147 +0,0 @@ -"""Module to put any functions that are referred to in the "callables" section of DistanceMap.yaml""" - -import attrs -import os -import os.path as op -from pathlib import Path - - -def distance_map_default(inputs): - return _gen_filename("distance_map", inputs=inputs) - - -def distance_map_callable(output_dir, inputs, stdout, stderr): - outputs = _list_outputs( - output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr - ) - return outputs["distance_map"] - - -def local_max_file_callable(output_dir, inputs, stdout, stderr): - outputs = _list_outputs( - output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr - ) - return outputs["local_max_file"] - - -# Original source at L1537 of /interfaces/fsl/dti.py -def _gen_filename(name, inputs=None, stdout=None, stderr=None, output_dir=None): - if name == "distance_map": - return _list_outputs( - inputs=inputs, stdout=stdout, stderr=stderr, output_dir=output_dir - )["distance_map"] - return None - - -# Original source at L1519 of /interfaces/fsl/dti.py -def _list_outputs(inputs=None, stdout=None, stderr=None, output_dir=None): - outputs = {} - _si = inputs - outputs["distance_map"] = _si.distance_map - if _si.distance_map is attrs.NOTHING: - outputs["distance_map"] = fname_presuffix( - _si.in_file, suffix="_dstmap", use_ext=True, newpath=output_dir - ) - outputs["distance_map"] = os.path.abspath(outputs["distance_map"]) - if _si.local_max_file is not attrs.NOTHING: - outputs["local_max_file"] = _si.local_max_file - if isinstance(_si.local_max_file, bool): - outputs["local_max_file"] = fname_presuffix( - _si.in_file, suffix="_lclmax", use_ext=True, newpath=output_dir - ) - outputs["local_max_file"] = os.path.abspath(outputs["local_max_file"]) - return outputs - - -# Original source at L108 of /utils/filemanip.py -def fname_presuffix(fname, prefix="", suffix="", newpath=None, use_ext=True): - """Manipulates path and name of input filename - - Parameters - ---------- - fname : string - A filename (may or may not include path) - prefix : string - Characters to prepend to the filename - suffix : string - Characters to append to the filename - newpath : string - Path to replace the path of the input fname - use_ext : boolean - If True (default), appends the extension of the original file - to the output name. - - Returns - ------- - Absolute path of the modified filename - - >>> from nipype.utils.filemanip import fname_presuffix - >>> fname = 'foo.nii.gz' - >>> fname_presuffix(fname,'pre','post','/tmp') - '/tmp/prefoopost.nii.gz' - - >>> from nipype.interfaces.base import attrs.NOTHING - >>> fname_presuffix(fname, 'pre', 'post', attrs.NOTHING) == \ - fname_presuffix(fname, 'pre', 'post') - True - - """ - pth, fname, ext = split_filename(fname) - if not use_ext: - ext = "" - - # No need for : bool(attrs.NOTHING is not attrs.NOTHING) evaluates to False - if newpath: - pth = op.abspath(newpath) - return op.join(pth, prefix + fname + suffix + ext) - - -# Original source at L58 of /utils/filemanip.py -def split_filename(fname): - """Split a filename into parts: path, base filename and extension. - - Parameters - ---------- - fname : str - file or path name - - Returns - ------- - pth : str - base path from fname - fname : str - filename from fname, without extension - ext : str - file extension from fname - - Examples - -------- - >>> from nipype.utils.filemanip import split_filename - >>> pth, fname, ext = split_filename('/home/data/subject.nii.gz') - >>> pth - '/home/data' - - >>> fname - 'subject' - - >>> ext - '.nii.gz' - - """ - - special_extensions = [".nii.gz", ".tar.gz", ".niml.dset"] - - pth = op.dirname(fname) - fname = op.basename(fname) - - ext = None - for special_ext in special_extensions: - ext_len = len(special_ext) - if (len(fname) > ext_len) and (fname[-ext_len:].lower() == special_ext.lower()): - ext = fname[-ext_len:] - fname = fname[:-ext_len] - break - if not ext: - fname, ext = op.splitext(fname) - - return pth, fname, ext diff --git a/nipype-auto-conv/specs/interfaces/dti_fit.yaml b/nipype-auto-conv/specs/interfaces/dti_fit.yaml index 6a42635..62e05b2 100644 --- a/nipype-auto-conv/specs/interfaces/dti_fit.yaml +++ b/nipype-auto-conv/specs/interfaces/dti_fit.yaml @@ -6,20 +6,20 @@ # Docs # ---- # Use FSL dtifit command for fitting a diffusion tensor model at each -# voxel +# voxel # -# Example -# ------- +# Example +# ------- # -# >>> from nipype.interfaces import fsl -# >>> dti = fsl.DTIFit() -# >>> dti.inputs.dwi = 'diffusion.nii' -# >>> dti.inputs.bvecs = 'bvecs' -# >>> dti.inputs.bvals = 'bvals' -# >>> dti.inputs.base_name = 'TP' -# >>> dti.inputs.mask = 'mask.nii' -# >>> dti.cmdline -# 'dtifit -k diffusion.nii -o TP -m mask.nii -r bvecs -b bvals' +# >>> from nipype.interfaces import fsl +# >>> dti = fsl.DTIFit() +# >>> dti.inputs.dwi = 'diffusion.nii' +# >>> dti.inputs.bvecs = 'bvecs' +# >>> dti.inputs.bvals = 'bvals' +# >>> dti.inputs.base_name = 'TP' +# >>> dti.inputs.mask = 'mask.nii' +# >>> dti.cmdline +# 'dtifit -k diffusion.nii -o TP -m mask.nii -r bvecs -b bvals' # # task_name: DTIFit @@ -38,7 +38,7 @@ inputs: # passed to the field in the automatically generated unittests. bvals: medimage/bval # type=file|default=: b values file - bvecs: medimage/bvec + bvecs: generic/file # type=file|default=: b vectors file cni: generic/file # type=file|default=: input counfound regressors @@ -93,7 +93,7 @@ outputs: # dict[str, str] - names of methods/callable classes defined in the adjacent `*_callables.py` # to set to the `callable` attribute of output fields templates: - # dict[str, str] - `output_file_template` values to be provided to output fields + # dict[str, str] - `path_template` values to be provided to output fields requirements: # dict[str, list[str]] - input fields that are required to be provided for the output field to be present tests: @@ -134,13 +134,13 @@ tests: gradnonlin: # type=file|default=: gradient non linearities output_type: - # type=enum|default='NIFTI'|allowed['NIFTI','NIFTI_GZ','NIFTI_PAIR','NIFTI_PAIR_GZ']: FSL output type + # type=enum|default='NIFTI'|allowed['GIFTI','NIFTI','NIFTI_GZ','NIFTI_PAIR','NIFTI_PAIR_GZ']: FSL output type args: # type=str|default='': Additional parameters to the command environ: # type=dict|default={}: Environment variables imports: - # list[nipype2pydra.task.base.importstatement] - list import statements required by the test, with each list item + # list[nipype2pydra.statements.imports.explicitimport] - list import statements required by the test, with each list item # consisting of 'module', 'name', and optionally 'alias' keys expected_outputs: # dict[str, str] - expected values for selected outputs, noting that tests will typically @@ -159,16 +159,12 @@ tests: # (if not specified, will try to choose a sensible value) dwi: # type=file|default=: diffusion weighted image data file - bvecs: - # type=file|default=: b vectors file bvals: # type=file|default=: b values file - base_name: '"TP"' - # type=str|default='dtifit_': base_name that all output files will start with mask: # type=file|default=: bet binary mask file imports: - # list[nipype2pydra.task.base.importstatement] - list import statements required by the test, with each list item + # list[nipype2pydra.statements.imports.explicitimport] - list import statements required by the test, with each list item # consisting of 'module', 'name', and optionally 'alias' keys expected_outputs: # dict[str, str] - expected values for selected outputs, noting that tests will typically @@ -191,16 +187,12 @@ doctests: # '.mock()' method of the corresponding class is used instead. dwi: '"diffusion.nii"' # type=file|default=: diffusion weighted image data file - bvecs: '"bvecs"' - # type=file|default=: b vectors file bvals: '"bvals"' # type=file|default=: b values file - base_name: '"TP"' - # type=str|default='dtifit_': base_name that all output files will start with mask: '"mask.nii"' # type=file|default=: bet binary mask file imports: - # list[nipype2pydra.task.base.importstatement] - list import statements required by the test, with each list item + # list[nipype2pydra.statements.imports.explicitimport] - list import statements required by the test, with each list item # consisting of 'module', 'name', and optionally 'alias' keys directive: # str - any doctest directive to place on the cmdline call, e.g. # doctest: +ELLIPSIS diff --git a/nipype-auto-conv/specs/interfaces/dti_fit_callables.py b/nipype-auto-conv/specs/interfaces/dti_fit_callables.py deleted file mode 100644 index 7037322..0000000 --- a/nipype-auto-conv/specs/interfaces/dti_fit_callables.py +++ /dev/null @@ -1,407 +0,0 @@ -"""Module to put any functions that are referred to in the "callables" section of DTIFit.yaml""" - -import attrs -import logging -import os -import os.path as op -from glob import glob -from pathlib import Path - - -def FA_callable(output_dir, inputs, stdout, stderr): - outputs = _list_outputs( - output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr - ) - return outputs["FA"] - - -def L1_callable(output_dir, inputs, stdout, stderr): - outputs = _list_outputs( - output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr - ) - return outputs["L1"] - - -def L2_callable(output_dir, inputs, stdout, stderr): - outputs = _list_outputs( - output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr - ) - return outputs["L2"] - - -def L3_callable(output_dir, inputs, stdout, stderr): - outputs = _list_outputs( - output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr - ) - return outputs["L3"] - - -def MD_callable(output_dir, inputs, stdout, stderr): - outputs = _list_outputs( - output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr - ) - return outputs["MD"] - - -def MO_callable(output_dir, inputs, stdout, stderr): - outputs = _list_outputs( - output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr - ) - return outputs["MO"] - - -def S0_callable(output_dir, inputs, stdout, stderr): - outputs = _list_outputs( - output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr - ) - return outputs["S0"] - - -def V1_callable(output_dir, inputs, stdout, stderr): - outputs = _list_outputs( - output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr - ) - return outputs["V1"] - - -def V2_callable(output_dir, inputs, stdout, stderr): - outputs = _list_outputs( - output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr - ) - return outputs["V2"] - - -def V3_callable(output_dir, inputs, stdout, stderr): - outputs = _list_outputs( - output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr - ) - return outputs["V3"] - - -def sse_callable(output_dir, inputs, stdout, stderr): - outputs = _list_outputs( - output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr - ) - return outputs["sse"] - - -def tensor_callable(output_dir, inputs, stdout, stderr): - outputs = _list_outputs( - output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr - ) - return outputs["tensor"] - - -IFLOGGER = logging.getLogger("nipype.interface") - - -# Original source at L885 of /interfaces/base/core.py -def _gen_filename(name, inputs=None, stdout=None, stderr=None, output_dir=None): - raise NotImplementedError - - -# Original source at L205 of /interfaces/fsl/base.py -def _gen_fname( - basename, - cwd=None, - suffix=None, - change_ext=True, - ext=None, - inputs=None, - stdout=None, - stderr=None, - output_dir=None, -): - """Generate a filename based on the given parameters. - - The filename will take the form: cwd/basename. - If change_ext is True, it will use the extensions specified in - inputs.output_type. - - Parameters - ---------- - basename : str - Filename to base the new filename on. - cwd : str - Path to prefix to the new filename. (default is output_dir) - suffix : str - Suffix to add to the `basename`. (defaults is '' ) - change_ext : bool - Flag to change the filename extension to the FSL output type. - (default True) - - Returns - ------- - fname : str - New filename based on given parameters. - - """ - - if basename == "": - msg = "Unable to generate filename for command %s. " % "dtifit" - msg += "basename is not set!" - raise ValueError(msg) - if cwd is None: - cwd = output_dir - if ext is None: - ext = Info.output_type_to_ext(inputs.output_type) - if change_ext: - if suffix: - suffix = "".join((suffix, ext)) - else: - suffix = ext - if suffix is None: - suffix = "" - fname = fname_presuffix(basename, suffix=suffix, use_ext=False, newpath=cwd) - return fname - - -# Original source at L114 of /interfaces/fsl/dti.py -def _list_outputs(inputs=None, stdout=None, stderr=None, output_dir=None): - keys_to_ignore = {"outputtype", "environ", "args"} - # Optional output: Map output name to input flag - opt_output = {"tensor": inputs.save_tensor, "sse": inputs.sse} - # Ignore optional output, whose corresponding input-flag is not defined - # or set to False - for output, input_flag in opt_output.items(): - if (input_flag is not attrs.NOTHING) and input_flag: - # this is wanted output, do not ignore - continue - keys_to_ignore.add(output) - - outputs = {} - for k in set(outputs.keys()) - keys_to_ignore: - outputs[k] = _gen_fname( - inputs.base_name, - suffix="_" + k, - inputs=inputs, - stdout=stdout, - stderr=stderr, - output_dir=output_dir, - ) - return outputs - - -# Original source at L108 of /utils/filemanip.py -def fname_presuffix(fname, prefix="", suffix="", newpath=None, use_ext=True): - """Manipulates path and name of input filename - - Parameters - ---------- - fname : string - A filename (may or may not include path) - prefix : string - Characters to prepend to the filename - suffix : string - Characters to append to the filename - newpath : string - Path to replace the path of the input fname - use_ext : boolean - If True (default), appends the extension of the original file - to the output name. - - Returns - ------- - Absolute path of the modified filename - - >>> from nipype.utils.filemanip import fname_presuffix - >>> fname = 'foo.nii.gz' - >>> fname_presuffix(fname,'pre','post','/tmp') - '/tmp/prefoopost.nii.gz' - - >>> from nipype.interfaces.base import attrs.NOTHING - >>> fname_presuffix(fname, 'pre', 'post', attrs.NOTHING) == \ - fname_presuffix(fname, 'pre', 'post') - True - - """ - pth, fname, ext = split_filename(fname) - if not use_ext: - ext = "" - - # No need for : bool(attrs.NOTHING is not attrs.NOTHING) evaluates to False - if newpath: - pth = op.abspath(newpath) - return op.join(pth, prefix + fname + suffix + ext) - - -# Original source at L58 of /utils/filemanip.py -def split_filename(fname): - """Split a filename into parts: path, base filename and extension. - - Parameters - ---------- - fname : str - file or path name - - Returns - ------- - pth : str - base path from fname - fname : str - filename from fname, without extension - ext : str - file extension from fname - - Examples - -------- - >>> from nipype.utils.filemanip import split_filename - >>> pth, fname, ext = split_filename('/home/data/subject.nii.gz') - >>> pth - '/home/data' - - >>> fname - 'subject' - - >>> ext - '.nii.gz' - - """ - - special_extensions = [".nii.gz", ".tar.gz", ".niml.dset"] - - pth = op.dirname(fname) - fname = op.basename(fname) - - ext = None - for special_ext in special_extensions: - ext_len = len(special_ext) - if (len(fname) > ext_len) and (fname[-ext_len:].lower() == special_ext.lower()): - ext = fname[-ext_len:] - fname = fname[:-ext_len] - break - if not ext: - fname, ext = op.splitext(fname) - - return pth, fname, ext - - -# Original source at L1069 of /interfaces/base/core.py -class PackageInfo(object): - _version = None - version_cmd = None - version_file = None - - @classmethod - def version(klass): - if klass._version is None: - if klass.version_cmd is not None: - try: - clout = CommandLine( - command=klass.version_cmd, - resource_monitor=False, - terminal_output="allatonce", - ).run() - except IOError: - return None - - raw_info = clout.runtime.stdout - elif klass.version_file is not None: - try: - with open(klass.version_file, "rt") as fobj: - raw_info = fobj.read() - except OSError: - return None - else: - return None - - klass._version = klass.parse_version(raw_info) - - return klass._version - - @staticmethod - def parse_version(raw_info): - raise NotImplementedError - - -# Original source at L40 of /interfaces/fsl/base.py -class Info(PackageInfo): - """ - Handle FSL ``output_type`` and version information. - - output type refers to the type of file fsl defaults to writing - eg, NIFTI, NIFTI_GZ - - Examples - -------- - - >>> from nipype.interfaces.fsl import Info - >>> Info.version() # doctest: +SKIP - >>> Info.output_type() # doctest: +SKIP - - """ - - ftypes = { - "NIFTI": ".nii", - "NIFTI_PAIR": ".img", - "NIFTI_GZ": ".nii.gz", - "NIFTI_PAIR_GZ": ".img.gz", - } - - if os.getenv("FSLDIR"): - version_file = os.path.join(os.getenv("FSLDIR"), "etc", "fslversion") - - @staticmethod - def parse_version(raw_info): - return raw_info.splitlines()[0] - - @classmethod - def output_type_to_ext(cls, output_type): - """Get the file extension for the given output type. - - Parameters - ---------- - output_type : {'NIFTI', 'NIFTI_GZ', 'NIFTI_PAIR', 'NIFTI_PAIR_GZ'} - String specifying the output type. - - Returns - ------- - extension : str - The file extension for the output type. - """ - - try: - return cls.ftypes[output_type] - except KeyError: - msg = "Invalid FSLOUTPUTTYPE: ", output_type - raise KeyError(msg) - - @classmethod - def output_type(cls): - """Get the global FSL output file type FSLOUTPUTTYPE. - - This returns the value of the environment variable - FSLOUTPUTTYPE. An exception is raised if it is not defined. - - Returns - ------- - fsl_ftype : string - Represents the current environment setting of FSLOUTPUTTYPE - """ - try: - return os.environ["FSLOUTPUTTYPE"] - except KeyError: - IFLOGGER.warning( - "FSLOUTPUTTYPE environment variable is not set. " - "Setting FSLOUTPUTTYPE=NIFTI" - ) - return "NIFTI" - - @staticmethod - def standard_image(img_name=None): - """Grab an image from the standard location. - - Returns a list of standard images if called without arguments. - - Could be made more fancy to allow for more relocatability""" - try: - fsldir = os.environ["FSLDIR"] - except KeyError: - raise Exception("FSL environment variables not set") - stdpath = os.path.join(fsldir, "data", "standard") - if img_name is None: - return [ - filename.replace(stdpath + "/", "") - for filename in glob(os.path.join(stdpath, "*nii*")) - ] - return os.path.join(stdpath, img_name) diff --git a/nipype-auto-conv/specs/interfaces/dual_regression.yaml b/nipype-auto-conv/specs/interfaces/dual_regression.yaml index 5d90159..1a5ba55 100644 --- a/nipype-auto-conv/specs/interfaces/dual_regression.yaml +++ b/nipype-auto-conv/specs/interfaces/dual_regression.yaml @@ -7,19 +7,19 @@ # ---- # Wrapper Script for Dual Regression Workflow # -# Examples -# -------- +# Examples +# -------- # -# >>> dual_regression = DualRegression() -# >>> dual_regression.inputs.in_files = ["functional.nii", "functional2.nii", "functional3.nii"] -# >>> dual_regression.inputs.group_IC_maps_4D = "allFA.nii" -# >>> dual_regression.inputs.des_norm = False -# >>> dual_regression.inputs.one_sample_group_mean = True -# >>> dual_regression.inputs.n_perm = 10 -# >>> dual_regression.inputs.out_dir = "my_output_directory" -# >>> dual_regression.cmdline -# 'dual_regression allFA.nii 0 -1 10 my_output_directory functional.nii functional2.nii functional3.nii' -# >>> dual_regression.run() # doctest: +SKIP +# >>> dual_regression = DualRegression() +# >>> dual_regression.inputs.in_files = ["functional.nii", "functional2.nii", "functional3.nii"] +# >>> dual_regression.inputs.group_IC_maps_4D = "allFA.nii" +# >>> dual_regression.inputs.des_norm = False +# >>> dual_regression.inputs.one_sample_group_mean = True +# >>> dual_regression.inputs.n_perm = 10 +# >>> dual_regression.inputs.out_dir = "my_output_directory" +# >>> dual_regression.cmdline +# 'dual_regression allFA.nii 0 -1 10 my_output_directory functional.nii functional2.nii functional3.nii' +# >>> dual_regression.run() # doctest: +SKIP # # task_name: DualRegression @@ -40,13 +40,10 @@ inputs: # type=file|default=: Design contrasts for final cross-subject modelling with randomise design_file: generic/file # type=file|default=: Design matrix for final cross-subject modelling with randomise - group_IC_maps_4D: medimage/nifti1 + group_IC_maps_4D: generic/file # type=file|default=: 4D image containing spatial IC maps (melodic_IC) from the whole-group ICA analysis in_files: medimage/nifti1+list-of # type=inputmultiobject|default=[]: List all subjects' preprocessed, standard-space 4D datasets - out_dir: Path - # type=directory: - # type=directory|default='output': This directory will be created to hold all output and logfiles callable_defaults: # dict[str, str] - names of methods/callable classes defined in the adjacent `*_callables.py` # to set as the `default` method of input fields @@ -70,8 +67,8 @@ outputs: # dict[str, str] - names of methods/callable classes defined in the adjacent `*_callables.py` # to set to the `callable` attribute of output fields templates: - # dict[str, str] - `output_file_template` values to be provided to output fields - out_dir: '"my_output_directory"' + # dict[str, str] - `path_template` values to be provided to output fields + out_dir: out_dir # type=directory: # type=directory|default='output': This directory will be created to hold all output and logfiles requirements: @@ -98,13 +95,13 @@ tests: # type=directory: # type=directory|default='output': This directory will be created to hold all output and logfiles output_type: - # type=enum|default='NIFTI'|allowed['NIFTI','NIFTI_GZ','NIFTI_PAIR','NIFTI_PAIR_GZ']: FSL output type + # type=enum|default='NIFTI'|allowed['GIFTI','NIFTI','NIFTI_GZ','NIFTI_PAIR','NIFTI_PAIR_GZ']: FSL output type args: # type=str|default='': Additional parameters to the command environ: # type=dict|default={}: Environment variables imports: - # list[nipype2pydra.task.base.importstatement] - list import statements required by the test, with each list item + # list[nipype2pydra.statements.imports.explicitimport] - list import statements required by the test, with each list item # consisting of 'module', 'name', and optionally 'alias' keys expected_outputs: # dict[str, str] - expected values for selected outputs, noting that tests will typically @@ -123,19 +120,12 @@ tests: # (if not specified, will try to choose a sensible value) in_files: # type=inputmultiobject|default=[]: List all subjects' preprocessed, standard-space 4D datasets - group_IC_maps_4D: - # type=file|default=: 4D image containing spatial IC maps (melodic_IC) from the whole-group ICA analysis des_norm: 'False' # type=bool|default=True: Whether to variance-normalise the timecourses used as the stage-2 regressors; True is default and recommended - one_sample_group_mean: 'True' - # type=bool|default=False: perform 1-sample group-mean test instead of generic permutation test n_perm: '10' # type=int|default=0: Number of permutations for randomise; set to 1 for just raw tstat output, set to 0 to not run randomise at all. - out_dir: '"my_output_directory"' - # type=directory: - # type=directory|default='output': This directory will be created to hold all output and logfiles imports: - # list[nipype2pydra.task.base.importstatement] - list import statements required by the test, with each list item + # list[nipype2pydra.statements.imports.explicitimport] - list import statements required by the test, with each list item # consisting of 'module', 'name', and optionally 'alias' keys expected_outputs: # dict[str, str] - expected values for selected outputs, noting that tests will typically @@ -158,19 +148,12 @@ doctests: # '.mock()' method of the corresponding class is used instead. in_files: '["functional.nii", "functional2.nii", "functional3.nii"]' # type=inputmultiobject|default=[]: List all subjects' preprocessed, standard-space 4D datasets - group_IC_maps_4D: '"allFA.nii"' - # type=file|default=: 4D image containing spatial IC maps (melodic_IC) from the whole-group ICA analysis des_norm: 'False' # type=bool|default=True: Whether to variance-normalise the timecourses used as the stage-2 regressors; True is default and recommended - one_sample_group_mean: 'True' - # type=bool|default=False: perform 1-sample group-mean test instead of generic permutation test n_perm: '10' # type=int|default=0: Number of permutations for randomise; set to 1 for just raw tstat output, set to 0 to not run randomise at all. - out_dir: '"my_output_directory"' - # type=directory: - # type=directory|default='output': This directory will be created to hold all output and logfiles imports: - # list[nipype2pydra.task.base.importstatement] - list import statements required by the test, with each list item + # list[nipype2pydra.statements.imports.explicitimport] - list import statements required by the test, with each list item # consisting of 'module', 'name', and optionally 'alias' keys directive: # str - any doctest directive to place on the cmdline call, e.g. # doctest: +ELLIPSIS diff --git a/nipype-auto-conv/specs/interfaces/dual_regression_callables.py b/nipype-auto-conv/specs/interfaces/dual_regression_callables.py deleted file mode 100644 index 55ed558..0000000 --- a/nipype-auto-conv/specs/interfaces/dual_regression_callables.py +++ /dev/null @@ -1,37 +0,0 @@ -"""Module to put any functions that are referred to in the "callables" section of DualRegression.yaml""" - -import attrs -import os - - -def out_dir_default(inputs): - return _gen_filename("out_dir", inputs=inputs) - - -def out_dir_callable(output_dir, inputs, stdout, stderr): - outputs = _list_outputs( - output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr - ) - return outputs["out_dir"] - - -# Original source at L2198 of /interfaces/fsl/model.py -def _gen_filename(name, inputs=None, stdout=None, stderr=None, output_dir=None): - if name == "out_dir": - return output_dir - - -# Original source at L2190 of /interfaces/fsl/model.py -def _list_outputs(inputs=None, stdout=None, stderr=None, output_dir=None): - outputs = {} - if inputs.out_dir is not attrs.NOTHING: - outputs["out_dir"] = os.path.abspath(inputs.out_dir) - else: - outputs["out_dir"] = _gen_filename( - "out_dir", - inputs=inputs, - stdout=stdout, - stderr=stderr, - output_dir=output_dir, - ) - return outputs diff --git a/nipype-auto-conv/specs/interfaces/eddy.yaml b/nipype-auto-conv/specs/interfaces/eddy.yaml index cf7bf91..272b00e 100644 --- a/nipype-auto-conv/specs/interfaces/eddy.yaml +++ b/nipype-auto-conv/specs/interfaces/eddy.yaml @@ -6,42 +6,42 @@ # Docs # ---- # -# Interface for FSL eddy, a tool for estimating and correcting eddy -# currents induced distortions. `User guide -# `__ and -# `more info regarding acqp file -# `_. +# Interface for FSL eddy, a tool for estimating and correcting eddy +# currents induced distortions. `User guide +# `__ and +# `more info regarding acqp file +# `_. # -# Examples -# -------- +# Examples +# -------- # -# >>> from nipype.interfaces.fsl import Eddy +# >>> from nipype.interfaces.fsl import Eddy # -# Running eddy on a CPU using OpenMP: -# >>> eddy = Eddy() -# >>> eddy.inputs.in_file = 'epi.nii' -# >>> eddy.inputs.in_mask = 'epi_mask.nii' -# >>> eddy.inputs.in_index = 'epi_index.txt' -# >>> eddy.inputs.in_acqp = 'epi_acqp.txt' -# >>> eddy.inputs.in_bvec = 'bvecs.scheme' -# >>> eddy.inputs.in_bval = 'bvals.scheme' -# >>> eddy.cmdline # doctest: +ELLIPSIS -# 'eddy_openmp --flm=quadratic --ff=10.0 --acqp=epi_acqp.txt --bvals=bvals.scheme --bvecs=bvecs.scheme --imain=epi.nii --index=epi_index.txt --mask=epi_mask.nii --interp=spline --resamp=jac --niter=5 --nvoxhp=1000 --out=.../eddy_corrected --slm=none' +# Running eddy on a CPU using OpenMP: +# >>> eddy = Eddy() +# >>> eddy.inputs.in_file = 'epi.nii' +# >>> eddy.inputs.in_mask = 'epi_mask.nii' +# >>> eddy.inputs.in_index = 'epi_index.txt' +# >>> eddy.inputs.in_acqp = 'epi_acqp.txt' +# >>> eddy.inputs.in_bvec = 'bvecs.scheme' +# >>> eddy.inputs.in_bval = 'bvals.scheme' +# >>> eddy.cmdline # doctest: +ELLIPSIS +# 'eddy_openmp --flm=quadratic --ff=10.0 --acqp=epi_acqp.txt --bvals=bvals.scheme --bvecs=bvecs.scheme --imain=epi.nii --index=epi_index.txt --mask=epi_mask.nii --interp=spline --resamp=jac --niter=5 --nvoxhp=1000 --out=.../eddy_corrected --slm=none' # -# Running eddy on an Nvidia GPU using cuda: -# >>> eddy.inputs.use_cuda = True -# >>> eddy.cmdline # doctest: +ELLIPSIS -# 'eddy_cuda --flm=quadratic --ff=10.0 --acqp=epi_acqp.txt --bvals=bvals.scheme --bvecs=bvecs.scheme --imain=epi.nii --index=epi_index.txt --mask=epi_mask.nii --interp=spline --resamp=jac --niter=5 --nvoxhp=1000 --out=.../eddy_corrected --slm=none' +# Running eddy on an Nvidia GPU using cuda: +# >>> eddy.inputs.use_cuda = True +# >>> eddy.cmdline # doctest: +ELLIPSIS +# 'eddy_cuda --flm=quadratic --ff=10.0 --acqp=epi_acqp.txt --bvals=bvals.scheme --bvecs=bvecs.scheme --imain=epi.nii --index=epi_index.txt --mask=epi_mask.nii --interp=spline --resamp=jac --niter=5 --nvoxhp=1000 --out=.../eddy_corrected --slm=none' # -# Running eddy with slice-to-volume motion correction: -# >>> eddy.inputs.mporder = 6 -# >>> eddy.inputs.slice2vol_niter = 5 -# >>> eddy.inputs.slice2vol_lambda = 1 -# >>> eddy.inputs.slice2vol_interp = 'trilinear' -# >>> eddy.inputs.slice_order = 'epi_slspec.txt' -# >>> eddy.cmdline # doctest: +ELLIPSIS -# 'eddy_cuda --flm=quadratic --ff=10.0 --acqp=epi_acqp.txt --bvals=bvals.scheme --bvecs=bvecs.scheme --imain=epi.nii --index=epi_index.txt --mask=epi_mask.nii --interp=spline --resamp=jac --mporder=6 --niter=5 --nvoxhp=1000 --out=.../eddy_corrected --s2v_interp=trilinear --s2v_lambda=1 --s2v_niter=5 --slspec=epi_slspec.txt --slm=none' -# >>> res = eddy.run() # doctest: +SKIP +# Running eddy with slice-to-volume motion correction: +# >>> eddy.inputs.mporder = 6 +# >>> eddy.inputs.slice2vol_niter = 5 +# >>> eddy.inputs.slice2vol_lambda = 1 +# >>> eddy.inputs.slice2vol_interp = 'trilinear' +# >>> eddy.inputs.slice_order = 'epi_slspec.txt' +# >>> eddy.cmdline # doctest: +ELLIPSIS +# 'eddy_cuda --flm=quadratic --ff=10.0 --acqp=epi_acqp.txt --bvals=bvals.scheme --bvecs=bvecs.scheme --imain=epi.nii --index=epi_index.txt --mask=epi_mask.nii --interp=spline --resamp=jac --mporder=6 --niter=5 --nvoxhp=1000 --out=.../eddy_corrected --s2v_interp=trilinear --s2v_lambda=1 --s2v_niter=5 --slspec=epi_slspec.txt --slm=none' +# >>> res = eddy.run() # doctest: +SKIP # # task_name: Eddy @@ -53,35 +53,35 @@ inputs: rename: # dict[str, str] - fields to rename in the Pydra interface types: - # dict[str, type] - override inferred types (use "mime-like" string for file-format types, - # e.g. 'medimage/nifti-gz'). For most fields the type will be correctly inferred - # from the nipype interface, but you may want to be more specific, particularly - # for file types, where specifying the format also specifies the file that will be - # passed to the field in the automatically generated unittests. + # dict[str, type] - override inferred types (use "mime-like" string for file-format types, + # e.g. 'medimage/nifti-gz'). For most fields the type will be correctly inferred + # from the nipype interface, but you may want to be more specific, particularly + # for file types, where specifying the format also specifies the file that will be + # passed to the field in the automatically generated unittests. + field: generic/file + # type=file|default=: Non-topup derived fieldmap scaled in Hz + field_mat: generic/file + # type=file|default=: Matrix specifying the relative positions of the fieldmap, --field, and the first volume of the input file, --imain + in_acqp: generic/file + # type=file|default=: File containing acquisition parameters + in_bval: generic/file + # type=file|default=: File containing the b-values for all volumes in --imain + in_bvec: generic/file + # type=file|default=: File containing the b-vectors for all volumes in --imain in_file: medimage/nifti1 # type=file|default=: File containing all the images to estimate distortions for - in_mask: generic/file - # type=file|default=: Mask to indicate brain in_index: text/text-file # type=file|default=: File containing indices for all volumes in --imain into --acqp and --topup - in_acqp: generic/file - # type=file|default=: File containing acquisition parameters - in_bvec: medimage/bvec - # type=file|default=: File containing the b-vectors for all volumes in --imain - in_bval: medimage/bval - # type=file|default=: File containing the b-values for all volumes in --imain - session: generic/file - # type=file|default=: File containing session indices for all volumes in --imain + in_mask: generic/file + # type=file|default=: Mask to indicate brain in_topup_fieldcoef: generic/file # type=file|default=: Topup results file containing the field coefficients in_topup_movpar: generic/file # type=file|default=: Topup results file containing the movement parameters (movpar.txt) - field: generic/file - # type=file|default=: Non-topup derived fieldmap scaled in Hz - field_mat: generic/file - # type=file|default=: Matrix specifying the relative positions of the fieldmap, --field, and the first volume of the input file, --imain json: generic/file # type=file|default='': Name of .json text file with information about slice timing + session: generic/file + # type=file|default=: File containing session indices for all volumes in --imain slice_order: text/text-file # type=file|default='': Name of text file completely specifying slice/group acquisition callable_defaults: @@ -95,11 +95,11 @@ outputs: rename: # dict[str, str] - fields to rename in the Pydra interface types: - # dict[str, type] - override inferred types (use "mime-like" string for file-format types, - # e.g. 'medimage/nifti-gz'). For most fields the type will be correctly inferred - # from the nipype interface, but you may want to be more specific, particularly - # for file types, where specifying the format also specifies the file that will be - # passed to the field in the automatically generated unittests. + # dict[str, type] - override inferred types (use "mime-like" string for file-format types, + # e.g. 'medimage/nifti-gz'). For most fields the type will be correctly inferred + # from the nipype interface, but you may want to be more specific, particularly + # for file types, where specifying the format also specifies the file that will be + # passed to the field in the automatically generated unittests. out_cnr_maps: generic/file # type=file: path/name of file with the cnr_maps out_corrected: generic/file @@ -134,245 +134,237 @@ outputs: # dict[str, str] - names of methods/callable classes defined in the adjacent `*_callables.py` # to set to the `callable` attribute of output fields templates: - # dict[str, str] - `output_file_template` values to be provided to output fields + # dict[str, str] - `path_template` values to be provided to output fields requirements: # dict[str, list[str]] - input fields that are required to be provided for the output field to be present tests: - - inputs: - # dict[str, str] - values to provide to inputs fields in the task initialisation - # (if not specified, will try to choose a sensible value) - in_file: - # type=file|default=: File containing all the images to estimate distortions for - in_mask: - # type=file|default=: Mask to indicate brain - in_index: - # type=file|default=: File containing indices for all volumes in --imain into --acqp and --topup - in_acqp: - # type=file|default=: File containing acquisition parameters - in_bvec: - # type=file|default=: File containing the b-vectors for all volumes in --imain - in_bval: - # type=file|default=: File containing the b-values for all volumes in --imain - out_base: - # type=str|default='eddy_corrected': Basename for output image - session: - # type=file|default=: File containing session indices for all volumes in --imain - in_topup_fieldcoef: - # type=file|default=: Topup results file containing the field coefficients - in_topup_movpar: - # type=file|default=: Topup results file containing the movement parameters (movpar.txt) - field: - # type=file|default=: Non-topup derived fieldmap scaled in Hz - field_mat: - # type=file|default=: Matrix specifying the relative positions of the fieldmap, --field, and the first volume of the input file, --imain - flm: - # type=enum|default='quadratic'|allowed['cubic','linear','quadratic']: First level EC model - slm: - # type=enum|default='none'|allowed['linear','none','quadratic']: Second level EC model - fep: - # type=bool|default=False: Fill empty planes in x- or y-directions - initrand: - # type=bool|default=False: Resets rand for when selecting voxels - interp: - # type=enum|default='spline'|allowed['spline','trilinear']: Interpolation model for estimation step - nvoxhp: - # type=int|default=1000: # of voxels used to estimate the hyperparameters - fudge_factor: - # type=float|default=10.0: Fudge factor for hyperparameter error variance - dont_sep_offs_move: - # type=bool|default=False: Do NOT attempt to separate field offset from subject movement - dont_peas: - # type=bool|default=False: Do NOT perform a post-eddy alignment of shells - fwhm: - # type=float|default=0.0: FWHM for conditioning filter when estimating the parameters - niter: - # type=int|default=5: Number of iterations - method: - # type=enum|default='jac'|allowed['jac','lsr']: Final resampling method (jacobian/least squares) - repol: - # type=bool|default=False: Detect and replace outlier slices - outlier_nstd: - # type=int|default=0: Number of std off to qualify as outlier - outlier_nvox: - # type=int|default=0: Min # of voxels in a slice for inclusion in outlier detection - outlier_type: - # type=enum|default='sw'|allowed['both','gw','sw']: Type of outliers, slicewise (sw), groupwise (gw) or both (both) - outlier_pos: - # type=bool|default=False: Consider both positive and negative outliers if set - outlier_sqr: - # type=bool|default=False: Consider outliers among sums-of-squared differences if set - multiband_factor: - # type=int|default=0: Multi-band factor - multiband_offset: - # type=enum|default=0|allowed[-1,0,1]: Multi-band offset (-1 if bottom slice removed, 1 if top slice removed - mporder: - # type=int|default=0: Order of slice-to-vol movement model - slice2vol_niter: - # type=int|default=0: Number of iterations for slice-to-vol - slice2vol_lambda: - # type=int|default=0: Regularisation weight for slice-to-vol movement (reasonable range 1-10) - slice2vol_interp: - # type=enum|default='trilinear'|allowed['spline','trilinear']: Slice-to-vol interpolation model for estimation step - slice_order: - # type=file|default='': Name of text file completely specifying slice/group acquisition - json: - # type=file|default='': Name of .json text file with information about slice timing - estimate_move_by_susceptibility: - # type=bool|default=False: Estimate how susceptibility field changes with subject movement - mbs_niter: - # type=int|default=0: Number of iterations for MBS estimation - mbs_lambda: - # type=int|default=0: Weighting of regularisation for MBS estimation - mbs_ksp: - # type=int|default=0: Knot-spacing for MBS field estimation - num_threads: - # type=int|default=1: Number of openmp threads to use - is_shelled: - # type=bool|default=False: Override internal check to ensure that date are acquired on a set of b-value shells - use_cuda: - # type=bool|default=False: Run eddy using cuda gpu - cnr_maps: - # type=bool|default=False: Output CNR-Maps - residuals: - # type=bool|default=False: Output Residuals - output_type: - # type=enum|default='NIFTI'|allowed['NIFTI','NIFTI_GZ','NIFTI_PAIR','NIFTI_PAIR_GZ']: FSL output type - args: - # type=str|default='': Additional parameters to the command - environ: - # type=dict|default={}: Environment variables - imports: - # list[nipype2pydra.task.base.importstatement] - list import statements required by the test, with each list item - # consisting of 'module', 'name', and optionally 'alias' keys - expected_outputs: - # dict[str, str] - expected values for selected outputs, noting that tests will typically - # be terminated before they complete for time-saving reasons, and therefore - # these values will be ignored, when running in CI - timeout: 10 - # int - the value to set for the timeout in the generated test, - # after which the test will be considered to have been initialised - # successfully. Set to 0 to disable the timeout (warning, this could - # lead to the unittests taking a very long time to complete) - xfail: true - # bool - whether the unittest is expected to fail or not. Set to false - # when you are satisfied with the edits you have made to this file - - inputs: - # dict[str, str] - values to provide to inputs fields in the task initialisation - # (if not specified, will try to choose a sensible value) - in_file: - # type=file|default=: File containing all the images to estimate distortions for - in_index: - # type=file|default=: File containing indices for all volumes in --imain into --acqp and --topup - imports: - # list[nipype2pydra.task.base.importstatement] - list import statements required by the test, with each list item - # consisting of 'module', 'name', and optionally 'alias' keys - expected_outputs: - # dict[str, str] - expected values for selected outputs, noting that tests will typically - # be terminated before they complete for time-saving reasons, and therefore - # these values will be ignored, when running in CI - timeout: 10 - # int - the value to set for the timeout in the generated test, - # after which the test will be considered to have been initialised - # successfully. Set to 0 to disable the timeout (warning, this could - # lead to the unittests taking a very long time to complete) - xfail: true - # bool - whether the unittest is expected to fail or not. Set to false - # when you are satisfied with the edits you have made to this file - - inputs: - # dict[str, str] - values to provide to inputs fields in the task initialisation - # (if not specified, will try to choose a sensible value) - use_cuda: "True" - # type=bool|default=False: Run eddy using cuda gpu - imports: - # list[nipype2pydra.task.base.importstatement] - list import statements required by the test, with each list item - # consisting of 'module', 'name', and optionally 'alias' keys - expected_outputs: - # dict[str, str] - expected values for selected outputs, noting that tests will typically - # be terminated before they complete for time-saving reasons, and therefore - # these values will be ignored, when running in CI - timeout: 10 - # int - the value to set for the timeout in the generated test, - # after which the test will be considered to have been initialised - # successfully. Set to 0 to disable the timeout (warning, this could - # lead to the unittests taking a very long time to complete) - xfail: true - # bool - whether the unittest is expected to fail or not. Set to false - # when you are satisfied with the edits you have made to this file - - inputs: - # dict[str, str] - values to provide to inputs fields in the task initialisation - # (if not specified, will try to choose a sensible value) - mporder: "6" - # type=int|default=0: Order of slice-to-vol movement model - slice2vol_niter: "5" - # type=int|default=0: Number of iterations for slice-to-vol - slice2vol_lambda: "1" - # type=int|default=0: Regularisation weight for slice-to-vol movement (reasonable range 1-10) - slice2vol_interp: '"trilinear"' - # type=enum|default='trilinear'|allowed['spline','trilinear']: Slice-to-vol interpolation model for estimation step - slice_order: - # type=file|default='': Name of text file completely specifying slice/group acquisition - imports: - # list[nipype2pydra.task.base.importstatement] - list import statements required by the test, with each list item - # consisting of 'module', 'name', and optionally 'alias' keys - expected_outputs: - # dict[str, str] - expected values for selected outputs, noting that tests will typically - # be terminated before they complete for time-saving reasons, and therefore - # these values will be ignored, when running in CI - timeout: 10 - # int - the value to set for the timeout in the generated test, - # after which the test will be considered to have been initialised - # successfully. Set to 0 to disable the timeout (warning, this could - # lead to the unittests taking a very long time to complete) - xfail: true - # bool - whether the unittest is expected to fail or not. Set to false - # when you are satisfied with the edits you have made to this file +- inputs: + # dict[str, str] - values to provide to inputs fields in the task initialisation + # (if not specified, will try to choose a sensible value) + in_file: + # type=file|default=: File containing all the images to estimate distortions for + in_mask: + # type=file|default=: Mask to indicate brain + in_index: + # type=file|default=: File containing indices for all volumes in --imain into --acqp and --topup + in_acqp: + # type=file|default=: File containing acquisition parameters + in_bvec: + # type=file|default=: File containing the b-vectors for all volumes in --imain + in_bval: + # type=file|default=: File containing the b-values for all volumes in --imain + out_base: + # type=str|default='eddy_corrected': Basename for output image + session: + # type=file|default=: File containing session indices for all volumes in --imain + in_topup_fieldcoef: + # type=file|default=: Topup results file containing the field coefficients + in_topup_movpar: + # type=file|default=: Topup results file containing the movement parameters (movpar.txt) + field: + # type=file|default=: Non-topup derived fieldmap scaled in Hz + field_mat: + # type=file|default=: Matrix specifying the relative positions of the fieldmap, --field, and the first volume of the input file, --imain + flm: + # type=enum|default='quadratic'|allowed['cubic','linear','quadratic']: First level EC model + slm: + # type=enum|default='none'|allowed['linear','none','quadratic']: Second level EC model + fep: + # type=bool|default=False: Fill empty planes in x- or y-directions + initrand: + # type=bool|default=False: Resets rand for when selecting voxels + interp: + # type=enum|default='spline'|allowed['spline','trilinear']: Interpolation model for estimation step + nvoxhp: + # type=int|default=1000: # of voxels used to estimate the hyperparameters + fudge_factor: + # type=float|default=10.0: Fudge factor for hyperparameter error variance + dont_sep_offs_move: + # type=bool|default=False: Do NOT attempt to separate field offset from subject movement + dont_peas: + # type=bool|default=False: Do NOT perform a post-eddy alignment of shells + fwhm: + # type=float|default=0.0: FWHM for conditioning filter when estimating the parameters + niter: + # type=int|default=5: Number of iterations + method: + # type=enum|default='jac'|allowed['jac','lsr']: Final resampling method (jacobian/least squares) + repol: + # type=bool|default=False: Detect and replace outlier slices + outlier_nstd: + # type=int|default=0: Number of std off to qualify as outlier + outlier_nvox: + # type=int|default=0: Min # of voxels in a slice for inclusion in outlier detection + outlier_type: + # type=enum|default='sw'|allowed['both','gw','sw']: Type of outliers, slicewise (sw), groupwise (gw) or both (both) + outlier_pos: + # type=bool|default=False: Consider both positive and negative outliers if set + outlier_sqr: + # type=bool|default=False: Consider outliers among sums-of-squared differences if set + multiband_factor: + # type=int|default=0: Multi-band factor + multiband_offset: + # type=enum|default=0|allowed[-1,0,1]: Multi-band offset (-1 if bottom slice removed, 1 if top slice removed + mporder: + # type=int|default=0: Order of slice-to-vol movement model + slice2vol_niter: + # type=int|default=0: Number of iterations for slice-to-vol + slice2vol_lambda: + # type=int|default=0: Regularisation weight for slice-to-vol movement (reasonable range 1-10) + slice2vol_interp: + # type=enum|default='trilinear'|allowed['spline','trilinear']: Slice-to-vol interpolation model for estimation step + slice_order: + # type=file|default='': Name of text file completely specifying slice/group acquisition + json: + # type=file|default='': Name of .json text file with information about slice timing + estimate_move_by_susceptibility: + # type=bool|default=False: Estimate how susceptibility field changes with subject movement + mbs_niter: + # type=int|default=0: Number of iterations for MBS estimation + mbs_lambda: + # type=int|default=0: Weighting of regularisation for MBS estimation + mbs_ksp: + # type=int|default=0: Knot-spacing for MBS field estimation + num_threads: + # type=int|default=1: Number of openmp threads to use + is_shelled: + # type=bool|default=False: Override internal check to ensure that date are acquired on a set of b-value shells + use_cuda: + # type=bool|default=False: Run eddy using cuda gpu + cnr_maps: + # type=bool|default=False: Output CNR-Maps + residuals: + # type=bool|default=False: Output Residuals + output_type: + # type=enum|default='NIFTI'|allowed['GIFTI','NIFTI','NIFTI_GZ','NIFTI_PAIR','NIFTI_PAIR_GZ']: FSL output type + args: + # type=str|default='': Additional parameters to the command + environ: + # type=dict|default={}: Environment variables + imports: + # list[nipype2pydra.statements.imports.explicitimport] - list import statements required by the test, with each list item + # consisting of 'module', 'name', and optionally 'alias' keys + expected_outputs: + # dict[str, str] - expected values for selected outputs, noting that tests will typically + # be terminated before they complete for time-saving reasons, and therefore + # these values will be ignored, when running in CI + timeout: 10 + # int - the value to set for the timeout in the generated test, + # after which the test will be considered to have been initialised + # successfully. Set to 0 to disable the timeout (warning, this could + # lead to the unittests taking a very long time to complete) + xfail: true + # bool - whether the unittest is expected to fail or not. Set to false + # when you are satisfied with the edits you have made to this file +- inputs: + # dict[str, str] - values to provide to inputs fields in the task initialisation + # (if not specified, will try to choose a sensible value) + in_file: + # type=file|default=: File containing all the images to estimate distortions for + in_index: + # type=file|default=: File containing indices for all volumes in --imain into --acqp and --topup + imports: + # list[nipype2pydra.statements.imports.explicitimport] - list import statements required by the test, with each list item + # consisting of 'module', 'name', and optionally 'alias' keys + expected_outputs: + # dict[str, str] - expected values for selected outputs, noting that tests will typically + # be terminated before they complete for time-saving reasons, and therefore + # these values will be ignored, when running in CI + timeout: 10 + # int - the value to set for the timeout in the generated test, + # after which the test will be considered to have been initialised + # successfully. Set to 0 to disable the timeout (warning, this could + # lead to the unittests taking a very long time to complete) + xfail: true + # bool - whether the unittest is expected to fail or not. Set to false + # when you are satisfied with the edits you have made to this file +- inputs: + # dict[str, str] - values to provide to inputs fields in the task initialisation + # (if not specified, will try to choose a sensible value) + use_cuda: 'True' + # type=bool|default=False: Run eddy using cuda gpu + imports: + # list[nipype2pydra.statements.imports.explicitimport] - list import statements required by the test, with each list item + # consisting of 'module', 'name', and optionally 'alias' keys + expected_outputs: + # dict[str, str] - expected values for selected outputs, noting that tests will typically + # be terminated before they complete for time-saving reasons, and therefore + # these values will be ignored, when running in CI + timeout: 10 + # int - the value to set for the timeout in the generated test, + # after which the test will be considered to have been initialised + # successfully. Set to 0 to disable the timeout (warning, this could + # lead to the unittests taking a very long time to complete) + xfail: true + # bool - whether the unittest is expected to fail or not. Set to false + # when you are satisfied with the edits you have made to this file +- inputs: + # dict[str, str] - values to provide to inputs fields in the task initialisation + # (if not specified, will try to choose a sensible value) + mporder: '6' + # type=int|default=0: Order of slice-to-vol movement model + slice2vol_lambda: '1' + # type=int|default=0: Regularisation weight for slice-to-vol movement (reasonable range 1-10) + slice_order: + # type=file|default='': Name of text file completely specifying slice/group acquisition + imports: + # list[nipype2pydra.statements.imports.explicitimport] - list import statements required by the test, with each list item + # consisting of 'module', 'name', and optionally 'alias' keys + expected_outputs: + # dict[str, str] - expected values for selected outputs, noting that tests will typically + # be terminated before they complete for time-saving reasons, and therefore + # these values will be ignored, when running in CI + timeout: 10 + # int - the value to set for the timeout in the generated test, + # after which the test will be considered to have been initialised + # successfully. Set to 0 to disable the timeout (warning, this could + # lead to the unittests taking a very long time to complete) + xfail: true + # bool - whether the unittest is expected to fail or not. Set to false + # when you are satisfied with the edits you have made to this file doctests: - - cmdline: eddy_openmp --flm=quadratic --ff=10.0 --acqp=epi_acqp.txt --bvals=bvals.scheme --bvecs=bvecs.scheme --imain=epi.nii --index=epi_index.txt --mask=epi_mask.nii --interp=spline --resamp=jac --niter=5 --nvoxhp=1000 --out=.../eddy_corrected --slm=none - # str - the expected cmdline output - inputs: - # dict[str, str] - name-value pairs for inputs to be provided to the doctest. - # If the field is of file-format type and the value is None, then the - # '.mock()' method of the corresponding class is used instead. - in_file: '"epi.nii"' - # type=file|default=: File containing all the images to estimate distortions for - in_index: '"epi_index.txt"' - # type=file|default=: File containing indices for all volumes in --imain into --acqp and --topup - imports: - # list[nipype2pydra.task.base.importstatement] - list import statements required by the test, with each list item - # consisting of 'module', 'name', and optionally 'alias' keys - directive: - # str - any doctest directive to place on the cmdline call, e.g. # doctest: +ELLIPSIS - - cmdline: eddy_cuda --flm=quadratic --ff=10.0 --acqp=epi_acqp.txt --bvals=bvals.scheme --bvecs=bvecs.scheme --imain=epi.nii --index=epi_index.txt --mask=epi_mask.nii --interp=spline --resamp=jac --niter=5 --nvoxhp=1000 --out=.../eddy_corrected --slm=none - # str - the expected cmdline output - inputs: - # dict[str, str] - name-value pairs for inputs to be provided to the doctest. - # If the field is of file-format type and the value is None, then the - # '.mock()' method of the corresponding class is used instead. - use_cuda: "True" - # type=bool|default=False: Run eddy using cuda gpu - imports: - # list[nipype2pydra.task.base.importstatement] - list import statements required by the test, with each list item - # consisting of 'module', 'name', and optionally 'alias' keys - directive: - # str - any doctest directive to place on the cmdline call, e.g. # doctest: +ELLIPSIS - - cmdline: eddy_cuda --flm=quadratic --ff=10.0 --acqp=epi_acqp.txt --bvals=bvals.scheme --bvecs=bvecs.scheme --imain=epi.nii --index=epi_index.txt --mask=epi_mask.nii --interp=spline --resamp=jac --mporder=6 --niter=5 --nvoxhp=1000 --out=.../eddy_corrected --s2v_interp=trilinear --s2v_lambda=1 --s2v_niter=5 --slspec=epi_slspec.txt --slm=none - # str - the expected cmdline output - inputs: - # dict[str, str] - name-value pairs for inputs to be provided to the doctest. - # If the field is of file-format type and the value is None, then the - # '.mock()' method of the corresponding class is used instead. - mporder: "6" - # type=int|default=0: Order of slice-to-vol movement model - slice2vol_niter: "5" - # type=int|default=0: Number of iterations for slice-to-vol - slice2vol_lambda: "1" - # type=int|default=0: Regularisation weight for slice-to-vol movement (reasonable range 1-10) - slice2vol_interp: '"trilinear"' - # type=enum|default='trilinear'|allowed['spline','trilinear']: Slice-to-vol interpolation model for estimation step - slice_order: '"epi_slspec.txt"' - # type=file|default='': Name of text file completely specifying slice/group acquisition - imports: - # list[nipype2pydra.task.base.importstatement] - list import statements required by the test, with each list item - # consisting of 'module', 'name', and optionally 'alias' keys - directive: - # str - any doctest directive to place on the cmdline call, e.g. # doctest: +ELLIPSIS +- cmdline: eddy_openmp --flm=quadratic --ff=10.0 --acqp=epi_acqp.txt --bvals=bvals.scheme --bvecs=bvecs.scheme --imain=epi.nii --index=epi_index.txt --mask=epi_mask.nii --interp=spline --resamp=jac --niter=5 --nvoxhp=1000 --out=.../eddy_corrected --slm=none + # str - the expected cmdline output + inputs: + # dict[str, str] - name-value pairs for inputs to be provided to the doctest. + # If the field is of file-format type and the value is None, then the + # '.mock()' method of the corresponding class is used instead. + in_file: '"epi.nii"' + # type=file|default=: File containing all the images to estimate distortions for + in_index: '"epi_index.txt"' + # type=file|default=: File containing indices for all volumes in --imain into --acqp and --topup + imports: + # list[nipype2pydra.statements.imports.explicitimport] - list import statements required by the test, with each list item + # consisting of 'module', 'name', and optionally 'alias' keys + directive: + # str - any doctest directive to place on the cmdline call, e.g. # doctest: +ELLIPSIS +- cmdline: + # str - the expected cmdline output + inputs: + # dict[str, str] - name-value pairs for inputs to be provided to the doctest. + # If the field is of file-format type and the value is None, then the + # '.mock()' method of the corresponding class is used instead. + use_cuda: 'True' + # type=bool|default=False: Run eddy using cuda gpu + imports: + # list[nipype2pydra.statements.imports.explicitimport] - list import statements required by the test, with each list item + # consisting of 'module', 'name', and optionally 'alias' keys + directive: + # str - any doctest directive to place on the cmdline call, e.g. # doctest: +ELLIPSIS +- cmdline: + # str - the expected cmdline output + inputs: + # dict[str, str] - name-value pairs for inputs to be provided to the doctest. + # If the field is of file-format type and the value is None, then the + # '.mock()' method of the corresponding class is used instead. + mporder: '6' + # type=int|default=0: Order of slice-to-vol movement model + slice2vol_lambda: '1' + # type=int|default=0: Regularisation weight for slice-to-vol movement (reasonable range 1-10) + slice_order: '"epi_slspec.txt"' + # type=file|default='': Name of text file completely specifying slice/group acquisition + imports: + # list[nipype2pydra.statements.imports.explicitimport] - list import statements required by the test, with each list item + # consisting of 'module', 'name', and optionally 'alias' keys + directive: + # str - any doctest directive to place on the cmdline call, e.g. # doctest: +ELLIPSIS diff --git a/nipype-auto-conv/specs/interfaces/eddy_callables.py b/nipype-auto-conv/specs/interfaces/eddy_callables.py deleted file mode 100644 index 1fc233f..0000000 --- a/nipype-auto-conv/specs/interfaces/eddy_callables.py +++ /dev/null @@ -1,185 +0,0 @@ -"""Module to put any functions that are referred to in the "callables" section of Eddy.yaml""" - -import attrs -import os - - -def out_cnr_maps_callable(output_dir, inputs, stdout, stderr): - outputs = _list_outputs( - output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr - ) - return outputs["out_cnr_maps"] - - -def out_corrected_callable(output_dir, inputs, stdout, stderr): - outputs = _list_outputs( - output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr - ) - return outputs["out_corrected"] - - -def out_movement_over_time_callable(output_dir, inputs, stdout, stderr): - outputs = _list_outputs( - output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr - ) - return outputs["out_movement_over_time"] - - -def out_movement_rms_callable(output_dir, inputs, stdout, stderr): - outputs = _list_outputs( - output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr - ) - return outputs["out_movement_rms"] - - -def out_outlier_free_callable(output_dir, inputs, stdout, stderr): - outputs = _list_outputs( - output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr - ) - return outputs["out_outlier_free"] - - -def out_outlier_map_callable(output_dir, inputs, stdout, stderr): - outputs = _list_outputs( - output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr - ) - return outputs["out_outlier_map"] - - -def out_outlier_n_sqr_stdev_map_callable(output_dir, inputs, stdout, stderr): - outputs = _list_outputs( - output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr - ) - return outputs["out_outlier_n_sqr_stdev_map"] - - -def out_outlier_n_stdev_map_callable(output_dir, inputs, stdout, stderr): - outputs = _list_outputs( - output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr - ) - return outputs["out_outlier_n_stdev_map"] - - -def out_outlier_report_callable(output_dir, inputs, stdout, stderr): - outputs = _list_outputs( - output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr - ) - return outputs["out_outlier_report"] - - -def out_parameter_callable(output_dir, inputs, stdout, stderr): - outputs = _list_outputs( - output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr - ) - return outputs["out_parameter"] - - -def out_residuals_callable(output_dir, inputs, stdout, stderr): - outputs = _list_outputs( - output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr - ) - return outputs["out_residuals"] - - -def out_restricted_movement_rms_callable(output_dir, inputs, stdout, stderr): - outputs = _list_outputs( - output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr - ) - return outputs["out_restricted_movement_rms"] - - -def out_rotated_bvecs_callable(output_dir, inputs, stdout, stderr): - outputs = _list_outputs( - output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr - ) - return outputs["out_rotated_bvecs"] - - -def out_shell_alignment_parameters_callable(output_dir, inputs, stdout, stderr): - outputs = _list_outputs( - output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr - ) - return outputs["out_shell_alignment_parameters"] - - -def out_shell_pe_translation_parameters_callable(output_dir, inputs, stdout, stderr): - outputs = _list_outputs( - output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr - ) - return outputs["out_shell_pe_translation_parameters"] - - -# Original source at L885 of /interfaces/base/core.py -def _gen_filename(name, inputs=None, stdout=None, stderr=None, output_dir=None): - raise NotImplementedError - - -# Original source at L1008 of /interfaces/fsl/epi.py -def _list_outputs(inputs=None, stdout=None, stderr=None, output_dir=None): - outputs = {} - outputs["out_corrected"] = os.path.abspath("%s.nii.gz" % inputs.out_base) - outputs["out_parameter"] = os.path.abspath("%s.eddy_parameters" % inputs.out_base) - - # File generation might depend on the version of EDDY - out_rotated_bvecs = os.path.abspath("%s.eddy_rotated_bvecs" % inputs.out_base) - out_movement_rms = os.path.abspath("%s.eddy_movement_rms" % inputs.out_base) - out_restricted_movement_rms = os.path.abspath( - "%s.eddy_restricted_movement_rms" % inputs.out_base - ) - out_shell_alignment_parameters = os.path.abspath( - "%s.eddy_post_eddy_shell_alignment_parameters" % inputs.out_base - ) - out_shell_pe_translation_parameters = os.path.abspath( - "%s.eddy_post_eddy_shell_PE_translation_parameters" % inputs.out_base - ) - out_outlier_map = os.path.abspath("%s.eddy_outlier_map" % inputs.out_base) - out_outlier_n_stdev_map = os.path.abspath( - "%s.eddy_outlier_n_stdev_map" % inputs.out_base - ) - out_outlier_n_sqr_stdev_map = os.path.abspath( - "%s.eddy_outlier_n_sqr_stdev_map" % inputs.out_base - ) - out_outlier_report = os.path.abspath("%s.eddy_outlier_report" % inputs.out_base) - if (inputs.repol is not attrs.NOTHING) and inputs.repol: - out_outlier_free = os.path.abspath( - "%s.eddy_outlier_free_data" % inputs.out_base - ) - if os.path.exists(out_outlier_free): - outputs["out_outlier_free"] = out_outlier_free - if (inputs.mporder is not attrs.NOTHING) and inputs.mporder > 0: - out_movement_over_time = os.path.abspath( - "%s.eddy_movement_over_time" % inputs.out_base - ) - if os.path.exists(out_movement_over_time): - outputs["out_movement_over_time"] = out_movement_over_time - if (inputs.cnr_maps is not attrs.NOTHING) and inputs.cnr_maps: - out_cnr_maps = os.path.abspath("%s.eddy_cnr_maps.nii.gz" % inputs.out_base) - if os.path.exists(out_cnr_maps): - outputs["out_cnr_maps"] = out_cnr_maps - if (inputs.residuals is not attrs.NOTHING) and inputs.residuals: - out_residuals = os.path.abspath("%s.eddy_residuals.nii.gz" % inputs.out_base) - if os.path.exists(out_residuals): - outputs["out_residuals"] = out_residuals - - if os.path.exists(out_rotated_bvecs): - outputs["out_rotated_bvecs"] = out_rotated_bvecs - if os.path.exists(out_movement_rms): - outputs["out_movement_rms"] = out_movement_rms - if os.path.exists(out_restricted_movement_rms): - outputs["out_restricted_movement_rms"] = out_restricted_movement_rms - if os.path.exists(out_shell_alignment_parameters): - outputs["out_shell_alignment_parameters"] = out_shell_alignment_parameters - if os.path.exists(out_shell_pe_translation_parameters): - outputs["out_shell_pe_translation_parameters"] = ( - out_shell_pe_translation_parameters - ) - if os.path.exists(out_outlier_map): - outputs["out_outlier_map"] = out_outlier_map - if os.path.exists(out_outlier_n_stdev_map): - outputs["out_outlier_n_stdev_map"] = out_outlier_n_stdev_map - if os.path.exists(out_outlier_n_sqr_stdev_map): - outputs["out_outlier_n_sqr_stdev_map"] = out_outlier_n_sqr_stdev_map - if os.path.exists(out_outlier_report): - outputs["out_outlier_report"] = out_outlier_report - - return outputs diff --git a/nipype-auto-conv/specs/interfaces/eddy_correct.yaml b/nipype-auto-conv/specs/interfaces/eddy_correct.yaml index 40b9f06..8dd2dcb 100644 --- a/nipype-auto-conv/specs/interfaces/eddy_correct.yaml +++ b/nipype-auto-conv/specs/interfaces/eddy_correct.yaml @@ -7,17 +7,17 @@ # ---- # # -# .. warning:: Deprecated in FSL. Please use -# :class:`nipype.interfaces.fsl.epi.Eddy` instead +# .. warning:: Deprecated in FSL. Please use +# :class:`nipype.interfaces.fsl.epi.Eddy` instead # -# Example -# ------- +# Example +# ------- # -# >>> from nipype.interfaces.fsl import EddyCorrect -# >>> eddyc = EddyCorrect(in_file='diffusion.nii', -# ... out_file="diffusion_edc.nii", ref_num=0) -# >>> eddyc.cmdline -# 'eddy_correct diffusion.nii diffusion_edc.nii 0' +# >>> from nipype.interfaces.fsl import EddyCorrect +# >>> eddyc = EddyCorrect(in_file='diffusion.nii', +# ... out_file="diffusion_edc.nii", ref_num=0) +# >>> eddyc.cmdline +# 'eddy_correct diffusion.nii diffusion_edc.nii 0' # # task_name: EddyCorrect @@ -36,8 +36,6 @@ inputs: # passed to the field in the automatically generated unittests. in_file: medimage/nifti1 # type=file|default=: 4D input file - out_file: Path - # type=file|default=: 4D output file callable_defaults: # dict[str, str] - names of methods/callable classes defined in the adjacent `*_callables.py` # to set as the `default` method of input fields @@ -60,7 +58,7 @@ outputs: # dict[str, str] - names of methods/callable classes defined in the adjacent `*_callables.py` # to set to the `callable` attribute of output fields templates: - # dict[str, str] - `output_file_template` values to be provided to output fields + # dict[str, str] - `path_template` values to be provided to output fields requirements: # dict[str, list[str]] - input fields that are required to be provided for the output field to be present tests: @@ -74,13 +72,13 @@ tests: ref_num: # type=int|default=0: reference number output_type: - # type=enum|default='NIFTI'|allowed['NIFTI','NIFTI_GZ','NIFTI_PAIR','NIFTI_PAIR_GZ']: FSL output type + # type=enum|default='NIFTI'|allowed['GIFTI','NIFTI','NIFTI_GZ','NIFTI_PAIR','NIFTI_PAIR_GZ']: FSL output type args: # type=str|default='': Additional parameters to the command environ: # type=dict|default={}: Environment variables imports: - # list[nipype2pydra.task.base.importstatement] - list import statements required by the test, with each list item + # list[nipype2pydra.statements.imports.explicitimport] - list import statements required by the test, with each list item # consisting of 'module', 'name', and optionally 'alias' keys expected_outputs: # dict[str, str] - expected values for selected outputs, noting that tests will typically @@ -104,7 +102,7 @@ tests: ref_num: '0' # type=int|default=0: reference number imports: - # list[nipype2pydra.task.base.importstatement] - list import statements required by the test, with each list item + # list[nipype2pydra.statements.imports.explicitimport] - list import statements required by the test, with each list item # consisting of 'module', 'name', and optionally 'alias' keys expected_outputs: # dict[str, str] - expected values for selected outputs, noting that tests will typically @@ -132,7 +130,7 @@ doctests: ref_num: '0' # type=int|default=0: reference number imports: - # list[nipype2pydra.task.base.importstatement] - list import statements required by the test, with each list item + # list[nipype2pydra.statements.imports.explicitimport] - list import statements required by the test, with each list item # consisting of 'module', 'name', and optionally 'alias' keys directive: # str - any doctest directive to place on the cmdline call, e.g. # doctest: +ELLIPSIS diff --git a/nipype-auto-conv/specs/interfaces/eddy_correct_callables.py b/nipype-auto-conv/specs/interfaces/eddy_correct_callables.py deleted file mode 100644 index 209f413..0000000 --- a/nipype-auto-conv/specs/interfaces/eddy_correct_callables.py +++ /dev/null @@ -1,338 +0,0 @@ -"""Module to put any functions that are referred to in the "callables" section of EddyCorrect.yaml""" - -import attrs -import logging -import os -import os.path as op -from glob import glob - - -def eddy_corrected_callable(output_dir, inputs, stdout, stderr): - outputs = _list_outputs( - output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr - ) - return outputs["eddy_corrected"] - - -IFLOGGER = logging.getLogger("nipype.interface") - - -iflogger = logging.getLogger("nipype.interface") - - -# Original source at L809 of /interfaces/base/core.py -def _filename_from_source( - name, chain=None, inputs=None, stdout=None, stderr=None, output_dir=None -): - if chain is None: - chain = [] - - trait_spec = inputs.trait(name) - retval = getattr(inputs, name) - source_ext = None - if (retval is attrs.NOTHING) or "%s" in retval: - if not trait_spec.name_source: - return retval - - # Do not generate filename when excluded by other inputs - if any( - (getattr(inputs, field) is not attrs.NOTHING) - for field in trait_spec.xor or () - ): - return retval - - # Do not generate filename when required fields are missing - if not all( - (getattr(inputs, field) is not attrs.NOTHING) - for field in trait_spec.requires or () - ): - return retval - - if (retval is not attrs.NOTHING) and "%s" in retval: - name_template = retval - else: - name_template = trait_spec.name_template - if not name_template: - name_template = "%s_generated" - - ns = trait_spec.name_source - while isinstance(ns, (list, tuple)): - if len(ns) > 1: - iflogger.warning("Only one name_source per trait is allowed") - ns = ns[0] - - if not isinstance(ns, (str, bytes)): - raise ValueError( - "name_source of '{}' trait should be an input trait " - "name, but a type {} object was found".format(name, type(ns)) - ) - - if getattr(inputs, ns) is not attrs.NOTHING: - name_source = ns - source = getattr(inputs, name_source) - while isinstance(source, list): - source = source[0] - - # special treatment for files - try: - _, base, source_ext = split_filename(source) - except (AttributeError, TypeError): - base = source - else: - if name in chain: - raise NipypeInterfaceError("Mutually pointing name_sources") - - chain.append(name) - base = _filename_from_source( - ns, - chain, - inputs=inputs, - stdout=stdout, - stderr=stderr, - output_dir=output_dir, - ) - if base is not attrs.NOTHING: - _, _, source_ext = split_filename(base) - else: - # Do not generate filename when required fields are missing - return retval - - chain = None - retval = name_template % base - _, _, ext = split_filename(retval) - if trait_spec.keep_extension and (ext or source_ext): - if (ext is None or not ext) and source_ext: - retval = retval + source_ext - else: - retval = _overload_extension( - retval, - name, - inputs=inputs, - stdout=stdout, - stderr=stderr, - output_dir=output_dir, - ) - return retval - - -# Original source at L885 of /interfaces/base/core.py -def _gen_filename(name, inputs=None, stdout=None, stderr=None, output_dir=None): - raise NotImplementedError - - -# Original source at L891 of /interfaces/base/core.py -def _list_outputs(inputs=None, stdout=None, stderr=None, output_dir=None): - metadata = dict(name_source=lambda t: t is not None) - traits = inputs.traits(**metadata) - if traits: - outputs = {} - for name, trait_spec in list(traits.items()): - out_name = name - if trait_spec.output_name is not None: - out_name = trait_spec.output_name - fname = _filename_from_source( - name, inputs=inputs, stdout=stdout, stderr=stderr, output_dir=output_dir - ) - if fname is not attrs.NOTHING: - outputs[out_name] = os.path.abspath(fname) - return outputs - - -# Original source at L249 of /interfaces/fsl/base.py -def _overload_extension( - value, name=None, inputs=None, stdout=None, stderr=None, output_dir=None -): - return value + Info.output_type_to_ext(inputs.output_type) - - -# Original source at L58 of /utils/filemanip.py -def split_filename(fname): - """Split a filename into parts: path, base filename and extension. - - Parameters - ---------- - fname : str - file or path name - - Returns - ------- - pth : str - base path from fname - fname : str - filename from fname, without extension - ext : str - file extension from fname - - Examples - -------- - >>> from nipype.utils.filemanip import split_filename - >>> pth, fname, ext = split_filename('/home/data/subject.nii.gz') - >>> pth - '/home/data' - - >>> fname - 'subject' - - >>> ext - '.nii.gz' - - """ - - special_extensions = [".nii.gz", ".tar.gz", ".niml.dset"] - - pth = op.dirname(fname) - fname = op.basename(fname) - - ext = None - for special_ext in special_extensions: - ext_len = len(special_ext) - if (len(fname) > ext_len) and (fname[-ext_len:].lower() == special_ext.lower()): - ext = fname[-ext_len:] - fname = fname[:-ext_len] - break - if not ext: - fname, ext = op.splitext(fname) - - return pth, fname, ext - - -# Original source at L1069 of /interfaces/base/core.py -class PackageInfo(object): - _version = None - version_cmd = None - version_file = None - - @classmethod - def version(klass): - if klass._version is None: - if klass.version_cmd is not None: - try: - clout = CommandLine( - command=klass.version_cmd, - resource_monitor=False, - terminal_output="allatonce", - ).run() - except IOError: - return None - - raw_info = clout.runtime.stdout - elif klass.version_file is not None: - try: - with open(klass.version_file, "rt") as fobj: - raw_info = fobj.read() - except OSError: - return None - else: - return None - - klass._version = klass.parse_version(raw_info) - - return klass._version - - @staticmethod - def parse_version(raw_info): - raise NotImplementedError - - -# Original source at L40 of /interfaces/fsl/base.py -class Info(PackageInfo): - """ - Handle FSL ``output_type`` and version information. - - output type refers to the type of file fsl defaults to writing - eg, NIFTI, NIFTI_GZ - - Examples - -------- - - >>> from nipype.interfaces.fsl import Info - >>> Info.version() # doctest: +SKIP - >>> Info.output_type() # doctest: +SKIP - - """ - - ftypes = { - "NIFTI": ".nii", - "NIFTI_PAIR": ".img", - "NIFTI_GZ": ".nii.gz", - "NIFTI_PAIR_GZ": ".img.gz", - } - - if os.getenv("FSLDIR"): - version_file = os.path.join(os.getenv("FSLDIR"), "etc", "fslversion") - - @staticmethod - def parse_version(raw_info): - return raw_info.splitlines()[0] - - @classmethod - def output_type_to_ext(cls, output_type): - """Get the file extension for the given output type. - - Parameters - ---------- - output_type : {'NIFTI', 'NIFTI_GZ', 'NIFTI_PAIR', 'NIFTI_PAIR_GZ'} - String specifying the output type. - - Returns - ------- - extension : str - The file extension for the output type. - """ - - try: - return cls.ftypes[output_type] - except KeyError: - msg = "Invalid FSLOUTPUTTYPE: ", output_type - raise KeyError(msg) - - @classmethod - def output_type(cls): - """Get the global FSL output file type FSLOUTPUTTYPE. - - This returns the value of the environment variable - FSLOUTPUTTYPE. An exception is raised if it is not defined. - - Returns - ------- - fsl_ftype : string - Represents the current environment setting of FSLOUTPUTTYPE - """ - try: - return os.environ["FSLOUTPUTTYPE"] - except KeyError: - IFLOGGER.warning( - "FSLOUTPUTTYPE environment variable is not set. " - "Setting FSLOUTPUTTYPE=NIFTI" - ) - return "NIFTI" - - @staticmethod - def standard_image(img_name=None): - """Grab an image from the standard location. - - Returns a list of standard images if called without arguments. - - Could be made more fancy to allow for more relocatability""" - try: - fsldir = os.environ["FSLDIR"] - except KeyError: - raise Exception("FSL environment variables not set") - stdpath = os.path.join(fsldir, "data", "standard") - if img_name is None: - return [ - filename.replace(stdpath + "/", "") - for filename in glob(os.path.join(stdpath, "*nii*")) - ] - return os.path.join(stdpath, img_name) - - -# Original source at L125 of /interfaces/base/support.py -class NipypeInterfaceError(Exception): - """Custom error for interfaces""" - - def __init__(self, value): - self.value = value - - def __str__(self): - return "{}".format(self.value) diff --git a/nipype-auto-conv/specs/interfaces/eddy_quad.yaml b/nipype-auto-conv/specs/interfaces/eddy_quad.yaml index a90ca6e..7e59f40 100644 --- a/nipype-auto-conv/specs/interfaces/eddy_quad.yaml +++ b/nipype-auto-conv/specs/interfaces/eddy_quad.yaml @@ -6,27 +6,27 @@ # Docs # ---- # -# Interface for FSL eddy_quad, a tool for generating single subject reports -# and storing the quality assessment indices for each subject. -# `User guide `__ +# Interface for FSL eddy_quad, a tool for generating single subject reports +# and storing the quality assessment indices for each subject. +# `User guide `__ # -# Examples -# -------- +# Examples +# -------- # -# >>> from nipype.interfaces.fsl import EddyQuad -# >>> quad = EddyQuad() -# >>> quad.inputs.base_name = 'eddy_corrected' -# >>> quad.inputs.idx_file = 'epi_index.txt' -# >>> quad.inputs.param_file = 'epi_acqp.txt' -# >>> quad.inputs.mask_file = 'epi_mask.nii' -# >>> quad.inputs.bval_file = 'bvals.scheme' -# >>> quad.inputs.bvec_file = 'bvecs.scheme' -# >>> quad.inputs.output_dir = 'eddy_corrected.qc' -# >>> quad.inputs.field = 'fieldmap_phase_fslprepared.nii' -# >>> quad.inputs.verbose = True -# >>> quad.cmdline -# 'eddy_quad eddy_corrected --bvals bvals.scheme --bvecs bvecs.scheme --field fieldmap_phase_fslprepared.nii --eddyIdx epi_index.txt --mask epi_mask.nii --output-dir eddy_corrected.qc --eddyParams epi_acqp.txt --verbose' -# >>> res = quad.run() # doctest: +SKIP +# >>> from nipype.interfaces.fsl import EddyQuad +# >>> quad = EddyQuad() +# >>> quad.inputs.base_name = 'eddy_corrected' +# >>> quad.inputs.idx_file = 'epi_index.txt' +# >>> quad.inputs.param_file = 'epi_acqp.txt' +# >>> quad.inputs.mask_file = 'epi_mask.nii' +# >>> quad.inputs.bval_file = 'bvals.scheme' +# >>> quad.inputs.bvec_file = 'bvecs.scheme' +# >>> quad.inputs.output_dir = 'eddy_corrected.qc' +# >>> quad.inputs.field = 'fieldmap_phase_fslprepared.nii' +# >>> quad.inputs.verbose = True +# >>> quad.cmdline +# 'eddy_quad eddy_corrected --bvals bvals.scheme --bvecs bvecs.scheme --field fieldmap_phase_fslprepared.nii --eddyIdx epi_index.txt --mask epi_mask.nii --output-dir eddy_corrected.qc --eddyParams epi_acqp.txt --verbose' +# >>> res = quad.run() # doctest: +SKIP # # task_name: EddyQuad @@ -38,14 +38,14 @@ inputs: rename: # dict[str, str] - fields to rename in the Pydra interface types: - # dict[str, type] - override inferred types (use "mime-like" string for file-format types, - # e.g. 'medimage/nifti-gz'). For most fields the type will be correctly inferred - # from the nipype interface, but you may want to be more specific, particularly - # for file types, where specifying the format also specifies the file that will be - # passed to the field in the automatically generated unittests. - bval_file: medimage/bval + # dict[str, type] - override inferred types (use "mime-like" string for file-format types, + # e.g. 'medimage/nifti-gz'). For most fields the type will be correctly inferred + # from the nipype interface, but you may want to be more specific, particularly + # for file types, where specifying the format also specifies the file that will be + # passed to the field in the automatically generated unittests. + bval_file: generic/file # type=file|default=: b-values file - bvec_file: medimage/bvec + bvec_file: generic/file # type=file|default=: b-vectors file - only used when .eddy_residuals file is present field: generic/file # type=file|default=: TOPUP estimated field (in Hz) @@ -68,11 +68,11 @@ outputs: rename: # dict[str, str] - fields to rename in the Pydra interface types: - # dict[str, type] - override inferred types (use "mime-like" string for file-format types, - # e.g. 'medimage/nifti-gz'). For most fields the type will be correctly inferred - # from the nipype interface, but you may want to be more specific, particularly - # for file types, where specifying the format also specifies the file that will be - # passed to the field in the automatically generated unittests. + # dict[str, type] - override inferred types (use "mime-like" string for file-format types, + # e.g. 'medimage/nifti-gz'). For most fields the type will be correctly inferred + # from the nipype interface, but you may want to be more specific, particularly + # for file types, where specifying the format also specifies the file that will be + # passed to the field in the automatically generated unittests. avg_b0_pe_png: generic/file+list-of # type=list: Image showing mid-sagittal, -coronal and -axial slices of each averaged pe-direction b0 volume. Generated when using the -f option. avg_b_png: generic/file+list-of @@ -93,89 +93,89 @@ outputs: # dict[str, str] - names of methods/callable classes defined in the adjacent `*_callables.py` # to set to the `callable` attribute of output fields templates: - # dict[str, str] - `output_file_template` values to be provided to output fields + # dict[str, str] - `path_template` values to be provided to output fields requirements: # dict[str, list[str]] - input fields that are required to be provided for the output field to be present tests: - - inputs: - # dict[str, str] - values to provide to inputs fields in the task initialisation - # (if not specified, will try to choose a sensible value) - base_name: - # type=str|default='eddy_corrected': Basename (including path) for EDDY output files, i.e., corrected images and QC files - idx_file: - # type=file|default=: File containing indices for all volumes into acquisition parameters - param_file: - # type=file|default=: File containing acquisition parameters - mask_file: - # type=file|default=: Binary mask file - bval_file: - # type=file|default=: b-values file - bvec_file: - # type=file|default=: b-vectors file - only used when .eddy_residuals file is present - output_dir: - # type=str|default='': Output directory - default = '.qc' - field: - # type=file|default=: TOPUP estimated field (in Hz) - slice_spec: - # type=file|default=: Text file specifying slice/group acquisition - verbose: - # type=bool|default=False: Display debug messages - output_type: - # type=enum|default='NIFTI'|allowed['NIFTI','NIFTI_GZ','NIFTI_PAIR','NIFTI_PAIR_GZ']: FSL output type - args: - # type=str|default='': Additional parameters to the command - environ: - # type=dict|default={}: Environment variables - imports: - # list[nipype2pydra.task.base.importstatement] - list import statements required by the test, with each list item - # consisting of 'module', 'name', and optionally 'alias' keys - expected_outputs: - # dict[str, str] - expected values for selected outputs, noting that tests will typically - # be terminated before they complete for time-saving reasons, and therefore - # these values will be ignored, when running in CI - timeout: 10 - # int - the value to set for the timeout in the generated test, - # after which the test will be considered to have been initialised - # successfully. Set to 0 to disable the timeout (warning, this could - # lead to the unittests taking a very long time to complete) - xfail: true - # bool - whether the unittest is expected to fail or not. Set to false - # when you are satisfied with the edits you have made to this file - - inputs: - # dict[str, str] - values to provide to inputs fields in the task initialisation - # (if not specified, will try to choose a sensible value) - param_file: - # type=file|default=: File containing acquisition parameters - output_dir: '"eddy_corrected.qc"' - # type=str|default='': Output directory - default = '.qc' - imports: - # list[nipype2pydra.task.base.importstatement] - list import statements required by the test, with each list item - # consisting of 'module', 'name', and optionally 'alias' keys - expected_outputs: - # dict[str, str] - expected values for selected outputs, noting that tests will typically - # be terminated before they complete for time-saving reasons, and therefore - # these values will be ignored, when running in CI - timeout: 10 - # int - the value to set for the timeout in the generated test, - # after which the test will be considered to have been initialised - # successfully. Set to 0 to disable the timeout (warning, this could - # lead to the unittests taking a very long time to complete) - xfail: true - # bool - whether the unittest is expected to fail or not. Set to false - # when you are satisfied with the edits you have made to this file +- inputs: + # dict[str, str] - values to provide to inputs fields in the task initialisation + # (if not specified, will try to choose a sensible value) + base_name: + # type=str|default='eddy_corrected': Basename (including path) for EDDY output files, i.e., corrected images and QC files + idx_file: + # type=file|default=: File containing indices for all volumes into acquisition parameters + param_file: + # type=file|default=: File containing acquisition parameters + mask_file: + # type=file|default=: Binary mask file + bval_file: + # type=file|default=: b-values file + bvec_file: + # type=file|default=: b-vectors file - only used when .eddy_residuals file is present + output_dir: + # type=str|default='': Output directory - default = '.qc' + field: + # type=file|default=: TOPUP estimated field (in Hz) + slice_spec: + # type=file|default=: Text file specifying slice/group acquisition + verbose: + # type=bool|default=False: Display debug messages + output_type: + # type=enum|default='NIFTI'|allowed['GIFTI','NIFTI','NIFTI_GZ','NIFTI_PAIR','NIFTI_PAIR_GZ']: FSL output type + args: + # type=str|default='': Additional parameters to the command + environ: + # type=dict|default={}: Environment variables + imports: + # list[nipype2pydra.statements.imports.explicitimport] - list import statements required by the test, with each list item + # consisting of 'module', 'name', and optionally 'alias' keys + expected_outputs: + # dict[str, str] - expected values for selected outputs, noting that tests will typically + # be terminated before they complete for time-saving reasons, and therefore + # these values will be ignored, when running in CI + timeout: 10 + # int - the value to set for the timeout in the generated test, + # after which the test will be considered to have been initialised + # successfully. Set to 0 to disable the timeout (warning, this could + # lead to the unittests taking a very long time to complete) + xfail: true + # bool - whether the unittest is expected to fail or not. Set to false + # when you are satisfied with the edits you have made to this file +- inputs: + # dict[str, str] - values to provide to inputs fields in the task initialisation + # (if not specified, will try to choose a sensible value) + param_file: + # type=file|default=: File containing acquisition parameters + output_dir: '"eddy_corrected.qc"' + # type=str|default='': Output directory - default = '.qc' + imports: + # list[nipype2pydra.statements.imports.explicitimport] - list import statements required by the test, with each list item + # consisting of 'module', 'name', and optionally 'alias' keys + expected_outputs: + # dict[str, str] - expected values for selected outputs, noting that tests will typically + # be terminated before they complete for time-saving reasons, and therefore + # these values will be ignored, when running in CI + timeout: 10 + # int - the value to set for the timeout in the generated test, + # after which the test will be considered to have been initialised + # successfully. Set to 0 to disable the timeout (warning, this could + # lead to the unittests taking a very long time to complete) + xfail: true + # bool - whether the unittest is expected to fail or not. Set to false + # when you are satisfied with the edits you have made to this file doctests: - - cmdline: eddy_quad eddy_corrected --bvals bvals.scheme --bvecs bvecs.scheme --field fieldmap_phase_fslprepared.nii --eddyIdx epi_index.txt --mask epi_mask.nii --output-dir eddy_corrected.qc --eddyParams epi_acqp.txt --verbose - # str - the expected cmdline output - inputs: - # dict[str, str] - name-value pairs for inputs to be provided to the doctest. - # If the field is of file-format type and the value is None, then the - # '.mock()' method of the corresponding class is used instead. - param_file: '"epi_acqp.txt"' - # type=file|default=: File containing acquisition parameters - output_dir: '"eddy_corrected.qc"' - # type=str|default='': Output directory - default = '.qc' - imports: - # list[nipype2pydra.task.base.importstatement] - list import statements required by the test, with each list item - # consisting of 'module', 'name', and optionally 'alias' keys - directive: - # str - any doctest directive to place on the cmdline call, e.g. # doctest: +ELLIPSIS +- cmdline: eddy_quad eddy_corrected --bvals bvals.scheme --bvecs bvecs.scheme --field fieldmap_phase_fslprepared.nii --eddyIdx epi_index.txt --mask epi_mask.nii --output-dir eddy_corrected.qc --eddyParams epi_acqp.txt --verbose + # str - the expected cmdline output + inputs: + # dict[str, str] - name-value pairs for inputs to be provided to the doctest. + # If the field is of file-format type and the value is None, then the + # '.mock()' method of the corresponding class is used instead. + param_file: '"epi_acqp.txt"' + # type=file|default=: File containing acquisition parameters + output_dir: '"eddy_corrected.qc"' + # type=str|default='': Output directory - default = '.qc' + imports: + # list[nipype2pydra.statements.imports.explicitimport] - list import statements required by the test, with each list item + # consisting of 'module', 'name', and optionally 'alias' keys + directive: + # str - any doctest directive to place on the cmdline call, e.g. # doctest: +ELLIPSIS diff --git a/nipype-auto-conv/specs/interfaces/eddy_quad_callables.py b/nipype-auto-conv/specs/interfaces/eddy_quad_callables.py deleted file mode 100644 index daf563b..0000000 --- a/nipype-auto-conv/specs/interfaces/eddy_quad_callables.py +++ /dev/null @@ -1,111 +0,0 @@ -"""Module to put any functions that are referred to in the "callables" section of EddyQuad.yaml""" - -import attrs -import os -from glob import glob - - -def avg_b0_pe_png_callable(output_dir, inputs, stdout, stderr): - outputs = _list_outputs( - output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr - ) - return outputs["avg_b0_pe_png"] - - -def avg_b_png_callable(output_dir, inputs, stdout, stderr): - outputs = _list_outputs( - output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr - ) - return outputs["avg_b_png"] - - -def clean_volumes_callable(output_dir, inputs, stdout, stderr): - outputs = _list_outputs( - output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr - ) - return outputs["clean_volumes"] - - -def cnr_png_callable(output_dir, inputs, stdout, stderr): - outputs = _list_outputs( - output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr - ) - return outputs["cnr_png"] - - -def qc_json_callable(output_dir, inputs, stdout, stderr): - outputs = _list_outputs( - output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr - ) - return outputs["qc_json"] - - -def qc_pdf_callable(output_dir, inputs, stdout, stderr): - outputs = _list_outputs( - output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr - ) - return outputs["qc_pdf"] - - -def residuals_callable(output_dir, inputs, stdout, stderr): - outputs = _list_outputs( - output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr - ) - return outputs["residuals"] - - -def vdm_png_callable(output_dir, inputs, stdout, stderr): - outputs = _list_outputs( - output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr - ) - return outputs["vdm_png"] - - -# Original source at L885 of /interfaces/base/core.py -def _gen_filename(name, inputs=None, stdout=None, stderr=None, output_dir=None): - raise NotImplementedError - - -# Original source at L1673 of /interfaces/fsl/epi.py -def _list_outputs(inputs=None, stdout=None, stderr=None, output_dir=None): - from glob import glob - - outputs = {} - - # If the output directory isn't defined, the interface seems to use - # the default but not set its value in `inputs.output_dir` - if inputs.output_dir is attrs.NOTHING: - out_dir = os.path.abspath(os.path.basename(inputs.base_name) + ".qc") - else: - out_dir = os.path.abspath(inputs.output_dir) - - outputs["qc_json"] = os.path.join(out_dir, "qc.json") - outputs["qc_pdf"] = os.path.join(out_dir, "qc.pdf") - - # Grab all b* files here. This will also grab the b0_pe* files - # as well, but only if the field input was provided. So we'll remove - # them later in the next conditional. - outputs["avg_b_png"] = sorted(glob(os.path.join(out_dir, "avg_b*.png"))) - - if inputs.field is not attrs.NOTHING: - outputs["avg_b0_pe_png"] = sorted(glob(os.path.join(out_dir, "avg_b0_pe*.png"))) - - # The previous glob for `avg_b_png` also grabbed the - # `avg_b0_pe_png` files so we have to remove them - # from `avg_b_png`. - for fname in outputs["avg_b0_pe_png"]: - outputs["avg_b_png"].remove(fname) - - outputs["vdm_png"] = os.path.join(out_dir, "vdm.png") - - outputs["cnr_png"] = sorted(glob(os.path.join(out_dir, "cnr*.png"))) - - residuals = os.path.join(out_dir, "eddy_msr.txt") - if os.path.isfile(residuals): - outputs["residuals"] = residuals - - clean_volumes = os.path.join(out_dir, "vols_no_outliers.txt") - if os.path.isfile(clean_volumes): - outputs["clean_volumes"] = clean_volumes - - return outputs diff --git a/nipype-auto-conv/specs/interfaces/epi_de_warp.yaml b/nipype-auto-conv/specs/interfaces/epi_de_warp.yaml index 55bee74..fc29922 100644 --- a/nipype-auto-conv/specs/interfaces/epi_de_warp.yaml +++ b/nipype-auto-conv/specs/interfaces/epi_de_warp.yaml @@ -6,24 +6,24 @@ # Docs # ---- # -# Wraps the unwarping script `epidewarp.fsl -# `_. +# Wraps the unwarping script `epidewarp.fsl +# `_. # -# .. warning:: deprecated in FSL, please use -# :func:`niflow.nipype1.workflows.dmri.preprocess.epi.sdc_fmb` instead. +# .. warning:: deprecated in FSL, please use +# :func:`niflow.nipype1.workflows.dmri.preprocess.epi.sdc_fmb` instead. # -# Examples -# -------- +# Examples +# -------- # -# >>> from nipype.interfaces.fsl import EPIDeWarp -# >>> dewarp = EPIDeWarp() -# >>> dewarp.inputs.epi_file = "functional.nii" -# >>> dewarp.inputs.mag_file = "magnitude.nii" -# >>> dewarp.inputs.dph_file = "phase.nii" -# >>> dewarp.inputs.output_type = "NIFTI_GZ" -# >>> dewarp.cmdline # doctest: +ELLIPSIS -# 'epidewarp.fsl --mag magnitude.nii --dph phase.nii --epi functional.nii --esp 0.58 --exfdw .../exfdw.nii.gz --nocleanup --sigma 2 --tediff 2.46 --tmpdir .../temp --vsm .../vsm.nii.gz' -# >>> res = dewarp.run() # doctest: +SKIP +# >>> from nipype.interfaces.fsl import EPIDeWarp +# >>> dewarp = EPIDeWarp() +# >>> dewarp.inputs.epi_file = "functional.nii" +# >>> dewarp.inputs.mag_file = "magnitude.nii" +# >>> dewarp.inputs.dph_file = "phase.nii" +# >>> dewarp.inputs.output_type = "NIFTI_GZ" +# >>> dewarp.cmdline # doctest: +ELLIPSIS +# 'epidewarp.fsl --mag magnitude.nii --dph phase.nii --epi functional.nii --esp 0.58 --exfdw .../exfdw.nii.gz --nocleanup --sigma 2 --tediff 2.46 --tmpdir .../temp --vsm .../vsm.nii.gz' +# >>> res = dewarp.run() # doctest: +SKIP # # # @@ -47,7 +47,7 @@ inputs: # type=file|default=: EPI volume to unwarp exf_file: generic/file # type=file|default=: example func volume (or use epi) - mag_file: medimage/nifti1 + mag_file: generic/file # type=file|default=: Magnitude file callable_defaults: # dict[str, str] - names of methods/callable classes defined in the adjacent `*_callables.py` @@ -82,7 +82,7 @@ outputs: # dict[str, str] - names of methods/callable classes defined in the adjacent `*_callables.py` # to set to the `callable` attribute of output fields templates: - # dict[str, str] - `output_file_template` values to be provided to output fields + # dict[str, str] - `path_template` values to be provided to output fields exfdw: exfdw # type=file: dewarped functional volume example # type=string|default='': dewarped example func volume @@ -120,13 +120,13 @@ tests: cleanup: # type=bool|default=False: cleanup output_type: - # type=enum|default='NIFTI'|allowed['NIFTI','NIFTI_GZ','NIFTI_PAIR','NIFTI_PAIR_GZ']: FSL output type + # type=enum|default='NIFTI'|allowed['GIFTI','NIFTI','NIFTI_GZ','NIFTI_PAIR','NIFTI_PAIR_GZ']: FSL output type args: # type=str|default='': Additional parameters to the command environ: # type=dict|default={}: Environment variables imports: - # list[nipype2pydra.task.base.importstatement] - list import statements required by the test, with each list item + # list[nipype2pydra.statements.imports.explicitimport] - list import statements required by the test, with each list item # consisting of 'module', 'name', and optionally 'alias' keys expected_outputs: # dict[str, str] - expected values for selected outputs, noting that tests will typically @@ -145,14 +145,10 @@ tests: # (if not specified, will try to choose a sensible value) epi_file: # type=file|default=: EPI volume to unwarp - mag_file: - # type=file|default=: Magnitude file dph_file: # type=file|default=: Phase file assumed to be scaled from 0 to 4095 - output_type: '"NIFTI_GZ"' - # type=enum|default='NIFTI'|allowed['NIFTI','NIFTI_GZ','NIFTI_PAIR','NIFTI_PAIR_GZ']: FSL output type imports: - # list[nipype2pydra.task.base.importstatement] - list import statements required by the test, with each list item + # list[nipype2pydra.statements.imports.explicitimport] - list import statements required by the test, with each list item # consisting of 'module', 'name', and optionally 'alias' keys expected_outputs: # dict[str, str] - expected values for selected outputs, noting that tests will typically @@ -175,14 +171,10 @@ doctests: # '.mock()' method of the corresponding class is used instead. epi_file: '"functional.nii"' # type=file|default=: EPI volume to unwarp - mag_file: '"magnitude.nii"' - # type=file|default=: Magnitude file dph_file: '"phase.nii"' # type=file|default=: Phase file assumed to be scaled from 0 to 4095 - output_type: '"NIFTI_GZ"' - # type=enum|default='NIFTI'|allowed['NIFTI','NIFTI_GZ','NIFTI_PAIR','NIFTI_PAIR_GZ']: FSL output type imports: - # list[nipype2pydra.task.base.importstatement] - list import statements required by the test, with each list item + # list[nipype2pydra.statements.imports.explicitimport] - list import statements required by the test, with each list item # consisting of 'module', 'name', and optionally 'alias' keys directive: # str - any doctest directive to place on the cmdline call, e.g. # doctest: +ELLIPSIS diff --git a/nipype-auto-conv/specs/interfaces/epi_de_warp_callables.py b/nipype-auto-conv/specs/interfaces/epi_de_warp_callables.py deleted file mode 100644 index b121f9b..0000000 --- a/nipype-auto-conv/specs/interfaces/epi_de_warp_callables.py +++ /dev/null @@ -1,424 +0,0 @@ -"""Module to put any functions that are referred to in the "callables" section of EPIDeWarp.yaml""" - -import attrs -import logging -import os -import os.path as op -from glob import glob -from pathlib import Path - - -def exfdw_default(inputs): - return _gen_filename("exfdw", inputs=inputs) - - -def tmpdir_default(inputs): - return _gen_filename("tmpdir", inputs=inputs) - - -def vsm_default(inputs): - return _gen_filename("vsm", inputs=inputs) - - -def exf_mask_callable(output_dir, inputs, stdout, stderr): - outputs = _list_outputs( - output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr - ) - return outputs["exf_mask"] - - -def exfdw_callable(output_dir, inputs, stdout, stderr): - outputs = _list_outputs( - output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr - ) - return outputs["exfdw"] - - -def unwarped_file_callable(output_dir, inputs, stdout, stderr): - outputs = _list_outputs( - output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr - ) - return outputs["unwarped_file"] - - -def vsm_file_callable(output_dir, inputs, stdout, stderr): - outputs = _list_outputs( - output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr - ) - return outputs["vsm_file"] - - -IFLOGGER = logging.getLogger("nipype.interface") - - -# Original source at L1428 of /interfaces/fsl/epi.py -def _gen_filename(name, inputs=None, stdout=None, stderr=None, output_dir=None): - if name == "exfdw": - if inputs.exf_file is not attrs.NOTHING: - return _gen_fname( - inputs.exf_file, - suffix="_exfdw", - inputs=inputs, - stdout=stdout, - stderr=stderr, - output_dir=output_dir, - ) - else: - return _gen_fname( - "exfdw", - inputs=inputs, - stdout=stdout, - stderr=stderr, - output_dir=output_dir, - ) - if name == "epidw": - if inputs.epi_file is not attrs.NOTHING: - return _gen_fname( - inputs.epi_file, - suffix="_epidw", - inputs=inputs, - stdout=stdout, - stderr=stderr, - output_dir=output_dir, - ) - if name == "vsm": - return _gen_fname( - "vsm", inputs=inputs, stdout=stdout, stderr=stderr, output_dir=output_dir - ) - if name == "tmpdir": - return os.path.join(output_dir, "temp") - return None - - -# Original source at L205 of /interfaces/fsl/base.py -def _gen_fname( - basename, - cwd=None, - suffix=None, - change_ext=True, - ext=None, - inputs=None, - stdout=None, - stderr=None, - output_dir=None, -): - """Generate a filename based on the given parameters. - - The filename will take the form: cwd/basename. - If change_ext is True, it will use the extensions specified in - inputs.output_type. - - Parameters - ---------- - basename : str - Filename to base the new filename on. - cwd : str - Path to prefix to the new filename. (default is output_dir) - suffix : str - Suffix to add to the `basename`. (defaults is '' ) - change_ext : bool - Flag to change the filename extension to the FSL output type. - (default True) - - Returns - ------- - fname : str - New filename based on given parameters. - - """ - - if basename == "": - msg = "Unable to generate filename for command %s. " % "epidewarp.fsl" - msg += "basename is not set!" - raise ValueError(msg) - if cwd is None: - cwd = output_dir - if ext is None: - ext = Info.output_type_to_ext(inputs.output_type) - if change_ext: - if suffix: - suffix = "".join((suffix, ext)) - else: - suffix = ext - if suffix is None: - suffix = "" - fname = fname_presuffix(basename, suffix=suffix, use_ext=False, newpath=cwd) - return fname - - -# Original source at L1443 of /interfaces/fsl/epi.py -def _list_outputs(inputs=None, stdout=None, stderr=None, output_dir=None): - outputs = {} - if inputs.exfdw is attrs.NOTHING: - outputs["exfdw"] = _gen_filename( - "exfdw", inputs=inputs, stdout=stdout, stderr=stderr, output_dir=output_dir - ) - else: - outputs["exfdw"] = inputs.exfdw - if inputs.epi_file is not attrs.NOTHING: - if inputs.epidw is not attrs.NOTHING: - outputs["unwarped_file"] = inputs.epidw - else: - outputs["unwarped_file"] = _gen_filename( - "epidw", - inputs=inputs, - stdout=stdout, - stderr=stderr, - output_dir=output_dir, - ) - if inputs.vsm is attrs.NOTHING: - outputs["vsm_file"] = _gen_filename( - "vsm", inputs=inputs, stdout=stdout, stderr=stderr, output_dir=output_dir - ) - else: - outputs["vsm_file"] = _gen_fname( - inputs.vsm, - inputs=inputs, - stdout=stdout, - stderr=stderr, - output_dir=output_dir, - ) - if inputs.tmpdir is attrs.NOTHING: - outputs["exf_mask"] = _gen_fname( - cwd=_gen_filename("tmpdir"), - basename="maskexf", - inputs=inputs, - stdout=stdout, - stderr=stderr, - output_dir=output_dir, - ) - else: - outputs["exf_mask"] = _gen_fname( - cwd=inputs.tmpdir, - basename="maskexf", - inputs=inputs, - stdout=stdout, - stderr=stderr, - output_dir=output_dir, - ) - return outputs - - -# Original source at L108 of /utils/filemanip.py -def fname_presuffix(fname, prefix="", suffix="", newpath=None, use_ext=True): - """Manipulates path and name of input filename - - Parameters - ---------- - fname : string - A filename (may or may not include path) - prefix : string - Characters to prepend to the filename - suffix : string - Characters to append to the filename - newpath : string - Path to replace the path of the input fname - use_ext : boolean - If True (default), appends the extension of the original file - to the output name. - - Returns - ------- - Absolute path of the modified filename - - >>> from nipype.utils.filemanip import fname_presuffix - >>> fname = 'foo.nii.gz' - >>> fname_presuffix(fname,'pre','post','/tmp') - '/tmp/prefoopost.nii.gz' - - >>> from nipype.interfaces.base import attrs.NOTHING - >>> fname_presuffix(fname, 'pre', 'post', attrs.NOTHING) == \ - fname_presuffix(fname, 'pre', 'post') - True - - """ - pth, fname, ext = split_filename(fname) - if not use_ext: - ext = "" - - # No need for : bool(attrs.NOTHING is not attrs.NOTHING) evaluates to False - if newpath: - pth = op.abspath(newpath) - return op.join(pth, prefix + fname + suffix + ext) - - -# Original source at L58 of /utils/filemanip.py -def split_filename(fname): - """Split a filename into parts: path, base filename and extension. - - Parameters - ---------- - fname : str - file or path name - - Returns - ------- - pth : str - base path from fname - fname : str - filename from fname, without extension - ext : str - file extension from fname - - Examples - -------- - >>> from nipype.utils.filemanip import split_filename - >>> pth, fname, ext = split_filename('/home/data/subject.nii.gz') - >>> pth - '/home/data' - - >>> fname - 'subject' - - >>> ext - '.nii.gz' - - """ - - special_extensions = [".nii.gz", ".tar.gz", ".niml.dset"] - - pth = op.dirname(fname) - fname = op.basename(fname) - - ext = None - for special_ext in special_extensions: - ext_len = len(special_ext) - if (len(fname) > ext_len) and (fname[-ext_len:].lower() == special_ext.lower()): - ext = fname[-ext_len:] - fname = fname[:-ext_len] - break - if not ext: - fname, ext = op.splitext(fname) - - return pth, fname, ext - - -# Original source at L1069 of /interfaces/base/core.py -class PackageInfo(object): - _version = None - version_cmd = None - version_file = None - - @classmethod - def version(klass): - if klass._version is None: - if klass.version_cmd is not None: - try: - clout = CommandLine( - command=klass.version_cmd, - resource_monitor=False, - terminal_output="allatonce", - ).run() - except IOError: - return None - - raw_info = clout.runtime.stdout - elif klass.version_file is not None: - try: - with open(klass.version_file, "rt") as fobj: - raw_info = fobj.read() - except OSError: - return None - else: - return None - - klass._version = klass.parse_version(raw_info) - - return klass._version - - @staticmethod - def parse_version(raw_info): - raise NotImplementedError - - -# Original source at L40 of /interfaces/fsl/base.py -class Info(PackageInfo): - """ - Handle FSL ``output_type`` and version information. - - output type refers to the type of file fsl defaults to writing - eg, NIFTI, NIFTI_GZ - - Examples - -------- - - >>> from nipype.interfaces.fsl import Info - >>> Info.version() # doctest: +SKIP - >>> Info.output_type() # doctest: +SKIP - - """ - - ftypes = { - "NIFTI": ".nii", - "NIFTI_PAIR": ".img", - "NIFTI_GZ": ".nii.gz", - "NIFTI_PAIR_GZ": ".img.gz", - } - - if os.getenv("FSLDIR"): - version_file = os.path.join(os.getenv("FSLDIR"), "etc", "fslversion") - - @staticmethod - def parse_version(raw_info): - return raw_info.splitlines()[0] - - @classmethod - def output_type_to_ext(cls, output_type): - """Get the file extension for the given output type. - - Parameters - ---------- - output_type : {'NIFTI', 'NIFTI_GZ', 'NIFTI_PAIR', 'NIFTI_PAIR_GZ'} - String specifying the output type. - - Returns - ------- - extension : str - The file extension for the output type. - """ - - try: - return cls.ftypes[output_type] - except KeyError: - msg = "Invalid FSLOUTPUTTYPE: ", output_type - raise KeyError(msg) - - @classmethod - def output_type(cls): - """Get the global FSL output file type FSLOUTPUTTYPE. - - This returns the value of the environment variable - FSLOUTPUTTYPE. An exception is raised if it is not defined. - - Returns - ------- - fsl_ftype : string - Represents the current environment setting of FSLOUTPUTTYPE - """ - try: - return os.environ["FSLOUTPUTTYPE"] - except KeyError: - IFLOGGER.warning( - "FSLOUTPUTTYPE environment variable is not set. " - "Setting FSLOUTPUTTYPE=NIFTI" - ) - return "NIFTI" - - @staticmethod - def standard_image(img_name=None): - """Grab an image from the standard location. - - Returns a list of standard images if called without arguments. - - Could be made more fancy to allow for more relocatability""" - try: - fsldir = os.environ["FSLDIR"] - except KeyError: - raise Exception("FSL environment variables not set") - stdpath = os.path.join(fsldir, "data", "standard") - if img_name is None: - return [ - filename.replace(stdpath + "/", "") - for filename in glob(os.path.join(stdpath, "*nii*")) - ] - return os.path.join(stdpath, img_name) diff --git a/nipype-auto-conv/specs/interfaces/epi_reg.yaml b/nipype-auto-conv/specs/interfaces/epi_reg.yaml index d20b581..67bbf9c 100644 --- a/nipype-auto-conv/specs/interfaces/epi_reg.yaml +++ b/nipype-auto-conv/specs/interfaces/epi_reg.yaml @@ -7,26 +7,26 @@ # ---- # # -# Runs FSL epi_reg script for simultaneous coregistration and fieldmap -# unwarping. +# Runs FSL epi_reg script for simultaneous coregistration and fieldmap +# unwarping. # -# Examples -# -------- +# Examples +# -------- # -# >>> from nipype.interfaces.fsl import EpiReg -# >>> epireg = EpiReg() -# >>> epireg.inputs.epi='epi.nii' -# >>> epireg.inputs.t1_head='T1.nii' -# >>> epireg.inputs.t1_brain='T1_brain.nii' -# >>> epireg.inputs.out_base='epi2struct' -# >>> epireg.inputs.fmap='fieldmap_phase_fslprepared.nii' -# >>> epireg.inputs.fmapmag='fieldmap_mag.nii' -# >>> epireg.inputs.fmapmagbrain='fieldmap_mag_brain.nii' -# >>> epireg.inputs.echospacing=0.00067 -# >>> epireg.inputs.pedir='y' -# >>> epireg.cmdline # doctest: +ELLIPSIS -# 'epi_reg --echospacing=0.000670 --fmap=fieldmap_phase_fslprepared.nii --fmapmag=fieldmap_mag.nii --fmapmagbrain=fieldmap_mag_brain.nii --noclean --pedir=y --epi=epi.nii --t1=T1.nii --t1brain=T1_brain.nii --out=epi2struct' -# >>> epireg.run() # doctest: +SKIP +# >>> from nipype.interfaces.fsl import EpiReg +# >>> epireg = EpiReg() +# >>> epireg.inputs.epi='epi.nii' +# >>> epireg.inputs.t1_head='T1.nii' +# >>> epireg.inputs.t1_brain='T1_brain.nii' +# >>> epireg.inputs.out_base='epi2struct' +# >>> epireg.inputs.fmap='fieldmap_phase_fslprepared.nii' +# >>> epireg.inputs.fmapmag='fieldmap_mag.nii' +# >>> epireg.inputs.fmapmagbrain='fieldmap_mag_brain.nii' +# >>> epireg.inputs.echospacing=0.00067 +# >>> epireg.inputs.pedir='y' +# >>> epireg.cmdline # doctest: +ELLIPSIS +# 'epi_reg --echospacing=0.000670 --fmap=fieldmap_phase_fslprepared.nii --fmapmag=fieldmap_mag.nii --fmapmagbrain=fieldmap_mag_brain.nii --noclean --pedir=y --epi=epi.nii --t1=T1.nii --t1brain=T1_brain.nii --out=epi2struct' +# >>> epireg.run() # doctest: +SKIP # # task_name: EpiReg @@ -47,19 +47,16 @@ inputs: # type=file|default=: EPI image fmap: medimage/nifti1 # type=file|default=: fieldmap image (in rad/s) - fmapmag: medimage/nifti1 + fmapmag: generic/file # type=file|default=: fieldmap magnitude image - wholehead fmapmagbrain: medimage/nifti1 # type=file|default=: fieldmap magnitude image - brain extracted t1_brain: medimage/nifti1 # type=file|default=: brain extracted T1 image - t1_head: medimage/nifti1 + t1_head: generic/file # type=file|default=: wholehead T1 image weight_image: generic/file # type=file|default=: weighting image (in T1 space) - wmseg: Path - # type=file: white matter segmentation used in flirt bbr - # type=file|default=: white matter segmentation of T1 image, has to be named like the t1brain and end on _wmseg callable_defaults: # dict[str, str] - names of methods/callable classes defined in the adjacent `*_callables.py` # to set as the `default` method of input fields @@ -109,7 +106,7 @@ outputs: # dict[str, str] - names of methods/callable classes defined in the adjacent `*_callables.py` # to set to the `callable` attribute of output fields templates: - # dict[str, str] - `output_file_template` values to be provided to output fields + # dict[str, str] - `path_template` values to be provided to output fields requirements: # dict[str, list[str]] - input fields that are required to be provided for the output field to be present tests: @@ -144,13 +141,13 @@ tests: no_clean: # type=bool|default=True: do not clean up intermediate files output_type: - # type=enum|default='NIFTI'|allowed['NIFTI','NIFTI_GZ','NIFTI_PAIR','NIFTI_PAIR_GZ']: FSL output type + # type=enum|default='NIFTI'|allowed['GIFTI','NIFTI','NIFTI_GZ','NIFTI_PAIR','NIFTI_PAIR_GZ']: FSL output type args: # type=str|default='': Additional parameters to the command environ: # type=dict|default={}: Environment variables imports: - # list[nipype2pydra.task.base.importstatement] - list import statements required by the test, with each list item + # list[nipype2pydra.statements.imports.explicitimport] - list import statements required by the test, with each list item # consisting of 'module', 'name', and optionally 'alias' keys expected_outputs: # dict[str, str] - expected values for selected outputs, noting that tests will typically @@ -169,24 +166,16 @@ tests: # (if not specified, will try to choose a sensible value) epi: # type=file|default=: EPI image - t1_head: - # type=file|default=: wholehead T1 image t1_brain: # type=file|default=: brain extracted T1 image - out_base: '"epi2struct"' - # type=string|default='epi2struct': output base name fmap: # type=file|default=: fieldmap image (in rad/s) - fmapmag: - # type=file|default=: fieldmap magnitude image - wholehead fmapmagbrain: # type=file|default=: fieldmap magnitude image - brain extracted - echospacing: '0.00067' - # type=float|default=0.0: Effective EPI echo spacing (sometimes called dwell time) - in seconds pedir: '"y"' # type=enum|default='x'|allowed['-x','-y','-z','x','y','z']: phase encoding direction, dir = x/y/z/-x/-y/-z imports: - # list[nipype2pydra.task.base.importstatement] - list import statements required by the test, with each list item + # list[nipype2pydra.statements.imports.explicitimport] - list import statements required by the test, with each list item # consisting of 'module', 'name', and optionally 'alias' keys expected_outputs: # dict[str, str] - expected values for selected outputs, noting that tests will typically @@ -209,24 +198,16 @@ doctests: # '.mock()' method of the corresponding class is used instead. epi: '"epi.nii"' # type=file|default=: EPI image - t1_head: '"T1.nii"' - # type=file|default=: wholehead T1 image t1_brain: '"T1_brain.nii"' # type=file|default=: brain extracted T1 image - out_base: '"epi2struct"' - # type=string|default='epi2struct': output base name fmap: '"fieldmap_phase_fslprepared.nii"' # type=file|default=: fieldmap image (in rad/s) - fmapmag: '"fieldmap_mag.nii"' - # type=file|default=: fieldmap magnitude image - wholehead fmapmagbrain: '"fieldmap_mag_brain.nii"' # type=file|default=: fieldmap magnitude image - brain extracted - echospacing: '0.00067' - # type=float|default=0.0: Effective EPI echo spacing (sometimes called dwell time) - in seconds pedir: '"y"' # type=enum|default='x'|allowed['-x','-y','-z','x','y','z']: phase encoding direction, dir = x/y/z/-x/-y/-z imports: - # list[nipype2pydra.task.base.importstatement] - list import statements required by the test, with each list item + # list[nipype2pydra.statements.imports.explicitimport] - list import statements required by the test, with each list item # consisting of 'module', 'name', and optionally 'alias' keys directive: # str - any doctest directive to place on the cmdline call, e.g. # doctest: +ELLIPSIS diff --git a/nipype-auto-conv/specs/interfaces/epi_reg_callables.py b/nipype-auto-conv/specs/interfaces/epi_reg_callables.py deleted file mode 100644 index 9acf06e..0000000 --- a/nipype-auto-conv/specs/interfaces/epi_reg_callables.py +++ /dev/null @@ -1,147 +0,0 @@ -"""Module to put any functions that are referred to in the "callables" section of EpiReg.yaml""" - -import attrs -import os - - -def epi2str_inv_callable(output_dir, inputs, stdout, stderr): - outputs = _list_outputs( - output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr - ) - return outputs["epi2str_inv"] - - -def epi2str_mat_callable(output_dir, inputs, stdout, stderr): - outputs = _list_outputs( - output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr - ) - return outputs["epi2str_mat"] - - -def fmap2epi_mat_callable(output_dir, inputs, stdout, stderr): - outputs = _list_outputs( - output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr - ) - return outputs["fmap2epi_mat"] - - -def fmap2str_mat_callable(output_dir, inputs, stdout, stderr): - outputs = _list_outputs( - output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr - ) - return outputs["fmap2str_mat"] - - -def fmap_epi_callable(output_dir, inputs, stdout, stderr): - outputs = _list_outputs( - output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr - ) - return outputs["fmap_epi"] - - -def fmap_str_callable(output_dir, inputs, stdout, stderr): - outputs = _list_outputs( - output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr - ) - return outputs["fmap_str"] - - -def fmapmag_str_callable(output_dir, inputs, stdout, stderr): - outputs = _list_outputs( - output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr - ) - return outputs["fmapmag_str"] - - -def fullwarp_callable(output_dir, inputs, stdout, stderr): - outputs = _list_outputs( - output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr - ) - return outputs["fullwarp"] - - -def out_1vol_callable(output_dir, inputs, stdout, stderr): - outputs = _list_outputs( - output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr - ) - return outputs["out_1vol"] - - -def out_file_callable(output_dir, inputs, stdout, stderr): - outputs = _list_outputs( - output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr - ) - return outputs["out_file"] - - -def seg_callable(output_dir, inputs, stdout, stderr): - outputs = _list_outputs( - output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr - ) - return outputs["seg"] - - -def shiftmap_callable(output_dir, inputs, stdout, stderr): - outputs = _list_outputs( - output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr - ) - return outputs["shiftmap"] - - -def wmedge_callable(output_dir, inputs, stdout, stderr): - outputs = _list_outputs( - output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr - ) - return outputs["wmedge"] - - -def wmseg_callable(output_dir, inputs, stdout, stderr): - outputs = _list_outputs( - output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr - ) - return outputs["wmseg"] - - -# Original source at L885 of /interfaces/base/core.py -def _gen_filename(name, inputs=None, stdout=None, stderr=None, output_dir=None): - raise NotImplementedError - - -# Original source at L1271 of /interfaces/fsl/epi.py -def _list_outputs(inputs=None, stdout=None, stderr=None, output_dir=None): - outputs = {} - outputs["out_file"] = os.path.join(output_dir, inputs.out_base + ".nii.gz") - if not ((inputs.no_fmapreg is not attrs.NOTHING) and inputs.no_fmapreg) and ( - inputs.fmap is not attrs.NOTHING - ): - outputs["out_1vol"] = os.path.join(output_dir, inputs.out_base + "_1vol.nii.gz") - outputs["fmap2str_mat"] = os.path.join( - output_dir, inputs.out_base + "_fieldmap2str.mat" - ) - outputs["fmap2epi_mat"] = os.path.join( - output_dir, inputs.out_base + "_fieldmaprads2epi.mat" - ) - outputs["fmap_epi"] = os.path.join( - output_dir, inputs.out_base + "_fieldmaprads2epi.nii.gz" - ) - outputs["fmap_str"] = os.path.join( - output_dir, inputs.out_base + "_fieldmaprads2str.nii.gz" - ) - outputs["fmapmag_str"] = os.path.join( - output_dir, inputs.out_base + "_fieldmap2str.nii.gz" - ) - outputs["shiftmap"] = os.path.join( - output_dir, inputs.out_base + "_fieldmaprads2epi_shift.nii.gz" - ) - outputs["fullwarp"] = os.path.join(output_dir, inputs.out_base + "_warp.nii.gz") - outputs["epi2str_inv"] = os.path.join(output_dir, inputs.out_base + "_inv.mat") - if inputs.wmseg is attrs.NOTHING: - outputs["wmedge"] = os.path.join( - output_dir, inputs.out_base + "_fast_wmedge.nii.gz" - ) - outputs["wmseg"] = os.path.join( - output_dir, inputs.out_base + "_fast_wmseg.nii.gz" - ) - outputs["seg"] = os.path.join(output_dir, inputs.out_base + "_fast_seg.nii.gz") - outputs["epi2str_mat"] = os.path.join(output_dir, inputs.out_base + ".mat") - return outputs diff --git a/nipype-auto-conv/specs/interfaces/erode_image.yaml b/nipype-auto-conv/specs/interfaces/erode_image.yaml index 5ae2aac..26a693f 100644 --- a/nipype-auto-conv/specs/interfaces/erode_image.yaml +++ b/nipype-auto-conv/specs/interfaces/erode_image.yaml @@ -24,9 +24,6 @@ inputs: # type=file|default=: image to operate on kernel_file: generic/file # type=file|default=: use external file for kernel - out_file: Path - # type=file: image written after calculations - # type=file|default=: image to write callable_defaults: # dict[str, str] - names of methods/callable classes defined in the adjacent `*_callables.py` # to set as the `default` method of input fields @@ -50,7 +47,7 @@ outputs: # dict[str, str] - names of methods/callable classes defined in the adjacent `*_callables.py` # to set to the `callable` attribute of output fields templates: - # dict[str, str] - `output_file_template` values to be provided to output fields + # dict[str, str] - `path_template` values to be provided to output fields out_file: out_file # type=file: image written after calculations # type=file|default=: image to write @@ -80,13 +77,13 @@ tests: nan2zeros: # type=bool|default=False: change NaNs to zeros before doing anything output_type: - # type=enum|default='NIFTI'|allowed['NIFTI','NIFTI_GZ','NIFTI_PAIR','NIFTI_PAIR_GZ']: FSL output type + # type=enum|default='NIFTI'|allowed['GIFTI','NIFTI','NIFTI_GZ','NIFTI_PAIR','NIFTI_PAIR_GZ']: FSL output type args: # type=str|default='': Additional parameters to the command environ: # type=dict|default={}: Environment variables imports: - # list[nipype2pydra.task.base.importstatement] - list import statements required by the test, with each list item + # list[nipype2pydra.statements.imports.explicitimport] - list import statements required by the test, with each list item # consisting of 'module', 'name', and optionally 'alias' keys expected_outputs: # dict[str, str] - expected values for selected outputs, noting that tests will typically diff --git a/nipype-auto-conv/specs/interfaces/erode_image_callables.py b/nipype-auto-conv/specs/interfaces/erode_image_callables.py deleted file mode 100644 index da319d0..0000000 --- a/nipype-auto-conv/specs/interfaces/erode_image_callables.py +++ /dev/null @@ -1,329 +0,0 @@ -"""Module to put any functions that are referred to in the "callables" section of ErodeImage.yaml""" - -import attrs -import logging -import os -import os.path as op -from glob import glob -from pathlib import Path - - -def out_file_default(inputs): - return _gen_filename("out_file", inputs=inputs) - - -def out_file_callable(output_dir, inputs, stdout, stderr): - outputs = _list_outputs( - output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr - ) - return outputs["out_file"] - - -IFLOGGER = logging.getLogger("nipype.interface") - - -# Original source at L61 of /interfaces/fsl/maths.py -def _gen_filename(name, inputs=None, stdout=None, stderr=None, output_dir=None): - if name == "out_file": - return _list_outputs( - inputs=inputs, stdout=stdout, stderr=stderr, output_dir=output_dir - )["out_file"] - return None - - -# Original source at L205 of /interfaces/fsl/base.py -def _gen_fname( - basename, - cwd=None, - suffix=None, - change_ext=True, - ext=None, - inputs=None, - stdout=None, - stderr=None, - output_dir=None, -): - """Generate a filename based on the given parameters. - - The filename will take the form: cwd/basename. - If change_ext is True, it will use the extensions specified in - inputs.output_type. - - Parameters - ---------- - basename : str - Filename to base the new filename on. - cwd : str - Path to prefix to the new filename. (default is output_dir) - suffix : str - Suffix to add to the `basename`. (defaults is '' ) - change_ext : bool - Flag to change the filename extension to the FSL output type. - (default True) - - Returns - ------- - fname : str - New filename based on given parameters. - - """ - - if basename == "": - msg = "Unable to generate filename for command %s. " % "fslmaths" - msg += "basename is not set!" - raise ValueError(msg) - if cwd is None: - cwd = output_dir - if ext is None: - ext = Info.output_type_to_ext(inputs.output_type) - if change_ext: - if suffix: - suffix = "".join((suffix, ext)) - else: - suffix = ext - if suffix is None: - suffix = "" - fname = fname_presuffix(basename, suffix=suffix, use_ext=False, newpath=cwd) - return fname - - -# Original source at L51 of /interfaces/fsl/maths.py -def _list_outputs(inputs=None, stdout=None, stderr=None, output_dir=None): - outputs = {} - outputs["out_file"] = inputs.out_file - if inputs.out_file is attrs.NOTHING: - outputs["out_file"] = _gen_fname( - inputs.in_file, - suffix=_suffix, - inputs=inputs, - stdout=stdout, - stderr=stderr, - output_dir=output_dir, - ) - outputs["out_file"] = os.path.abspath(outputs["out_file"]) - return outputs - - -# Original source at L108 of /utils/filemanip.py -def fname_presuffix(fname, prefix="", suffix="", newpath=None, use_ext=True): - """Manipulates path and name of input filename - - Parameters - ---------- - fname : string - A filename (may or may not include path) - prefix : string - Characters to prepend to the filename - suffix : string - Characters to append to the filename - newpath : string - Path to replace the path of the input fname - use_ext : boolean - If True (default), appends the extension of the original file - to the output name. - - Returns - ------- - Absolute path of the modified filename - - >>> from nipype.utils.filemanip import fname_presuffix - >>> fname = 'foo.nii.gz' - >>> fname_presuffix(fname,'pre','post','/tmp') - '/tmp/prefoopost.nii.gz' - - >>> from nipype.interfaces.base import attrs.NOTHING - >>> fname_presuffix(fname, 'pre', 'post', attrs.NOTHING) == \ - fname_presuffix(fname, 'pre', 'post') - True - - """ - pth, fname, ext = split_filename(fname) - if not use_ext: - ext = "" - - # No need for : bool(attrs.NOTHING is not attrs.NOTHING) evaluates to False - if newpath: - pth = op.abspath(newpath) - return op.join(pth, prefix + fname + suffix + ext) - - -# Original source at L58 of /utils/filemanip.py -def split_filename(fname): - """Split a filename into parts: path, base filename and extension. - - Parameters - ---------- - fname : str - file or path name - - Returns - ------- - pth : str - base path from fname - fname : str - filename from fname, without extension - ext : str - file extension from fname - - Examples - -------- - >>> from nipype.utils.filemanip import split_filename - >>> pth, fname, ext = split_filename('/home/data/subject.nii.gz') - >>> pth - '/home/data' - - >>> fname - 'subject' - - >>> ext - '.nii.gz' - - """ - - special_extensions = [".nii.gz", ".tar.gz", ".niml.dset"] - - pth = op.dirname(fname) - fname = op.basename(fname) - - ext = None - for special_ext in special_extensions: - ext_len = len(special_ext) - if (len(fname) > ext_len) and (fname[-ext_len:].lower() == special_ext.lower()): - ext = fname[-ext_len:] - fname = fname[:-ext_len] - break - if not ext: - fname, ext = op.splitext(fname) - - return pth, fname, ext - - -# Original source at L1069 of /interfaces/base/core.py -class PackageInfo(object): - _version = None - version_cmd = None - version_file = None - - @classmethod - def version(klass): - if klass._version is None: - if klass.version_cmd is not None: - try: - clout = CommandLine( - command=klass.version_cmd, - resource_monitor=False, - terminal_output="allatonce", - ).run() - except IOError: - return None - - raw_info = clout.runtime.stdout - elif klass.version_file is not None: - try: - with open(klass.version_file, "rt") as fobj: - raw_info = fobj.read() - except OSError: - return None - else: - return None - - klass._version = klass.parse_version(raw_info) - - return klass._version - - @staticmethod - def parse_version(raw_info): - raise NotImplementedError - - -# Original source at L40 of /interfaces/fsl/base.py -class Info(PackageInfo): - """ - Handle FSL ``output_type`` and version information. - - output type refers to the type of file fsl defaults to writing - eg, NIFTI, NIFTI_GZ - - Examples - -------- - - >>> from nipype.interfaces.fsl import Info - >>> Info.version() # doctest: +SKIP - >>> Info.output_type() # doctest: +SKIP - - """ - - ftypes = { - "NIFTI": ".nii", - "NIFTI_PAIR": ".img", - "NIFTI_GZ": ".nii.gz", - "NIFTI_PAIR_GZ": ".img.gz", - } - - if os.getenv("FSLDIR"): - version_file = os.path.join(os.getenv("FSLDIR"), "etc", "fslversion") - - @staticmethod - def parse_version(raw_info): - return raw_info.splitlines()[0] - - @classmethod - def output_type_to_ext(cls, output_type): - """Get the file extension for the given output type. - - Parameters - ---------- - output_type : {'NIFTI', 'NIFTI_GZ', 'NIFTI_PAIR', 'NIFTI_PAIR_GZ'} - String specifying the output type. - - Returns - ------- - extension : str - The file extension for the output type. - """ - - try: - return cls.ftypes[output_type] - except KeyError: - msg = "Invalid FSLOUTPUTTYPE: ", output_type - raise KeyError(msg) - - @classmethod - def output_type(cls): - """Get the global FSL output file type FSLOUTPUTTYPE. - - This returns the value of the environment variable - FSLOUTPUTTYPE. An exception is raised if it is not defined. - - Returns - ------- - fsl_ftype : string - Represents the current environment setting of FSLOUTPUTTYPE - """ - try: - return os.environ["FSLOUTPUTTYPE"] - except KeyError: - IFLOGGER.warning( - "FSLOUTPUTTYPE environment variable is not set. " - "Setting FSLOUTPUTTYPE=NIFTI" - ) - return "NIFTI" - - @staticmethod - def standard_image(img_name=None): - """Grab an image from the standard location. - - Returns a list of standard images if called without arguments. - - Could be made more fancy to allow for more relocatability""" - try: - fsldir = os.environ["FSLDIR"] - except KeyError: - raise Exception("FSL environment variables not set") - stdpath = os.path.join(fsldir, "data", "standard") - if img_name is None: - return [ - filename.replace(stdpath + "/", "") - for filename in glob(os.path.join(stdpath, "*nii*")) - ] - return os.path.join(stdpath, img_name) diff --git a/nipype-auto-conv/specs/interfaces/extract_roi.yaml b/nipype-auto-conv/specs/interfaces/extract_roi.yaml index ae4c2c2..4328257 100644 --- a/nipype-auto-conv/specs/interfaces/extract_roi.yaml +++ b/nipype-auto-conv/specs/interfaces/extract_roi.yaml @@ -6,26 +6,26 @@ # Docs # ---- # Uses FSL Fslroi command to extract region of interest (ROI) -# from an image. +# from an image. # -# You can a) take a 3D ROI from a 3D data set (or if it is 4D, the -# same ROI is taken from each time point and a new 4D data set is -# created), b) extract just some time points from a 4D data set, or -# c) control time and space limits to the ROI. Note that the -# arguments are minimum index and size (not maximum index). So to -# extract voxels 10 to 12 inclusive you would specify 10 and 3 (not -# 10 and 12). +# You can a) take a 3D ROI from a 3D data set (or if it is 4D, the +# same ROI is taken from each time point and a new 4D data set is +# created), b) extract just some time points from a 4D data set, or +# c) control time and space limits to the ROI. Note that the +# arguments are minimum index and size (not maximum index). So to +# extract voxels 10 to 12 inclusive you would specify 10 and 3 (not +# 10 and 12). # # -# Examples -# -------- +# Examples +# -------- # -# >>> from nipype.interfaces.fsl import ExtractROI -# >>> from nipype.testing import anatfile -# >>> fslroi = ExtractROI(in_file=anatfile, roi_file='bar.nii', t_min=0, -# ... t_size=1) -# >>> fslroi.cmdline == 'fslroi %s bar.nii 0 1' % anatfile -# True +# >>> from nipype.interfaces.fsl import ExtractROI +# >>> from nipype.testing import anatfile +# >>> fslroi = ExtractROI(in_file=anatfile, roi_file='bar.nii', t_min=0, +# ... t_size=1) +# >>> fslroi.cmdline == 'fslroi %s bar.nii 0 1' % anatfile +# True # # # @@ -45,9 +45,6 @@ inputs: # passed to the field in the automatically generated unittests. in_file: generic/file # type=file|default=: input file - roi_file: Path - # type=file: - # type=file|default=: output file callable_defaults: # dict[str, str] - names of methods/callable classes defined in the adjacent `*_callables.py` # to set as the `default` method of input fields @@ -71,7 +68,7 @@ outputs: # dict[str, str] - names of methods/callable classes defined in the adjacent `*_callables.py` # to set to the `callable` attribute of output fields templates: - # dict[str, str] - `output_file_template` values to be provided to output fields + # dict[str, str] - `path_template` values to be provided to output fields roi_file: '"bar.nii"' # type=file: # type=file|default=: output file @@ -105,13 +102,13 @@ tests: crop_list: # type=list|default=[]: list of two tuples specifying crop options output_type: - # type=enum|default='NIFTI'|allowed['NIFTI','NIFTI_GZ','NIFTI_PAIR','NIFTI_PAIR_GZ']: FSL output type + # type=enum|default='NIFTI'|allowed['GIFTI','NIFTI','NIFTI_GZ','NIFTI_PAIR','NIFTI_PAIR_GZ']: FSL output type args: # type=str|default='': Additional parameters to the command environ: # type=dict|default={}: Environment variables imports: - # list[nipype2pydra.task.base.importstatement] - list import statements required by the test, with each list item + # list[nipype2pydra.statements.imports.explicitimport] - list import statements required by the test, with each list item # consisting of 'module', 'name', and optionally 'alias' keys expected_outputs: # dict[str, str] - expected values for selected outputs, noting that tests will typically @@ -138,7 +135,7 @@ tests: t_size: '1' # type=int|default=0: imports: &id001 - # list[nipype2pydra.task.base.importstatement] - list import statements required by the test, with each list item + # list[nipype2pydra.statements.imports.explicitimport] - list import statements required by the test, with each list item # consisting of 'module', 'name', and optionally 'alias' keys - module: nipype.testing name: anatfile @@ -172,7 +169,7 @@ doctests: t_size: '1' # type=int|default=0: imports: *id001 - # list[nipype2pydra.task.base.importstatement] - list import statements required by the test, with each list item + # list[nipype2pydra.statements.imports.explicitimport] - list import statements required by the test, with each list item # consisting of 'module', 'name', and optionally 'alias' keys directive: # str - any doctest directive to place on the cmdline call, e.g. # doctest: +ELLIPSIS diff --git a/nipype-auto-conv/specs/interfaces/extract_roi_callables.py b/nipype-auto-conv/specs/interfaces/extract_roi_callables.py deleted file mode 100644 index 7ca660e..0000000 --- a/nipype-auto-conv/specs/interfaces/extract_roi_callables.py +++ /dev/null @@ -1,345 +0,0 @@ -"""Module to put any functions that are referred to in the "callables" section of ExtractROI.yaml""" - -import attrs -import logging -import os -import os.path as op -from glob import glob -from pathlib import Path - - -def roi_file_default(inputs): - return _gen_filename("roi_file", inputs=inputs) - - -def roi_file_callable(output_dir, inputs, stdout, stderr): - outputs = _list_outputs( - output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr - ) - return outputs["roi_file"] - - -IFLOGGER = logging.getLogger("nipype.interface") - - -# Original source at L513 of /interfaces/fsl/utils.py -def _gen_filename(name, inputs=None, stdout=None, stderr=None, output_dir=None): - if name == "roi_file": - return _list_outputs( - inputs=inputs, stdout=stdout, stderr=stderr, output_dir=output_dir - )[name] - return None - - -# Original source at L205 of /interfaces/fsl/base.py -def _gen_fname( - basename, - cwd=None, - suffix=None, - change_ext=True, - ext=None, - inputs=None, - stdout=None, - stderr=None, - output_dir=None, -): - """Generate a filename based on the given parameters. - - The filename will take the form: cwd/basename. - If change_ext is True, it will use the extensions specified in - inputs.output_type. - - Parameters - ---------- - basename : str - Filename to base the new filename on. - cwd : str - Path to prefix to the new filename. (default is output_dir) - suffix : str - Suffix to add to the `basename`. (defaults is '' ) - change_ext : bool - Flag to change the filename extension to the FSL output type. - (default True) - - Returns - ------- - fname : str - New filename based on given parameters. - - """ - - if basename == "": - msg = "Unable to generate filename for command %s. " % "fslroi" - msg += "basename is not set!" - raise ValueError(msg) - if cwd is None: - cwd = output_dir - if ext is None: - ext = Info.output_type_to_ext(inputs.output_type) - if change_ext: - if suffix: - suffix = "".join((suffix, ext)) - else: - suffix = ext - if suffix is None: - suffix = "" - fname = fname_presuffix(basename, suffix=suffix, use_ext=False, newpath=cwd) - return fname - - -# Original source at L489 of /interfaces/fsl/utils.py -def _list_outputs(inputs=None, stdout=None, stderr=None, output_dir=None): - """Create a Bunch which contains all possible files generated - by running the interface. Some files are always generated, others - depending on which ``inputs`` options are set. - - - Returns - ------- - - outputs : Bunch object - Bunch object containing all possible files generated by - interface object. - - If None, file was not generated - Else, contains path, filename of generated outputfile - - """ - outputs = {} - outputs["roi_file"] = inputs.roi_file - if outputs["roi_file"] is attrs.NOTHING: - outputs["roi_file"] = _gen_fname( - inputs.in_file, - suffix="_roi", - inputs=inputs, - stdout=stdout, - stderr=stderr, - output_dir=output_dir, - ) - outputs["roi_file"] = os.path.abspath(outputs["roi_file"]) - return outputs - - -# Original source at L108 of /utils/filemanip.py -def fname_presuffix(fname, prefix="", suffix="", newpath=None, use_ext=True): - """Manipulates path and name of input filename - - Parameters - ---------- - fname : string - A filename (may or may not include path) - prefix : string - Characters to prepend to the filename - suffix : string - Characters to append to the filename - newpath : string - Path to replace the path of the input fname - use_ext : boolean - If True (default), appends the extension of the original file - to the output name. - - Returns - ------- - Absolute path of the modified filename - - >>> from nipype.utils.filemanip import fname_presuffix - >>> fname = 'foo.nii.gz' - >>> fname_presuffix(fname,'pre','post','/tmp') - '/tmp/prefoopost.nii.gz' - - >>> from nipype.interfaces.base import attrs.NOTHING - >>> fname_presuffix(fname, 'pre', 'post', attrs.NOTHING) == \ - fname_presuffix(fname, 'pre', 'post') - True - - """ - pth, fname, ext = split_filename(fname) - if not use_ext: - ext = "" - - # No need for : bool(attrs.NOTHING is not attrs.NOTHING) evaluates to False - if newpath: - pth = op.abspath(newpath) - return op.join(pth, prefix + fname + suffix + ext) - - -# Original source at L58 of /utils/filemanip.py -def split_filename(fname): - """Split a filename into parts: path, base filename and extension. - - Parameters - ---------- - fname : str - file or path name - - Returns - ------- - pth : str - base path from fname - fname : str - filename from fname, without extension - ext : str - file extension from fname - - Examples - -------- - >>> from nipype.utils.filemanip import split_filename - >>> pth, fname, ext = split_filename('/home/data/subject.nii.gz') - >>> pth - '/home/data' - - >>> fname - 'subject' - - >>> ext - '.nii.gz' - - """ - - special_extensions = [".nii.gz", ".tar.gz", ".niml.dset"] - - pth = op.dirname(fname) - fname = op.basename(fname) - - ext = None - for special_ext in special_extensions: - ext_len = len(special_ext) - if (len(fname) > ext_len) and (fname[-ext_len:].lower() == special_ext.lower()): - ext = fname[-ext_len:] - fname = fname[:-ext_len] - break - if not ext: - fname, ext = op.splitext(fname) - - return pth, fname, ext - - -# Original source at L1069 of /interfaces/base/core.py -class PackageInfo(object): - _version = None - version_cmd = None - version_file = None - - @classmethod - def version(klass): - if klass._version is None: - if klass.version_cmd is not None: - try: - clout = CommandLine( - command=klass.version_cmd, - resource_monitor=False, - terminal_output="allatonce", - ).run() - except IOError: - return None - - raw_info = clout.runtime.stdout - elif klass.version_file is not None: - try: - with open(klass.version_file, "rt") as fobj: - raw_info = fobj.read() - except OSError: - return None - else: - return None - - klass._version = klass.parse_version(raw_info) - - return klass._version - - @staticmethod - def parse_version(raw_info): - raise NotImplementedError - - -# Original source at L40 of /interfaces/fsl/base.py -class Info(PackageInfo): - """ - Handle FSL ``output_type`` and version information. - - output type refers to the type of file fsl defaults to writing - eg, NIFTI, NIFTI_GZ - - Examples - -------- - - >>> from nipype.interfaces.fsl import Info - >>> Info.version() # doctest: +SKIP - >>> Info.output_type() # doctest: +SKIP - - """ - - ftypes = { - "NIFTI": ".nii", - "NIFTI_PAIR": ".img", - "NIFTI_GZ": ".nii.gz", - "NIFTI_PAIR_GZ": ".img.gz", - } - - if os.getenv("FSLDIR"): - version_file = os.path.join(os.getenv("FSLDIR"), "etc", "fslversion") - - @staticmethod - def parse_version(raw_info): - return raw_info.splitlines()[0] - - @classmethod - def output_type_to_ext(cls, output_type): - """Get the file extension for the given output type. - - Parameters - ---------- - output_type : {'NIFTI', 'NIFTI_GZ', 'NIFTI_PAIR', 'NIFTI_PAIR_GZ'} - String specifying the output type. - - Returns - ------- - extension : str - The file extension for the output type. - """ - - try: - return cls.ftypes[output_type] - except KeyError: - msg = "Invalid FSLOUTPUTTYPE: ", output_type - raise KeyError(msg) - - @classmethod - def output_type(cls): - """Get the global FSL output file type FSLOUTPUTTYPE. - - This returns the value of the environment variable - FSLOUTPUTTYPE. An exception is raised if it is not defined. - - Returns - ------- - fsl_ftype : string - Represents the current environment setting of FSLOUTPUTTYPE - """ - try: - return os.environ["FSLOUTPUTTYPE"] - except KeyError: - IFLOGGER.warning( - "FSLOUTPUTTYPE environment variable is not set. " - "Setting FSLOUTPUTTYPE=NIFTI" - ) - return "NIFTI" - - @staticmethod - def standard_image(img_name=None): - """Grab an image from the standard location. - - Returns a list of standard images if called without arguments. - - Could be made more fancy to allow for more relocatability""" - try: - fsldir = os.environ["FSLDIR"] - except KeyError: - raise Exception("FSL environment variables not set") - stdpath = os.path.join(fsldir, "data", "standard") - if img_name is None: - return [ - filename.replace(stdpath + "/", "") - for filename in glob(os.path.join(stdpath, "*nii*")) - ] - return os.path.join(stdpath, img_name) diff --git a/nipype-auto-conv/specs/interfaces/fast.yaml b/nipype-auto-conv/specs/interfaces/fast.yaml index 37d0d67..6bf521a 100644 --- a/nipype-auto-conv/specs/interfaces/fast.yaml +++ b/nipype-auto-conv/specs/interfaces/fast.yaml @@ -7,18 +7,18 @@ # ---- # FSL FAST wrapper for segmentation and bias correction # -# For complete details, see the `FAST Documentation. -# `_ +# For complete details, see the `FAST Documentation. +# `_ # -# Examples -# -------- -# >>> from nipype.interfaces import fsl -# >>> fast = fsl.FAST() -# >>> fast.inputs.in_files = 'structural.nii' -# >>> fast.inputs.out_basename = 'fast_' -# >>> fast.cmdline -# 'fast -o fast_ -S 1 structural.nii' -# >>> out = fast.run() # doctest: +SKIP +# Examples +# -------- +# >>> from nipype.interfaces import fsl +# >>> fast = fsl.FAST() +# >>> fast.inputs.in_files = 'structural.nii' +# >>> fast.inputs.out_basename = 'fast_' +# >>> fast.cmdline +# 'fast -o fast_ -S 1 structural.nii' +# >>> out = fast.run() # doctest: +SKIP # # task_name: FAST @@ -43,8 +43,6 @@ inputs: # type=file|default=: Filename containing intensities other_priors: generic/file+list-of # type=inputmultiobject|default=[]: alternative prior images - out_basename: Path - # type=file|default=: base name of output files callable_defaults: # dict[str, str] - names of methods/callable classes defined in the adjacent `*_callables.py` # to set as the `default` method of input fields @@ -82,7 +80,7 @@ outputs: # dict[str, str] - names of methods/callable classes defined in the adjacent `*_callables.py` # to set to the `callable` attribute of output fields templates: - # dict[str, str] - `output_file_template` values to be provided to output fields + # dict[str, str] - `path_template` values to be provided to output fields requirements: # dict[str, list[str]] - input fields that are required to be provided for the output field to be present tests: @@ -135,13 +133,13 @@ tests: # type=outputmultiobject: # type=bool|default=False: outputs individual probability maps output_type: - # type=enum|default='NIFTI'|allowed['NIFTI','NIFTI_GZ','NIFTI_PAIR','NIFTI_PAIR_GZ']: FSL output type + # type=enum|default='NIFTI'|allowed['GIFTI','NIFTI','NIFTI_GZ','NIFTI_PAIR','NIFTI_PAIR_GZ']: FSL output type args: # type=str|default='': Additional parameters to the command environ: # type=dict|default={}: Environment variables imports: - # list[nipype2pydra.task.base.importstatement] - list import statements required by the test, with each list item + # list[nipype2pydra.statements.imports.explicitimport] - list import statements required by the test, with each list item # consisting of 'module', 'name', and optionally 'alias' keys expected_outputs: # dict[str, str] - expected values for selected outputs, noting that tests will typically @@ -160,10 +158,8 @@ tests: # (if not specified, will try to choose a sensible value) in_files: # type=inputmultiobject|default=[]: image, or multi-channel set of images, to be segmented - out_basename: '"fast_"' - # type=file|default=: base name of output files imports: - # list[nipype2pydra.task.base.importstatement] - list import statements required by the test, with each list item + # list[nipype2pydra.statements.imports.explicitimport] - list import statements required by the test, with each list item # consisting of 'module', 'name', and optionally 'alias' keys expected_outputs: # dict[str, str] - expected values for selected outputs, noting that tests will typically @@ -178,7 +174,7 @@ tests: # bool - whether the unittest is expected to fail or not. Set to false # when you are satisfied with the edits you have made to this file doctests: -- cmdline: fast -o fast_ -S 1 structural.nii +- cmdline: # str - the expected cmdline output inputs: # dict[str, str] - name-value pairs for inputs to be provided to the doctest. @@ -186,10 +182,8 @@ doctests: # '.mock()' method of the corresponding class is used instead. in_files: '"structural.nii"' # type=inputmultiobject|default=[]: image, or multi-channel set of images, to be segmented - out_basename: '"fast_"' - # type=file|default=: base name of output files imports: - # list[nipype2pydra.task.base.importstatement] - list import statements required by the test, with each list item + # list[nipype2pydra.statements.imports.explicitimport] - list import statements required by the test, with each list item # consisting of 'module', 'name', and optionally 'alias' keys directive: # str - any doctest directive to place on the cmdline call, e.g. # doctest: +ELLIPSIS diff --git a/nipype-auto-conv/specs/interfaces/fast_callables.py b/nipype-auto-conv/specs/interfaces/fast_callables.py deleted file mode 100644 index 490163d..0000000 --- a/nipype-auto-conv/specs/interfaces/fast_callables.py +++ /dev/null @@ -1,496 +0,0 @@ -"""Module to put any functions that are referred to in the "callables" section of FAST.yaml""" - -import attrs -import logging -import os -import os.path as op -from glob import glob -from pathlib import Path - - -def bias_field_callable(output_dir, inputs, stdout, stderr): - outputs = _list_outputs( - output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr - ) - return outputs["bias_field"] - - -def mixeltype_callable(output_dir, inputs, stdout, stderr): - outputs = _list_outputs( - output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr - ) - return outputs["mixeltype"] - - -def partial_volume_files_callable(output_dir, inputs, stdout, stderr): - outputs = _list_outputs( - output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr - ) - return outputs["partial_volume_files"] - - -def partial_volume_map_callable(output_dir, inputs, stdout, stderr): - outputs = _list_outputs( - output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr - ) - return outputs["partial_volume_map"] - - -def probability_maps_callable(output_dir, inputs, stdout, stderr): - outputs = _list_outputs( - output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr - ) - return outputs["probability_maps"] - - -def restored_image_callable(output_dir, inputs, stdout, stderr): - outputs = _list_outputs( - output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr - ) - return outputs["restored_image"] - - -def tissue_class_files_callable(output_dir, inputs, stdout, stderr): - outputs = _list_outputs( - output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr - ) - return outputs["tissue_class_files"] - - -def tissue_class_map_callable(output_dir, inputs, stdout, stderr): - outputs = _list_outputs( - output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr - ) - return outputs["tissue_class_map"] - - -IFLOGGER = logging.getLogger("nipype.interface") - - -# Original source at L885 of /interfaces/base/core.py -def _gen_filename(name, inputs=None, stdout=None, stderr=None, output_dir=None): - raise NotImplementedError - - -# Original source at L205 of /interfaces/fsl/base.py -def _gen_fname( - basename, - cwd=None, - suffix=None, - change_ext=True, - ext=None, - inputs=None, - stdout=None, - stderr=None, - output_dir=None, -): - """Generate a filename based on the given parameters. - - The filename will take the form: cwd/basename. - If change_ext is True, it will use the extensions specified in - inputs.output_type. - - Parameters - ---------- - basename : str - Filename to base the new filename on. - cwd : str - Path to prefix to the new filename. (default is output_dir) - suffix : str - Suffix to add to the `basename`. (defaults is '' ) - change_ext : bool - Flag to change the filename extension to the FSL output type. - (default True) - - Returns - ------- - fname : str - New filename based on given parameters. - - """ - - if basename == "": - msg = "Unable to generate filename for command %s. " % "fast" - msg += "basename is not set!" - raise ValueError(msg) - if cwd is None: - cwd = output_dir - if ext is None: - ext = Info.output_type_to_ext(inputs.output_type) - if change_ext: - if suffix: - suffix = "".join((suffix, ext)) - else: - suffix = ext - if suffix is None: - suffix = "" - fname = fname_presuffix(basename, suffix=suffix, use_ext=False, newpath=cwd) - return fname - - -# Original source at L401 of /interfaces/fsl/preprocess.py -def _list_outputs(inputs=None, stdout=None, stderr=None, output_dir=None): - outputs = {} - if inputs.number_classes is attrs.NOTHING: - nclasses = 3 - else: - nclasses = inputs.number_classes - # when using multichannel, results basename is based on last - # input filename - _gen_fname_opts = {} - if inputs.out_basename is not attrs.NOTHING: - _gen_fname_opts["basename"] = inputs.out_basename - _gen_fname_opts["cwd"] = output_dir - else: - _gen_fname_opts["basename"] = inputs.in_files[-1] - _gen_fname_opts["cwd"], _, _ = split_filename(_gen_fname_opts["basename"]) - - outputs["tissue_class_map"] = _gen_fname( - suffix="_seg", - **_gen_fname_opts, - inputs=inputs, - stdout=stdout, - stderr=stderr, - output_dir=output_dir, - ) - if inputs.segments: - outputs["tissue_class_files"] = [] - for i in range(nclasses): - outputs["tissue_class_files"].append( - _gen_fname( - suffix="_seg_%d" % i, - **_gen_fname_opts, - inputs=inputs, - stdout=stdout, - stderr=stderr, - output_dir=output_dir, - ) - ) - if inputs.output_biascorrected is not attrs.NOTHING: - outputs["restored_image"] = [] - if len(inputs.in_files) > 1: - # for multi-image segmentation there is one corrected image - # per input - for val, f in enumerate(inputs.in_files): - # image numbering is 1-based - outputs["restored_image"].append( - _gen_fname( - suffix="_restore_%d" % (val + 1), - **_gen_fname_opts, - inputs=inputs, - stdout=stdout, - stderr=stderr, - output_dir=output_dir, - ) - ) - else: - # single image segmentation has unnumbered output image - outputs["restored_image"].append( - _gen_fname( - suffix="_restore", - **_gen_fname_opts, - inputs=inputs, - stdout=stdout, - stderr=stderr, - output_dir=output_dir, - ) - ) - - outputs["mixeltype"] = _gen_fname( - suffix="_mixeltype", - **_gen_fname_opts, - inputs=inputs, - stdout=stdout, - stderr=stderr, - output_dir=output_dir, - ) - if not inputs.no_pve: - outputs["partial_volume_map"] = _gen_fname( - suffix="_pveseg", - **_gen_fname_opts, - inputs=inputs, - stdout=stdout, - stderr=stderr, - output_dir=output_dir, - ) - outputs["partial_volume_files"] = [] - for i in range(nclasses): - outputs["partial_volume_files"].append( - _gen_fname( - suffix="_pve_%d" % i, - **_gen_fname_opts, - inputs=inputs, - stdout=stdout, - stderr=stderr, - output_dir=output_dir, - ) - ) - if inputs.output_biasfield: - outputs["bias_field"] = [] - if len(inputs.in_files) > 1: - # for multi-image segmentation there is one bias field image - # per input - for val, f in enumerate(inputs.in_files): - # image numbering is 1-based - outputs["bias_field"].append( - _gen_fname( - suffix="_bias_%d" % (val + 1), - **_gen_fname_opts, - inputs=inputs, - stdout=stdout, - stderr=stderr, - output_dir=output_dir, - ) - ) - else: - # single image segmentation has unnumbered output image - outputs["bias_field"].append( - _gen_fname( - suffix="_bias", - **_gen_fname_opts, - inputs=inputs, - stdout=stdout, - stderr=stderr, - output_dir=output_dir, - ) - ) - - if inputs.probability_maps: - outputs["probability_maps"] = [] - for i in range(nclasses): - outputs["probability_maps"].append( - _gen_fname( - suffix="_prob_%d" % i, - **_gen_fname_opts, - inputs=inputs, - stdout=stdout, - stderr=stderr, - output_dir=output_dir, - ) - ) - return outputs - - -# Original source at L108 of /utils/filemanip.py -def fname_presuffix(fname, prefix="", suffix="", newpath=None, use_ext=True): - """Manipulates path and name of input filename - - Parameters - ---------- - fname : string - A filename (may or may not include path) - prefix : string - Characters to prepend to the filename - suffix : string - Characters to append to the filename - newpath : string - Path to replace the path of the input fname - use_ext : boolean - If True (default), appends the extension of the original file - to the output name. - - Returns - ------- - Absolute path of the modified filename - - >>> from nipype.utils.filemanip import fname_presuffix - >>> fname = 'foo.nii.gz' - >>> fname_presuffix(fname,'pre','post','/tmp') - '/tmp/prefoopost.nii.gz' - - >>> from nipype.interfaces.base import attrs.NOTHING - >>> fname_presuffix(fname, 'pre', 'post', attrs.NOTHING) == \ - fname_presuffix(fname, 'pre', 'post') - True - - """ - pth, fname, ext = split_filename(fname) - if not use_ext: - ext = "" - - # No need for : bool(attrs.NOTHING is not attrs.NOTHING) evaluates to False - if newpath: - pth = op.abspath(newpath) - return op.join(pth, prefix + fname + suffix + ext) - - -# Original source at L58 of /utils/filemanip.py -def split_filename(fname): - """Split a filename into parts: path, base filename and extension. - - Parameters - ---------- - fname : str - file or path name - - Returns - ------- - pth : str - base path from fname - fname : str - filename from fname, without extension - ext : str - file extension from fname - - Examples - -------- - >>> from nipype.utils.filemanip import split_filename - >>> pth, fname, ext = split_filename('/home/data/subject.nii.gz') - >>> pth - '/home/data' - - >>> fname - 'subject' - - >>> ext - '.nii.gz' - - """ - - special_extensions = [".nii.gz", ".tar.gz", ".niml.dset"] - - pth = op.dirname(fname) - fname = op.basename(fname) - - ext = None - for special_ext in special_extensions: - ext_len = len(special_ext) - if (len(fname) > ext_len) and (fname[-ext_len:].lower() == special_ext.lower()): - ext = fname[-ext_len:] - fname = fname[:-ext_len] - break - if not ext: - fname, ext = op.splitext(fname) - - return pth, fname, ext - - -# Original source at L1069 of /interfaces/base/core.py -class PackageInfo(object): - _version = None - version_cmd = None - version_file = None - - @classmethod - def version(klass): - if klass._version is None: - if klass.version_cmd is not None: - try: - clout = CommandLine( - command=klass.version_cmd, - resource_monitor=False, - terminal_output="allatonce", - ).run() - except IOError: - return None - - raw_info = clout.runtime.stdout - elif klass.version_file is not None: - try: - with open(klass.version_file, "rt") as fobj: - raw_info = fobj.read() - except OSError: - return None - else: - return None - - klass._version = klass.parse_version(raw_info) - - return klass._version - - @staticmethod - def parse_version(raw_info): - raise NotImplementedError - - -# Original source at L40 of /interfaces/fsl/base.py -class Info(PackageInfo): - """ - Handle FSL ``output_type`` and version information. - - output type refers to the type of file fsl defaults to writing - eg, NIFTI, NIFTI_GZ - - Examples - -------- - - >>> from nipype.interfaces.fsl import Info - >>> Info.version() # doctest: +SKIP - >>> Info.output_type() # doctest: +SKIP - - """ - - ftypes = { - "NIFTI": ".nii", - "NIFTI_PAIR": ".img", - "NIFTI_GZ": ".nii.gz", - "NIFTI_PAIR_GZ": ".img.gz", - } - - if os.getenv("FSLDIR"): - version_file = os.path.join(os.getenv("FSLDIR"), "etc", "fslversion") - - @staticmethod - def parse_version(raw_info): - return raw_info.splitlines()[0] - - @classmethod - def output_type_to_ext(cls, output_type): - """Get the file extension for the given output type. - - Parameters - ---------- - output_type : {'NIFTI', 'NIFTI_GZ', 'NIFTI_PAIR', 'NIFTI_PAIR_GZ'} - String specifying the output type. - - Returns - ------- - extension : str - The file extension for the output type. - """ - - try: - return cls.ftypes[output_type] - except KeyError: - msg = "Invalid FSLOUTPUTTYPE: ", output_type - raise KeyError(msg) - - @classmethod - def output_type(cls): - """Get the global FSL output file type FSLOUTPUTTYPE. - - This returns the value of the environment variable - FSLOUTPUTTYPE. An exception is raised if it is not defined. - - Returns - ------- - fsl_ftype : string - Represents the current environment setting of FSLOUTPUTTYPE - """ - try: - return os.environ["FSLOUTPUTTYPE"] - except KeyError: - IFLOGGER.warning( - "FSLOUTPUTTYPE environment variable is not set. " - "Setting FSLOUTPUTTYPE=NIFTI" - ) - return "NIFTI" - - @staticmethod - def standard_image(img_name=None): - """Grab an image from the standard location. - - Returns a list of standard images if called without arguments. - - Could be made more fancy to allow for more relocatability""" - try: - fsldir = os.environ["FSLDIR"] - except KeyError: - raise Exception("FSL environment variables not set") - stdpath = os.path.join(fsldir, "data", "standard") - if img_name is None: - return [ - filename.replace(stdpath + "/", "") - for filename in glob(os.path.join(stdpath, "*nii*")) - ] - return os.path.join(stdpath, img_name) diff --git a/nipype-auto-conv/specs/interfaces/feat.yaml b/nipype-auto-conv/specs/interfaces/feat.yaml index 042b1c0..13c9614 100644 --- a/nipype-auto-conv/specs/interfaces/feat.yaml +++ b/nipype-auto-conv/specs/interfaces/feat.yaml @@ -44,7 +44,7 @@ outputs: # dict[str, str] - names of methods/callable classes defined in the adjacent `*_callables.py` # to set to the `callable` attribute of output fields templates: - # dict[str, str] - `output_file_template` values to be provided to output fields + # dict[str, str] - `path_template` values to be provided to output fields requirements: # dict[str, list[str]] - input fields that are required to be provided for the output field to be present tests: @@ -54,13 +54,13 @@ tests: fsf_file: # type=file|default=: File specifying the feat design spec file output_type: - # type=enum|default='NIFTI'|allowed['NIFTI','NIFTI_GZ','NIFTI_PAIR','NIFTI_PAIR_GZ']: FSL output type + # type=enum|default='NIFTI'|allowed['GIFTI','NIFTI','NIFTI_GZ','NIFTI_PAIR','NIFTI_PAIR_GZ']: FSL output type args: # type=str|default='': Additional parameters to the command environ: # type=dict|default={}: Environment variables imports: - # list[nipype2pydra.task.base.importstatement] - list import statements required by the test, with each list item + # list[nipype2pydra.statements.imports.explicitimport] - list import statements required by the test, with each list item # consisting of 'module', 'name', and optionally 'alias' keys expected_outputs: # dict[str, str] - expected values for selected outputs, noting that tests will typically diff --git a/nipype-auto-conv/specs/interfaces/feat_callables.py b/nipype-auto-conv/specs/interfaces/feat_callables.py deleted file mode 100644 index a28955e..0000000 --- a/nipype-auto-conv/specs/interfaces/feat_callables.py +++ /dev/null @@ -1,42 +0,0 @@ -"""Module to put any functions that are referred to in the "callables" section of FEAT.yaml""" - -import os -from glob import glob - - -def feat_dir_callable(output_dir, inputs, stdout, stderr): - outputs = _list_outputs( - output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr - ) - return outputs["feat_dir"] - - -# Original source at L885 of /interfaces/base/core.py -def _gen_filename(name, inputs=None, stdout=None, stderr=None, output_dir=None): - raise NotImplementedError - - -# Original source at L465 of /interfaces/fsl/model.py -def _list_outputs(inputs=None, stdout=None, stderr=None, output_dir=None): - outputs = {} - is_ica = False - outputs["feat_dir"] = None - with open(inputs.fsf_file, "rt") as fp: - text = fp.read() - if "set fmri(inmelodic) 1" in text: - is_ica = True - for line in text.split("\n"): - if line.find("set fmri(outputdir)") > -1: - try: - outputdir_spec = line.split('"')[-2] - if os.path.exists(outputdir_spec): - outputs["feat_dir"] = outputdir_spec - - except: - pass - if not outputs["feat_dir"]: - if is_ica: - outputs["feat_dir"] = glob(os.path.join(output_dir, "*ica"))[0] - else: - outputs["feat_dir"] = glob(os.path.join(output_dir, "*feat"))[0] - return outputs diff --git a/nipype-auto-conv/specs/interfaces/feat_model.yaml b/nipype-auto-conv/specs/interfaces/feat_model.yaml index bda2496..5f981fb 100644 --- a/nipype-auto-conv/specs/interfaces/feat_model.yaml +++ b/nipype-auto-conv/specs/interfaces/feat_model.yaml @@ -54,7 +54,7 @@ outputs: # dict[str, str] - names of methods/callable classes defined in the adjacent `*_callables.py` # to set to the `callable` attribute of output fields templates: - # dict[str, str] - `output_file_template` values to be provided to output fields + # dict[str, str] - `path_template` values to be provided to output fields requirements: # dict[str, list[str]] - input fields that are required to be provided for the output field to be present tests: @@ -66,13 +66,13 @@ tests: ev_files: # type=list|default=[]: Event spec files generated by level1design output_type: - # type=enum|default='NIFTI'|allowed['NIFTI','NIFTI_GZ','NIFTI_PAIR','NIFTI_PAIR_GZ']: FSL output type + # type=enum|default='NIFTI'|allowed['GIFTI','NIFTI','NIFTI_GZ','NIFTI_PAIR','NIFTI_PAIR_GZ']: FSL output type args: # type=str|default='': Additional parameters to the command environ: # type=dict|default={}: Environment variables imports: - # list[nipype2pydra.task.base.importstatement] - list import statements required by the test, with each list item + # list[nipype2pydra.statements.imports.explicitimport] - list import statements required by the test, with each list item # consisting of 'module', 'name', and optionally 'alias' keys expected_outputs: # dict[str, str] - expected values for selected outputs, noting that tests will typically diff --git a/nipype-auto-conv/specs/interfaces/feat_model_callables.py b/nipype-auto-conv/specs/interfaces/feat_model_callables.py deleted file mode 100644 index bb0a4aa..0000000 --- a/nipype-auto-conv/specs/interfaces/feat_model_callables.py +++ /dev/null @@ -1,91 +0,0 @@ -"""Module to put any functions that are referred to in the "callables" section of FEATModel.yaml""" - -import os -from glob import glob - - -def con_file_callable(output_dir, inputs, stdout, stderr): - outputs = _list_outputs( - output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr - ) - return outputs["con_file"] - - -def design_cov_callable(output_dir, inputs, stdout, stderr): - outputs = _list_outputs( - output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr - ) - return outputs["design_cov"] - - -def design_file_callable(output_dir, inputs, stdout, stderr): - outputs = _list_outputs( - output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr - ) - return outputs["design_file"] - - -def design_image_callable(output_dir, inputs, stdout, stderr): - outputs = _list_outputs( - output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr - ) - return outputs["design_image"] - - -def fcon_file_callable(output_dir, inputs, stdout, stderr): - outputs = _list_outputs( - output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr - ) - return outputs["fcon_file"] - - -# Original source at L885 of /interfaces/base/core.py -def _gen_filename(name, inputs=None, stdout=None, stderr=None, output_dir=None): - raise NotImplementedError - - -# Original source at L534 of /interfaces/fsl/model.py -def _get_design_root(infile, inputs=None, stdout=None, stderr=None, output_dir=None): - _, fname = os.path.split(infile) - return fname.split(".")[0] - - -# Original source at L538 of /interfaces/fsl/model.py -def _list_outputs(inputs=None, stdout=None, stderr=None, output_dir=None): - # TODO: figure out file names and get rid off the globs - outputs = {} - root = _get_design_root( - simplify_list(inputs.fsf_file), - inputs=inputs, - stdout=stdout, - stderr=stderr, - output_dir=output_dir, - ) - design_file = glob(os.path.join(output_dir, "%s*.mat" % root)) - assert len(design_file) == 1, "No mat file generated by FEAT Model" - outputs["design_file"] = design_file[0] - design_image = glob(os.path.join(output_dir, "%s.png" % root)) - assert len(design_image) == 1, "No design image generated by FEAT Model" - outputs["design_image"] = design_image[0] - design_cov = glob(os.path.join(output_dir, "%s_cov.png" % root)) - assert len(design_cov) == 1, "No covariance image generated by FEAT Model" - outputs["design_cov"] = design_cov[0] - con_file = glob(os.path.join(output_dir, "%s*.con" % root)) - assert len(con_file) == 1, "No con file generated by FEAT Model" - outputs["con_file"] = con_file[0] - fcon_file = glob(os.path.join(output_dir, "%s*.fts" % root)) - if fcon_file: - assert len(fcon_file) == 1, "No fts file generated by FEAT Model" - outputs["fcon_file"] = fcon_file[0] - return outputs - - -# Original source at L530 of /utils/filemanip.py -def simplify_list(filelist): - """Returns a list if filelist is a list of length greater than 1, - otherwise returns the first element - """ - if len(filelist) > 1: - return filelist - else: - return filelist[0] diff --git a/nipype-auto-conv/specs/interfaces/feature_extractor.yaml b/nipype-auto-conv/specs/interfaces/feature_extractor.yaml index d994561..f34548e 100644 --- a/nipype-auto-conv/specs/interfaces/feature_extractor.yaml +++ b/nipype-auto-conv/specs/interfaces/feature_extractor.yaml @@ -6,7 +6,7 @@ # Docs # ---- # -# Extract features (for later training and/or classifying) +# Extract features (for later training and/or classifying) # task_name: FeatureExtractor nipype_name: FeatureExtractor @@ -22,9 +22,6 @@ inputs: # from the nipype interface, but you may want to be more specific, particularly # for file types, where specifying the format also specifies the file that will be # passed to the field in the automatically generated unittests. - mel_ica: Path - # type=directory: Melodic output directory or directories - # type=directory|default=: Melodic output directory or directories callable_defaults: # dict[str, str] - names of methods/callable classes defined in the adjacent `*_callables.py` # to set as the `default` method of input fields @@ -48,7 +45,7 @@ outputs: # dict[str, str] - names of methods/callable classes defined in the adjacent `*_callables.py` # to set to the `callable` attribute of output fields templates: - # dict[str, str] - `output_file_template` values to be provided to output fields + # dict[str, str] - `path_template` values to be provided to output fields requirements: # dict[str, list[str]] - input fields that are required to be provided for the output field to be present tests: @@ -63,7 +60,7 @@ tests: environ: # type=dict|default={}: Environment variables imports: - # list[nipype2pydra.task.base.importstatement] - list import statements required by the test, with each list item + # list[nipype2pydra.statements.imports.explicitimport] - list import statements required by the test, with each list item # consisting of 'module', 'name', and optionally 'alias' keys expected_outputs: # dict[str, str] - expected values for selected outputs, noting that tests will typically diff --git a/nipype-auto-conv/specs/interfaces/feature_extractor_callables.py b/nipype-auto-conv/specs/interfaces/feature_extractor_callables.py deleted file mode 100644 index 9a93d53..0000000 --- a/nipype-auto-conv/specs/interfaces/feature_extractor_callables.py +++ /dev/null @@ -1,20 +0,0 @@ -"""Module to put any functions that are referred to in the "callables" section of FeatureExtractor.yaml""" - - -def mel_ica_callable(output_dir, inputs, stdout, stderr): - outputs = _list_outputs( - output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr - ) - return outputs["mel_ica"] - - -# Original source at L885 of /interfaces/base/core.py -def _gen_filename(name, inputs=None, stdout=None, stderr=None, output_dir=None): - raise NotImplementedError - - -# Original source at L161 of /interfaces/fsl/fix.py -def _list_outputs(inputs=None, stdout=None, stderr=None, output_dir=None): - outputs = {} - outputs["mel_ica"] = inputs.mel_ica - return outputs diff --git a/nipype-auto-conv/specs/interfaces/filmgls.yaml b/nipype-auto-conv/specs/interfaces/filmgls.yaml index 74615c3..095304a 100644 --- a/nipype-auto-conv/specs/interfaces/filmgls.yaml +++ b/nipype-auto-conv/specs/interfaces/filmgls.yaml @@ -7,28 +7,28 @@ # ---- # Use FSL film_gls command to fit a design matrix to voxel timeseries # -# Examples -# -------- +# Examples +# -------- # -# Initialize with no options, assigning them when calling run: +# Initialize with no options, assigning them when calling run: # -# >>> from nipype.interfaces import fsl -# >>> fgls = fsl.FILMGLS() -# >>> res = fgls.run('in_file', 'design_file', 'thresh', rn='stats') #doctest: +SKIP +# >>> from nipype.interfaces import fsl +# >>> fgls = fsl.FILMGLS() +# >>> res = fgls.run('in_file', 'design_file', 'thresh', rn='stats') #doctest: +SKIP # -# Assign options through the ``inputs`` attribute: +# Assign options through the ``inputs`` attribute: # -# >>> fgls = fsl.FILMGLS() -# >>> fgls.inputs.in_file = 'functional.nii' -# >>> fgls.inputs.design_file = 'design.mat' -# >>> fgls.inputs.threshold = 10 -# >>> fgls.inputs.results_dir = 'stats' -# >>> res = fgls.run() #doctest: +SKIP +# >>> fgls = fsl.FILMGLS() +# >>> fgls.inputs.in_file = 'functional.nii' +# >>> fgls.inputs.design_file = 'design.mat' +# >>> fgls.inputs.threshold = 10 +# >>> fgls.inputs.results_dir = 'stats' +# >>> res = fgls.run() #doctest: +SKIP # -# Specify options when creating an instance: +# Specify options when creating an instance: # -# >>> fgls = fsl.FILMGLS(in_file='functional.nii', design_file='design.mat', threshold=10, results_dir='stats') -# >>> res = fgls.run() #doctest: +SKIP +# >>> fgls = fsl.FILMGLS(in_file='functional.nii', design_file='design.mat', threshold=10, results_dir='stats') +# >>> res = fgls.run() #doctest: +SKIP # # task_name: FILMGLS @@ -47,17 +47,8 @@ inputs: # passed to the field in the automatically generated unittests. design_file: generic/file # type=file|default=: design matrix file - fcon_file: generic/file - # type=file|default=: contrast file containing F-contrasts in_file: generic/file # type=file|default=: input data file - results_dir: Path - # type=directory: directory storing model estimation output - # type=directory|default='results': directory to store results in - surface: generic/file - # type=file|default=: input surface for autocorr smoothing in surface-based analyses - tcon_file: generic/file - # type=file|default=: contrast file containing T-contrasts callable_defaults: # dict[str, str] - names of methods/callable classes defined in the adjacent `*_callables.py` # to set as the `default` method of input fields @@ -74,12 +65,10 @@ outputs: # from the nipype interface, but you may want to be more specific, particularly # for file types, where specifying the format also specifies the file that will be # passed to the field in the automatically generated unittests. - copes: generic/file+list-of - # type=outputmultiobject: Contrast estimates for each contrast + corrections: generic/file + # type=file: statistical corrections used within FILM modeling dof_file: generic/file # type=file: degrees of freedom - fstats: generic/file+list-of - # type=outputmultiobject: f-stat file for each contrast logfile: generic/file # type=file: FILM run logfile param_estimates: generic/file+list-of @@ -93,39 +82,23 @@ outputs: # type=file: summary of residuals, See Woolrich, et. al., 2001 thresholdac: generic/file # type=file: The FILM autocorrelation parameters - tstats: generic/file+list-of - # type=outputmultiobject: t-stat file for each contrast - varcopes: generic/file+list-of - # type=outputmultiobject: Variance estimates for each contrast - zfstats: generic/file+list-of - # type=outputmultiobject: z-stat file for each F contrast - zstats: generic/file+list-of - # type=outputmultiobject: z-stat file for each contrast callables: # dict[str, str] - names of methods/callable classes defined in the adjacent `*_callables.py` # to set to the `callable` attribute of output fields templates: - # dict[str, str] - `output_file_template` values to be provided to output fields + # dict[str, str] - `path_template` values to be provided to output fields requirements: # dict[str, list[str]] - input fields that are required to be provided for the output field to be present tests: - inputs: # dict[str, str] - values to provide to inputs fields in the task initialisation # (if not specified, will try to choose a sensible value) - threshold: - # type=float|default=-1000.0: threshold - tcon_file: - # type=file|default=: contrast file containing T-contrasts - fcon_file: - # type=file|default=: contrast file containing F-contrasts - mode: - # type=enum|default='volumetric'|allowed['surface','volumetric']: Type of analysis to be done - surface: - # type=file|default=: input surface for autocorr smoothing in surface-based analyses in_file: # type=file|default=: input data file design_file: # type=file|default=: design matrix file + threshold: + # type=range|default=1000.0: threshold smooth_autocorr: # type=bool|default=False: Smooth auto corr estimates mask_size: @@ -135,7 +108,7 @@ tests: full_data: # type=bool|default=False: output full data autocorr_estimate_only: - # type=bool|default=False: perform autocorrelation estimation only + # type=bool|default=False: perform autocorrelation estimatation only fit_armodel: # type=bool|default=False: fits autoregressive model - default is to use tukey with M=sqrt(numvols) tukey_window: @@ -152,13 +125,13 @@ tests: # type=directory: directory storing model estimation output # type=directory|default='results': directory to store results in output_type: - # type=enum|default='NIFTI'|allowed['NIFTI','NIFTI_GZ','NIFTI_PAIR','NIFTI_PAIR_GZ']: FSL output type + # type=enum|default='NIFTI'|allowed['GIFTI','NIFTI','NIFTI_GZ','NIFTI_PAIR','NIFTI_PAIR_GZ']: FSL output type args: # type=str|default='': Additional parameters to the command environ: # type=dict|default={}: Environment variables imports: - # list[nipype2pydra.task.base.importstatement] - list import statements required by the test, with each list item + # list[nipype2pydra.statements.imports.explicitimport] - list import statements required by the test, with each list item # consisting of 'module', 'name', and optionally 'alias' keys expected_outputs: # dict[str, str] - expected values for selected outputs, noting that tests will typically diff --git a/nipype-auto-conv/specs/interfaces/filmgls_callables.py b/nipype-auto-conv/specs/interfaces/filmgls_callables.py deleted file mode 100644 index 7d41802..0000000 --- a/nipype-auto-conv/specs/interfaces/filmgls_callables.py +++ /dev/null @@ -1,575 +0,0 @@ -"""Module to put any functions that are referred to in the "callables" section of FILMGLS.yaml""" - -import attrs -import logging -import os -import os.path as op -from glob import glob -from looseversion import LooseVersion -from pathlib import Path - - -def copes_callable(output_dir, inputs, stdout, stderr): - outputs = _list_outputs( - output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr - ) - return outputs["copes"] - - -def dof_file_callable(output_dir, inputs, stdout, stderr): - outputs = _list_outputs( - output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr - ) - return outputs["dof_file"] - - -def fstats_callable(output_dir, inputs, stdout, stderr): - outputs = _list_outputs( - output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr - ) - return outputs["fstats"] - - -def logfile_callable(output_dir, inputs, stdout, stderr): - outputs = _list_outputs( - output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr - ) - return outputs["logfile"] - - -def param_estimates_callable(output_dir, inputs, stdout, stderr): - outputs = _list_outputs( - output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr - ) - return outputs["param_estimates"] - - -def residual4d_callable(output_dir, inputs, stdout, stderr): - outputs = _list_outputs( - output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr - ) - return outputs["residual4d"] - - -def results_dir_callable(output_dir, inputs, stdout, stderr): - outputs = _list_outputs( - output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr - ) - return outputs["results_dir"] - - -def sigmasquareds_callable(output_dir, inputs, stdout, stderr): - outputs = _list_outputs( - output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr - ) - return outputs["sigmasquareds"] - - -def thresholdac_callable(output_dir, inputs, stdout, stderr): - outputs = _list_outputs( - output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr - ) - return outputs["thresholdac"] - - -def tstats_callable(output_dir, inputs, stdout, stderr): - outputs = _list_outputs( - output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr - ) - return outputs["tstats"] - - -def varcopes_callable(output_dir, inputs, stdout, stderr): - outputs = _list_outputs( - output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr - ) - return outputs["varcopes"] - - -def zfstats_callable(output_dir, inputs, stdout, stderr): - outputs = _list_outputs( - output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr - ) - return outputs["zfstats"] - - -def zstats_callable(output_dir, inputs, stdout, stderr): - outputs = _list_outputs( - output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr - ) - return outputs["zstats"] - - -IFLOGGER = logging.getLogger("nipype.interface") - - -# Original source at L885 of /interfaces/base/core.py -def _gen_filename(name, inputs=None, stdout=None, stderr=None, output_dir=None): - raise NotImplementedError - - -# Original source at L205 of /interfaces/fsl/base.py -def _gen_fname( - basename, - cwd=None, - suffix=None, - change_ext=True, - ext=None, - inputs=None, - stdout=None, - stderr=None, - output_dir=None, -): - """Generate a filename based on the given parameters. - - The filename will take the form: cwd/basename. - If change_ext is True, it will use the extensions specified in - inputs.output_type. - - Parameters - ---------- - basename : str - Filename to base the new filename on. - cwd : str - Path to prefix to the new filename. (default is output_dir) - suffix : str - Suffix to add to the `basename`. (defaults is '' ) - change_ext : bool - Flag to change the filename extension to the FSL output type. - (default True) - - Returns - ------- - fname : str - New filename based on given parameters. - - """ - - if basename == "": - msg = "Unable to generate filename for command %s. " % "film_gls" - msg += "basename is not set!" - raise ValueError(msg) - if cwd is None: - cwd = output_dir - if ext is None: - ext = Info.output_type_to_ext(inputs.output_type) - if change_ext: - if suffix: - suffix = "".join((suffix, ext)) - else: - suffix = ext - if suffix is None: - suffix = "" - fname = fname_presuffix(basename, suffix=suffix, use_ext=False, newpath=cwd) - return fname - - -# Original source at L841 of /interfaces/fsl/model.py -def _get_numcons(inputs=None, stdout=None, stderr=None, output_dir=None): - numtcons = 0 - numfcons = 0 - if inputs.tcon_file is not attrs.NOTHING: - fp = open(inputs.tcon_file, "rt") - for line in fp.readlines(): - if line.startswith("/NumContrasts"): - numtcons = int(line.split()[-1]) - break - fp.close() - if inputs.fcon_file is not attrs.NOTHING: - fp = open(inputs.fcon_file, "rt") - for line in fp.readlines(): - if line.startswith("/NumContrasts"): - numfcons = int(line.split()[-1]) - break - fp.close() - return numtcons, numfcons - - -# Original source at L827 of /interfaces/fsl/model.py -def _get_pe_files(cwd, inputs=None, stdout=None, stderr=None, output_dir=None): - files = None - if inputs.design_file is not attrs.NOTHING: - fp = open(inputs.design_file, "rt") - for line in fp.readlines(): - if line.startswith("/NumWaves"): - numpes = int(line.split()[-1]) - files = [] - for i in range(numpes): - files.append( - _gen_fname( - "pe%d.nii" % (i + 1), - cwd=cwd, - inputs=inputs, - stdout=stdout, - stderr=stderr, - output_dir=output_dir, - ) - ) - break - fp.close() - return files - - -# Original source at L860 of /interfaces/fsl/model.py -def _list_outputs(inputs=None, stdout=None, stderr=None, output_dir=None): - outputs = {} - cwd = output_dir - results_dir = os.path.join(cwd, inputs.results_dir) - outputs["results_dir"] = results_dir - pe_files = _get_pe_files( - results_dir, inputs=inputs, stdout=stdout, stderr=stderr, output_dir=output_dir - ) - if pe_files: - outputs["param_estimates"] = pe_files - outputs["residual4d"] = _gen_fname( - "res4d.nii", - cwd=results_dir, - inputs=inputs, - stdout=stdout, - stderr=stderr, - output_dir=output_dir, - ) - outputs["dof_file"] = os.path.join(results_dir, "dof") - outputs["sigmasquareds"] = _gen_fname( - "sigmasquareds.nii", - cwd=results_dir, - inputs=inputs, - stdout=stdout, - stderr=stderr, - output_dir=output_dir, - ) - outputs["thresholdac"] = _gen_fname( - "threshac1.nii", - cwd=results_dir, - inputs=inputs, - stdout=stdout, - stderr=stderr, - output_dir=output_dir, - ) - if Info.version() and LooseVersion(Info.version()) < LooseVersion("5.0.7"): - outputs["corrections"] = _gen_fname( - "corrections.nii", - cwd=results_dir, - inputs=inputs, - stdout=stdout, - stderr=stderr, - output_dir=output_dir, - ) - outputs["logfile"] = _gen_fname( - "logfile", - change_ext=False, - cwd=results_dir, - inputs=inputs, - stdout=stdout, - stderr=stderr, - output_dir=output_dir, - ) - - if Info.version() and LooseVersion(Info.version()) > LooseVersion("5.0.6"): - pth = results_dir - numtcons, numfcons = _get_numcons( - inputs=inputs, stdout=stdout, stderr=stderr, output_dir=output_dir - ) - base_contrast = 1 - copes = [] - varcopes = [] - zstats = [] - tstats = [] - for i in range(numtcons): - copes.append( - _gen_fname( - "cope%d.nii" % (base_contrast + i), - cwd=pth, - inputs=inputs, - stdout=stdout, - stderr=stderr, - output_dir=output_dir, - ) - ) - varcopes.append( - _gen_fname( - "varcope%d.nii" % (base_contrast + i), - cwd=pth, - inputs=inputs, - stdout=stdout, - stderr=stderr, - output_dir=output_dir, - ) - ) - zstats.append( - _gen_fname( - "zstat%d.nii" % (base_contrast + i), - cwd=pth, - inputs=inputs, - stdout=stdout, - stderr=stderr, - output_dir=output_dir, - ) - ) - tstats.append( - _gen_fname( - "tstat%d.nii" % (base_contrast + i), - cwd=pth, - inputs=inputs, - stdout=stdout, - stderr=stderr, - output_dir=output_dir, - ) - ) - if copes: - outputs["copes"] = copes - outputs["varcopes"] = varcopes - outputs["zstats"] = zstats - outputs["tstats"] = tstats - fstats = [] - zfstats = [] - for i in range(numfcons): - fstats.append( - _gen_fname( - "fstat%d.nii" % (base_contrast + i), - cwd=pth, - inputs=inputs, - stdout=stdout, - stderr=stderr, - output_dir=output_dir, - ) - ) - zfstats.append( - _gen_fname( - "zfstat%d.nii" % (base_contrast + i), - cwd=pth, - inputs=inputs, - stdout=stdout, - stderr=stderr, - output_dir=output_dir, - ) - ) - if fstats: - outputs["fstats"] = fstats - outputs["zfstats"] = zfstats - return outputs - - -# Original source at L108 of /utils/filemanip.py -def fname_presuffix(fname, prefix="", suffix="", newpath=None, use_ext=True): - """Manipulates path and name of input filename - - Parameters - ---------- - fname : string - A filename (may or may not include path) - prefix : string - Characters to prepend to the filename - suffix : string - Characters to append to the filename - newpath : string - Path to replace the path of the input fname - use_ext : boolean - If True (default), appends the extension of the original file - to the output name. - - Returns - ------- - Absolute path of the modified filename - - >>> from nipype.utils.filemanip import fname_presuffix - >>> fname = 'foo.nii.gz' - >>> fname_presuffix(fname,'pre','post','/tmp') - '/tmp/prefoopost.nii.gz' - - >>> from nipype.interfaces.base import attrs.NOTHING - >>> fname_presuffix(fname, 'pre', 'post', attrs.NOTHING) == \ - fname_presuffix(fname, 'pre', 'post') - True - - """ - pth, fname, ext = split_filename(fname) - if not use_ext: - ext = "" - - # No need for : bool(attrs.NOTHING is not attrs.NOTHING) evaluates to False - if newpath: - pth = op.abspath(newpath) - return op.join(pth, prefix + fname + suffix + ext) - - -# Original source at L58 of /utils/filemanip.py -def split_filename(fname): - """Split a filename into parts: path, base filename and extension. - - Parameters - ---------- - fname : str - file or path name - - Returns - ------- - pth : str - base path from fname - fname : str - filename from fname, without extension - ext : str - file extension from fname - - Examples - -------- - >>> from nipype.utils.filemanip import split_filename - >>> pth, fname, ext = split_filename('/home/data/subject.nii.gz') - >>> pth - '/home/data' - - >>> fname - 'subject' - - >>> ext - '.nii.gz' - - """ - - special_extensions = [".nii.gz", ".tar.gz", ".niml.dset"] - - pth = op.dirname(fname) - fname = op.basename(fname) - - ext = None - for special_ext in special_extensions: - ext_len = len(special_ext) - if (len(fname) > ext_len) and (fname[-ext_len:].lower() == special_ext.lower()): - ext = fname[-ext_len:] - fname = fname[:-ext_len] - break - if not ext: - fname, ext = op.splitext(fname) - - return pth, fname, ext - - -# Original source at L1069 of /interfaces/base/core.py -class PackageInfo(object): - _version = None - version_cmd = None - version_file = None - - @classmethod - def version(klass): - if klass._version is None: - if klass.version_cmd is not None: - try: - clout = CommandLine( - command=klass.version_cmd, - resource_monitor=False, - terminal_output="allatonce", - ).run() - except IOError: - return None - - raw_info = clout.runtime.stdout - elif klass.version_file is not None: - try: - with open(klass.version_file, "rt") as fobj: - raw_info = fobj.read() - except OSError: - return None - else: - return None - - klass._version = klass.parse_version(raw_info) - - return klass._version - - @staticmethod - def parse_version(raw_info): - raise NotImplementedError - - -# Original source at L40 of /interfaces/fsl/base.py -class Info(PackageInfo): - """ - Handle FSL ``output_type`` and version information. - - output type refers to the type of file fsl defaults to writing - eg, NIFTI, NIFTI_GZ - - Examples - -------- - - >>> from nipype.interfaces.fsl import Info - >>> Info.version() # doctest: +SKIP - >>> Info.output_type() # doctest: +SKIP - - """ - - ftypes = { - "NIFTI": ".nii", - "NIFTI_PAIR": ".img", - "NIFTI_GZ": ".nii.gz", - "NIFTI_PAIR_GZ": ".img.gz", - } - - if os.getenv("FSLDIR"): - version_file = os.path.join(os.getenv("FSLDIR"), "etc", "fslversion") - - @staticmethod - def parse_version(raw_info): - return raw_info.splitlines()[0] - - @classmethod - def output_type_to_ext(cls, output_type): - """Get the file extension for the given output type. - - Parameters - ---------- - output_type : {'NIFTI', 'NIFTI_GZ', 'NIFTI_PAIR', 'NIFTI_PAIR_GZ'} - String specifying the output type. - - Returns - ------- - extension : str - The file extension for the output type. - """ - - try: - return cls.ftypes[output_type] - except KeyError: - msg = "Invalid FSLOUTPUTTYPE: ", output_type - raise KeyError(msg) - - @classmethod - def output_type(cls): - """Get the global FSL output file type FSLOUTPUTTYPE. - - This returns the value of the environment variable - FSLOUTPUTTYPE. An exception is raised if it is not defined. - - Returns - ------- - fsl_ftype : string - Represents the current environment setting of FSLOUTPUTTYPE - """ - try: - return os.environ["FSLOUTPUTTYPE"] - except KeyError: - IFLOGGER.warning( - "FSLOUTPUTTYPE environment variable is not set. " - "Setting FSLOUTPUTTYPE=NIFTI" - ) - return "NIFTI" - - @staticmethod - def standard_image(img_name=None): - """Grab an image from the standard location. - - Returns a list of standard images if called without arguments. - - Could be made more fancy to allow for more relocatability""" - try: - fsldir = os.environ["FSLDIR"] - except KeyError: - raise Exception("FSL environment variables not set") - stdpath = os.path.join(fsldir, "data", "standard") - if img_name is None: - return [ - filename.replace(stdpath + "/", "") - for filename in glob(os.path.join(stdpath, "*nii*")) - ] - return os.path.join(stdpath, img_name) diff --git a/nipype-auto-conv/specs/interfaces/filter_regressor.yaml b/nipype-auto-conv/specs/interfaces/filter_regressor.yaml index 74a9915..7726a2d 100644 --- a/nipype-auto-conv/specs/interfaces/filter_regressor.yaml +++ b/nipype-auto-conv/specs/interfaces/filter_regressor.yaml @@ -7,7 +7,7 @@ # ---- # Data de-noising by regressing out part of a design matrix # -# Uses simple OLS regression on 4D images +# Uses simple OLS regression on 4D images # task_name: FilterRegressor nipype_name: FilterRegressor @@ -29,9 +29,6 @@ inputs: # type=file|default=: input file name (4D image) mask: generic/file # type=file|default=: mask image file name - out_file: Path - # type=file: output file name for the filtered data - # type=file|default=: output file name for the filtered data callable_defaults: # dict[str, str] - names of methods/callable classes defined in the adjacent `*_callables.py` # to set as the `default` method of input fields @@ -55,7 +52,7 @@ outputs: # dict[str, str] - names of methods/callable classes defined in the adjacent `*_callables.py` # to set to the `callable` attribute of output fields templates: - # dict[str, str] - `output_file_template` values to be provided to output fields + # dict[str, str] - `path_template` values to be provided to output fields out_file: out_file # type=file: output file name for the filtered data # type=file|default=: output file name for the filtered data @@ -83,13 +80,13 @@ tests: out_vnscales: # type=bool|default=False: output scaling factors for variance normalization output_type: - # type=enum|default='NIFTI'|allowed['NIFTI','NIFTI_GZ','NIFTI_PAIR','NIFTI_PAIR_GZ']: FSL output type + # type=enum|default='NIFTI'|allowed['GIFTI','NIFTI','NIFTI_GZ','NIFTI_PAIR','NIFTI_PAIR_GZ']: FSL output type args: # type=str|default='': Additional parameters to the command environ: # type=dict|default={}: Environment variables imports: - # list[nipype2pydra.task.base.importstatement] - list import statements required by the test, with each list item + # list[nipype2pydra.statements.imports.explicitimport] - list import statements required by the test, with each list item # consisting of 'module', 'name', and optionally 'alias' keys expected_outputs: # dict[str, str] - expected values for selected outputs, noting that tests will typically diff --git a/nipype-auto-conv/specs/interfaces/filter_regressor_callables.py b/nipype-auto-conv/specs/interfaces/filter_regressor_callables.py deleted file mode 100644 index 15ef6f2..0000000 --- a/nipype-auto-conv/specs/interfaces/filter_regressor_callables.py +++ /dev/null @@ -1,329 +0,0 @@ -"""Module to put any functions that are referred to in the "callables" section of FilterRegressor.yaml""" - -import attrs -import logging -import os -import os.path as op -from glob import glob -from pathlib import Path - - -def out_file_default(inputs): - return _gen_filename("out_file", inputs=inputs) - - -def out_file_callable(output_dir, inputs, stdout, stderr): - outputs = _list_outputs( - output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr - ) - return outputs["out_file"] - - -IFLOGGER = logging.getLogger("nipype.interface") - - -# Original source at L731 of /interfaces/fsl/utils.py -def _gen_filename(name, inputs=None, stdout=None, stderr=None, output_dir=None): - if name == "out_file": - return _list_outputs( - inputs=inputs, stdout=stdout, stderr=stderr, output_dir=output_dir - )[name] - return None - - -# Original source at L205 of /interfaces/fsl/base.py -def _gen_fname( - basename, - cwd=None, - suffix=None, - change_ext=True, - ext=None, - inputs=None, - stdout=None, - stderr=None, - output_dir=None, -): - """Generate a filename based on the given parameters. - - The filename will take the form: cwd/basename. - If change_ext is True, it will use the extensions specified in - inputs.output_type. - - Parameters - ---------- - basename : str - Filename to base the new filename on. - cwd : str - Path to prefix to the new filename. (default is output_dir) - suffix : str - Suffix to add to the `basename`. (defaults is '' ) - change_ext : bool - Flag to change the filename extension to the FSL output type. - (default True) - - Returns - ------- - fname : str - New filename based on given parameters. - - """ - - if basename == "": - msg = "Unable to generate filename for command %s. " % "fsl_regfilt" - msg += "basename is not set!" - raise ValueError(msg) - if cwd is None: - cwd = output_dir - if ext is None: - ext = Info.output_type_to_ext(inputs.output_type) - if change_ext: - if suffix: - suffix = "".join((suffix, ext)) - else: - suffix = ext - if suffix is None: - suffix = "" - fname = fname_presuffix(basename, suffix=suffix, use_ext=False, newpath=cwd) - return fname - - -# Original source at L721 of /interfaces/fsl/utils.py -def _list_outputs(inputs=None, stdout=None, stderr=None, output_dir=None): - outputs = {} - outputs["out_file"] = inputs.out_file - if outputs["out_file"] is attrs.NOTHING: - outputs["out_file"] = _gen_fname( - inputs.in_file, - suffix="_regfilt", - inputs=inputs, - stdout=stdout, - stderr=stderr, - output_dir=output_dir, - ) - outputs["out_file"] = os.path.abspath(outputs["out_file"]) - return outputs - - -# Original source at L108 of /utils/filemanip.py -def fname_presuffix(fname, prefix="", suffix="", newpath=None, use_ext=True): - """Manipulates path and name of input filename - - Parameters - ---------- - fname : string - A filename (may or may not include path) - prefix : string - Characters to prepend to the filename - suffix : string - Characters to append to the filename - newpath : string - Path to replace the path of the input fname - use_ext : boolean - If True (default), appends the extension of the original file - to the output name. - - Returns - ------- - Absolute path of the modified filename - - >>> from nipype.utils.filemanip import fname_presuffix - >>> fname = 'foo.nii.gz' - >>> fname_presuffix(fname,'pre','post','/tmp') - '/tmp/prefoopost.nii.gz' - - >>> from nipype.interfaces.base import attrs.NOTHING - >>> fname_presuffix(fname, 'pre', 'post', attrs.NOTHING) == \ - fname_presuffix(fname, 'pre', 'post') - True - - """ - pth, fname, ext = split_filename(fname) - if not use_ext: - ext = "" - - # No need for : bool(attrs.NOTHING is not attrs.NOTHING) evaluates to False - if newpath: - pth = op.abspath(newpath) - return op.join(pth, prefix + fname + suffix + ext) - - -# Original source at L58 of /utils/filemanip.py -def split_filename(fname): - """Split a filename into parts: path, base filename and extension. - - Parameters - ---------- - fname : str - file or path name - - Returns - ------- - pth : str - base path from fname - fname : str - filename from fname, without extension - ext : str - file extension from fname - - Examples - -------- - >>> from nipype.utils.filemanip import split_filename - >>> pth, fname, ext = split_filename('/home/data/subject.nii.gz') - >>> pth - '/home/data' - - >>> fname - 'subject' - - >>> ext - '.nii.gz' - - """ - - special_extensions = [".nii.gz", ".tar.gz", ".niml.dset"] - - pth = op.dirname(fname) - fname = op.basename(fname) - - ext = None - for special_ext in special_extensions: - ext_len = len(special_ext) - if (len(fname) > ext_len) and (fname[-ext_len:].lower() == special_ext.lower()): - ext = fname[-ext_len:] - fname = fname[:-ext_len] - break - if not ext: - fname, ext = op.splitext(fname) - - return pth, fname, ext - - -# Original source at L1069 of /interfaces/base/core.py -class PackageInfo(object): - _version = None - version_cmd = None - version_file = None - - @classmethod - def version(klass): - if klass._version is None: - if klass.version_cmd is not None: - try: - clout = CommandLine( - command=klass.version_cmd, - resource_monitor=False, - terminal_output="allatonce", - ).run() - except IOError: - return None - - raw_info = clout.runtime.stdout - elif klass.version_file is not None: - try: - with open(klass.version_file, "rt") as fobj: - raw_info = fobj.read() - except OSError: - return None - else: - return None - - klass._version = klass.parse_version(raw_info) - - return klass._version - - @staticmethod - def parse_version(raw_info): - raise NotImplementedError - - -# Original source at L40 of /interfaces/fsl/base.py -class Info(PackageInfo): - """ - Handle FSL ``output_type`` and version information. - - output type refers to the type of file fsl defaults to writing - eg, NIFTI, NIFTI_GZ - - Examples - -------- - - >>> from nipype.interfaces.fsl import Info - >>> Info.version() # doctest: +SKIP - >>> Info.output_type() # doctest: +SKIP - - """ - - ftypes = { - "NIFTI": ".nii", - "NIFTI_PAIR": ".img", - "NIFTI_GZ": ".nii.gz", - "NIFTI_PAIR_GZ": ".img.gz", - } - - if os.getenv("FSLDIR"): - version_file = os.path.join(os.getenv("FSLDIR"), "etc", "fslversion") - - @staticmethod - def parse_version(raw_info): - return raw_info.splitlines()[0] - - @classmethod - def output_type_to_ext(cls, output_type): - """Get the file extension for the given output type. - - Parameters - ---------- - output_type : {'NIFTI', 'NIFTI_GZ', 'NIFTI_PAIR', 'NIFTI_PAIR_GZ'} - String specifying the output type. - - Returns - ------- - extension : str - The file extension for the output type. - """ - - try: - return cls.ftypes[output_type] - except KeyError: - msg = "Invalid FSLOUTPUTTYPE: ", output_type - raise KeyError(msg) - - @classmethod - def output_type(cls): - """Get the global FSL output file type FSLOUTPUTTYPE. - - This returns the value of the environment variable - FSLOUTPUTTYPE. An exception is raised if it is not defined. - - Returns - ------- - fsl_ftype : string - Represents the current environment setting of FSLOUTPUTTYPE - """ - try: - return os.environ["FSLOUTPUTTYPE"] - except KeyError: - IFLOGGER.warning( - "FSLOUTPUTTYPE environment variable is not set. " - "Setting FSLOUTPUTTYPE=NIFTI" - ) - return "NIFTI" - - @staticmethod - def standard_image(img_name=None): - """Grab an image from the standard location. - - Returns a list of standard images if called without arguments. - - Could be made more fancy to allow for more relocatability""" - try: - fsldir = os.environ["FSLDIR"] - except KeyError: - raise Exception("FSL environment variables not set") - stdpath = os.path.join(fsldir, "data", "standard") - if img_name is None: - return [ - filename.replace(stdpath + "/", "") - for filename in glob(os.path.join(stdpath, "*nii*")) - ] - return os.path.join(stdpath, img_name) diff --git a/nipype-auto-conv/specs/interfaces/find_the_biggest.yaml b/nipype-auto-conv/specs/interfaces/find_the_biggest.yaml index a2d7e9c..49fbe29 100644 --- a/nipype-auto-conv/specs/interfaces/find_the_biggest.yaml +++ b/nipype-auto-conv/specs/interfaces/find_the_biggest.yaml @@ -6,19 +6,19 @@ # Docs # ---- # -# Use FSL find_the_biggest for performing hard segmentation on -# the outputs of connectivity-based thresholding in probtrack. -# For complete details, see the `FDT -# Documentation. `_ +# Use FSL find_the_biggest for performing hard segmentation on +# the outputs of connectivity-based thresholding in probtrack. +# For complete details, see the `FDT +# Documentation. `_ # -# Example -# ------- +# Example +# ------- # -# >>> from nipype.interfaces import fsl -# >>> ldir = ['seeds_to_M1.nii', 'seeds_to_M2.nii'] -# >>> fBig = fsl.FindTheBiggest(in_files=ldir, out_file='biggestSegmentation') -# >>> fBig.cmdline -# 'find_the_biggest seeds_to_M1.nii seeds_to_M2.nii biggestSegmentation' +# >>> from nipype.interfaces import fsl +# >>> ldir = ['seeds_to_M1.nii', 'seeds_to_M2.nii'] +# >>> fBig = fsl.FindTheBiggest(in_files=ldir, out_file='biggestSegmentation') +# >>> fBig.cmdline +# 'find_the_biggest seeds_to_M1.nii seeds_to_M2.nii biggestSegmentation' # # task_name: FindTheBiggest @@ -37,9 +37,6 @@ inputs: # passed to the field in the automatically generated unittests. in_files: generic/file+list-of # type=list|default=[]: a list of input volumes or a singleMatrixFile - out_file: Path - # type=file: output file indexed in order of input files - # type=file|default=: file with the resulting segmentation callable_defaults: # dict[str, str] - names of methods/callable classes defined in the adjacent `*_callables.py` # to set as the `default` method of input fields @@ -63,7 +60,7 @@ outputs: # dict[str, str] - names of methods/callable classes defined in the adjacent `*_callables.py` # to set to the `callable` attribute of output fields templates: - # dict[str, str] - `output_file_template` values to be provided to output fields + # dict[str, str] - `path_template` values to be provided to output fields out_file: '"biggestSegmentation"' # type=file: output file indexed in order of input files # type=file|default=: file with the resulting segmentation @@ -79,13 +76,13 @@ tests: # type=file: output file indexed in order of input files # type=file|default=: file with the resulting segmentation output_type: - # type=enum|default='NIFTI'|allowed['NIFTI','NIFTI_GZ','NIFTI_PAIR','NIFTI_PAIR_GZ']: FSL output type + # type=enum|default='NIFTI'|allowed['GIFTI','NIFTI','NIFTI_GZ','NIFTI_PAIR','NIFTI_PAIR_GZ']: FSL output type args: # type=str|default='': Additional parameters to the command environ: # type=dict|default={}: Environment variables imports: - # list[nipype2pydra.task.base.importstatement] - list import statements required by the test, with each list item + # list[nipype2pydra.statements.imports.explicitimport] - list import statements required by the test, with each list item # consisting of 'module', 'name', and optionally 'alias' keys expected_outputs: # dict[str, str] - expected values for selected outputs, noting that tests will typically @@ -108,7 +105,7 @@ tests: # type=file: output file indexed in order of input files # type=file|default=: file with the resulting segmentation imports: - # list[nipype2pydra.task.base.importstatement] - list import statements required by the test, with each list item + # list[nipype2pydra.statements.imports.explicitimport] - list import statements required by the test, with each list item # consisting of 'module', 'name', and optionally 'alias' keys expected_outputs: # dict[str, str] - expected values for selected outputs, noting that tests will typically @@ -135,7 +132,7 @@ doctests: # type=file: output file indexed in order of input files # type=file|default=: file with the resulting segmentation imports: - # list[nipype2pydra.task.base.importstatement] - list import statements required by the test, with each list item + # list[nipype2pydra.statements.imports.explicitimport] - list import statements required by the test, with each list item # consisting of 'module', 'name', and optionally 'alias' keys directive: # str - any doctest directive to place on the cmdline call, e.g. # doctest: +ELLIPSIS diff --git a/nipype-auto-conv/specs/interfaces/find_the_biggest_callables.py b/nipype-auto-conv/specs/interfaces/find_the_biggest_callables.py deleted file mode 100644 index 4bc9cf0..0000000 --- a/nipype-auto-conv/specs/interfaces/find_the_biggest_callables.py +++ /dev/null @@ -1,330 +0,0 @@ -"""Module to put any functions that are referred to in the "callables" section of FindTheBiggest.yaml""" - -import attrs -import logging -import os -import os.path as op -from glob import glob -from pathlib import Path - - -def out_file_default(inputs): - return _gen_filename("out_file", inputs=inputs) - - -def out_file_callable(output_dir, inputs, stdout, stderr): - outputs = _list_outputs( - output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr - ) - return outputs["out_file"] - - -IFLOGGER = logging.getLogger("nipype.interface") - - -# Original source at L1341 of /interfaces/fsl/dti.py -def _gen_filename(name, inputs=None, stdout=None, stderr=None, output_dir=None): - if name == "out_file": - return _list_outputs( - inputs=inputs, stdout=stdout, stderr=stderr, output_dir=output_dir - )[name] - else: - return None - - -# Original source at L205 of /interfaces/fsl/base.py -def _gen_fname( - basename, - cwd=None, - suffix=None, - change_ext=True, - ext=None, - inputs=None, - stdout=None, - stderr=None, - output_dir=None, -): - """Generate a filename based on the given parameters. - - The filename will take the form: cwd/basename. - If change_ext is True, it will use the extensions specified in - inputs.output_type. - - Parameters - ---------- - basename : str - Filename to base the new filename on. - cwd : str - Path to prefix to the new filename. (default is output_dir) - suffix : str - Suffix to add to the `basename`. (defaults is '' ) - change_ext : bool - Flag to change the filename extension to the FSL output type. - (default True) - - Returns - ------- - fname : str - New filename based on given parameters. - - """ - - if basename == "": - msg = "Unable to generate filename for command %s. " % "find_the_biggest" - msg += "basename is not set!" - raise ValueError(msg) - if cwd is None: - cwd = output_dir - if ext is None: - ext = Info.output_type_to_ext(inputs.output_type) - if change_ext: - if suffix: - suffix = "".join((suffix, ext)) - else: - suffix = ext - if suffix is None: - suffix = "" - fname = fname_presuffix(basename, suffix=suffix, use_ext=False, newpath=cwd) - return fname - - -# Original source at L1333 of /interfaces/fsl/dti.py -def _list_outputs(inputs=None, stdout=None, stderr=None, output_dir=None): - outputs = {} - outputs["out_file"] = inputs.out_file - if outputs["out_file"] is attrs.NOTHING: - outputs["out_file"] = _gen_fname( - "biggestSegmentation", - suffix="", - inputs=inputs, - stdout=stdout, - stderr=stderr, - output_dir=output_dir, - ) - outputs["out_file"] = os.path.abspath(outputs["out_file"]) - return outputs - - -# Original source at L108 of /utils/filemanip.py -def fname_presuffix(fname, prefix="", suffix="", newpath=None, use_ext=True): - """Manipulates path and name of input filename - - Parameters - ---------- - fname : string - A filename (may or may not include path) - prefix : string - Characters to prepend to the filename - suffix : string - Characters to append to the filename - newpath : string - Path to replace the path of the input fname - use_ext : boolean - If True (default), appends the extension of the original file - to the output name. - - Returns - ------- - Absolute path of the modified filename - - >>> from nipype.utils.filemanip import fname_presuffix - >>> fname = 'foo.nii.gz' - >>> fname_presuffix(fname,'pre','post','/tmp') - '/tmp/prefoopost.nii.gz' - - >>> from nipype.interfaces.base import attrs.NOTHING - >>> fname_presuffix(fname, 'pre', 'post', attrs.NOTHING) == \ - fname_presuffix(fname, 'pre', 'post') - True - - """ - pth, fname, ext = split_filename(fname) - if not use_ext: - ext = "" - - # No need for : bool(attrs.NOTHING is not attrs.NOTHING) evaluates to False - if newpath: - pth = op.abspath(newpath) - return op.join(pth, prefix + fname + suffix + ext) - - -# Original source at L58 of /utils/filemanip.py -def split_filename(fname): - """Split a filename into parts: path, base filename and extension. - - Parameters - ---------- - fname : str - file or path name - - Returns - ------- - pth : str - base path from fname - fname : str - filename from fname, without extension - ext : str - file extension from fname - - Examples - -------- - >>> from nipype.utils.filemanip import split_filename - >>> pth, fname, ext = split_filename('/home/data/subject.nii.gz') - >>> pth - '/home/data' - - >>> fname - 'subject' - - >>> ext - '.nii.gz' - - """ - - special_extensions = [".nii.gz", ".tar.gz", ".niml.dset"] - - pth = op.dirname(fname) - fname = op.basename(fname) - - ext = None - for special_ext in special_extensions: - ext_len = len(special_ext) - if (len(fname) > ext_len) and (fname[-ext_len:].lower() == special_ext.lower()): - ext = fname[-ext_len:] - fname = fname[:-ext_len] - break - if not ext: - fname, ext = op.splitext(fname) - - return pth, fname, ext - - -# Original source at L1069 of /interfaces/base/core.py -class PackageInfo(object): - _version = None - version_cmd = None - version_file = None - - @classmethod - def version(klass): - if klass._version is None: - if klass.version_cmd is not None: - try: - clout = CommandLine( - command=klass.version_cmd, - resource_monitor=False, - terminal_output="allatonce", - ).run() - except IOError: - return None - - raw_info = clout.runtime.stdout - elif klass.version_file is not None: - try: - with open(klass.version_file, "rt") as fobj: - raw_info = fobj.read() - except OSError: - return None - else: - return None - - klass._version = klass.parse_version(raw_info) - - return klass._version - - @staticmethod - def parse_version(raw_info): - raise NotImplementedError - - -# Original source at L40 of /interfaces/fsl/base.py -class Info(PackageInfo): - """ - Handle FSL ``output_type`` and version information. - - output type refers to the type of file fsl defaults to writing - eg, NIFTI, NIFTI_GZ - - Examples - -------- - - >>> from nipype.interfaces.fsl import Info - >>> Info.version() # doctest: +SKIP - >>> Info.output_type() # doctest: +SKIP - - """ - - ftypes = { - "NIFTI": ".nii", - "NIFTI_PAIR": ".img", - "NIFTI_GZ": ".nii.gz", - "NIFTI_PAIR_GZ": ".img.gz", - } - - if os.getenv("FSLDIR"): - version_file = os.path.join(os.getenv("FSLDIR"), "etc", "fslversion") - - @staticmethod - def parse_version(raw_info): - return raw_info.splitlines()[0] - - @classmethod - def output_type_to_ext(cls, output_type): - """Get the file extension for the given output type. - - Parameters - ---------- - output_type : {'NIFTI', 'NIFTI_GZ', 'NIFTI_PAIR', 'NIFTI_PAIR_GZ'} - String specifying the output type. - - Returns - ------- - extension : str - The file extension for the output type. - """ - - try: - return cls.ftypes[output_type] - except KeyError: - msg = "Invalid FSLOUTPUTTYPE: ", output_type - raise KeyError(msg) - - @classmethod - def output_type(cls): - """Get the global FSL output file type FSLOUTPUTTYPE. - - This returns the value of the environment variable - FSLOUTPUTTYPE. An exception is raised if it is not defined. - - Returns - ------- - fsl_ftype : string - Represents the current environment setting of FSLOUTPUTTYPE - """ - try: - return os.environ["FSLOUTPUTTYPE"] - except KeyError: - IFLOGGER.warning( - "FSLOUTPUTTYPE environment variable is not set. " - "Setting FSLOUTPUTTYPE=NIFTI" - ) - return "NIFTI" - - @staticmethod - def standard_image(img_name=None): - """Grab an image from the standard location. - - Returns a list of standard images if called without arguments. - - Could be made more fancy to allow for more relocatability""" - try: - fsldir = os.environ["FSLDIR"] - except KeyError: - raise Exception("FSL environment variables not set") - stdpath = os.path.join(fsldir, "data", "standard") - if img_name is None: - return [ - filename.replace(stdpath + "/", "") - for filename in glob(os.path.join(stdpath, "*nii*")) - ] - return os.path.join(stdpath, img_name) diff --git a/nipype-auto-conv/specs/interfaces/first.yaml b/nipype-auto-conv/specs/interfaces/first.yaml index 057b131..2481df4 100644 --- a/nipype-auto-conv/specs/interfaces/first.yaml +++ b/nipype-auto-conv/specs/interfaces/first.yaml @@ -7,16 +7,16 @@ # ---- # FSL run_first_all wrapper for segmentation of subcortical volumes # -# http://www.fmrib.ox.ac.uk/fsl/first/index.html +# http://www.fmrib.ox.ac.uk/fsl/first/index.html # -# Examples -# -------- +# Examples +# -------- # -# >>> from nipype.interfaces import fsl -# >>> first = fsl.FIRST() -# >>> first.inputs.in_file = 'structural.nii' -# >>> first.inputs.out_file = 'segmented.nii' -# >>> res = first.run() #doctest: +SKIP +# >>> from nipype.interfaces import fsl +# >>> first = fsl.FIRST() +# >>> first.inputs.in_file = 'structural.nii' +# >>> first.inputs.out_file = 'segmented.nii' +# >>> res = first.run() #doctest: +SKIP # # task_name: FIRST @@ -37,8 +37,6 @@ inputs: # type=file|default=: Affine matrix to use (e.g. img2std.mat) (does not re-run registration) in_file: generic/file # type=file|default=: input data file - out_file: Path - # type=file|default='segmented': output data file callable_defaults: # dict[str, str] - names of methods/callable classes defined in the adjacent `*_callables.py` # to set as the `default` method of input fields @@ -67,7 +65,7 @@ outputs: # dict[str, str] - names of methods/callable classes defined in the adjacent `*_callables.py` # to set to the `callable` attribute of output fields templates: - # dict[str, str] - `output_file_template` values to be provided to output fields + # dict[str, str] - `path_template` values to be provided to output fields requirements: # dict[str, list[str]] - input fields that are required to be provided for the output field to be present tests: @@ -93,13 +91,13 @@ tests: affine_file: # type=file|default=: Affine matrix to use (e.g. img2std.mat) (does not re-run registration) output_type: - # type=enum|default='NIFTI'|allowed['NIFTI','NIFTI_GZ','NIFTI_PAIR','NIFTI_PAIR_GZ']: FSL output type + # type=enum|default='NIFTI'|allowed['GIFTI','NIFTI','NIFTI_GZ','NIFTI_PAIR','NIFTI_PAIR_GZ']: FSL output type args: # type=str|default='': Additional parameters to the command environ: # type=dict|default={}: Environment variables imports: - # list[nipype2pydra.task.base.importstatement] - list import statements required by the test, with each list item + # list[nipype2pydra.statements.imports.explicitimport] - list import statements required by the test, with each list item # consisting of 'module', 'name', and optionally 'alias' keys expected_outputs: # dict[str, str] - expected values for selected outputs, noting that tests will typically diff --git a/nipype-auto-conv/specs/interfaces/first_callables.py b/nipype-auto-conv/specs/interfaces/first_callables.py deleted file mode 100644 index 9f168a3..0000000 --- a/nipype-auto-conv/specs/interfaces/first_callables.py +++ /dev/null @@ -1,187 +0,0 @@ -"""Module to put any functions that are referred to in the "callables" section of FIRST.yaml""" - -import attrs -import os.path as op - - -def bvars_callable(output_dir, inputs, stdout, stderr): - outputs = _list_outputs( - output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr - ) - return outputs["bvars"] - - -def original_segmentations_callable(output_dir, inputs, stdout, stderr): - outputs = _list_outputs( - output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr - ) - return outputs["original_segmentations"] - - -def segmentation_file_callable(output_dir, inputs, stdout, stderr): - outputs = _list_outputs( - output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr - ) - return outputs["segmentation_file"] - - -def vtk_surfaces_callable(output_dir, inputs, stdout, stderr): - outputs = _list_outputs( - output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr - ) - return outputs["vtk_surfaces"] - - -# Original source at L885 of /interfaces/base/core.py -def _gen_filename(name, inputs=None, stdout=None, stderr=None, output_dir=None): - raise NotImplementedError - - -# Original source at L2259 of /interfaces/fsl/preprocess.py -def _gen_fname(basename, inputs=None, stdout=None, stderr=None, output_dir=None): - path, outname, ext = split_filename(inputs.out_file) - - method = "none" - if (inputs.method is not attrs.NOTHING) and inputs.method != "none": - method = "fast" - if inputs.list_of_specific_structures and inputs.method == "auto": - method = "none" - - if inputs.method_as_numerical_threshold is not attrs.NOTHING: - thres = "%.4f" % inputs.method_as_numerical_threshold - method = thres.replace(".", "") - - if basename == "original_segmentations": - return op.abspath("%s_all_%s_origsegs.nii.gz" % (outname, method)) - if basename == "segmentation_file": - return op.abspath("%s_all_%s_firstseg.nii.gz" % (outname, method)) - - return None - - -# Original source at L2279 of /interfaces/fsl/preprocess.py -def _gen_mesh_names( - name, structures, inputs=None, stdout=None, stderr=None, output_dir=None -): - path, prefix, ext = split_filename(inputs.out_file) - if name == "vtk_surfaces": - vtks = list() - for struct in structures: - vtk = prefix + "-" + struct + "_first.vtk" - vtks.append(op.abspath(vtk)) - return vtks - if name == "bvars": - bvars = list() - for struct in structures: - bvar = prefix + "-" + struct + "_first.bvars" - bvars.append(op.abspath(bvar)) - return bvars - return None - - -# Original source at L2230 of /interfaces/fsl/preprocess.py -def _list_outputs(inputs=None, stdout=None, stderr=None, output_dir=None): - outputs = {} - - if inputs.list_of_specific_structures is not attrs.NOTHING: - structures = inputs.list_of_specific_structures - else: - structures = [ - "L_Hipp", - "R_Hipp", - "L_Accu", - "R_Accu", - "L_Amyg", - "R_Amyg", - "L_Caud", - "R_Caud", - "L_Pall", - "R_Pall", - "L_Puta", - "R_Puta", - "L_Thal", - "R_Thal", - "BrStem", - ] - outputs["original_segmentations"] = _gen_fname( - "original_segmentations", - inputs=inputs, - stdout=stdout, - stderr=stderr, - output_dir=output_dir, - ) - outputs["segmentation_file"] = _gen_fname( - "segmentation_file", - inputs=inputs, - stdout=stdout, - stderr=stderr, - output_dir=output_dir, - ) - outputs["vtk_surfaces"] = _gen_mesh_names( - "vtk_surfaces", - structures, - inputs=inputs, - stdout=stdout, - stderr=stderr, - output_dir=output_dir, - ) - outputs["bvars"] = _gen_mesh_names( - "bvars", - structures, - inputs=inputs, - stdout=stdout, - stderr=stderr, - output_dir=output_dir, - ) - return outputs - - -# Original source at L58 of /utils/filemanip.py -def split_filename(fname): - """Split a filename into parts: path, base filename and extension. - - Parameters - ---------- - fname : str - file or path name - - Returns - ------- - pth : str - base path from fname - fname : str - filename from fname, without extension - ext : str - file extension from fname - - Examples - -------- - >>> from nipype.utils.filemanip import split_filename - >>> pth, fname, ext = split_filename('/home/data/subject.nii.gz') - >>> pth - '/home/data' - - >>> fname - 'subject' - - >>> ext - '.nii.gz' - - """ - - special_extensions = [".nii.gz", ".tar.gz", ".niml.dset"] - - pth = op.dirname(fname) - fname = op.basename(fname) - - ext = None - for special_ext in special_extensions: - ext_len = len(special_ext) - if (len(fname) > ext_len) and (fname[-ext_len:].lower() == special_ext.lower()): - ext = fname[-ext_len:] - fname = fname[:-ext_len] - break - if not ext: - fname, ext = op.splitext(fname) - - return pth, fname, ext diff --git a/nipype-auto-conv/specs/interfaces/flameo.yaml b/nipype-auto-conv/specs/interfaces/flameo.yaml index 92e69a3..9b4cd5e 100644 --- a/nipype-auto-conv/specs/interfaces/flameo.yaml +++ b/nipype-auto-conv/specs/interfaces/flameo.yaml @@ -7,22 +7,22 @@ # ---- # Use FSL flameo command to perform higher level model fits # -# Examples -# -------- +# Examples +# -------- # -# Initialize FLAMEO with no options, assigning them when calling run: +# Initialize FLAMEO with no options, assigning them when calling run: # -# >>> from nipype.interfaces import fsl -# >>> flameo = fsl.FLAMEO() -# >>> flameo.inputs.cope_file = 'cope.nii.gz' -# >>> flameo.inputs.var_cope_file = 'varcope.nii.gz' -# >>> flameo.inputs.cov_split_file = 'cov_split.mat' -# >>> flameo.inputs.design_file = 'design.mat' -# >>> flameo.inputs.t_con_file = 'design.con' -# >>> flameo.inputs.mask_file = 'mask.nii' -# >>> flameo.inputs.run_mode = 'fe' -# >>> flameo.cmdline -# 'flameo --copefile=cope.nii.gz --covsplitfile=cov_split.mat --designfile=design.mat --ld=stats --maskfile=mask.nii --runmode=fe --tcontrastsfile=design.con --varcopefile=varcope.nii.gz' +# >>> from nipype.interfaces import fsl +# >>> flameo = fsl.FLAMEO() +# >>> flameo.inputs.cope_file = 'cope.nii.gz' +# >>> flameo.inputs.var_cope_file = 'varcope.nii.gz' +# >>> flameo.inputs.cov_split_file = 'cov_split.mat' +# >>> flameo.inputs.design_file = 'design.mat' +# >>> flameo.inputs.t_con_file = 'design.con' +# >>> flameo.inputs.mask_file = 'mask.nii' +# >>> flameo.inputs.run_mode = 'fe' +# >>> flameo.cmdline +# 'flameo --copefile=cope.nii.gz --covsplitfile=cov_split.mat --designfile=design.mat --ld=stats --maskfile=mask.nii --runmode=fe --tcontrastsfile=design.con --varcopefile=varcope.nii.gz' # # task_name: FLAMEO @@ -43,7 +43,7 @@ inputs: # type=file|default=: cope regressor data file cov_split_file: datascience/text-matrix # type=file|default=: ascii matrix specifying the groups the covariance is split into - design_file: datascience/text-matrix + design_file: generic/file # type=file|default=: design matrix file dof_var_cope_file: generic/file # type=file|default=: dof data file for varcope data @@ -51,11 +51,11 @@ inputs: # type=file|default=: ascii matrix specifying f-contrasts log_dir: generic/directory # type=directory|default='stats': - mask_file: medimage/nifti1 + mask_file: generic/file # type=file|default=: mask file - t_con_file: medimage-fsl/con + t_con_file: fileformats.medimage_fsl.Con # type=file|default=: ascii matrix specifying t-contrasts - var_cope_file: medimage/nifti-gz + var_cope_file: generic/file # type=file|default=: varcope weightings data file callable_defaults: # dict[str, str] - names of methods/callable classes defined in the adjacent `*_callables.py` @@ -101,7 +101,7 @@ outputs: # dict[str, str] - names of methods/callable classes defined in the adjacent `*_callables.py` # to set to the `callable` attribute of output fields templates: - # dict[str, str] - `output_file_template` values to be provided to output fields + # dict[str, str] - `path_template` values to be provided to output fields requirements: # dict[str, list[str]] - input fields that are required to be provided for the output field to be present tests: @@ -145,13 +145,13 @@ tests: log_dir: # type=directory|default='stats': output_type: - # type=enum|default='NIFTI'|allowed['NIFTI','NIFTI_GZ','NIFTI_PAIR','NIFTI_PAIR_GZ']: FSL output type + # type=enum|default='NIFTI'|allowed['GIFTI','NIFTI','NIFTI_GZ','NIFTI_PAIR','NIFTI_PAIR_GZ']: FSL output type args: # type=str|default='': Additional parameters to the command environ: # type=dict|default={}: Environment variables imports: - # list[nipype2pydra.task.base.importstatement] - list import statements required by the test, with each list item + # list[nipype2pydra.statements.imports.explicitimport] - list import statements required by the test, with each list item # consisting of 'module', 'name', and optionally 'alias' keys expected_outputs: # dict[str, str] - expected values for selected outputs, noting that tests will typically @@ -170,20 +170,14 @@ tests: # (if not specified, will try to choose a sensible value) cope_file: # type=file|default=: cope regressor data file - var_cope_file: - # type=file|default=: varcope weightings data file cov_split_file: # type=file|default=: ascii matrix specifying the groups the covariance is split into - design_file: - # type=file|default=: design matrix file t_con_file: # type=file|default=: ascii matrix specifying t-contrasts - mask_file: - # type=file|default=: mask file run_mode: '"fe"' # type=enum|default='fe'|allowed['fe','flame1','flame12','ols']: inference to perform imports: - # list[nipype2pydra.task.base.importstatement] - list import statements required by the test, with each list item + # list[nipype2pydra.statements.imports.explicitimport] - list import statements required by the test, with each list item # consisting of 'module', 'name', and optionally 'alias' keys expected_outputs: # dict[str, str] - expected values for selected outputs, noting that tests will typically @@ -206,20 +200,14 @@ doctests: # '.mock()' method of the corresponding class is used instead. cope_file: '"cope.nii.gz"' # type=file|default=: cope regressor data file - var_cope_file: '"varcope.nii.gz"' - # type=file|default=: varcope weightings data file cov_split_file: '"cov_split.mat"' # type=file|default=: ascii matrix specifying the groups the covariance is split into - design_file: '"design.mat"' - # type=file|default=: design matrix file t_con_file: '"design.con"' # type=file|default=: ascii matrix specifying t-contrasts - mask_file: '"mask.nii"' - # type=file|default=: mask file run_mode: '"fe"' # type=enum|default='fe'|allowed['fe','flame1','flame12','ols']: inference to perform imports: - # list[nipype2pydra.task.base.importstatement] - list import statements required by the test, with each list item + # list[nipype2pydra.statements.imports.explicitimport] - list import statements required by the test, with each list item # consisting of 'module', 'name', and optionally 'alias' keys directive: # str - any doctest directive to place on the cmdline call, e.g. # doctest: +ELLIPSIS diff --git a/nipype-auto-conv/specs/interfaces/flameo_callables.py b/nipype-auto-conv/specs/interfaces/flameo_callables.py deleted file mode 100644 index 734d1ea..0000000 --- a/nipype-auto-conv/specs/interfaces/flameo_callables.py +++ /dev/null @@ -1,167 +0,0 @@ -"""Module to put any functions that are referred to in the "callables" section of FLAMEO.yaml""" - -import attrs -import os -import re -from glob import glob - - -def copes_callable(output_dir, inputs, stdout, stderr): - outputs = _list_outputs( - output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr - ) - return outputs["copes"] - - -def fstats_callable(output_dir, inputs, stdout, stderr): - outputs = _list_outputs( - output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr - ) - return outputs["fstats"] - - -def mrefvars_callable(output_dir, inputs, stdout, stderr): - outputs = _list_outputs( - output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr - ) - return outputs["mrefvars"] - - -def pes_callable(output_dir, inputs, stdout, stderr): - outputs = _list_outputs( - output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr - ) - return outputs["pes"] - - -def res4d_callable(output_dir, inputs, stdout, stderr): - outputs = _list_outputs( - output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr - ) - return outputs["res4d"] - - -def stats_dir_callable(output_dir, inputs, stdout, stderr): - outputs = _list_outputs( - output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr - ) - return outputs["stats_dir"] - - -def tdof_callable(output_dir, inputs, stdout, stderr): - outputs = _list_outputs( - output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr - ) - return outputs["tdof"] - - -def tstats_callable(output_dir, inputs, stdout, stderr): - outputs = _list_outputs( - output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr - ) - return outputs["tstats"] - - -def var_copes_callable(output_dir, inputs, stdout, stderr): - outputs = _list_outputs( - output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr - ) - return outputs["var_copes"] - - -def weights_callable(output_dir, inputs, stdout, stderr): - outputs = _list_outputs( - output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr - ) - return outputs["weights"] - - -def zfstats_callable(output_dir, inputs, stdout, stderr): - outputs = _list_outputs( - output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr - ) - return outputs["zfstats"] - - -def zstats_callable(output_dir, inputs, stdout, stderr): - outputs = _list_outputs( - output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr - ) - return outputs["zstats"] - - -# Original source at L885 of /interfaces/base/core.py -def _gen_filename(name, inputs=None, stdout=None, stderr=None, output_dir=None): - raise NotImplementedError - - -# Original source at L1143 of /interfaces/fsl/model.py -def _list_outputs(inputs=None, stdout=None, stderr=None, output_dir=None): - outputs = {} - pth = os.path.join(output_dir, inputs.log_dir) - - pes = human_order_sorted(glob(os.path.join(pth, "pe[0-9]*.*"))) - assert len(pes) >= 1, "No pe volumes generated by FSL Estimate" - outputs["pes"] = pes - - res4d = human_order_sorted(glob(os.path.join(pth, "res4d.*"))) - assert len(res4d) == 1, "No residual volume generated by FSL Estimate" - outputs["res4d"] = res4d[0] - - copes = human_order_sorted(glob(os.path.join(pth, "cope[0-9]*.*"))) - assert len(copes) >= 1, "No cope volumes generated by FSL CEstimate" - outputs["copes"] = copes - - var_copes = human_order_sorted(glob(os.path.join(pth, "varcope[0-9]*.*"))) - assert len(var_copes) >= 1, "No varcope volumes generated by FSL CEstimate" - outputs["var_copes"] = var_copes - - zstats = human_order_sorted(glob(os.path.join(pth, "zstat[0-9]*.*"))) - assert len(zstats) >= 1, "No zstat volumes generated by FSL CEstimate" - outputs["zstats"] = zstats - - if inputs.f_con_file is not attrs.NOTHING: - zfstats = human_order_sorted(glob(os.path.join(pth, "zfstat[0-9]*.*"))) - assert len(zfstats) >= 1, "No zfstat volumes generated by FSL CEstimate" - outputs["zfstats"] = zfstats - - fstats = human_order_sorted(glob(os.path.join(pth, "fstat[0-9]*.*"))) - assert len(fstats) >= 1, "No fstat volumes generated by FSL CEstimate" - outputs["fstats"] = fstats - - tstats = human_order_sorted(glob(os.path.join(pth, "tstat[0-9]*.*"))) - assert len(tstats) >= 1, "No tstat volumes generated by FSL CEstimate" - outputs["tstats"] = tstats - - mrefs = human_order_sorted( - glob(os.path.join(pth, "mean_random_effects_var[0-9]*.*")) - ) - assert len(mrefs) >= 1, "No mean random effects volumes generated by FLAMEO" - outputs["mrefvars"] = mrefs - - tdof = human_order_sorted(glob(os.path.join(pth, "tdof_t[0-9]*.*"))) - assert len(tdof) >= 1, "No T dof volumes generated by FLAMEO" - outputs["tdof"] = tdof - - weights = human_order_sorted(glob(os.path.join(pth, "weights[0-9]*.*"))) - assert len(weights) >= 1, "No weight volumes generated by FLAMEO" - outputs["weights"] = weights - - outputs["stats_dir"] = pth - - return outputs - - -# Original source at L19 of /utils/misc.py -def human_order_sorted(l): - """Sorts string in human order (i.e. 'stat10' will go after 'stat2')""" - - def atoi(text): - return int(text) if text.isdigit() else text - - def natural_keys(text): - if isinstance(text, tuple): - text = text[0] - return [atoi(c) for c in re.split(r"(\d+)", text)] - - return sorted(l, key=natural_keys) diff --git a/nipype-auto-conv/specs/interfaces/flirt.yaml b/nipype-auto-conv/specs/interfaces/flirt.yaml index f079021..85f6868 100644 --- a/nipype-auto-conv/specs/interfaces/flirt.yaml +++ b/nipype-auto-conv/specs/interfaces/flirt.yaml @@ -7,23 +7,23 @@ # ---- # FSL FLIRT wrapper for coregistration # -# For complete details, see the `FLIRT Documentation. -# `_ +# For complete details, see the `FLIRT Documentation. +# `_ # -# To print out the command line help, use: -# fsl.FLIRT().inputs_help() +# To print out the command line help, use: +# fsl.FLIRT().inputs_help() # -# Examples -# -------- -# >>> from nipype.interfaces import fsl -# >>> from nipype.testing import example_data -# >>> flt = fsl.FLIRT(bins=640, cost_func='mutualinfo') -# >>> flt.inputs.in_file = 'structural.nii' -# >>> flt.inputs.reference = 'mni.nii' -# >>> flt.inputs.output_type = "NIFTI_GZ" -# >>> flt.cmdline # doctest: +ELLIPSIS -# 'flirt -in structural.nii -ref mni.nii -out structural_flirt.nii.gz -omat structural_flirt.mat -bins 640 -searchcost mutualinfo' -# >>> res = flt.run() #doctest: +SKIP +# Examples +# -------- +# >>> from nipype.interfaces import fsl +# >>> from nipype.testing import example_data +# >>> flt = fsl.FLIRT(bins=640, cost_func='mutualinfo') +# >>> flt.inputs.in_file = 'structural.nii' +# >>> flt.inputs.reference = 'mni.nii' +# >>> flt.inputs.output_type = "NIFTI_GZ" +# >>> flt.cmdline # doctest: +ELLIPSIS +# 'flirt -in structural.nii -ref mni.nii -out structural_flirt.nii.gz -omat structural_flirt.mat -bins 640 -searchcost mutualinfo' +# >>> res = flt.run() #doctest: +SKIP # # task_name: FLIRT @@ -50,18 +50,9 @@ inputs: # type=file|default=: input 4x4 affine matrix in_weight: generic/file # type=file|default=: File for input weighting volume - out_file: Path - # type=file: path/name of registered file (if generated) - # type=file|default=: registered output file - out_log: Path - # type=file: path/name of output log (if generated) - # type=file|default=: output log - out_matrix_file: Path - # type=file: path/name of calculated affine transform (if generated) - # type=file|default=: output affine matrix in 4x4 asciii format ref_weight: generic/file # type=file|default=: File for reference weighting volume - reference: medimage/nifti1 + reference: generic/file # type=file|default=: reference file schedule: generic/file # type=file|default=: replaces default schedule @@ -100,7 +91,7 @@ outputs: # dict[str, str] - names of methods/callable classes defined in the adjacent `*_callables.py` # to set to the `callable` attribute of output fields templates: - # dict[str, str] - `output_file_template` values to be provided to output fields + # dict[str, str] - `path_template` values to be provided to output fields requirements: # dict[str, list[str]] - input fields that are required to be provided for the output field to be present tests: @@ -205,13 +196,13 @@ tests: bbrslope: # type=float|default=0.0: value of bbr slope output_type: - # type=enum|default='NIFTI'|allowed['NIFTI','NIFTI_GZ','NIFTI_PAIR','NIFTI_PAIR_GZ']: FSL output type + # type=enum|default='NIFTI'|allowed['GIFTI','NIFTI','NIFTI_GZ','NIFTI_PAIR','NIFTI_PAIR_GZ']: FSL output type args: # type=str|default='': Additional parameters to the command environ: # type=dict|default={}: Environment variables imports: - # list[nipype2pydra.task.base.importstatement] - list import statements required by the test, with each list item + # list[nipype2pydra.statements.imports.explicitimport] - list import statements required by the test, with each list item # consisting of 'module', 'name', and optionally 'alias' keys expected_outputs: # dict[str, str] - expected values for selected outputs, noting that tests will typically @@ -230,16 +221,14 @@ tests: # (if not specified, will try to choose a sensible value) in_file: # type=file|default=: input file - reference: - # type=file|default=: reference file output_type: '"NIFTI_GZ"' - # type=enum|default='NIFTI'|allowed['NIFTI','NIFTI_GZ','NIFTI_PAIR','NIFTI_PAIR_GZ']: FSL output type + # type=enum|default='NIFTI'|allowed['GIFTI','NIFTI','NIFTI_GZ','NIFTI_PAIR','NIFTI_PAIR_GZ']: FSL output type bins: '640' # type=int|default=0: number of histogram bins cost_func: '"mutualinfo"' # type=enum|default='mutualinfo'|allowed['bbr','corratio','labeldiff','leastsq','mutualinfo','normcorr','normmi']: cost function imports: &id001 - # list[nipype2pydra.task.base.importstatement] - list import statements required by the test, with each list item + # list[nipype2pydra.statements.imports.explicitimport] - list import statements required by the test, with each list item # consisting of 'module', 'name', and optionally 'alias' keys - module: nipype.testing name: example_data @@ -257,7 +246,7 @@ tests: # bool - whether the unittest is expected to fail or not. Set to false # when you are satisfied with the edits you have made to this file doctests: -- cmdline: flirt -in structural.nii -ref mni.nii -out structural_flirt.nii.gz -omat structural_flirt.mat -bins 640 -searchcost mutualinfo +- cmdline: # str - the expected cmdline output inputs: # dict[str, str] - name-value pairs for inputs to be provided to the doctest. @@ -265,16 +254,14 @@ doctests: # '.mock()' method of the corresponding class is used instead. in_file: '"structural.nii"' # type=file|default=: input file - reference: '"mni.nii"' - # type=file|default=: reference file output_type: '"NIFTI_GZ"' - # type=enum|default='NIFTI'|allowed['NIFTI','NIFTI_GZ','NIFTI_PAIR','NIFTI_PAIR_GZ']: FSL output type + # type=enum|default='NIFTI'|allowed['GIFTI','NIFTI','NIFTI_GZ','NIFTI_PAIR','NIFTI_PAIR_GZ']: FSL output type bins: '640' # type=int|default=0: number of histogram bins cost_func: '"mutualinfo"' # type=enum|default='mutualinfo'|allowed['bbr','corratio','labeldiff','leastsq','mutualinfo','normcorr','normmi']: cost function imports: *id001 - # list[nipype2pydra.task.base.importstatement] - list import statements required by the test, with each list item + # list[nipype2pydra.statements.imports.explicitimport] - list import statements required by the test, with each list item # consisting of 'module', 'name', and optionally 'alias' keys directive: # str - any doctest directive to place on the cmdline call, e.g. # doctest: +ELLIPSIS diff --git a/nipype-auto-conv/specs/interfaces/flirt_callables.py b/nipype-auto-conv/specs/interfaces/flirt_callables.py deleted file mode 100644 index cf6386c..0000000 --- a/nipype-auto-conv/specs/interfaces/flirt_callables.py +++ /dev/null @@ -1,352 +0,0 @@ -"""Module to put any functions that are referred to in the "callables" section of FLIRT.yaml""" - -import attrs -import logging -import os -import os.path as op -from glob import glob - - -def out_file_callable(output_dir, inputs, stdout, stderr): - outputs = _list_outputs( - output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr - ) - return outputs["out_file"] - - -def out_log_callable(output_dir, inputs, stdout, stderr): - outputs = _list_outputs( - output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr - ) - return outputs["out_log"] - - -def out_matrix_file_callable(output_dir, inputs, stdout, stderr): - outputs = _list_outputs( - output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr - ) - return outputs["out_matrix_file"] - - -IFLOGGER = logging.getLogger("nipype.interface") - - -iflogger = logging.getLogger("nipype.interface") - - -# Original source at L809 of /interfaces/base/core.py -def _filename_from_source( - name, chain=None, inputs=None, stdout=None, stderr=None, output_dir=None -): - if chain is None: - chain = [] - - trait_spec = inputs.trait(name) - retval = getattr(inputs, name) - source_ext = None - if (retval is attrs.NOTHING) or "%s" in retval: - if not trait_spec.name_source: - return retval - - # Do not generate filename when excluded by other inputs - if any( - (getattr(inputs, field) is not attrs.NOTHING) - for field in trait_spec.xor or () - ): - return retval - - # Do not generate filename when required fields are missing - if not all( - (getattr(inputs, field) is not attrs.NOTHING) - for field in trait_spec.requires or () - ): - return retval - - if (retval is not attrs.NOTHING) and "%s" in retval: - name_template = retval - else: - name_template = trait_spec.name_template - if not name_template: - name_template = "%s_generated" - - ns = trait_spec.name_source - while isinstance(ns, (list, tuple)): - if len(ns) > 1: - iflogger.warning("Only one name_source per trait is allowed") - ns = ns[0] - - if not isinstance(ns, (str, bytes)): - raise ValueError( - "name_source of '{}' trait should be an input trait " - "name, but a type {} object was found".format(name, type(ns)) - ) - - if getattr(inputs, ns) is not attrs.NOTHING: - name_source = ns - source = getattr(inputs, name_source) - while isinstance(source, list): - source = source[0] - - # special treatment for files - try: - _, base, source_ext = split_filename(source) - except (AttributeError, TypeError): - base = source - else: - if name in chain: - raise NipypeInterfaceError("Mutually pointing name_sources") - - chain.append(name) - base = _filename_from_source( - ns, - chain, - inputs=inputs, - stdout=stdout, - stderr=stderr, - output_dir=output_dir, - ) - if base is not attrs.NOTHING: - _, _, source_ext = split_filename(base) - else: - # Do not generate filename when required fields are missing - return retval - - chain = None - retval = name_template % base - _, _, ext = split_filename(retval) - if trait_spec.keep_extension and (ext or source_ext): - if (ext is None or not ext) and source_ext: - retval = retval + source_ext - else: - retval = _overload_extension( - retval, - name, - inputs=inputs, - stdout=stdout, - stderr=stderr, - output_dir=output_dir, - ) - return retval - - -# Original source at L885 of /interfaces/base/core.py -def _gen_filename(name, inputs=None, stdout=None, stderr=None, output_dir=None): - raise NotImplementedError - - -# Original source at L891 of /interfaces/base/core.py -def _list_outputs(inputs=None, stdout=None, stderr=None, output_dir=None): - metadata = dict(name_source=lambda t: t is not None) - traits = inputs.traits(**metadata) - if traits: - outputs = {} - for name, trait_spec in list(traits.items()): - out_name = name - if trait_spec.output_name is not None: - out_name = trait_spec.output_name - fname = _filename_from_source( - name, inputs=inputs, stdout=stdout, stderr=stderr, output_dir=output_dir - ) - if fname is not attrs.NOTHING: - outputs[out_name] = os.path.abspath(fname) - return outputs - - -# Original source at L249 of /interfaces/fsl/base.py -def _overload_extension( - value, name=None, inputs=None, stdout=None, stderr=None, output_dir=None -): - return value + Info.output_type_to_ext(inputs.output_type) - - -# Original source at L58 of /utils/filemanip.py -def split_filename(fname): - """Split a filename into parts: path, base filename and extension. - - Parameters - ---------- - fname : str - file or path name - - Returns - ------- - pth : str - base path from fname - fname : str - filename from fname, without extension - ext : str - file extension from fname - - Examples - -------- - >>> from nipype.utils.filemanip import split_filename - >>> pth, fname, ext = split_filename('/home/data/subject.nii.gz') - >>> pth - '/home/data' - - >>> fname - 'subject' - - >>> ext - '.nii.gz' - - """ - - special_extensions = [".nii.gz", ".tar.gz", ".niml.dset"] - - pth = op.dirname(fname) - fname = op.basename(fname) - - ext = None - for special_ext in special_extensions: - ext_len = len(special_ext) - if (len(fname) > ext_len) and (fname[-ext_len:].lower() == special_ext.lower()): - ext = fname[-ext_len:] - fname = fname[:-ext_len] - break - if not ext: - fname, ext = op.splitext(fname) - - return pth, fname, ext - - -# Original source at L1069 of /interfaces/base/core.py -class PackageInfo(object): - _version = None - version_cmd = None - version_file = None - - @classmethod - def version(klass): - if klass._version is None: - if klass.version_cmd is not None: - try: - clout = CommandLine( - command=klass.version_cmd, - resource_monitor=False, - terminal_output="allatonce", - ).run() - except IOError: - return None - - raw_info = clout.runtime.stdout - elif klass.version_file is not None: - try: - with open(klass.version_file, "rt") as fobj: - raw_info = fobj.read() - except OSError: - return None - else: - return None - - klass._version = klass.parse_version(raw_info) - - return klass._version - - @staticmethod - def parse_version(raw_info): - raise NotImplementedError - - -# Original source at L40 of /interfaces/fsl/base.py -class Info(PackageInfo): - """ - Handle FSL ``output_type`` and version information. - - output type refers to the type of file fsl defaults to writing - eg, NIFTI, NIFTI_GZ - - Examples - -------- - - >>> from nipype.interfaces.fsl import Info - >>> Info.version() # doctest: +SKIP - >>> Info.output_type() # doctest: +SKIP - - """ - - ftypes = { - "NIFTI": ".nii", - "NIFTI_PAIR": ".img", - "NIFTI_GZ": ".nii.gz", - "NIFTI_PAIR_GZ": ".img.gz", - } - - if os.getenv("FSLDIR"): - version_file = os.path.join(os.getenv("FSLDIR"), "etc", "fslversion") - - @staticmethod - def parse_version(raw_info): - return raw_info.splitlines()[0] - - @classmethod - def output_type_to_ext(cls, output_type): - """Get the file extension for the given output type. - - Parameters - ---------- - output_type : {'NIFTI', 'NIFTI_GZ', 'NIFTI_PAIR', 'NIFTI_PAIR_GZ'} - String specifying the output type. - - Returns - ------- - extension : str - The file extension for the output type. - """ - - try: - return cls.ftypes[output_type] - except KeyError: - msg = "Invalid FSLOUTPUTTYPE: ", output_type - raise KeyError(msg) - - @classmethod - def output_type(cls): - """Get the global FSL output file type FSLOUTPUTTYPE. - - This returns the value of the environment variable - FSLOUTPUTTYPE. An exception is raised if it is not defined. - - Returns - ------- - fsl_ftype : string - Represents the current environment setting of FSLOUTPUTTYPE - """ - try: - return os.environ["FSLOUTPUTTYPE"] - except KeyError: - IFLOGGER.warning( - "FSLOUTPUTTYPE environment variable is not set. " - "Setting FSLOUTPUTTYPE=NIFTI" - ) - return "NIFTI" - - @staticmethod - def standard_image(img_name=None): - """Grab an image from the standard location. - - Returns a list of standard images if called without arguments. - - Could be made more fancy to allow for more relocatability""" - try: - fsldir = os.environ["FSLDIR"] - except KeyError: - raise Exception("FSL environment variables not set") - stdpath = os.path.join(fsldir, "data", "standard") - if img_name is None: - return [ - filename.replace(stdpath + "/", "") - for filename in glob(os.path.join(stdpath, "*nii*")) - ] - return os.path.join(stdpath, img_name) - - -# Original source at L125 of /interfaces/base/support.py -class NipypeInterfaceError(Exception): - """Custom error for interfaces""" - - def __init__(self, value): - self.value = value - - def __str__(self): - return "{}".format(self.value) diff --git a/nipype-auto-conv/specs/interfaces/fnirt.yaml b/nipype-auto-conv/specs/interfaces/fnirt.yaml index d434bec..226ddaf 100644 --- a/nipype-auto-conv/specs/interfaces/fnirt.yaml +++ b/nipype-auto-conv/specs/interfaces/fnirt.yaml @@ -7,32 +7,32 @@ # ---- # FSL FNIRT wrapper for non-linear registration # -# For complete details, see the `FNIRT Documentation. -# `_ +# For complete details, see the `FNIRT Documentation. +# `_ # -# Examples -# -------- -# >>> from nipype.interfaces import fsl -# >>> from nipype.testing import example_data -# >>> fnt = fsl.FNIRT(affine_file=example_data('trans.mat')) -# >>> res = fnt.run(ref_file=example_data('mni.nii', in_file=example_data('structural.nii')) #doctest: +SKIP +# Examples +# -------- +# >>> from nipype.interfaces import fsl +# >>> from nipype.testing import example_data +# >>> fnt = fsl.FNIRT(affine_file=example_data('trans.mat')) +# >>> res = fnt.run(ref_file=example_data('mni.nii', in_file=example_data('structural.nii')) #doctest: +SKIP # -# T1 -> Mni153 +# T1 -> Mni153 # -# >>> from nipype.interfaces import fsl -# >>> fnirt_mprage = fsl.FNIRT() -# >>> fnirt_mprage.inputs.in_fwhm = [8, 4, 2, 2] -# >>> fnirt_mprage.inputs.subsampling_scheme = [4, 2, 1, 1] +# >>> from nipype.interfaces import fsl +# >>> fnirt_mprage = fsl.FNIRT() +# >>> fnirt_mprage.inputs.in_fwhm = [8, 4, 2, 2] +# >>> fnirt_mprage.inputs.subsampling_scheme = [4, 2, 1, 1] # -# Specify the resolution of the warps +# Specify the resolution of the warps # -# >>> fnirt_mprage.inputs.warp_resolution = (6, 6, 6) -# >>> res = fnirt_mprage.run(in_file='structural.nii', ref_file='mni.nii', warped_file='warped.nii', fieldcoeff_file='fieldcoeff.nii')#doctest: +SKIP +# >>> fnirt_mprage.inputs.warp_resolution = (6, 6, 6) +# >>> res = fnirt_mprage.run(in_file='structural.nii', ref_file='mni.nii', warped_file='warped.nii', fieldcoeff_file='fieldcoeff.nii')#doctest: +SKIP # -# We can check the command line and confirm that it's what we expect. +# We can check the command line and confirm that it's what we expect. # -# >>> fnirt_mprage.cmdline #doctest: +SKIP -# 'fnirt --cout=fieldcoeff.nii --in=structural.nii --infwhm=8,4,2,2 --ref=mni.nii --subsamp=4,2,1,1 --warpres=6,6,6 --iout=warped.nii' +# >>> fnirt_mprage.cmdline #doctest: +SKIP +# 'fnirt --cout=fieldcoeff.nii --in=structural.nii --infwhm=8,4,2,2 --ref=mni.nii --subsamp=4,2,1,1 --warpres=6,6,6 --iout=warped.nii' # # task_name: FNIRT @@ -59,16 +59,10 @@ inputs: # type=file|default=: name of file with mask in input image space inwarp_file: generic/file # type=file|default=: name of file containing initial non-linear warps - log_file: Path - # type=file: Name of log-file - # type=file|default=: Name of log-file ref_file: generic/file # type=file|default=: name of reference image refmask_file: generic/file # type=file|default=: name of file with mask in reference space - warped_file: Path - # type=file: warped image - # type=file|default=: name of output image callable_defaults: # dict[str, str] - names of methods/callable classes defined in the adjacent `*_callables.py` # to set as the `default` method of input fields @@ -110,7 +104,7 @@ outputs: # dict[str, str] - names of methods/callable classes defined in the adjacent `*_callables.py` # to set to the `callable` attribute of output fields templates: - # dict[str, str] - `output_file_template` values to be provided to output fields + # dict[str, str] - `path_template` values to be provided to output fields log_file: log_file # type=file: Name of log-file # type=file|default=: Name of log-file @@ -213,13 +207,13 @@ tests: hessian_precision: # type=enum|default='double'|allowed['double','float']: Precision for representing Hessian, double or float. Default double output_type: - # type=enum|default='NIFTI'|allowed['NIFTI','NIFTI_GZ','NIFTI_PAIR','NIFTI_PAIR_GZ']: FSL output type + # type=enum|default='NIFTI'|allowed['GIFTI','NIFTI','NIFTI_GZ','NIFTI_PAIR','NIFTI_PAIR_GZ']: FSL output type args: # type=str|default='': Additional parameters to the command environ: # type=dict|default={}: Environment variables imports: - # list[nipype2pydra.task.base.importstatement] - list import statements required by the test, with each list item + # list[nipype2pydra.statements.imports.explicitimport] - list import statements required by the test, with each list item # consisting of 'module', 'name', and optionally 'alias' keys expected_outputs: # dict[str, str] - expected values for selected outputs, noting that tests will typically @@ -238,12 +232,10 @@ tests: # (if not specified, will try to choose a sensible value) in_fwhm: '[8, 4, 2, 2]' # type=list|default=[]: FWHM (in mm) of gaussian smoothing kernel for input volume, default [6, 4, 2, 2] - subsampling_scheme: '[4, 2, 1, 1]' - # type=list|default=[]: sub-sampling scheme, list, default [4, 2, 1, 1] warp_resolution: (6, 6, 6) # type=tuple|default=(0, 0, 0): (approximate) resolution (in mm) of warp basis in x-, y- and z-direction, default 10, 10, 10 imports: &id001 - # list[nipype2pydra.task.base.importstatement] - list import statements required by the test, with each list item + # list[nipype2pydra.statements.imports.explicitimport] - list import statements required by the test, with each list item # consisting of 'module', 'name', and optionally 'alias' keys - module: nipype.testing name: example_data @@ -269,10 +261,10 @@ doctests: # '.mock()' method of the corresponding class is used instead. in_fwhm: '[8, 4, 2, 2]' # type=list|default=[]: FWHM (in mm) of gaussian smoothing kernel for input volume, default [6, 4, 2, 2] - subsampling_scheme: '[4, 2, 1, 1] Specify the resolution of the warps >>> fnirt_mprage.inputs.warp_resolution = (6, 6, 6)' - # type=list|default=[]: sub-sampling scheme, list, default [4, 2, 1, 1] + warp_resolution: (6, 6, 6) + # type=tuple|default=(0, 0, 0): (approximate) resolution (in mm) of warp basis in x-, y- and z-direction, default 10, 10, 10 imports: *id001 - # list[nipype2pydra.task.base.importstatement] - list import statements required by the test, with each list item + # list[nipype2pydra.statements.imports.explicitimport] - list import statements required by the test, with each list item # consisting of 'module', 'name', and optionally 'alias' keys directive: # str - any doctest directive to place on the cmdline call, e.g. # doctest: +ELLIPSIS diff --git a/nipype-auto-conv/specs/interfaces/fnirt_callables.py b/nipype-auto-conv/specs/interfaces/fnirt_callables.py deleted file mode 100644 index 3d121a2..0000000 --- a/nipype-auto-conv/specs/interfaces/fnirt_callables.py +++ /dev/null @@ -1,420 +0,0 @@ -"""Module to put any functions that are referred to in the "callables" section of FNIRT.yaml""" - -import attrs -import logging -import os -import os.path as op -from glob import glob -from pathlib import Path - - -def log_file_default(inputs): - return _gen_filename("log_file", inputs=inputs) - - -def warped_file_default(inputs): - return _gen_filename("warped_file", inputs=inputs) - - -def field_file_callable(output_dir, inputs, stdout, stderr): - outputs = _list_outputs( - output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr - ) - return outputs["field_file"] - - -def fieldcoeff_file_callable(output_dir, inputs, stdout, stderr): - outputs = _list_outputs( - output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr - ) - return outputs["fieldcoeff_file"] - - -def jacobian_file_callable(output_dir, inputs, stdout, stderr): - outputs = _list_outputs( - output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr - ) - return outputs["jacobian_file"] - - -def log_file_callable(output_dir, inputs, stdout, stderr): - outputs = _list_outputs( - output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr - ) - return outputs["log_file"] - - -def modulatedref_file_callable(output_dir, inputs, stdout, stderr): - outputs = _list_outputs( - output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr - ) - return outputs["modulatedref_file"] - - -def out_intensitymap_file_callable(output_dir, inputs, stdout, stderr): - outputs = _list_outputs( - output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr - ) - return outputs["out_intensitymap_file"] - - -def warped_file_callable(output_dir, inputs, stdout, stderr): - outputs = _list_outputs( - output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr - ) - return outputs["warped_file"] - - -IFLOGGER = logging.getLogger("nipype.interface") - - -# Original source at L1341 of /interfaces/fsl/preprocess.py -def _gen_filename(name, inputs=None, stdout=None, stderr=None, output_dir=None): - if name in ["warped_file", "log_file"]: - return _list_outputs( - inputs=inputs, stdout=stdout, stderr=stderr, output_dir=output_dir - )[name] - return None - - -# Original source at L205 of /interfaces/fsl/base.py -def _gen_fname( - basename, - cwd=None, - suffix=None, - change_ext=True, - ext=None, - inputs=None, - stdout=None, - stderr=None, - output_dir=None, -): - """Generate a filename based on the given parameters. - - The filename will take the form: cwd/basename. - If change_ext is True, it will use the extensions specified in - inputs.output_type. - - Parameters - ---------- - basename : str - Filename to base the new filename on. - cwd : str - Path to prefix to the new filename. (default is output_dir) - suffix : str - Suffix to add to the `basename`. (defaults is '' ) - change_ext : bool - Flag to change the filename extension to the FSL output type. - (default True) - - Returns - ------- - fname : str - New filename based on given parameters. - - """ - - if basename == "": - msg = "Unable to generate filename for command %s. " % "fnirt" - msg += "basename is not set!" - raise ValueError(msg) - if cwd is None: - cwd = output_dir - if ext is None: - ext = Info.output_type_to_ext(inputs.output_type) - if change_ext: - if suffix: - suffix = "".join((suffix, ext)) - else: - suffix = ext - if suffix is None: - suffix = "" - fname = fname_presuffix(basename, suffix=suffix, use_ext=False, newpath=cwd) - return fname - - -# Original source at L1298 of /interfaces/fsl/preprocess.py -def _list_outputs(inputs=None, stdout=None, stderr=None, output_dir=None): - outputs = {} - for key, suffix in list(filemap.items()): - inval = getattr(inputs, key) - change_ext = True - if key in ["warped_file", "log_file"]: - if suffix.endswith(".txt"): - change_ext = False - if inval is not attrs.NOTHING: - outputs[key] = os.path.abspath(inval) - else: - outputs[key] = _gen_fname( - inputs.in_file, - suffix="_" + suffix, - change_ext=change_ext, - inputs=inputs, - stdout=stdout, - stderr=stderr, - output_dir=output_dir, - ) - elif inval is not attrs.NOTHING: - if isinstance(inval, bool): - if inval: - outputs[key] = _gen_fname( - inputs.in_file, - suffix="_" + suffix, - change_ext=change_ext, - inputs=inputs, - stdout=stdout, - stderr=stderr, - output_dir=output_dir, - ) - else: - outputs[key] = os.path.abspath(inval) - - if key == "out_intensitymap_file" and (outputs[key] is not attrs.NOTHING): - basename = intensitymap_file_basename( - outputs[key], - inputs=inputs, - stdout=stdout, - stderr=stderr, - output_dir=output_dir, - ) - outputs[key] = [outputs[key], "%s.txt" % basename] - return outputs - - -# Original source at L108 of /utils/filemanip.py -def fname_presuffix(fname, prefix="", suffix="", newpath=None, use_ext=True): - """Manipulates path and name of input filename - - Parameters - ---------- - fname : string - A filename (may or may not include path) - prefix : string - Characters to prepend to the filename - suffix : string - Characters to append to the filename - newpath : string - Path to replace the path of the input fname - use_ext : boolean - If True (default), appends the extension of the original file - to the output name. - - Returns - ------- - Absolute path of the modified filename - - >>> from nipype.utils.filemanip import fname_presuffix - >>> fname = 'foo.nii.gz' - >>> fname_presuffix(fname,'pre','post','/tmp') - '/tmp/prefoopost.nii.gz' - - >>> from nipype.interfaces.base import attrs.NOTHING - >>> fname_presuffix(fname, 'pre', 'post', attrs.NOTHING) == \ - fname_presuffix(fname, 'pre', 'post') - True - - """ - pth, fname, ext = split_filename(fname) - if not use_ext: - ext = "" - - # No need for : bool(attrs.NOTHING is not attrs.NOTHING) evaluates to False - if newpath: - pth = op.abspath(newpath) - return op.join(pth, prefix + fname + suffix + ext) - - -# Original source at L1364 of /interfaces/fsl/preprocess.py -def intensitymap_file_basename( - f, inputs=None, stdout=None, stderr=None, output_dir=None -): - """Removes valid intensitymap extensions from `f`, returning a basename - that can refer to both intensitymap files. - """ - for ext in list(Info.ftypes.values()) + [".txt"]: - if f.endswith(ext): - return f[: -len(ext)] - # TODO consider warning for this case - return f - - -# Original source at L58 of /utils/filemanip.py -def split_filename(fname): - """Split a filename into parts: path, base filename and extension. - - Parameters - ---------- - fname : str - file or path name - - Returns - ------- - pth : str - base path from fname - fname : str - filename from fname, without extension - ext : str - file extension from fname - - Examples - -------- - >>> from nipype.utils.filemanip import split_filename - >>> pth, fname, ext = split_filename('/home/data/subject.nii.gz') - >>> pth - '/home/data' - - >>> fname - 'subject' - - >>> ext - '.nii.gz' - - """ - - special_extensions = [".nii.gz", ".tar.gz", ".niml.dset"] - - pth = op.dirname(fname) - fname = op.basename(fname) - - ext = None - for special_ext in special_extensions: - ext_len = len(special_ext) - if (len(fname) > ext_len) and (fname[-ext_len:].lower() == special_ext.lower()): - ext = fname[-ext_len:] - fname = fname[:-ext_len] - break - if not ext: - fname, ext = op.splitext(fname) - - return pth, fname, ext - - -# Original source at L1069 of /interfaces/base/core.py -class PackageInfo(object): - _version = None - version_cmd = None - version_file = None - - @classmethod - def version(klass): - if klass._version is None: - if klass.version_cmd is not None: - try: - clout = CommandLine( - command=klass.version_cmd, - resource_monitor=False, - terminal_output="allatonce", - ).run() - except IOError: - return None - - raw_info = clout.runtime.stdout - elif klass.version_file is not None: - try: - with open(klass.version_file, "rt") as fobj: - raw_info = fobj.read() - except OSError: - return None - else: - return None - - klass._version = klass.parse_version(raw_info) - - return klass._version - - @staticmethod - def parse_version(raw_info): - raise NotImplementedError - - -# Original source at L40 of /interfaces/fsl/base.py -class Info(PackageInfo): - """ - Handle FSL ``output_type`` and version information. - - output type refers to the type of file fsl defaults to writing - eg, NIFTI, NIFTI_GZ - - Examples - -------- - - >>> from nipype.interfaces.fsl import Info - >>> Info.version() # doctest: +SKIP - >>> Info.output_type() # doctest: +SKIP - - """ - - ftypes = { - "NIFTI": ".nii", - "NIFTI_PAIR": ".img", - "NIFTI_GZ": ".nii.gz", - "NIFTI_PAIR_GZ": ".img.gz", - } - - if os.getenv("FSLDIR"): - version_file = os.path.join(os.getenv("FSLDIR"), "etc", "fslversion") - - @staticmethod - def parse_version(raw_info): - return raw_info.splitlines()[0] - - @classmethod - def output_type_to_ext(cls, output_type): - """Get the file extension for the given output type. - - Parameters - ---------- - output_type : {'NIFTI', 'NIFTI_GZ', 'NIFTI_PAIR', 'NIFTI_PAIR_GZ'} - String specifying the output type. - - Returns - ------- - extension : str - The file extension for the output type. - """ - - try: - return cls.ftypes[output_type] - except KeyError: - msg = "Invalid FSLOUTPUTTYPE: ", output_type - raise KeyError(msg) - - @classmethod - def output_type(cls): - """Get the global FSL output file type FSLOUTPUTTYPE. - - This returns the value of the environment variable - FSLOUTPUTTYPE. An exception is raised if it is not defined. - - Returns - ------- - fsl_ftype : string - Represents the current environment setting of FSLOUTPUTTYPE - """ - try: - return os.environ["FSLOUTPUTTYPE"] - except KeyError: - IFLOGGER.warning( - "FSLOUTPUTTYPE environment variable is not set. " - "Setting FSLOUTPUTTYPE=NIFTI" - ) - return "NIFTI" - - @staticmethod - def standard_image(img_name=None): - """Grab an image from the standard location. - - Returns a list of standard images if called without arguments. - - Could be made more fancy to allow for more relocatability""" - try: - fsldir = os.environ["FSLDIR"] - except KeyError: - raise Exception("FSL environment variables not set") - stdpath = os.path.join(fsldir, "data", "standard") - if img_name is None: - return [ - filename.replace(stdpath + "/", "") - for filename in glob(os.path.join(stdpath, "*nii*")) - ] - return os.path.join(stdpath, img_name) diff --git a/nipype-auto-conv/specs/interfaces/fugue.yaml b/nipype-auto-conv/specs/interfaces/fugue.yaml index cf5bd47..d8c6380 100644 --- a/nipype-auto-conv/specs/interfaces/fugue.yaml +++ b/nipype-auto-conv/specs/interfaces/fugue.yaml @@ -7,64 +7,64 @@ # ---- # FSL FUGUE set of tools for EPI distortion correction # -# `FUGUE `_ is, most generally, -# a set of tools for EPI distortion correction. +# `FUGUE `_ is, most generally, +# a set of tools for EPI distortion correction. # -# Distortions may be corrected for -# 1. improving registration with non-distorted images (e.g. structurals), -# or -# 2. dealing with motion-dependent changes. +# Distortions may be corrected for +# 1. improving registration with non-distorted images (e.g. structurals), +# or +# 2. dealing with motion-dependent changes. # -# FUGUE is designed to deal only with the first case - -# improving registration. +# FUGUE is designed to deal only with the first case - +# improving registration. # # -# Examples -# -------- +# Examples +# -------- # # -# Unwarping an input image (shift map is known): +# Unwarping an input image (shift map is known): # -# >>> from nipype.interfaces.fsl.preprocess import FUGUE -# >>> fugue = FUGUE() -# >>> fugue.inputs.in_file = 'epi.nii' -# >>> fugue.inputs.mask_file = 'epi_mask.nii' -# >>> fugue.inputs.shift_in_file = 'vsm.nii' # Previously computed with fugue as well -# >>> fugue.inputs.unwarp_direction = 'y' -# >>> fugue.inputs.output_type = "NIFTI_GZ" -# >>> fugue.cmdline # doctest: +ELLIPSIS -# 'fugue --in=epi.nii --mask=epi_mask.nii --loadshift=vsm.nii --unwarpdir=y --unwarp=epi_unwarped.nii.gz' -# >>> fugue.run() #doctest: +SKIP +# >>> from nipype.interfaces.fsl.preprocess import FUGUE +# >>> fugue = FUGUE() +# >>> fugue.inputs.in_file = 'epi.nii' +# >>> fugue.inputs.mask_file = 'epi_mask.nii' +# >>> fugue.inputs.shift_in_file = 'vsm.nii' # Previously computed with fugue as well +# >>> fugue.inputs.unwarp_direction = 'y' +# >>> fugue.inputs.output_type = "NIFTI_GZ" +# >>> fugue.cmdline # doctest: +ELLIPSIS +# 'fugue --in=epi.nii --mask=epi_mask.nii --loadshift=vsm.nii --unwarpdir=y --unwarp=epi_unwarped.nii.gz' +# >>> fugue.run() #doctest: +SKIP # # -# Warping an input image (shift map is known): +# Warping an input image (shift map is known): # -# >>> from nipype.interfaces.fsl.preprocess import FUGUE -# >>> fugue = FUGUE() -# >>> fugue.inputs.in_file = 'epi.nii' -# >>> fugue.inputs.forward_warping = True -# >>> fugue.inputs.mask_file = 'epi_mask.nii' -# >>> fugue.inputs.shift_in_file = 'vsm.nii' # Previously computed with fugue as well -# >>> fugue.inputs.unwarp_direction = 'y' -# >>> fugue.inputs.output_type = "NIFTI_GZ" -# >>> fugue.cmdline # doctest: +ELLIPSIS -# 'fugue --in=epi.nii --mask=epi_mask.nii --loadshift=vsm.nii --unwarpdir=y --warp=epi_warped.nii.gz' -# >>> fugue.run() #doctest: +SKIP +# >>> from nipype.interfaces.fsl.preprocess import FUGUE +# >>> fugue = FUGUE() +# >>> fugue.inputs.in_file = 'epi.nii' +# >>> fugue.inputs.forward_warping = True +# >>> fugue.inputs.mask_file = 'epi_mask.nii' +# >>> fugue.inputs.shift_in_file = 'vsm.nii' # Previously computed with fugue as well +# >>> fugue.inputs.unwarp_direction = 'y' +# >>> fugue.inputs.output_type = "NIFTI_GZ" +# >>> fugue.cmdline # doctest: +ELLIPSIS +# 'fugue --in=epi.nii --mask=epi_mask.nii --loadshift=vsm.nii --unwarpdir=y --warp=epi_warped.nii.gz' +# >>> fugue.run() #doctest: +SKIP # # -# Computing the vsm (unwrapped phase map is known): +# Computing the vsm (unwrapped phase map is known): # -# >>> from nipype.interfaces.fsl.preprocess import FUGUE -# >>> fugue = FUGUE() -# >>> fugue.inputs.phasemap_in_file = 'epi_phasediff.nii' -# >>> fugue.inputs.mask_file = 'epi_mask.nii' -# >>> fugue.inputs.dwell_to_asym_ratio = (0.77e-3 * 3) / 2.46e-3 -# >>> fugue.inputs.unwarp_direction = 'y' -# >>> fugue.inputs.save_shift = True -# >>> fugue.inputs.output_type = "NIFTI_GZ" -# >>> fugue.cmdline # doctest: +ELLIPSIS -# 'fugue --dwelltoasym=0.9390243902 --mask=epi_mask.nii --phasemap=epi_phasediff.nii --saveshift=epi_phasediff_vsm.nii.gz --unwarpdir=y' -# >>> fugue.run() #doctest: +SKIP +# >>> from nipype.interfaces.fsl.preprocess import FUGUE +# >>> fugue = FUGUE() +# >>> fugue.inputs.phasemap_in_file = 'epi_phasediff.nii' +# >>> fugue.inputs.mask_file = 'epi_mask.nii' +# >>> fugue.inputs.dwell_to_asym_ratio = (0.77e-3 * 3) / 2.46e-3 +# >>> fugue.inputs.unwarp_direction = 'y' +# >>> fugue.inputs.save_shift = True +# >>> fugue.inputs.output_type = "NIFTI_GZ" +# >>> fugue.cmdline # doctest: +ELLIPSIS +# 'fugue --dwelltoasym=0.9390243902 --mask=epi_mask.nii --phasemap=epi_phasediff.nii --saveshift=epi_phasediff_vsm.nii.gz --unwarpdir=y' +# >>> fugue.run() #doctest: +SKIP # # # @@ -84,9 +84,6 @@ inputs: # passed to the field in the automatically generated unittests. fmap_in_file: generic/file # type=file|default=: filename for loading fieldmap (rad/s) - fmap_out_file: Path - # type=file: fieldmap file - # type=file|default=: filename for saving fieldmap (rad/s) in_file: medimage/nifti1 # type=file|default=: filename of input volume mask_file: medimage/nifti1 @@ -95,15 +92,6 @@ inputs: # type=file|default=: filename for input phase image shift_in_file: medimage/nifti1 # type=file|default=: filename for reading pixel shift volume - shift_out_file: Path - # type=file: voxel shift map file - # type=file|default=: filename for saving pixel shift volume - unwarped_file: Path - # type=file: unwarped file - # type=file|default=: apply unwarping and save as filename - warped_file: Path - # type=file: forward warped file - # type=file|default=: apply forward warping and save as filename callable_defaults: # dict[str, str] - names of methods/callable classes defined in the adjacent `*_callables.py` # to set as the `default` method of input fields @@ -136,7 +124,7 @@ outputs: # dict[str, str] - names of methods/callable classes defined in the adjacent `*_callables.py` # to set to the `callable` attribute of output fields templates: - # dict[str, str] - `output_file_template` values to be provided to output fields + # dict[str, str] - `path_template` values to be provided to output fields requirements: # dict[str, list[str]] - input fields that are required to be provided for the output field to be present tests: @@ -212,13 +200,13 @@ tests: save_unmasked_fmap: # type=bool|default=False: saves the unmasked fieldmap when using --savefmap output_type: - # type=enum|default='NIFTI'|allowed['NIFTI','NIFTI_GZ','NIFTI_PAIR','NIFTI_PAIR_GZ']: FSL output type + # type=enum|default='NIFTI'|allowed['GIFTI','NIFTI','NIFTI_GZ','NIFTI_PAIR','NIFTI_PAIR_GZ']: FSL output type args: # type=str|default='': Additional parameters to the command environ: # type=dict|default={}: Environment variables imports: - # list[nipype2pydra.task.base.importstatement] - list import statements required by the test, with each list item + # list[nipype2pydra.statements.imports.explicitimport] - list import statements required by the test, with each list item # consisting of 'module', 'name', and optionally 'alias' keys expected_outputs: # dict[str, str] - expected values for selected outputs, noting that tests will typically @@ -237,16 +225,12 @@ tests: # (if not specified, will try to choose a sensible value) in_file: # type=file|default=: filename of input volume - mask_file: - # type=file|default=: filename for loading valid mask shift_in_file: # type=file|default=: filename for reading pixel shift volume - unwarp_direction: '"y"' - # type=enum|default='x'|allowed['x','x-','y','y-','z','z-']: specifies direction of warping (default y) output_type: '"NIFTI_GZ"' - # type=enum|default='NIFTI'|allowed['NIFTI','NIFTI_GZ','NIFTI_PAIR','NIFTI_PAIR_GZ']: FSL output type + # type=enum|default='NIFTI'|allowed['GIFTI','NIFTI','NIFTI_GZ','NIFTI_PAIR','NIFTI_PAIR_GZ']: FSL output type imports: - # list[nipype2pydra.task.base.importstatement] - list import statements required by the test, with each list item + # list[nipype2pydra.statements.imports.explicitimport] - list import statements required by the test, with each list item # consisting of 'module', 'name', and optionally 'alias' keys expected_outputs: # dict[str, str] - expected values for selected outputs, noting that tests will typically @@ -265,18 +249,12 @@ tests: # (if not specified, will try to choose a sensible value) in_file: # type=file|default=: filename of input volume - forward_warping: 'True' - # type=bool|default=False: apply forward warping instead of unwarping mask_file: # type=file|default=: filename for loading valid mask - shift_in_file: - # type=file|default=: filename for reading pixel shift volume unwarp_direction: '"y"' # type=enum|default='x'|allowed['x','x-','y','y-','z','z-']: specifies direction of warping (default y) - output_type: '"NIFTI_GZ"' - # type=enum|default='NIFTI'|allowed['NIFTI','NIFTI_GZ','NIFTI_PAIR','NIFTI_PAIR_GZ']: FSL output type imports: - # list[nipype2pydra.task.base.importstatement] - list import statements required by the test, with each list item + # list[nipype2pydra.statements.imports.explicitimport] - list import statements required by the test, with each list item # consisting of 'module', 'name', and optionally 'alias' keys expected_outputs: # dict[str, str] - expected values for selected outputs, noting that tests will typically @@ -295,18 +273,12 @@ tests: # (if not specified, will try to choose a sensible value) phasemap_in_file: # type=file|default=: filename for input phase image - mask_file: - # type=file|default=: filename for loading valid mask dwell_to_asym_ratio: (0.77e-3 * 3) / 2.46e-3 # type=float|default=0.0: set the dwell to asym time ratio - unwarp_direction: '"y"' - # type=enum|default='x'|allowed['x','x-','y','y-','z','z-']: specifies direction of warping (default y) save_shift: 'True' # type=bool|default=False: write pixel shift volume - output_type: '"NIFTI_GZ"' - # type=enum|default='NIFTI'|allowed['NIFTI','NIFTI_GZ','NIFTI_PAIR','NIFTI_PAIR_GZ']: FSL output type imports: - # list[nipype2pydra.task.base.importstatement] - list import statements required by the test, with each list item + # list[nipype2pydra.statements.imports.explicitimport] - list import statements required by the test, with each list item # consisting of 'module', 'name', and optionally 'alias' keys expected_outputs: # dict[str, str] - expected values for selected outputs, noting that tests will typically @@ -329,16 +301,12 @@ doctests: # '.mock()' method of the corresponding class is used instead. in_file: '"epi.nii"' # type=file|default=: filename of input volume - mask_file: '"epi_mask.nii"' - # type=file|default=: filename for loading valid mask shift_in_file: '"vsm.nii" # Previously computed with fugue as well' # type=file|default=: filename for reading pixel shift volume - unwarp_direction: '"y"' - # type=enum|default='x'|allowed['x','x-','y','y-','z','z-']: specifies direction of warping (default y) output_type: '"NIFTI_GZ"' - # type=enum|default='NIFTI'|allowed['NIFTI','NIFTI_GZ','NIFTI_PAIR','NIFTI_PAIR_GZ']: FSL output type + # type=enum|default='NIFTI'|allowed['GIFTI','NIFTI','NIFTI_GZ','NIFTI_PAIR','NIFTI_PAIR_GZ']: FSL output type imports: - # list[nipype2pydra.task.base.importstatement] - list import statements required by the test, with each list item + # list[nipype2pydra.statements.imports.explicitimport] - list import statements required by the test, with each list item # consisting of 'module', 'name', and optionally 'alias' keys directive: # str - any doctest directive to place on the cmdline call, e.g. # doctest: +ELLIPSIS @@ -350,18 +318,12 @@ doctests: # '.mock()' method of the corresponding class is used instead. in_file: '"epi.nii"' # type=file|default=: filename of input volume - forward_warping: 'True' - # type=bool|default=False: apply forward warping instead of unwarping mask_file: '"epi_mask.nii"' # type=file|default=: filename for loading valid mask - shift_in_file: '"vsm.nii" # Previously computed with fugue as well' - # type=file|default=: filename for reading pixel shift volume unwarp_direction: '"y"' # type=enum|default='x'|allowed['x','x-','y','y-','z','z-']: specifies direction of warping (default y) - output_type: '"NIFTI_GZ"' - # type=enum|default='NIFTI'|allowed['NIFTI','NIFTI_GZ','NIFTI_PAIR','NIFTI_PAIR_GZ']: FSL output type imports: - # list[nipype2pydra.task.base.importstatement] - list import statements required by the test, with each list item + # list[nipype2pydra.statements.imports.explicitimport] - list import statements required by the test, with each list item # consisting of 'module', 'name', and optionally 'alias' keys directive: # str - any doctest directive to place on the cmdline call, e.g. # doctest: +ELLIPSIS @@ -373,18 +335,12 @@ doctests: # '.mock()' method of the corresponding class is used instead. phasemap_in_file: '"epi_phasediff.nii"' # type=file|default=: filename for input phase image - mask_file: '"epi_mask.nii"' - # type=file|default=: filename for loading valid mask dwell_to_asym_ratio: (0.77e-3 * 3) / 2.46e-3 # type=float|default=0.0: set the dwell to asym time ratio - unwarp_direction: '"y"' - # type=enum|default='x'|allowed['x','x-','y','y-','z','z-']: specifies direction of warping (default y) save_shift: 'True' # type=bool|default=False: write pixel shift volume - output_type: '"NIFTI_GZ"' - # type=enum|default='NIFTI'|allowed['NIFTI','NIFTI_GZ','NIFTI_PAIR','NIFTI_PAIR_GZ']: FSL output type imports: - # list[nipype2pydra.task.base.importstatement] - list import statements required by the test, with each list item + # list[nipype2pydra.statements.imports.explicitimport] - list import statements required by the test, with each list item # consisting of 'module', 'name', and optionally 'alias' keys directive: # str - any doctest directive to place on the cmdline call, e.g. # doctest: +ELLIPSIS diff --git a/nipype-auto-conv/specs/interfaces/fugue_callables.py b/nipype-auto-conv/specs/interfaces/fugue_callables.py deleted file mode 100644 index befea95..0000000 --- a/nipype-auto-conv/specs/interfaces/fugue_callables.py +++ /dev/null @@ -1,359 +0,0 @@ -"""Module to put any functions that are referred to in the "callables" section of FUGUE.yaml""" - -import attrs -import logging -import os -import os.path as op -from glob import glob - - -def fmap_out_file_callable(output_dir, inputs, stdout, stderr): - outputs = _list_outputs( - output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr - ) - return outputs["fmap_out_file"] - - -def shift_out_file_callable(output_dir, inputs, stdout, stderr): - outputs = _list_outputs( - output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr - ) - return outputs["shift_out_file"] - - -def unwarped_file_callable(output_dir, inputs, stdout, stderr): - outputs = _list_outputs( - output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr - ) - return outputs["unwarped_file"] - - -def warped_file_callable(output_dir, inputs, stdout, stderr): - outputs = _list_outputs( - output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr - ) - return outputs["warped_file"] - - -IFLOGGER = logging.getLogger("nipype.interface") - - -iflogger = logging.getLogger("nipype.interface") - - -# Original source at L809 of /interfaces/base/core.py -def _filename_from_source( - name, chain=None, inputs=None, stdout=None, stderr=None, output_dir=None -): - if chain is None: - chain = [] - - trait_spec = inputs.trait(name) - retval = getattr(inputs, name) - source_ext = None - if (retval is attrs.NOTHING) or "%s" in retval: - if not trait_spec.name_source: - return retval - - # Do not generate filename when excluded by other inputs - if any( - (getattr(inputs, field) is not attrs.NOTHING) - for field in trait_spec.xor or () - ): - return retval - - # Do not generate filename when required fields are missing - if not all( - (getattr(inputs, field) is not attrs.NOTHING) - for field in trait_spec.requires or () - ): - return retval - - if (retval is not attrs.NOTHING) and "%s" in retval: - name_template = retval - else: - name_template = trait_spec.name_template - if not name_template: - name_template = "%s_generated" - - ns = trait_spec.name_source - while isinstance(ns, (list, tuple)): - if len(ns) > 1: - iflogger.warning("Only one name_source per trait is allowed") - ns = ns[0] - - if not isinstance(ns, (str, bytes)): - raise ValueError( - "name_source of '{}' trait should be an input trait " - "name, but a type {} object was found".format(name, type(ns)) - ) - - if getattr(inputs, ns) is not attrs.NOTHING: - name_source = ns - source = getattr(inputs, name_source) - while isinstance(source, list): - source = source[0] - - # special treatment for files - try: - _, base, source_ext = split_filename(source) - except (AttributeError, TypeError): - base = source - else: - if name in chain: - raise NipypeInterfaceError("Mutually pointing name_sources") - - chain.append(name) - base = _filename_from_source( - ns, - chain, - inputs=inputs, - stdout=stdout, - stderr=stderr, - output_dir=output_dir, - ) - if base is not attrs.NOTHING: - _, _, source_ext = split_filename(base) - else: - # Do not generate filename when required fields are missing - return retval - - chain = None - retval = name_template % base - _, _, ext = split_filename(retval) - if trait_spec.keep_extension and (ext or source_ext): - if (ext is None or not ext) and source_ext: - retval = retval + source_ext - else: - retval = _overload_extension( - retval, - name, - inputs=inputs, - stdout=stdout, - stderr=stderr, - output_dir=output_dir, - ) - return retval - - -# Original source at L885 of /interfaces/base/core.py -def _gen_filename(name, inputs=None, stdout=None, stderr=None, output_dir=None): - raise NotImplementedError - - -# Original source at L891 of /interfaces/base/core.py -def _list_outputs(inputs=None, stdout=None, stderr=None, output_dir=None): - metadata = dict(name_source=lambda t: t is not None) - traits = inputs.traits(**metadata) - if traits: - outputs = {} - for name, trait_spec in list(traits.items()): - out_name = name - if trait_spec.output_name is not None: - out_name = trait_spec.output_name - fname = _filename_from_source( - name, inputs=inputs, stdout=stdout, stderr=stderr, output_dir=output_dir - ) - if fname is not attrs.NOTHING: - outputs[out_name] = os.path.abspath(fname) - return outputs - - -# Original source at L249 of /interfaces/fsl/base.py -def _overload_extension( - value, name=None, inputs=None, stdout=None, stderr=None, output_dir=None -): - return value + Info.output_type_to_ext(inputs.output_type) - - -# Original source at L58 of /utils/filemanip.py -def split_filename(fname): - """Split a filename into parts: path, base filename and extension. - - Parameters - ---------- - fname : str - file or path name - - Returns - ------- - pth : str - base path from fname - fname : str - filename from fname, without extension - ext : str - file extension from fname - - Examples - -------- - >>> from nipype.utils.filemanip import split_filename - >>> pth, fname, ext = split_filename('/home/data/subject.nii.gz') - >>> pth - '/home/data' - - >>> fname - 'subject' - - >>> ext - '.nii.gz' - - """ - - special_extensions = [".nii.gz", ".tar.gz", ".niml.dset"] - - pth = op.dirname(fname) - fname = op.basename(fname) - - ext = None - for special_ext in special_extensions: - ext_len = len(special_ext) - if (len(fname) > ext_len) and (fname[-ext_len:].lower() == special_ext.lower()): - ext = fname[-ext_len:] - fname = fname[:-ext_len] - break - if not ext: - fname, ext = op.splitext(fname) - - return pth, fname, ext - - -# Original source at L1069 of /interfaces/base/core.py -class PackageInfo(object): - _version = None - version_cmd = None - version_file = None - - @classmethod - def version(klass): - if klass._version is None: - if klass.version_cmd is not None: - try: - clout = CommandLine( - command=klass.version_cmd, - resource_monitor=False, - terminal_output="allatonce", - ).run() - except IOError: - return None - - raw_info = clout.runtime.stdout - elif klass.version_file is not None: - try: - with open(klass.version_file, "rt") as fobj: - raw_info = fobj.read() - except OSError: - return None - else: - return None - - klass._version = klass.parse_version(raw_info) - - return klass._version - - @staticmethod - def parse_version(raw_info): - raise NotImplementedError - - -# Original source at L40 of /interfaces/fsl/base.py -class Info(PackageInfo): - """ - Handle FSL ``output_type`` and version information. - - output type refers to the type of file fsl defaults to writing - eg, NIFTI, NIFTI_GZ - - Examples - -------- - - >>> from nipype.interfaces.fsl import Info - >>> Info.version() # doctest: +SKIP - >>> Info.output_type() # doctest: +SKIP - - """ - - ftypes = { - "NIFTI": ".nii", - "NIFTI_PAIR": ".img", - "NIFTI_GZ": ".nii.gz", - "NIFTI_PAIR_GZ": ".img.gz", - } - - if os.getenv("FSLDIR"): - version_file = os.path.join(os.getenv("FSLDIR"), "etc", "fslversion") - - @staticmethod - def parse_version(raw_info): - return raw_info.splitlines()[0] - - @classmethod - def output_type_to_ext(cls, output_type): - """Get the file extension for the given output type. - - Parameters - ---------- - output_type : {'NIFTI', 'NIFTI_GZ', 'NIFTI_PAIR', 'NIFTI_PAIR_GZ'} - String specifying the output type. - - Returns - ------- - extension : str - The file extension for the output type. - """ - - try: - return cls.ftypes[output_type] - except KeyError: - msg = "Invalid FSLOUTPUTTYPE: ", output_type - raise KeyError(msg) - - @classmethod - def output_type(cls): - """Get the global FSL output file type FSLOUTPUTTYPE. - - This returns the value of the environment variable - FSLOUTPUTTYPE. An exception is raised if it is not defined. - - Returns - ------- - fsl_ftype : string - Represents the current environment setting of FSLOUTPUTTYPE - """ - try: - return os.environ["FSLOUTPUTTYPE"] - except KeyError: - IFLOGGER.warning( - "FSLOUTPUTTYPE environment variable is not set. " - "Setting FSLOUTPUTTYPE=NIFTI" - ) - return "NIFTI" - - @staticmethod - def standard_image(img_name=None): - """Grab an image from the standard location. - - Returns a list of standard images if called without arguments. - - Could be made more fancy to allow for more relocatability""" - try: - fsldir = os.environ["FSLDIR"] - except KeyError: - raise Exception("FSL environment variables not set") - stdpath = os.path.join(fsldir, "data", "standard") - if img_name is None: - return [ - filename.replace(stdpath + "/", "") - for filename in glob(os.path.join(stdpath, "*nii*")) - ] - return os.path.join(stdpath, img_name) - - -# Original source at L125 of /interfaces/base/support.py -class NipypeInterfaceError(Exception): - """Custom error for interfaces""" - - def __init__(self, value): - self.value = value - - def __str__(self): - return "{}".format(self.value) diff --git a/nipype-auto-conv/specs/interfaces/glm.yaml b/nipype-auto-conv/specs/interfaces/glm.yaml index 3668994..2c042ad 100644 --- a/nipype-auto-conv/specs/interfaces/glm.yaml +++ b/nipype-auto-conv/specs/interfaces/glm.yaml @@ -6,14 +6,14 @@ # Docs # ---- # -# FSL GLM: +# FSL GLM: # -# Example -# ------- -# >>> import nipype.interfaces.fsl as fsl -# >>> glm = fsl.GLM(in_file='functional.nii', design='maps.nii', output_type='NIFTI') -# >>> glm.cmdline -# 'fsl_glm -i functional.nii -d maps.nii -o functional_glm.nii' +# Example +# ------- +# >>> import nipype.interfaces.fsl as fsl +# >>> glm = fsl.GLM(in_file='functional.nii', design='maps.nii', output_type='NIFTI') +# >>> glm.cmdline +# 'fsl_glm -i functional.nii -d maps.nii -o functional_glm.nii' # # task_name: GLM @@ -38,32 +38,6 @@ inputs: # type=file|default=: input file name (text matrix or 3D/4D image file) mask: generic/file # type=file|default=: mask image file name if input is image - out_cope: Path - # type=outputmultiobject: output file name for COPEs (either as text file or image) - # type=file|default=: output file name for COPE (either as txt or image - out_data_name: Path - # type=file|default=: output file name for pre-processed data - out_f_name: Path - # type=file|default=: output file name for F-value of full model fit - out_file: Path - # type=file: file name of GLM parameters (if generated) - # type=file|default=: filename for GLM parameter estimates (GLM betas) - out_p_name: Path - # type=file|default=: output file name for p-values of Z-stats (either as text file or image) - out_pf_name: Path - # type=file|default=: output file name for p-value for full model fit - out_res_name: Path - # type=file|default=: output file name for residuals - out_sigsq_name: Path - # type=file|default=: output file name for residual noise variance sigma-square - out_t_name: Path - # type=file|default=: output file name for t-stats (either as txt or image - out_varcb_name: Path - # type=file|default=: output file name for variance of COPEs - out_vnscales_name: Path - # type=file|default=: output file name for scaling factors for variance normalisation - out_z_name: Path - # type=file|default=: output file name for Z-stats (either as txt or image callable_defaults: # dict[str, str] - names of methods/callable classes defined in the adjacent `*_callables.py` # to set as the `default` method of input fields @@ -110,7 +84,7 @@ outputs: # dict[str, str] - names of methods/callable classes defined in the adjacent `*_callables.py` # to set to the `callable` attribute of output fields templates: - # dict[str, str] - `output_file_template` values to be provided to output fields + # dict[str, str] - `path_template` values to be provided to output fields requirements: # dict[str, list[str]] - input fields that are required to be provided for the output field to be present tests: @@ -162,13 +136,13 @@ tests: out_vnscales_name: # type=file|default=: output file name for scaling factors for variance normalisation output_type: - # type=enum|default='NIFTI'|allowed['NIFTI','NIFTI_GZ','NIFTI_PAIR','NIFTI_PAIR_GZ']: FSL output type + # type=enum|default='NIFTI'|allowed['GIFTI','NIFTI','NIFTI_GZ','NIFTI_PAIR','NIFTI_PAIR_GZ']: FSL output type args: # type=str|default='': Additional parameters to the command environ: # type=dict|default={}: Environment variables imports: - # list[nipype2pydra.task.base.importstatement] - list import statements required by the test, with each list item + # list[nipype2pydra.statements.imports.explicitimport] - list import statements required by the test, with each list item # consisting of 'module', 'name', and optionally 'alias' keys expected_outputs: # dict[str, str] - expected values for selected outputs, noting that tests will typically @@ -190,9 +164,9 @@ tests: design: # type=file|default=: file name of the GLM design matrix (text time courses for temporal regression or an image file for spatial regression) output_type: '"NIFTI"' - # type=enum|default='NIFTI'|allowed['NIFTI','NIFTI_GZ','NIFTI_PAIR','NIFTI_PAIR_GZ']: FSL output type + # type=enum|default='NIFTI'|allowed['GIFTI','NIFTI','NIFTI_GZ','NIFTI_PAIR','NIFTI_PAIR_GZ']: FSL output type imports: &id001 - # list[nipype2pydra.task.base.importstatement] - list import statements required by the test, with each list item + # list[nipype2pydra.statements.imports.explicitimport] - list import statements required by the test, with each list item # consisting of 'module', 'name', and optionally 'alias' keys - module: nipype.interfaces.fsl as fsl expected_outputs: @@ -208,7 +182,7 @@ tests: # bool - whether the unittest is expected to fail or not. Set to false # when you are satisfied with the edits you have made to this file doctests: -- cmdline: fsl_glm -i functional.nii -d maps.nii -o functional_glm.nii +- cmdline: # str - the expected cmdline output inputs: # dict[str, str] - name-value pairs for inputs to be provided to the doctest. @@ -219,9 +193,9 @@ doctests: design: '"maps.nii"' # type=file|default=: file name of the GLM design matrix (text time courses for temporal regression or an image file for spatial regression) output_type: '"NIFTI"' - # type=enum|default='NIFTI'|allowed['NIFTI','NIFTI_GZ','NIFTI_PAIR','NIFTI_PAIR_GZ']: FSL output type + # type=enum|default='NIFTI'|allowed['GIFTI','NIFTI','NIFTI_GZ','NIFTI_PAIR','NIFTI_PAIR_GZ']: FSL output type imports: *id001 - # list[nipype2pydra.task.base.importstatement] - list import statements required by the test, with each list item + # list[nipype2pydra.statements.imports.explicitimport] - list import statements required by the test, with each list item # consisting of 'module', 'name', and optionally 'alias' keys directive: # str - any doctest directive to place on the cmdline call, e.g. # doctest: +ELLIPSIS diff --git a/nipype-auto-conv/specs/interfaces/glm_callables.py b/nipype-auto-conv/specs/interfaces/glm_callables.py deleted file mode 100644 index 3f35c49..0000000 --- a/nipype-auto-conv/specs/interfaces/glm_callables.py +++ /dev/null @@ -1,457 +0,0 @@ -"""Module to put any functions that are referred to in the "callables" section of GLM.yaml""" - -import attrs -import logging -import os -import os.path as op -from glob import glob - - -def out_cope_callable(output_dir, inputs, stdout, stderr): - outputs = _list_outputs( - output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr - ) - return outputs["out_cope"] - - -def out_data_callable(output_dir, inputs, stdout, stderr): - outputs = _list_outputs( - output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr - ) - return outputs["out_data"] - - -def out_f_callable(output_dir, inputs, stdout, stderr): - outputs = _list_outputs( - output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr - ) - return outputs["out_f"] - - -def out_file_callable(output_dir, inputs, stdout, stderr): - outputs = _list_outputs( - output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr - ) - return outputs["out_file"] - - -def out_p_callable(output_dir, inputs, stdout, stderr): - outputs = _list_outputs( - output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr - ) - return outputs["out_p"] - - -def out_pf_callable(output_dir, inputs, stdout, stderr): - outputs = _list_outputs( - output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr - ) - return outputs["out_pf"] - - -def out_res_callable(output_dir, inputs, stdout, stderr): - outputs = _list_outputs( - output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr - ) - return outputs["out_res"] - - -def out_sigsq_callable(output_dir, inputs, stdout, stderr): - outputs = _list_outputs( - output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr - ) - return outputs["out_sigsq"] - - -def out_t_callable(output_dir, inputs, stdout, stderr): - outputs = _list_outputs( - output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr - ) - return outputs["out_t"] - - -def out_varcb_callable(output_dir, inputs, stdout, stderr): - outputs = _list_outputs( - output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr - ) - return outputs["out_varcb"] - - -def out_vnscales_callable(output_dir, inputs, stdout, stderr): - outputs = _list_outputs( - output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr - ) - return outputs["out_vnscales"] - - -def out_z_callable(output_dir, inputs, stdout, stderr): - outputs = _list_outputs( - output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr - ) - return outputs["out_z"] - - -IFLOGGER = logging.getLogger("nipype.interface") - - -iflogger = logging.getLogger("nipype.interface") - - -# Original source at L809 of /interfaces/base/core.py -def _filename_from_source( - name, chain=None, inputs=None, stdout=None, stderr=None, output_dir=None -): - if chain is None: - chain = [] - - trait_spec = inputs.trait(name) - retval = getattr(inputs, name) - source_ext = None - if (retval is attrs.NOTHING) or "%s" in retval: - if not trait_spec.name_source: - return retval - - # Do not generate filename when excluded by other inputs - if any( - (getattr(inputs, field) is not attrs.NOTHING) - for field in trait_spec.xor or () - ): - return retval - - # Do not generate filename when required fields are missing - if not all( - (getattr(inputs, field) is not attrs.NOTHING) - for field in trait_spec.requires or () - ): - return retval - - if (retval is not attrs.NOTHING) and "%s" in retval: - name_template = retval - else: - name_template = trait_spec.name_template - if not name_template: - name_template = "%s_generated" - - ns = trait_spec.name_source - while isinstance(ns, (list, tuple)): - if len(ns) > 1: - iflogger.warning("Only one name_source per trait is allowed") - ns = ns[0] - - if not isinstance(ns, (str, bytes)): - raise ValueError( - "name_source of '{}' trait should be an input trait " - "name, but a type {} object was found".format(name, type(ns)) - ) - - if getattr(inputs, ns) is not attrs.NOTHING: - name_source = ns - source = getattr(inputs, name_source) - while isinstance(source, list): - source = source[0] - - # special treatment for files - try: - _, base, source_ext = split_filename(source) - except (AttributeError, TypeError): - base = source - else: - if name in chain: - raise NipypeInterfaceError("Mutually pointing name_sources") - - chain.append(name) - base = _filename_from_source( - ns, - chain, - inputs=inputs, - stdout=stdout, - stderr=stderr, - output_dir=output_dir, - ) - if base is not attrs.NOTHING: - _, _, source_ext = split_filename(base) - else: - # Do not generate filename when required fields are missing - return retval - - chain = None - retval = name_template % base - _, _, ext = split_filename(retval) - if trait_spec.keep_extension and (ext or source_ext): - if (ext is None or not ext) and source_ext: - retval = retval + source_ext - else: - retval = _overload_extension( - retval, - name, - inputs=inputs, - stdout=stdout, - stderr=stderr, - output_dir=output_dir, - ) - return retval - - -# Original source at L885 of /interfaces/base/core.py -def _gen_filename(name, inputs=None, stdout=None, stderr=None, output_dir=None): - raise NotImplementedError - - -# Original source at L2511 of /interfaces/fsl/model.py -def _list_outputs(inputs=None, stdout=None, stderr=None, output_dir=None): - outputs = nipype_interfaces_fsl__FSLCommand___list_outputs() - - if inputs.out_cope is not attrs.NOTHING: - outputs["out_cope"] = os.path.abspath(inputs.out_cope) - - if inputs.out_z_name is not attrs.NOTHING: - outputs["out_z"] = os.path.abspath(inputs.out_z_name) - - if inputs.out_t_name is not attrs.NOTHING: - outputs["out_t"] = os.path.abspath(inputs.out_t_name) - - if inputs.out_p_name is not attrs.NOTHING: - outputs["out_p"] = os.path.abspath(inputs.out_p_name) - - if inputs.out_f_name is not attrs.NOTHING: - outputs["out_f"] = os.path.abspath(inputs.out_f_name) - - if inputs.out_pf_name is not attrs.NOTHING: - outputs["out_pf"] = os.path.abspath(inputs.out_pf_name) - - if inputs.out_res_name is not attrs.NOTHING: - outputs["out_res"] = os.path.abspath(inputs.out_res_name) - - if inputs.out_varcb_name is not attrs.NOTHING: - outputs["out_varcb"] = os.path.abspath(inputs.out_varcb_name) - - if inputs.out_sigsq_name is not attrs.NOTHING: - outputs["out_sigsq"] = os.path.abspath(inputs.out_sigsq_name) - - if inputs.out_data_name is not attrs.NOTHING: - outputs["out_data"] = os.path.abspath(inputs.out_data_name) - - if inputs.out_vnscales_name is not attrs.NOTHING: - outputs["out_vnscales"] = os.path.abspath(inputs.out_vnscales_name) - - return outputs - - -# Original source at L249 of /interfaces/fsl/base.py -def _overload_extension( - value, name=None, inputs=None, stdout=None, stderr=None, output_dir=None -): - return value + Info.output_type_to_ext(inputs.output_type) - - -# Original source at L891 of /interfaces/base/core.py -def nipype_interfaces_fsl__FSLCommand___list_outputs( - inputs=None, stdout=None, stderr=None, output_dir=None -): - metadata = dict(name_source=lambda t: t is not None) - traits = inputs.traits(**metadata) - if traits: - outputs = {} - for name, trait_spec in list(traits.items()): - out_name = name - if trait_spec.output_name is not None: - out_name = trait_spec.output_name - fname = _filename_from_source( - name, inputs=inputs, stdout=stdout, stderr=stderr, output_dir=output_dir - ) - if fname is not attrs.NOTHING: - outputs[out_name] = os.path.abspath(fname) - return outputs - - -# Original source at L58 of /utils/filemanip.py -def split_filename(fname): - """Split a filename into parts: path, base filename and extension. - - Parameters - ---------- - fname : str - file or path name - - Returns - ------- - pth : str - base path from fname - fname : str - filename from fname, without extension - ext : str - file extension from fname - - Examples - -------- - >>> from nipype.utils.filemanip import split_filename - >>> pth, fname, ext = split_filename('/home/data/subject.nii.gz') - >>> pth - '/home/data' - - >>> fname - 'subject' - - >>> ext - '.nii.gz' - - """ - - special_extensions = [".nii.gz", ".tar.gz", ".niml.dset"] - - pth = op.dirname(fname) - fname = op.basename(fname) - - ext = None - for special_ext in special_extensions: - ext_len = len(special_ext) - if (len(fname) > ext_len) and (fname[-ext_len:].lower() == special_ext.lower()): - ext = fname[-ext_len:] - fname = fname[:-ext_len] - break - if not ext: - fname, ext = op.splitext(fname) - - return pth, fname, ext - - -# Original source at L1069 of /interfaces/base/core.py -class PackageInfo(object): - _version = None - version_cmd = None - version_file = None - - @classmethod - def version(klass): - if klass._version is None: - if klass.version_cmd is not None: - try: - clout = CommandLine( - command=klass.version_cmd, - resource_monitor=False, - terminal_output="allatonce", - ).run() - except IOError: - return None - - raw_info = clout.runtime.stdout - elif klass.version_file is not None: - try: - with open(klass.version_file, "rt") as fobj: - raw_info = fobj.read() - except OSError: - return None - else: - return None - - klass._version = klass.parse_version(raw_info) - - return klass._version - - @staticmethod - def parse_version(raw_info): - raise NotImplementedError - - -# Original source at L40 of /interfaces/fsl/base.py -class Info(PackageInfo): - """ - Handle FSL ``output_type`` and version information. - - output type refers to the type of file fsl defaults to writing - eg, NIFTI, NIFTI_GZ - - Examples - -------- - - >>> from nipype.interfaces.fsl import Info - >>> Info.version() # doctest: +SKIP - >>> Info.output_type() # doctest: +SKIP - - """ - - ftypes = { - "NIFTI": ".nii", - "NIFTI_PAIR": ".img", - "NIFTI_GZ": ".nii.gz", - "NIFTI_PAIR_GZ": ".img.gz", - } - - if os.getenv("FSLDIR"): - version_file = os.path.join(os.getenv("FSLDIR"), "etc", "fslversion") - - @staticmethod - def parse_version(raw_info): - return raw_info.splitlines()[0] - - @classmethod - def output_type_to_ext(cls, output_type): - """Get the file extension for the given output type. - - Parameters - ---------- - output_type : {'NIFTI', 'NIFTI_GZ', 'NIFTI_PAIR', 'NIFTI_PAIR_GZ'} - String specifying the output type. - - Returns - ------- - extension : str - The file extension for the output type. - """ - - try: - return cls.ftypes[output_type] - except KeyError: - msg = "Invalid FSLOUTPUTTYPE: ", output_type - raise KeyError(msg) - - @classmethod - def output_type(cls): - """Get the global FSL output file type FSLOUTPUTTYPE. - - This returns the value of the environment variable - FSLOUTPUTTYPE. An exception is raised if it is not defined. - - Returns - ------- - fsl_ftype : string - Represents the current environment setting of FSLOUTPUTTYPE - """ - try: - return os.environ["FSLOUTPUTTYPE"] - except KeyError: - IFLOGGER.warning( - "FSLOUTPUTTYPE environment variable is not set. " - "Setting FSLOUTPUTTYPE=NIFTI" - ) - return "NIFTI" - - @staticmethod - def standard_image(img_name=None): - """Grab an image from the standard location. - - Returns a list of standard images if called without arguments. - - Could be made more fancy to allow for more relocatability""" - try: - fsldir = os.environ["FSLDIR"] - except KeyError: - raise Exception("FSL environment variables not set") - stdpath = os.path.join(fsldir, "data", "standard") - if img_name is None: - return [ - filename.replace(stdpath + "/", "") - for filename in glob(os.path.join(stdpath, "*nii*")) - ] - return os.path.join(stdpath, img_name) - - -# Original source at L125 of /interfaces/base/support.py -class NipypeInterfaceError(Exception): - """Custom error for interfaces""" - - def __init__(self, value): - self.value = value - - def __str__(self): - return "{}".format(self.value) diff --git a/nipype-auto-conv/specs/interfaces/ica__aroma.yaml b/nipype-auto-conv/specs/interfaces/ica__aroma.yaml index bd97c5f..427dd84 100644 --- a/nipype-auto-conv/specs/interfaces/ica__aroma.yaml +++ b/nipype-auto-conv/specs/interfaces/ica__aroma.yaml @@ -6,31 +6,31 @@ # Docs # ---- # -# Interface for the ICA_AROMA.py script. +# Interface for the ICA_AROMA.py script. # -# ICA-AROMA (i.e. 'ICA-based Automatic Removal Of Motion Artifacts') concerns -# a data-driven method to identify and remove motion-related independent -# components from fMRI data. To that end it exploits a small, but robust -# set of theoretically motivated features, preventing the need for classifier -# re-training and therefore providing direct and easy applicability. +# ICA-AROMA (i.e. 'ICA-based Automatic Removal Of Motion Artifacts') concerns +# a data-driven method to identify and remove motion-related independent +# components from fMRI data. To that end it exploits a small, but robust +# set of theoretically motivated features, preventing the need for classifier +# re-training and therefore providing direct and easy applicability. # -# See link for further documentation: https://github.com/rhr-pruim/ICA-AROMA +# See link for further documentation: https://github.com/rhr-pruim/ICA-AROMA # -# Example -# ------- +# Example +# ------- # -# >>> from nipype.interfaces.fsl import ICA_AROMA -# >>> from nipype.testing import example_data -# >>> AROMA_obj = ICA_AROMA() -# >>> AROMA_obj.inputs.in_file = 'functional.nii' -# >>> AROMA_obj.inputs.mat_file = 'func_to_struct.mat' -# >>> AROMA_obj.inputs.fnirt_warp_file = 'warpfield.nii' -# >>> AROMA_obj.inputs.motion_parameters = 'fsl_mcflirt_movpar.txt' -# >>> AROMA_obj.inputs.mask = 'mask.nii.gz' -# >>> AROMA_obj.inputs.denoise_type = 'both' -# >>> AROMA_obj.inputs.out_dir = 'ICA_testout' -# >>> AROMA_obj.cmdline # doctest: +ELLIPSIS -# 'ICA_AROMA.py -den both -warp warpfield.nii -i functional.nii -m mask.nii.gz -affmat func_to_struct.mat -mc fsl_mcflirt_movpar.txt -o .../ICA_testout' +# >>> from nipype.interfaces.fsl import ICA_AROMA +# >>> from nipype.testing import example_data +# >>> AROMA_obj = ICA_AROMA() +# >>> AROMA_obj.inputs.in_file = 'functional.nii' +# >>> AROMA_obj.inputs.mat_file = 'func_to_struct.mat' +# >>> AROMA_obj.inputs.fnirt_warp_file = 'warpfield.nii' +# >>> AROMA_obj.inputs.motion_parameters = 'fsl_mcflirt_movpar.txt' +# >>> AROMA_obj.inputs.mask = 'mask.nii.gz' +# >>> AROMA_obj.inputs.denoise_type = 'both' +# >>> AROMA_obj.inputs.out_dir = 'ICA_testout' +# >>> AROMA_obj.cmdline # doctest: +ELLIPSIS +# 'ICA_AROMA.py -den both -warp warpfield.nii -i functional.nii -m mask.nii.gz -affmat func_to_struct.mat -mc fsl_mcflirt_movpar.txt -o .../ICA_testout' # task_name: ICA_AROMA nipype_name: ICA_AROMA @@ -54,15 +54,12 @@ inputs: # type=file|default=: volume to be denoised mask: medimage/nifti-gz # type=file|default=: path/name volume mask - mat_file: datascience/text-matrix + mat_file: generic/file # type=file|default=: path/name of the mat-file describing the affine registration (e.g. FSL FLIRT) of the functional data to structural space (.mat file) melodic_dir: generic/directory # type=directory|default=: path to MELODIC directory if MELODIC has already been run - motion_parameters: text/text-file + motion_parameters: generic/file # type=file|default=: motion parameters file - out_dir: Path - # type=directory: directory contains (in addition to the denoised files): melodic.ica + classified_motion_components + classification_overview + feature_scores + melodic_ic_mni) - # type=directory|default='out': output directory callable_defaults: # dict[str, str] - names of methods/callable classes defined in the adjacent `*_callables.py` # to set as the `default` method of input fields @@ -90,7 +87,7 @@ outputs: # dict[str, str] - names of methods/callable classes defined in the adjacent `*_callables.py` # to set to the `callable` attribute of output fields templates: - # dict[str, str] - `output_file_template` values to be provided to output fields + # dict[str, str] - `path_template` values to be provided to output fields requirements: # dict[str, list[str]] - input fields that are required to be provided for the output field to be present tests: @@ -125,7 +122,7 @@ tests: environ: # type=dict|default={}: Environment variables imports: - # list[nipype2pydra.task.base.importstatement] - list import statements required by the test, with each list item + # list[nipype2pydra.statements.imports.explicitimport] - list import statements required by the test, with each list item # consisting of 'module', 'name', and optionally 'alias' keys expected_outputs: # dict[str, str] - expected values for selected outputs, noting that tests will typically @@ -144,21 +141,15 @@ tests: # (if not specified, will try to choose a sensible value) in_file: # type=file|default=: volume to be denoised - mat_file: - # type=file|default=: path/name of the mat-file describing the affine registration (e.g. FSL FLIRT) of the functional data to structural space (.mat file) fnirt_warp_file: # type=file|default=: File name of the warp-file describing the non-linear registration (e.g. FSL FNIRT) of the structural data to MNI152 space (.nii.gz) - motion_parameters: - # type=file|default=: motion parameters file mask: # type=file|default=: path/name volume mask - denoise_type: '"both"' - # type=enum|default='nonaggr'|allowed['aggr','both','no','nonaggr']: Type of denoising strategy: -no: only classification, no denoising -nonaggr (default): non-aggresssive denoising, i.e. partial component regression -aggr: aggressive denoising, i.e. full component regression -both: both aggressive and non-aggressive denoising (two outputs) out_dir: '"ICA_testout"' # type=directory: directory contains (in addition to the denoised files): melodic.ica + classified_motion_components + classification_overview + feature_scores + melodic_ic_mni) # type=directory|default='out': output directory imports: &id001 - # list[nipype2pydra.task.base.importstatement] - list import statements required by the test, with each list item + # list[nipype2pydra.statements.imports.explicitimport] - list import statements required by the test, with each list item # consisting of 'module', 'name', and optionally 'alias' keys - module: nipype.testing name: example_data @@ -184,21 +175,15 @@ doctests: # '.mock()' method of the corresponding class is used instead. in_file: '"functional.nii"' # type=file|default=: volume to be denoised - mat_file: '"func_to_struct.mat"' - # type=file|default=: path/name of the mat-file describing the affine registration (e.g. FSL FLIRT) of the functional data to structural space (.mat file) fnirt_warp_file: '"warpfield.nii"' # type=file|default=: File name of the warp-file describing the non-linear registration (e.g. FSL FNIRT) of the structural data to MNI152 space (.nii.gz) - motion_parameters: '"fsl_mcflirt_movpar.txt"' - # type=file|default=: motion parameters file mask: '"mask.nii.gz"' # type=file|default=: path/name volume mask - denoise_type: '"both"' - # type=enum|default='nonaggr'|allowed['aggr','both','no','nonaggr']: Type of denoising strategy: -no: only classification, no denoising -nonaggr (default): non-aggresssive denoising, i.e. partial component regression -aggr: aggressive denoising, i.e. full component regression -both: both aggressive and non-aggressive denoising (two outputs) out_dir: '"ICA_testout"' # type=directory: directory contains (in addition to the denoised files): melodic.ica + classified_motion_components + classification_overview + feature_scores + melodic_ic_mni) # type=directory|default='out': output directory imports: *id001 - # list[nipype2pydra.task.base.importstatement] - list import statements required by the test, with each list item + # list[nipype2pydra.statements.imports.explicitimport] - list import statements required by the test, with each list item # consisting of 'module', 'name', and optionally 'alias' keys directive: # str - any doctest directive to place on the cmdline call, e.g. # doctest: +ELLIPSIS diff --git a/nipype-auto-conv/specs/interfaces/ica__aroma_callables.py b/nipype-auto-conv/specs/interfaces/ica__aroma_callables.py deleted file mode 100644 index 2c25633..0000000 --- a/nipype-auto-conv/specs/interfaces/ica__aroma_callables.py +++ /dev/null @@ -1,46 +0,0 @@ -"""Module to put any functions that are referred to in the "callables" section of ICA_AROMA.yaml""" - -import os - - -def aggr_denoised_file_callable(output_dir, inputs, stdout, stderr): - outputs = _list_outputs( - output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr - ) - return outputs["aggr_denoised_file"] - - -def nonaggr_denoised_file_callable(output_dir, inputs, stdout, stderr): - outputs = _list_outputs( - output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr - ) - return outputs["nonaggr_denoised_file"] - - -def out_dir_callable(output_dir, inputs, stdout, stderr): - outputs = _list_outputs( - output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr - ) - return outputs["out_dir"] - - -# Original source at L885 of /interfaces/base/core.py -def _gen_filename(name, inputs=None, stdout=None, stderr=None, output_dir=None): - raise NotImplementedError - - -# Original source at L151 of /interfaces/fsl/aroma.py -def _list_outputs(inputs=None, stdout=None, stderr=None, output_dir=None): - outputs = {} - outputs["out_dir"] = os.path.abspath(inputs.out_dir) - out_dir = outputs["out_dir"] - - if inputs.denoise_type in ("aggr", "both"): - outputs["aggr_denoised_file"] = os.path.join( - out_dir, "denoised_func_data_aggr.nii.gz" - ) - if inputs.denoise_type in ("nonaggr", "both"): - outputs["nonaggr_denoised_file"] = os.path.join( - out_dir, "denoised_func_data_nonaggr.nii.gz" - ) - return outputs diff --git a/nipype-auto-conv/specs/interfaces/image_maths.yaml b/nipype-auto-conv/specs/interfaces/image_maths.yaml index c0cb2c5..90dbab0 100644 --- a/nipype-auto-conv/specs/interfaces/image_maths.yaml +++ b/nipype-auto-conv/specs/interfaces/image_maths.yaml @@ -6,18 +6,18 @@ # Docs # ---- # Use FSL fslmaths command to allow mathematical manipulation of images -# `FSL info `_ +# `FSL info `_ # # -# Examples -# -------- +# Examples +# -------- # -# >>> from nipype.interfaces import fsl -# >>> from nipype.testing import anatfile -# >>> maths = fsl.ImageMaths(in_file=anatfile, op_string= '-add 5', -# ... out_file='foo_maths.nii') -# >>> maths.cmdline == 'fslmaths %s -add 5 foo_maths.nii' % anatfile -# True +# >>> from nipype.interfaces import fsl +# >>> from nipype.testing import anatfile +# >>> maths = fsl.ImageMaths(in_file=anatfile, op_string= '-add 5', +# ... out_file='foo_maths.nii') +# >>> maths.cmdline == 'fslmaths %s -add 5 foo_maths.nii' % anatfile +# True # # # @@ -41,9 +41,6 @@ inputs: # type=file|default=: mask_file: generic/file # type=file|default=: use (following image>0) to mask current image - out_file: Path - # type=file: - # type=file|default=: callable_defaults: # dict[str, str] - names of methods/callable classes defined in the adjacent `*_callables.py` # to set as the `default` method of input fields @@ -67,7 +64,7 @@ outputs: # dict[str, str] - names of methods/callable classes defined in the adjacent `*_callables.py` # to set to the `callable` attribute of output fields templates: - # dict[str, str] - `output_file_template` values to be provided to output fields + # dict[str, str] - `path_template` values to be provided to output fields out_file: '"foo_maths.nii"' # type=file: # type=file|default=: @@ -93,13 +90,13 @@ tests: out_data_type: # type=enum|default='char'|allowed['char','double','float','input','int','short']: output datatype, one of (char, short, int, float, double, input) output_type: - # type=enum|default='NIFTI'|allowed['NIFTI','NIFTI_GZ','NIFTI_PAIR','NIFTI_PAIR_GZ']: FSL output type + # type=enum|default='NIFTI'|allowed['GIFTI','NIFTI','NIFTI_GZ','NIFTI_PAIR','NIFTI_PAIR_GZ']: FSL output type args: # type=str|default='': Additional parameters to the command environ: # type=dict|default={}: Environment variables imports: - # list[nipype2pydra.task.base.importstatement] - list import statements required by the test, with each list item + # list[nipype2pydra.statements.imports.explicitimport] - list import statements required by the test, with each list item # consisting of 'module', 'name', and optionally 'alias' keys expected_outputs: # dict[str, str] - expected values for selected outputs, noting that tests will typically @@ -124,7 +121,7 @@ tests: # type=file: # type=file|default=: imports: &id001 - # list[nipype2pydra.task.base.importstatement] - list import statements required by the test, with each list item + # list[nipype2pydra.statements.imports.explicitimport] - list import statements required by the test, with each list item # consisting of 'module', 'name', and optionally 'alias' keys - module: nipype.testing name: anatfile @@ -156,7 +153,7 @@ doctests: # type=file: # type=file|default=: imports: *id001 - # list[nipype2pydra.task.base.importstatement] - list import statements required by the test, with each list item + # list[nipype2pydra.statements.imports.explicitimport] - list import statements required by the test, with each list item # consisting of 'module', 'name', and optionally 'alias' keys directive: # str - any doctest directive to place on the cmdline call, e.g. # doctest: +ELLIPSIS diff --git a/nipype-auto-conv/specs/interfaces/image_maths_callables.py b/nipype-auto-conv/specs/interfaces/image_maths_callables.py deleted file mode 100644 index 0689a1c..0000000 --- a/nipype-auto-conv/specs/interfaces/image_maths_callables.py +++ /dev/null @@ -1,332 +0,0 @@ -"""Module to put any functions that are referred to in the "callables" section of ImageMaths.yaml""" - -import attrs -import logging -import os -import os.path as op -from glob import glob -from pathlib import Path - - -def out_file_default(inputs): - return _gen_filename("out_file", inputs=inputs) - - -def out_file_callable(output_dir, inputs, stdout, stderr): - outputs = _list_outputs( - output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr - ) - return outputs["out_file"] - - -IFLOGGER = logging.getLogger("nipype.interface") - - -# Original source at L627 of /interfaces/fsl/utils.py -def _gen_filename(name, inputs=None, stdout=None, stderr=None, output_dir=None): - if name == "out_file": - return _list_outputs( - inputs=inputs, stdout=stdout, stderr=stderr, output_dir=output_dir - )[name] - return None - - -# Original source at L205 of /interfaces/fsl/base.py -def _gen_fname( - basename, - cwd=None, - suffix=None, - change_ext=True, - ext=None, - inputs=None, - stdout=None, - stderr=None, - output_dir=None, -): - """Generate a filename based on the given parameters. - - The filename will take the form: cwd/basename. - If change_ext is True, it will use the extensions specified in - inputs.output_type. - - Parameters - ---------- - basename : str - Filename to base the new filename on. - cwd : str - Path to prefix to the new filename. (default is output_dir) - suffix : str - Suffix to add to the `basename`. (defaults is '' ) - change_ext : bool - Flag to change the filename extension to the FSL output type. - (default True) - - Returns - ------- - fname : str - New filename based on given parameters. - - """ - - if basename == "": - msg = "Unable to generate filename for command %s. " % "fslmaths" - msg += "basename is not set!" - raise ValueError(msg) - if cwd is None: - cwd = output_dir - if ext is None: - ext = Info.output_type_to_ext(inputs.output_type) - if change_ext: - if suffix: - suffix = "".join((suffix, ext)) - else: - suffix = ext - if suffix is None: - suffix = "" - fname = fname_presuffix(basename, suffix=suffix, use_ext=False, newpath=cwd) - return fname - - -# Original source at L635 of /interfaces/fsl/utils.py -def _list_outputs(inputs=None, stdout=None, stderr=None, output_dir=None): - suffix = "_maths" # ohinds: build suffix - if inputs.suffix is not attrs.NOTHING: - suffix = inputs.suffix - outputs = {} - outputs["out_file"] = inputs.out_file - if outputs["out_file"] is attrs.NOTHING: - outputs["out_file"] = _gen_fname( - inputs.in_file, - suffix=suffix, - inputs=inputs, - stdout=stdout, - stderr=stderr, - output_dir=output_dir, - ) - outputs["out_file"] = os.path.abspath(outputs["out_file"]) - return outputs - - -# Original source at L108 of /utils/filemanip.py -def fname_presuffix(fname, prefix="", suffix="", newpath=None, use_ext=True): - """Manipulates path and name of input filename - - Parameters - ---------- - fname : string - A filename (may or may not include path) - prefix : string - Characters to prepend to the filename - suffix : string - Characters to append to the filename - newpath : string - Path to replace the path of the input fname - use_ext : boolean - If True (default), appends the extension of the original file - to the output name. - - Returns - ------- - Absolute path of the modified filename - - >>> from nipype.utils.filemanip import fname_presuffix - >>> fname = 'foo.nii.gz' - >>> fname_presuffix(fname,'pre','post','/tmp') - '/tmp/prefoopost.nii.gz' - - >>> from nipype.interfaces.base import attrs.NOTHING - >>> fname_presuffix(fname, 'pre', 'post', attrs.NOTHING) == \ - fname_presuffix(fname, 'pre', 'post') - True - - """ - pth, fname, ext = split_filename(fname) - if not use_ext: - ext = "" - - # No need for : bool(attrs.NOTHING is not attrs.NOTHING) evaluates to False - if newpath: - pth = op.abspath(newpath) - return op.join(pth, prefix + fname + suffix + ext) - - -# Original source at L58 of /utils/filemanip.py -def split_filename(fname): - """Split a filename into parts: path, base filename and extension. - - Parameters - ---------- - fname : str - file or path name - - Returns - ------- - pth : str - base path from fname - fname : str - filename from fname, without extension - ext : str - file extension from fname - - Examples - -------- - >>> from nipype.utils.filemanip import split_filename - >>> pth, fname, ext = split_filename('/home/data/subject.nii.gz') - >>> pth - '/home/data' - - >>> fname - 'subject' - - >>> ext - '.nii.gz' - - """ - - special_extensions = [".nii.gz", ".tar.gz", ".niml.dset"] - - pth = op.dirname(fname) - fname = op.basename(fname) - - ext = None - for special_ext in special_extensions: - ext_len = len(special_ext) - if (len(fname) > ext_len) and (fname[-ext_len:].lower() == special_ext.lower()): - ext = fname[-ext_len:] - fname = fname[:-ext_len] - break - if not ext: - fname, ext = op.splitext(fname) - - return pth, fname, ext - - -# Original source at L1069 of /interfaces/base/core.py -class PackageInfo(object): - _version = None - version_cmd = None - version_file = None - - @classmethod - def version(klass): - if klass._version is None: - if klass.version_cmd is not None: - try: - clout = CommandLine( - command=klass.version_cmd, - resource_monitor=False, - terminal_output="allatonce", - ).run() - except IOError: - return None - - raw_info = clout.runtime.stdout - elif klass.version_file is not None: - try: - with open(klass.version_file, "rt") as fobj: - raw_info = fobj.read() - except OSError: - return None - else: - return None - - klass._version = klass.parse_version(raw_info) - - return klass._version - - @staticmethod - def parse_version(raw_info): - raise NotImplementedError - - -# Original source at L40 of /interfaces/fsl/base.py -class Info(PackageInfo): - """ - Handle FSL ``output_type`` and version information. - - output type refers to the type of file fsl defaults to writing - eg, NIFTI, NIFTI_GZ - - Examples - -------- - - >>> from nipype.interfaces.fsl import Info - >>> Info.version() # doctest: +SKIP - >>> Info.output_type() # doctest: +SKIP - - """ - - ftypes = { - "NIFTI": ".nii", - "NIFTI_PAIR": ".img", - "NIFTI_GZ": ".nii.gz", - "NIFTI_PAIR_GZ": ".img.gz", - } - - if os.getenv("FSLDIR"): - version_file = os.path.join(os.getenv("FSLDIR"), "etc", "fslversion") - - @staticmethod - def parse_version(raw_info): - return raw_info.splitlines()[0] - - @classmethod - def output_type_to_ext(cls, output_type): - """Get the file extension for the given output type. - - Parameters - ---------- - output_type : {'NIFTI', 'NIFTI_GZ', 'NIFTI_PAIR', 'NIFTI_PAIR_GZ'} - String specifying the output type. - - Returns - ------- - extension : str - The file extension for the output type. - """ - - try: - return cls.ftypes[output_type] - except KeyError: - msg = "Invalid FSLOUTPUTTYPE: ", output_type - raise KeyError(msg) - - @classmethod - def output_type(cls): - """Get the global FSL output file type FSLOUTPUTTYPE. - - This returns the value of the environment variable - FSLOUTPUTTYPE. An exception is raised if it is not defined. - - Returns - ------- - fsl_ftype : string - Represents the current environment setting of FSLOUTPUTTYPE - """ - try: - return os.environ["FSLOUTPUTTYPE"] - except KeyError: - IFLOGGER.warning( - "FSLOUTPUTTYPE environment variable is not set. " - "Setting FSLOUTPUTTYPE=NIFTI" - ) - return "NIFTI" - - @staticmethod - def standard_image(img_name=None): - """Grab an image from the standard location. - - Returns a list of standard images if called without arguments. - - Could be made more fancy to allow for more relocatability""" - try: - fsldir = os.environ["FSLDIR"] - except KeyError: - raise Exception("FSL environment variables not set") - stdpath = os.path.join(fsldir, "data", "standard") - if img_name is None: - return [ - filename.replace(stdpath + "/", "") - for filename in glob(os.path.join(stdpath, "*nii*")) - ] - return os.path.join(stdpath, img_name) diff --git a/nipype-auto-conv/specs/interfaces/image_meants.yaml b/nipype-auto-conv/specs/interfaces/image_meants.yaml index c677a44..7a49179 100644 --- a/nipype-auto-conv/specs/interfaces/image_meants.yaml +++ b/nipype-auto-conv/specs/interfaces/image_meants.yaml @@ -6,8 +6,8 @@ # Docs # ---- # Use fslmeants for printing the average timeseries (intensities) to -# the screen (or saves to a file). The average is taken over all voxels -# in the mask (or all voxels in the image if no mask is specified) +# the screen (or saves to a file). The average is taken over all voxels +# in the mask (or all voxels in the image if no mask is specified) # # task_name: ImageMeants @@ -28,9 +28,6 @@ inputs: # type=file|default=: input file for computing the average timeseries mask: generic/file # type=file|default=: input 3D mask - out_file: Path - # type=file: path/name of output text matrix - # type=file|default=: name of output text matrix callable_defaults: # dict[str, str] - names of methods/callable classes defined in the adjacent `*_callables.py` # to set as the `default` method of input fields @@ -54,7 +51,7 @@ outputs: # dict[str, str] - names of methods/callable classes defined in the adjacent `*_callables.py` # to set to the `callable` attribute of output fields templates: - # dict[str, str] - `output_file_template` values to be provided to output fields + # dict[str, str] - `path_template` values to be provided to output fields out_file: out_file # type=file: path/name of output text matrix # type=file|default=: name of output text matrix @@ -86,13 +83,13 @@ tests: transpose: # type=bool|default=False: output results in transpose format (one row per voxel/mean) output_type: - # type=enum|default='NIFTI'|allowed['NIFTI','NIFTI_GZ','NIFTI_PAIR','NIFTI_PAIR_GZ']: FSL output type + # type=enum|default='NIFTI'|allowed['GIFTI','NIFTI','NIFTI_GZ','NIFTI_PAIR','NIFTI_PAIR_GZ']: FSL output type args: # type=str|default='': Additional parameters to the command environ: # type=dict|default={}: Environment variables imports: - # list[nipype2pydra.task.base.importstatement] - list import statements required by the test, with each list item + # list[nipype2pydra.statements.imports.explicitimport] - list import statements required by the test, with each list item # consisting of 'module', 'name', and optionally 'alias' keys expected_outputs: # dict[str, str] - expected values for selected outputs, noting that tests will typically diff --git a/nipype-auto-conv/specs/interfaces/image_meants_callables.py b/nipype-auto-conv/specs/interfaces/image_meants_callables.py deleted file mode 100644 index b7eb8a5..0000000 --- a/nipype-auto-conv/specs/interfaces/image_meants_callables.py +++ /dev/null @@ -1,331 +0,0 @@ -"""Module to put any functions that are referred to in the "callables" section of ImageMeants.yaml""" - -import attrs -import logging -import os -import os.path as op -from glob import glob -from pathlib import Path - - -def out_file_default(inputs): - return _gen_filename("out_file", inputs=inputs) - - -def out_file_callable(output_dir, inputs, stdout, stderr): - outputs = _list_outputs( - output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr - ) - return outputs["out_file"] - - -IFLOGGER = logging.getLogger("nipype.interface") - - -# Original source at L184 of /interfaces/fsl/utils.py -def _gen_filename(name, inputs=None, stdout=None, stderr=None, output_dir=None): - if name == "out_file": - return _list_outputs( - inputs=inputs, stdout=stdout, stderr=stderr, output_dir=output_dir - )[name] - return None - - -# Original source at L205 of /interfaces/fsl/base.py -def _gen_fname( - basename, - cwd=None, - suffix=None, - change_ext=True, - ext=None, - inputs=None, - stdout=None, - stderr=None, - output_dir=None, -): - """Generate a filename based on the given parameters. - - The filename will take the form: cwd/basename. - If change_ext is True, it will use the extensions specified in - inputs.output_type. - - Parameters - ---------- - basename : str - Filename to base the new filename on. - cwd : str - Path to prefix to the new filename. (default is output_dir) - suffix : str - Suffix to add to the `basename`. (defaults is '' ) - change_ext : bool - Flag to change the filename extension to the FSL output type. - (default True) - - Returns - ------- - fname : str - New filename based on given parameters. - - """ - - if basename == "": - msg = "Unable to generate filename for command %s. " % "fslmeants" - msg += "basename is not set!" - raise ValueError(msg) - if cwd is None: - cwd = output_dir - if ext is None: - ext = Info.output_type_to_ext(inputs.output_type) - if change_ext: - if suffix: - suffix = "".join((suffix, ext)) - else: - suffix = ext - if suffix is None: - suffix = "" - fname = fname_presuffix(basename, suffix=suffix, use_ext=False, newpath=cwd) - return fname - - -# Original source at L174 of /interfaces/fsl/utils.py -def _list_outputs(inputs=None, stdout=None, stderr=None, output_dir=None): - outputs = {} - outputs["out_file"] = inputs.out_file - if outputs["out_file"] is attrs.NOTHING: - outputs["out_file"] = _gen_fname( - inputs.in_file, - suffix="_ts", - ext=".txt", - change_ext=True, - inputs=inputs, - stdout=stdout, - stderr=stderr, - output_dir=output_dir, - ) - outputs["out_file"] = os.path.abspath(outputs["out_file"]) - return outputs - - -# Original source at L108 of /utils/filemanip.py -def fname_presuffix(fname, prefix="", suffix="", newpath=None, use_ext=True): - """Manipulates path and name of input filename - - Parameters - ---------- - fname : string - A filename (may or may not include path) - prefix : string - Characters to prepend to the filename - suffix : string - Characters to append to the filename - newpath : string - Path to replace the path of the input fname - use_ext : boolean - If True (default), appends the extension of the original file - to the output name. - - Returns - ------- - Absolute path of the modified filename - - >>> from nipype.utils.filemanip import fname_presuffix - >>> fname = 'foo.nii.gz' - >>> fname_presuffix(fname,'pre','post','/tmp') - '/tmp/prefoopost.nii.gz' - - >>> from nipype.interfaces.base import attrs.NOTHING - >>> fname_presuffix(fname, 'pre', 'post', attrs.NOTHING) == \ - fname_presuffix(fname, 'pre', 'post') - True - - """ - pth, fname, ext = split_filename(fname) - if not use_ext: - ext = "" - - # No need for : bool(attrs.NOTHING is not attrs.NOTHING) evaluates to False - if newpath: - pth = op.abspath(newpath) - return op.join(pth, prefix + fname + suffix + ext) - - -# Original source at L58 of /utils/filemanip.py -def split_filename(fname): - """Split a filename into parts: path, base filename and extension. - - Parameters - ---------- - fname : str - file or path name - - Returns - ------- - pth : str - base path from fname - fname : str - filename from fname, without extension - ext : str - file extension from fname - - Examples - -------- - >>> from nipype.utils.filemanip import split_filename - >>> pth, fname, ext = split_filename('/home/data/subject.nii.gz') - >>> pth - '/home/data' - - >>> fname - 'subject' - - >>> ext - '.nii.gz' - - """ - - special_extensions = [".nii.gz", ".tar.gz", ".niml.dset"] - - pth = op.dirname(fname) - fname = op.basename(fname) - - ext = None - for special_ext in special_extensions: - ext_len = len(special_ext) - if (len(fname) > ext_len) and (fname[-ext_len:].lower() == special_ext.lower()): - ext = fname[-ext_len:] - fname = fname[:-ext_len] - break - if not ext: - fname, ext = op.splitext(fname) - - return pth, fname, ext - - -# Original source at L1069 of /interfaces/base/core.py -class PackageInfo(object): - _version = None - version_cmd = None - version_file = None - - @classmethod - def version(klass): - if klass._version is None: - if klass.version_cmd is not None: - try: - clout = CommandLine( - command=klass.version_cmd, - resource_monitor=False, - terminal_output="allatonce", - ).run() - except IOError: - return None - - raw_info = clout.runtime.stdout - elif klass.version_file is not None: - try: - with open(klass.version_file, "rt") as fobj: - raw_info = fobj.read() - except OSError: - return None - else: - return None - - klass._version = klass.parse_version(raw_info) - - return klass._version - - @staticmethod - def parse_version(raw_info): - raise NotImplementedError - - -# Original source at L40 of /interfaces/fsl/base.py -class Info(PackageInfo): - """ - Handle FSL ``output_type`` and version information. - - output type refers to the type of file fsl defaults to writing - eg, NIFTI, NIFTI_GZ - - Examples - -------- - - >>> from nipype.interfaces.fsl import Info - >>> Info.version() # doctest: +SKIP - >>> Info.output_type() # doctest: +SKIP - - """ - - ftypes = { - "NIFTI": ".nii", - "NIFTI_PAIR": ".img", - "NIFTI_GZ": ".nii.gz", - "NIFTI_PAIR_GZ": ".img.gz", - } - - if os.getenv("FSLDIR"): - version_file = os.path.join(os.getenv("FSLDIR"), "etc", "fslversion") - - @staticmethod - def parse_version(raw_info): - return raw_info.splitlines()[0] - - @classmethod - def output_type_to_ext(cls, output_type): - """Get the file extension for the given output type. - - Parameters - ---------- - output_type : {'NIFTI', 'NIFTI_GZ', 'NIFTI_PAIR', 'NIFTI_PAIR_GZ'} - String specifying the output type. - - Returns - ------- - extension : str - The file extension for the output type. - """ - - try: - return cls.ftypes[output_type] - except KeyError: - msg = "Invalid FSLOUTPUTTYPE: ", output_type - raise KeyError(msg) - - @classmethod - def output_type(cls): - """Get the global FSL output file type FSLOUTPUTTYPE. - - This returns the value of the environment variable - FSLOUTPUTTYPE. An exception is raised if it is not defined. - - Returns - ------- - fsl_ftype : string - Represents the current environment setting of FSLOUTPUTTYPE - """ - try: - return os.environ["FSLOUTPUTTYPE"] - except KeyError: - IFLOGGER.warning( - "FSLOUTPUTTYPE environment variable is not set. " - "Setting FSLOUTPUTTYPE=NIFTI" - ) - return "NIFTI" - - @staticmethod - def standard_image(img_name=None): - """Grab an image from the standard location. - - Returns a list of standard images if called without arguments. - - Could be made more fancy to allow for more relocatability""" - try: - fsldir = os.environ["FSLDIR"] - except KeyError: - raise Exception("FSL environment variables not set") - stdpath = os.path.join(fsldir, "data", "standard") - if img_name is None: - return [ - filename.replace(stdpath + "/", "") - for filename in glob(os.path.join(stdpath, "*nii*")) - ] - return os.path.join(stdpath, img_name) diff --git a/nipype-auto-conv/specs/interfaces/image_stats.yaml b/nipype-auto-conv/specs/interfaces/image_stats.yaml index b390726..c8a784b 100644 --- a/nipype-auto-conv/specs/interfaces/image_stats.yaml +++ b/nipype-auto-conv/specs/interfaces/image_stats.yaml @@ -6,18 +6,18 @@ # Docs # ---- # Use FSL fslstats command to calculate stats from images -# `FSL info -# `_ +# `FSL info +# `_ # # -# Examples -# -------- +# Examples +# -------- # -# >>> from nipype.interfaces.fsl import ImageStats -# >>> from nipype.testing import funcfile -# >>> stats = ImageStats(in_file=funcfile, op_string= '-M') -# >>> stats.cmdline == 'fslstats %s -M'%funcfile -# True +# >>> from nipype.interfaces.fsl import ImageStats +# >>> from nipype.testing import funcfile +# >>> stats = ImageStats(in_file=funcfile, op_string= '-M') +# >>> stats.cmdline == 'fslstats %s -M'%funcfile +# True # # # @@ -63,7 +63,7 @@ outputs: out_stat: out_stat_callable # type=any: stats output templates: - # dict[str, str] - `output_file_template` values to be provided to output fields + # dict[str, str] - `path_template` values to be provided to output fields requirements: # dict[str, list[str]] - input fields that are required to be provided for the output field to be present tests: @@ -81,13 +81,13 @@ tests: index_mask_file: # type=file|default=: generate separate n submasks from indexMask, for indexvalues 1..n where n is the maximum index value in indexMask, and generate statistics for each submask output_type: - # type=enum|default='NIFTI'|allowed['NIFTI','NIFTI_GZ','NIFTI_PAIR','NIFTI_PAIR_GZ']: FSL output type + # type=enum|default='NIFTI'|allowed['GIFTI','NIFTI','NIFTI_GZ','NIFTI_PAIR','NIFTI_PAIR_GZ']: FSL output type args: # type=str|default='': Additional parameters to the command environ: # type=dict|default={}: Environment variables imports: - # list[nipype2pydra.task.base.importstatement] - list import statements required by the test, with each list item + # list[nipype2pydra.statements.imports.explicitimport] - list import statements required by the test, with each list item # consisting of 'module', 'name', and optionally 'alias' keys expected_outputs: # dict[str, str] - expected values for selected outputs, noting that tests will typically @@ -109,7 +109,7 @@ tests: op_string: '"-M"' # type=str|default='': string defining the operation, options are applied in order, e.g. -M -l 10 -M will report the non-zero mean, apply a threshold and then report the new nonzero mean imports: &id001 - # list[nipype2pydra.task.base.importstatement] - list import statements required by the test, with each list item + # list[nipype2pydra.statements.imports.explicitimport] - list import statements required by the test, with each list item # consisting of 'module', 'name', and optionally 'alias' keys - module: nipype.testing name: funcfile @@ -138,7 +138,7 @@ doctests: op_string: '"-M"' # type=str|default='': string defining the operation, options are applied in order, e.g. -M -l 10 -M will report the non-zero mean, apply a threshold and then report the new nonzero mean imports: *id001 - # list[nipype2pydra.task.base.importstatement] - list import statements required by the test, with each list item + # list[nipype2pydra.statements.imports.explicitimport] - list import statements required by the test, with each list item # consisting of 'module', 'name', and optionally 'alias' keys directive: # str - any doctest directive to place on the cmdline call, e.g. # doctest: +ELLIPSIS diff --git a/nipype-auto-conv/specs/interfaces/image_stats_callables.py b/nipype-auto-conv/specs/interfaces/image_stats_callables.py deleted file mode 100644 index 691ce5b..0000000 --- a/nipype-auto-conv/specs/interfaces/image_stats_callables.py +++ /dev/null @@ -1,338 +0,0 @@ -"""Module to put any functions that are referred to in the "callables" section of ImageStats.yaml""" - -import attrs -import logging -import os -import os.path as op -from glob import glob - - -def out_stat_callable(output_dir, inputs, stdout, stderr): - outputs = _list_outputs( - output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr - ) - return outputs["out_stat"] - - -IFLOGGER = logging.getLogger("nipype.interface") - - -iflogger = logging.getLogger("nipype.interface") - - -# Original source at L809 of /interfaces/base/core.py -def _filename_from_source( - name, chain=None, inputs=None, stdout=None, stderr=None, output_dir=None -): - if chain is None: - chain = [] - - trait_spec = inputs.trait(name) - retval = getattr(inputs, name) - source_ext = None - if (retval is attrs.NOTHING) or "%s" in retval: - if not trait_spec.name_source: - return retval - - # Do not generate filename when excluded by other inputs - if any( - (getattr(inputs, field) is not attrs.NOTHING) - for field in trait_spec.xor or () - ): - return retval - - # Do not generate filename when required fields are missing - if not all( - (getattr(inputs, field) is not attrs.NOTHING) - for field in trait_spec.requires or () - ): - return retval - - if (retval is not attrs.NOTHING) and "%s" in retval: - name_template = retval - else: - name_template = trait_spec.name_template - if not name_template: - name_template = "%s_generated" - - ns = trait_spec.name_source - while isinstance(ns, (list, tuple)): - if len(ns) > 1: - iflogger.warning("Only one name_source per trait is allowed") - ns = ns[0] - - if not isinstance(ns, (str, bytes)): - raise ValueError( - "name_source of '{}' trait should be an input trait " - "name, but a type {} object was found".format(name, type(ns)) - ) - - if getattr(inputs, ns) is not attrs.NOTHING: - name_source = ns - source = getattr(inputs, name_source) - while isinstance(source, list): - source = source[0] - - # special treatment for files - try: - _, base, source_ext = split_filename(source) - except (AttributeError, TypeError): - base = source - else: - if name in chain: - raise NipypeInterfaceError("Mutually pointing name_sources") - - chain.append(name) - base = _filename_from_source( - ns, - chain, - inputs=inputs, - stdout=stdout, - stderr=stderr, - output_dir=output_dir, - ) - if base is not attrs.NOTHING: - _, _, source_ext = split_filename(base) - else: - # Do not generate filename when required fields are missing - return retval - - chain = None - retval = name_template % base - _, _, ext = split_filename(retval) - if trait_spec.keep_extension and (ext or source_ext): - if (ext is None or not ext) and source_ext: - retval = retval + source_ext - else: - retval = _overload_extension( - retval, - name, - inputs=inputs, - stdout=stdout, - stderr=stderr, - output_dir=output_dir, - ) - return retval - - -# Original source at L885 of /interfaces/base/core.py -def _gen_filename(name, inputs=None, stdout=None, stderr=None, output_dir=None): - raise NotImplementedError - - -# Original source at L891 of /interfaces/base/core.py -def _list_outputs(inputs=None, stdout=None, stderr=None, output_dir=None): - metadata = dict(name_source=lambda t: t is not None) - traits = inputs.traits(**metadata) - if traits: - outputs = {} - for name, trait_spec in list(traits.items()): - out_name = name - if trait_spec.output_name is not None: - out_name = trait_spec.output_name - fname = _filename_from_source( - name, inputs=inputs, stdout=stdout, stderr=stderr, output_dir=output_dir - ) - if fname is not attrs.NOTHING: - outputs[out_name] = os.path.abspath(fname) - return outputs - - -# Original source at L249 of /interfaces/fsl/base.py -def _overload_extension( - value, name=None, inputs=None, stdout=None, stderr=None, output_dir=None -): - return value + Info.output_type_to_ext(inputs.output_type) - - -# Original source at L58 of /utils/filemanip.py -def split_filename(fname): - """Split a filename into parts: path, base filename and extension. - - Parameters - ---------- - fname : str - file or path name - - Returns - ------- - pth : str - base path from fname - fname : str - filename from fname, without extension - ext : str - file extension from fname - - Examples - -------- - >>> from nipype.utils.filemanip import split_filename - >>> pth, fname, ext = split_filename('/home/data/subject.nii.gz') - >>> pth - '/home/data' - - >>> fname - 'subject' - - >>> ext - '.nii.gz' - - """ - - special_extensions = [".nii.gz", ".tar.gz", ".niml.dset"] - - pth = op.dirname(fname) - fname = op.basename(fname) - - ext = None - for special_ext in special_extensions: - ext_len = len(special_ext) - if (len(fname) > ext_len) and (fname[-ext_len:].lower() == special_ext.lower()): - ext = fname[-ext_len:] - fname = fname[:-ext_len] - break - if not ext: - fname, ext = op.splitext(fname) - - return pth, fname, ext - - -# Original source at L1069 of /interfaces/base/core.py -class PackageInfo(object): - _version = None - version_cmd = None - version_file = None - - @classmethod - def version(klass): - if klass._version is None: - if klass.version_cmd is not None: - try: - clout = CommandLine( - command=klass.version_cmd, - resource_monitor=False, - terminal_output="allatonce", - ).run() - except IOError: - return None - - raw_info = clout.runtime.stdout - elif klass.version_file is not None: - try: - with open(klass.version_file, "rt") as fobj: - raw_info = fobj.read() - except OSError: - return None - else: - return None - - klass._version = klass.parse_version(raw_info) - - return klass._version - - @staticmethod - def parse_version(raw_info): - raise NotImplementedError - - -# Original source at L40 of /interfaces/fsl/base.py -class Info(PackageInfo): - """ - Handle FSL ``output_type`` and version information. - - output type refers to the type of file fsl defaults to writing - eg, NIFTI, NIFTI_GZ - - Examples - -------- - - >>> from nipype.interfaces.fsl import Info - >>> Info.version() # doctest: +SKIP - >>> Info.output_type() # doctest: +SKIP - - """ - - ftypes = { - "NIFTI": ".nii", - "NIFTI_PAIR": ".img", - "NIFTI_GZ": ".nii.gz", - "NIFTI_PAIR_GZ": ".img.gz", - } - - if os.getenv("FSLDIR"): - version_file = os.path.join(os.getenv("FSLDIR"), "etc", "fslversion") - - @staticmethod - def parse_version(raw_info): - return raw_info.splitlines()[0] - - @classmethod - def output_type_to_ext(cls, output_type): - """Get the file extension for the given output type. - - Parameters - ---------- - output_type : {'NIFTI', 'NIFTI_GZ', 'NIFTI_PAIR', 'NIFTI_PAIR_GZ'} - String specifying the output type. - - Returns - ------- - extension : str - The file extension for the output type. - """ - - try: - return cls.ftypes[output_type] - except KeyError: - msg = "Invalid FSLOUTPUTTYPE: ", output_type - raise KeyError(msg) - - @classmethod - def output_type(cls): - """Get the global FSL output file type FSLOUTPUTTYPE. - - This returns the value of the environment variable - FSLOUTPUTTYPE. An exception is raised if it is not defined. - - Returns - ------- - fsl_ftype : string - Represents the current environment setting of FSLOUTPUTTYPE - """ - try: - return os.environ["FSLOUTPUTTYPE"] - except KeyError: - IFLOGGER.warning( - "FSLOUTPUTTYPE environment variable is not set. " - "Setting FSLOUTPUTTYPE=NIFTI" - ) - return "NIFTI" - - @staticmethod - def standard_image(img_name=None): - """Grab an image from the standard location. - - Returns a list of standard images if called without arguments. - - Could be made more fancy to allow for more relocatability""" - try: - fsldir = os.environ["FSLDIR"] - except KeyError: - raise Exception("FSL environment variables not set") - stdpath = os.path.join(fsldir, "data", "standard") - if img_name is None: - return [ - filename.replace(stdpath + "/", "") - for filename in glob(os.path.join(stdpath, "*nii*")) - ] - return os.path.join(stdpath, img_name) - - -# Original source at L125 of /interfaces/base/support.py -class NipypeInterfaceError(Exception): - """Custom error for interfaces""" - - def __init__(self, value): - self.value = value - - def __str__(self): - return "{}".format(self.value) diff --git a/nipype-auto-conv/specs/interfaces/inv_warp.yaml b/nipype-auto-conv/specs/interfaces/inv_warp.yaml index a34cde0..a72930c 100644 --- a/nipype-auto-conv/specs/interfaces/inv_warp.yaml +++ b/nipype-auto-conv/specs/interfaces/inv_warp.yaml @@ -6,20 +6,20 @@ # Docs # ---- # -# Use FSL Invwarp to invert a FNIRT warp +# Use FSL Invwarp to invert a FNIRT warp # # -# Examples -# -------- +# Examples +# -------- # -# >>> from nipype.interfaces.fsl import InvWarp -# >>> invwarp = InvWarp() -# >>> invwarp.inputs.warp = "struct2mni.nii" -# >>> invwarp.inputs.reference = "anatomical.nii" -# >>> invwarp.inputs.output_type = "NIFTI_GZ" -# >>> invwarp.cmdline -# 'invwarp --out=struct2mni_inverse.nii.gz --ref=anatomical.nii --warp=struct2mni.nii' -# >>> res = invwarp.run() # doctest: +SKIP +# >>> from nipype.interfaces.fsl import InvWarp +# >>> invwarp = InvWarp() +# >>> invwarp.inputs.warp = "struct2mni.nii" +# >>> invwarp.inputs.reference = "anatomical.nii" +# >>> invwarp.inputs.output_type = "NIFTI_GZ" +# >>> invwarp.cmdline +# 'invwarp --out=struct2mni_inverse.nii.gz --ref=anatomical.nii --warp=struct2mni.nii' +# >>> res = invwarp.run() # doctest: +SKIP # # # @@ -37,10 +37,7 @@ inputs: # from the nipype interface, but you may want to be more specific, particularly # for file types, where specifying the format also specifies the file that will be # passed to the field in the automatically generated unittests. - inverse_warp: Path - # type=file: Name of output file, containing warps that are the "reverse" of those in --warp. - # type=file|default=: Name of output file, containing warps that are the "reverse" of those in --warp. This will be a field-file (rather than a file of spline coefficients), and it will have any affine component included as part of the displacements. - reference: medimage/nifti1 + reference: generic/file # type=file|default=: Name of a file in target space. Note that the target space is now different from the target space that was used to create the --warp file. It would typically be the file that was specified with the --in argument when running fnirt. warp: medimage/nifti1 # type=file|default=: Name of file containing warp-coefficients/fields. This would typically be the output from the --cout switch of fnirt (but can also use fields, like the output from --fout). @@ -67,7 +64,7 @@ outputs: # dict[str, str] - names of methods/callable classes defined in the adjacent `*_callables.py` # to set to the `callable` attribute of output fields templates: - # dict[str, str] - `output_file_template` values to be provided to output fields + # dict[str, str] - `path_template` values to be provided to output fields requirements: # dict[str, list[str]] - input fields that are required to be provided for the output field to be present tests: @@ -96,13 +93,13 @@ tests: jacobian_max: # type=float|default=0.0: Maximum acceptable Jacobian value for constraint (default 100.0) output_type: - # type=enum|default='NIFTI'|allowed['NIFTI','NIFTI_GZ','NIFTI_PAIR','NIFTI_PAIR_GZ']: FSL output type + # type=enum|default='NIFTI'|allowed['GIFTI','NIFTI','NIFTI_GZ','NIFTI_PAIR','NIFTI_PAIR_GZ']: FSL output type args: # type=str|default='': Additional parameters to the command environ: # type=dict|default={}: Environment variables imports: - # list[nipype2pydra.task.base.importstatement] - list import statements required by the test, with each list item + # list[nipype2pydra.statements.imports.explicitimport] - list import statements required by the test, with each list item # consisting of 'module', 'name', and optionally 'alias' keys expected_outputs: # dict[str, str] - expected values for selected outputs, noting that tests will typically @@ -121,12 +118,10 @@ tests: # (if not specified, will try to choose a sensible value) warp: # type=file|default=: Name of file containing warp-coefficients/fields. This would typically be the output from the --cout switch of fnirt (but can also use fields, like the output from --fout). - reference: - # type=file|default=: Name of a file in target space. Note that the target space is now different from the target space that was used to create the --warp file. It would typically be the file that was specified with the --in argument when running fnirt. output_type: '"NIFTI_GZ"' - # type=enum|default='NIFTI'|allowed['NIFTI','NIFTI_GZ','NIFTI_PAIR','NIFTI_PAIR_GZ']: FSL output type + # type=enum|default='NIFTI'|allowed['GIFTI','NIFTI','NIFTI_GZ','NIFTI_PAIR','NIFTI_PAIR_GZ']: FSL output type imports: - # list[nipype2pydra.task.base.importstatement] - list import statements required by the test, with each list item + # list[nipype2pydra.statements.imports.explicitimport] - list import statements required by the test, with each list item # consisting of 'module', 'name', and optionally 'alias' keys expected_outputs: # dict[str, str] - expected values for selected outputs, noting that tests will typically @@ -149,12 +144,10 @@ doctests: # '.mock()' method of the corresponding class is used instead. warp: '"struct2mni.nii"' # type=file|default=: Name of file containing warp-coefficients/fields. This would typically be the output from the --cout switch of fnirt (but can also use fields, like the output from --fout). - reference: '"anatomical.nii"' - # type=file|default=: Name of a file in target space. Note that the target space is now different from the target space that was used to create the --warp file. It would typically be the file that was specified with the --in argument when running fnirt. output_type: '"NIFTI_GZ"' - # type=enum|default='NIFTI'|allowed['NIFTI','NIFTI_GZ','NIFTI_PAIR','NIFTI_PAIR_GZ']: FSL output type + # type=enum|default='NIFTI'|allowed['GIFTI','NIFTI','NIFTI_GZ','NIFTI_PAIR','NIFTI_PAIR_GZ']: FSL output type imports: - # list[nipype2pydra.task.base.importstatement] - list import statements required by the test, with each list item + # list[nipype2pydra.statements.imports.explicitimport] - list import statements required by the test, with each list item # consisting of 'module', 'name', and optionally 'alias' keys directive: # str - any doctest directive to place on the cmdline call, e.g. # doctest: +ELLIPSIS diff --git a/nipype-auto-conv/specs/interfaces/inv_warp_callables.py b/nipype-auto-conv/specs/interfaces/inv_warp_callables.py deleted file mode 100644 index f9af045..0000000 --- a/nipype-auto-conv/specs/interfaces/inv_warp_callables.py +++ /dev/null @@ -1,338 +0,0 @@ -"""Module to put any functions that are referred to in the "callables" section of InvWarp.yaml""" - -import attrs -import logging -import os -import os.path as op -from glob import glob - - -def inverse_warp_callable(output_dir, inputs, stdout, stderr): - outputs = _list_outputs( - output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr - ) - return outputs["inverse_warp"] - - -IFLOGGER = logging.getLogger("nipype.interface") - - -iflogger = logging.getLogger("nipype.interface") - - -# Original source at L809 of /interfaces/base/core.py -def _filename_from_source( - name, chain=None, inputs=None, stdout=None, stderr=None, output_dir=None -): - if chain is None: - chain = [] - - trait_spec = inputs.trait(name) - retval = getattr(inputs, name) - source_ext = None - if (retval is attrs.NOTHING) or "%s" in retval: - if not trait_spec.name_source: - return retval - - # Do not generate filename when excluded by other inputs - if any( - (getattr(inputs, field) is not attrs.NOTHING) - for field in trait_spec.xor or () - ): - return retval - - # Do not generate filename when required fields are missing - if not all( - (getattr(inputs, field) is not attrs.NOTHING) - for field in trait_spec.requires or () - ): - return retval - - if (retval is not attrs.NOTHING) and "%s" in retval: - name_template = retval - else: - name_template = trait_spec.name_template - if not name_template: - name_template = "%s_generated" - - ns = trait_spec.name_source - while isinstance(ns, (list, tuple)): - if len(ns) > 1: - iflogger.warning("Only one name_source per trait is allowed") - ns = ns[0] - - if not isinstance(ns, (str, bytes)): - raise ValueError( - "name_source of '{}' trait should be an input trait " - "name, but a type {} object was found".format(name, type(ns)) - ) - - if getattr(inputs, ns) is not attrs.NOTHING: - name_source = ns - source = getattr(inputs, name_source) - while isinstance(source, list): - source = source[0] - - # special treatment for files - try: - _, base, source_ext = split_filename(source) - except (AttributeError, TypeError): - base = source - else: - if name in chain: - raise NipypeInterfaceError("Mutually pointing name_sources") - - chain.append(name) - base = _filename_from_source( - ns, - chain, - inputs=inputs, - stdout=stdout, - stderr=stderr, - output_dir=output_dir, - ) - if base is not attrs.NOTHING: - _, _, source_ext = split_filename(base) - else: - # Do not generate filename when required fields are missing - return retval - - chain = None - retval = name_template % base - _, _, ext = split_filename(retval) - if trait_spec.keep_extension and (ext or source_ext): - if (ext is None or not ext) and source_ext: - retval = retval + source_ext - else: - retval = _overload_extension( - retval, - name, - inputs=inputs, - stdout=stdout, - stderr=stderr, - output_dir=output_dir, - ) - return retval - - -# Original source at L885 of /interfaces/base/core.py -def _gen_filename(name, inputs=None, stdout=None, stderr=None, output_dir=None): - raise NotImplementedError - - -# Original source at L891 of /interfaces/base/core.py -def _list_outputs(inputs=None, stdout=None, stderr=None, output_dir=None): - metadata = dict(name_source=lambda t: t is not None) - traits = inputs.traits(**metadata) - if traits: - outputs = {} - for name, trait_spec in list(traits.items()): - out_name = name - if trait_spec.output_name is not None: - out_name = trait_spec.output_name - fname = _filename_from_source( - name, inputs=inputs, stdout=stdout, stderr=stderr, output_dir=output_dir - ) - if fname is not attrs.NOTHING: - outputs[out_name] = os.path.abspath(fname) - return outputs - - -# Original source at L249 of /interfaces/fsl/base.py -def _overload_extension( - value, name=None, inputs=None, stdout=None, stderr=None, output_dir=None -): - return value + Info.output_type_to_ext(inputs.output_type) - - -# Original source at L58 of /utils/filemanip.py -def split_filename(fname): - """Split a filename into parts: path, base filename and extension. - - Parameters - ---------- - fname : str - file or path name - - Returns - ------- - pth : str - base path from fname - fname : str - filename from fname, without extension - ext : str - file extension from fname - - Examples - -------- - >>> from nipype.utils.filemanip import split_filename - >>> pth, fname, ext = split_filename('/home/data/subject.nii.gz') - >>> pth - '/home/data' - - >>> fname - 'subject' - - >>> ext - '.nii.gz' - - """ - - special_extensions = [".nii.gz", ".tar.gz", ".niml.dset"] - - pth = op.dirname(fname) - fname = op.basename(fname) - - ext = None - for special_ext in special_extensions: - ext_len = len(special_ext) - if (len(fname) > ext_len) and (fname[-ext_len:].lower() == special_ext.lower()): - ext = fname[-ext_len:] - fname = fname[:-ext_len] - break - if not ext: - fname, ext = op.splitext(fname) - - return pth, fname, ext - - -# Original source at L1069 of /interfaces/base/core.py -class PackageInfo(object): - _version = None - version_cmd = None - version_file = None - - @classmethod - def version(klass): - if klass._version is None: - if klass.version_cmd is not None: - try: - clout = CommandLine( - command=klass.version_cmd, - resource_monitor=False, - terminal_output="allatonce", - ).run() - except IOError: - return None - - raw_info = clout.runtime.stdout - elif klass.version_file is not None: - try: - with open(klass.version_file, "rt") as fobj: - raw_info = fobj.read() - except OSError: - return None - else: - return None - - klass._version = klass.parse_version(raw_info) - - return klass._version - - @staticmethod - def parse_version(raw_info): - raise NotImplementedError - - -# Original source at L40 of /interfaces/fsl/base.py -class Info(PackageInfo): - """ - Handle FSL ``output_type`` and version information. - - output type refers to the type of file fsl defaults to writing - eg, NIFTI, NIFTI_GZ - - Examples - -------- - - >>> from nipype.interfaces.fsl import Info - >>> Info.version() # doctest: +SKIP - >>> Info.output_type() # doctest: +SKIP - - """ - - ftypes = { - "NIFTI": ".nii", - "NIFTI_PAIR": ".img", - "NIFTI_GZ": ".nii.gz", - "NIFTI_PAIR_GZ": ".img.gz", - } - - if os.getenv("FSLDIR"): - version_file = os.path.join(os.getenv("FSLDIR"), "etc", "fslversion") - - @staticmethod - def parse_version(raw_info): - return raw_info.splitlines()[0] - - @classmethod - def output_type_to_ext(cls, output_type): - """Get the file extension for the given output type. - - Parameters - ---------- - output_type : {'NIFTI', 'NIFTI_GZ', 'NIFTI_PAIR', 'NIFTI_PAIR_GZ'} - String specifying the output type. - - Returns - ------- - extension : str - The file extension for the output type. - """ - - try: - return cls.ftypes[output_type] - except KeyError: - msg = "Invalid FSLOUTPUTTYPE: ", output_type - raise KeyError(msg) - - @classmethod - def output_type(cls): - """Get the global FSL output file type FSLOUTPUTTYPE. - - This returns the value of the environment variable - FSLOUTPUTTYPE. An exception is raised if it is not defined. - - Returns - ------- - fsl_ftype : string - Represents the current environment setting of FSLOUTPUTTYPE - """ - try: - return os.environ["FSLOUTPUTTYPE"] - except KeyError: - IFLOGGER.warning( - "FSLOUTPUTTYPE environment variable is not set. " - "Setting FSLOUTPUTTYPE=NIFTI" - ) - return "NIFTI" - - @staticmethod - def standard_image(img_name=None): - """Grab an image from the standard location. - - Returns a list of standard images if called without arguments. - - Could be made more fancy to allow for more relocatability""" - try: - fsldir = os.environ["FSLDIR"] - except KeyError: - raise Exception("FSL environment variables not set") - stdpath = os.path.join(fsldir, "data", "standard") - if img_name is None: - return [ - filename.replace(stdpath + "/", "") - for filename in glob(os.path.join(stdpath, "*nii*")) - ] - return os.path.join(stdpath, img_name) - - -# Original source at L125 of /interfaces/base/support.py -class NipypeInterfaceError(Exception): - """Custom error for interfaces""" - - def __init__(self, value): - self.value = value - - def __str__(self): - return "{}".format(self.value) diff --git a/nipype-auto-conv/specs/interfaces/isotropic_smooth.yaml b/nipype-auto-conv/specs/interfaces/isotropic_smooth.yaml index 51d44a9..623ef7e 100644 --- a/nipype-auto-conv/specs/interfaces/isotropic_smooth.yaml +++ b/nipype-auto-conv/specs/interfaces/isotropic_smooth.yaml @@ -22,9 +22,6 @@ inputs: # passed to the field in the automatically generated unittests. in_file: generic/file # type=file|default=: image to operate on - out_file: Path - # type=file: image written after calculations - # type=file|default=: image to write callable_defaults: # dict[str, str] - names of methods/callable classes defined in the adjacent `*_callables.py` # to set as the `default` method of input fields @@ -48,7 +45,7 @@ outputs: # dict[str, str] - names of methods/callable classes defined in the adjacent `*_callables.py` # to set to the `callable` attribute of output fields templates: - # dict[str, str] - `output_file_template` values to be provided to output fields + # dict[str, str] - `path_template` values to be provided to output fields out_file: out_file # type=file: image written after calculations # type=file|default=: image to write @@ -74,13 +71,13 @@ tests: nan2zeros: # type=bool|default=False: change NaNs to zeros before doing anything output_type: - # type=enum|default='NIFTI'|allowed['NIFTI','NIFTI_GZ','NIFTI_PAIR','NIFTI_PAIR_GZ']: FSL output type + # type=enum|default='NIFTI'|allowed['GIFTI','NIFTI','NIFTI_GZ','NIFTI_PAIR','NIFTI_PAIR_GZ']: FSL output type args: # type=str|default='': Additional parameters to the command environ: # type=dict|default={}: Environment variables imports: - # list[nipype2pydra.task.base.importstatement] - list import statements required by the test, with each list item + # list[nipype2pydra.statements.imports.explicitimport] - list import statements required by the test, with each list item # consisting of 'module', 'name', and optionally 'alias' keys expected_outputs: # dict[str, str] - expected values for selected outputs, noting that tests will typically diff --git a/nipype-auto-conv/specs/interfaces/isotropic_smooth_callables.py b/nipype-auto-conv/specs/interfaces/isotropic_smooth_callables.py deleted file mode 100644 index 214d374..0000000 --- a/nipype-auto-conv/specs/interfaces/isotropic_smooth_callables.py +++ /dev/null @@ -1,329 +0,0 @@ -"""Module to put any functions that are referred to in the "callables" section of IsotropicSmooth.yaml""" - -import attrs -import logging -import os -import os.path as op -from glob import glob -from pathlib import Path - - -def out_file_default(inputs): - return _gen_filename("out_file", inputs=inputs) - - -def out_file_callable(output_dir, inputs, stdout, stderr): - outputs = _list_outputs( - output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr - ) - return outputs["out_file"] - - -IFLOGGER = logging.getLogger("nipype.interface") - - -# Original source at L61 of /interfaces/fsl/maths.py -def _gen_filename(name, inputs=None, stdout=None, stderr=None, output_dir=None): - if name == "out_file": - return _list_outputs( - inputs=inputs, stdout=stdout, stderr=stderr, output_dir=output_dir - )["out_file"] - return None - - -# Original source at L205 of /interfaces/fsl/base.py -def _gen_fname( - basename, - cwd=None, - suffix=None, - change_ext=True, - ext=None, - inputs=None, - stdout=None, - stderr=None, - output_dir=None, -): - """Generate a filename based on the given parameters. - - The filename will take the form: cwd/basename. - If change_ext is True, it will use the extensions specified in - inputs.output_type. - - Parameters - ---------- - basename : str - Filename to base the new filename on. - cwd : str - Path to prefix to the new filename. (default is output_dir) - suffix : str - Suffix to add to the `basename`. (defaults is '' ) - change_ext : bool - Flag to change the filename extension to the FSL output type. - (default True) - - Returns - ------- - fname : str - New filename based on given parameters. - - """ - - if basename == "": - msg = "Unable to generate filename for command %s. " % "fslmaths" - msg += "basename is not set!" - raise ValueError(msg) - if cwd is None: - cwd = output_dir - if ext is None: - ext = Info.output_type_to_ext(inputs.output_type) - if change_ext: - if suffix: - suffix = "".join((suffix, ext)) - else: - suffix = ext - if suffix is None: - suffix = "" - fname = fname_presuffix(basename, suffix=suffix, use_ext=False, newpath=cwd) - return fname - - -# Original source at L51 of /interfaces/fsl/maths.py -def _list_outputs(inputs=None, stdout=None, stderr=None, output_dir=None): - outputs = {} - outputs["out_file"] = inputs.out_file - if inputs.out_file is attrs.NOTHING: - outputs["out_file"] = _gen_fname( - inputs.in_file, - suffix=_suffix, - inputs=inputs, - stdout=stdout, - stderr=stderr, - output_dir=output_dir, - ) - outputs["out_file"] = os.path.abspath(outputs["out_file"]) - return outputs - - -# Original source at L108 of /utils/filemanip.py -def fname_presuffix(fname, prefix="", suffix="", newpath=None, use_ext=True): - """Manipulates path and name of input filename - - Parameters - ---------- - fname : string - A filename (may or may not include path) - prefix : string - Characters to prepend to the filename - suffix : string - Characters to append to the filename - newpath : string - Path to replace the path of the input fname - use_ext : boolean - If True (default), appends the extension of the original file - to the output name. - - Returns - ------- - Absolute path of the modified filename - - >>> from nipype.utils.filemanip import fname_presuffix - >>> fname = 'foo.nii.gz' - >>> fname_presuffix(fname,'pre','post','/tmp') - '/tmp/prefoopost.nii.gz' - - >>> from nipype.interfaces.base import attrs.NOTHING - >>> fname_presuffix(fname, 'pre', 'post', attrs.NOTHING) == \ - fname_presuffix(fname, 'pre', 'post') - True - - """ - pth, fname, ext = split_filename(fname) - if not use_ext: - ext = "" - - # No need for : bool(attrs.NOTHING is not attrs.NOTHING) evaluates to False - if newpath: - pth = op.abspath(newpath) - return op.join(pth, prefix + fname + suffix + ext) - - -# Original source at L58 of /utils/filemanip.py -def split_filename(fname): - """Split a filename into parts: path, base filename and extension. - - Parameters - ---------- - fname : str - file or path name - - Returns - ------- - pth : str - base path from fname - fname : str - filename from fname, without extension - ext : str - file extension from fname - - Examples - -------- - >>> from nipype.utils.filemanip import split_filename - >>> pth, fname, ext = split_filename('/home/data/subject.nii.gz') - >>> pth - '/home/data' - - >>> fname - 'subject' - - >>> ext - '.nii.gz' - - """ - - special_extensions = [".nii.gz", ".tar.gz", ".niml.dset"] - - pth = op.dirname(fname) - fname = op.basename(fname) - - ext = None - for special_ext in special_extensions: - ext_len = len(special_ext) - if (len(fname) > ext_len) and (fname[-ext_len:].lower() == special_ext.lower()): - ext = fname[-ext_len:] - fname = fname[:-ext_len] - break - if not ext: - fname, ext = op.splitext(fname) - - return pth, fname, ext - - -# Original source at L1069 of /interfaces/base/core.py -class PackageInfo(object): - _version = None - version_cmd = None - version_file = None - - @classmethod - def version(klass): - if klass._version is None: - if klass.version_cmd is not None: - try: - clout = CommandLine( - command=klass.version_cmd, - resource_monitor=False, - terminal_output="allatonce", - ).run() - except IOError: - return None - - raw_info = clout.runtime.stdout - elif klass.version_file is not None: - try: - with open(klass.version_file, "rt") as fobj: - raw_info = fobj.read() - except OSError: - return None - else: - return None - - klass._version = klass.parse_version(raw_info) - - return klass._version - - @staticmethod - def parse_version(raw_info): - raise NotImplementedError - - -# Original source at L40 of /interfaces/fsl/base.py -class Info(PackageInfo): - """ - Handle FSL ``output_type`` and version information. - - output type refers to the type of file fsl defaults to writing - eg, NIFTI, NIFTI_GZ - - Examples - -------- - - >>> from nipype.interfaces.fsl import Info - >>> Info.version() # doctest: +SKIP - >>> Info.output_type() # doctest: +SKIP - - """ - - ftypes = { - "NIFTI": ".nii", - "NIFTI_PAIR": ".img", - "NIFTI_GZ": ".nii.gz", - "NIFTI_PAIR_GZ": ".img.gz", - } - - if os.getenv("FSLDIR"): - version_file = os.path.join(os.getenv("FSLDIR"), "etc", "fslversion") - - @staticmethod - def parse_version(raw_info): - return raw_info.splitlines()[0] - - @classmethod - def output_type_to_ext(cls, output_type): - """Get the file extension for the given output type. - - Parameters - ---------- - output_type : {'NIFTI', 'NIFTI_GZ', 'NIFTI_PAIR', 'NIFTI_PAIR_GZ'} - String specifying the output type. - - Returns - ------- - extension : str - The file extension for the output type. - """ - - try: - return cls.ftypes[output_type] - except KeyError: - msg = "Invalid FSLOUTPUTTYPE: ", output_type - raise KeyError(msg) - - @classmethod - def output_type(cls): - """Get the global FSL output file type FSLOUTPUTTYPE. - - This returns the value of the environment variable - FSLOUTPUTTYPE. An exception is raised if it is not defined. - - Returns - ------- - fsl_ftype : string - Represents the current environment setting of FSLOUTPUTTYPE - """ - try: - return os.environ["FSLOUTPUTTYPE"] - except KeyError: - IFLOGGER.warning( - "FSLOUTPUTTYPE environment variable is not set. " - "Setting FSLOUTPUTTYPE=NIFTI" - ) - return "NIFTI" - - @staticmethod - def standard_image(img_name=None): - """Grab an image from the standard location. - - Returns a list of standard images if called without arguments. - - Could be made more fancy to allow for more relocatability""" - try: - fsldir = os.environ["FSLDIR"] - except KeyError: - raise Exception("FSL environment variables not set") - stdpath = os.path.join(fsldir, "data", "standard") - if img_name is None: - return [ - filename.replace(stdpath + "/", "") - for filename in glob(os.path.join(stdpath, "*nii*")) - ] - return os.path.join(stdpath, img_name) diff --git a/nipype-auto-conv/specs/interfaces/l2_model.yaml b/nipype-auto-conv/specs/interfaces/l2_model.yaml index fdfd1e2..ace294f 100644 --- a/nipype-auto-conv/specs/interfaces/l2_model.yaml +++ b/nipype-auto-conv/specs/interfaces/l2_model.yaml @@ -7,11 +7,11 @@ # ---- # Generate subject specific second level model # -# Examples -# -------- +# Examples +# -------- # -# >>> from nipype.interfaces.fsl import L2Model -# >>> model = L2Model(num_copes=3) # 3 sessions +# >>> from nipype.interfaces.fsl import L2Model +# >>> model = L2Model(num_copes=3) # 3 sessions # # task_name: L2Model @@ -54,7 +54,7 @@ outputs: # dict[str, str] - names of methods/callable classes defined in the adjacent `*_callables.py` # to set to the `callable` attribute of output fields templates: - # dict[str, str] - `output_file_template` values to be provided to output fields + # dict[str, str] - `path_template` values to be provided to output fields requirements: # dict[str, list[str]] - input fields that are required to be provided for the output field to be present tests: @@ -64,7 +64,7 @@ tests: num_copes: # type=range|default=1: number of copes to be combined imports: - # list[nipype2pydra.task.base.importstatement] - list import statements required by the test, with each list item + # list[nipype2pydra.statements.imports.explicitimport] - list import statements required by the test, with each list item # consisting of 'module', 'name', and optionally 'alias' keys expected_outputs: # dict[str, str] - expected values for selected outputs, noting that tests will typically diff --git a/nipype-auto-conv/specs/interfaces/l2_model_callables.py b/nipype-auto-conv/specs/interfaces/l2_model_callables.py deleted file mode 100644 index faac4e1..0000000 --- a/nipype-auto-conv/specs/interfaces/l2_model_callables.py +++ /dev/null @@ -1,32 +0,0 @@ -"""Module to put any functions that are referred to in the "callables" section of L2Model.yaml""" - -import os - - -def design_con_callable(output_dir, inputs, stdout, stderr): - outputs = _list_outputs( - output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr - ) - return outputs["design_con"] - - -def design_grp_callable(output_dir, inputs, stdout, stderr): - outputs = _list_outputs( - output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr - ) - return outputs["design_grp"] - - -def design_mat_callable(output_dir, inputs, stdout, stderr): - outputs = _list_outputs( - output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr - ) - return outputs["design_mat"] - - -# Original source at L1431 of /interfaces/fsl/model.py -def _list_outputs(inputs=None, stdout=None, stderr=None, output_dir=None): - outputs = {} - for field in list(outputs.keys()): - outputs[field] = os.path.join(output_dir, field.replace("_", ".")) - return outputs diff --git a/nipype-auto-conv/specs/interfaces/level_1_design.yaml b/nipype-auto-conv/specs/interfaces/level_1_design.yaml index 4c3d611..570af2b 100644 --- a/nipype-auto-conv/specs/interfaces/level_1_design.yaml +++ b/nipype-auto-conv/specs/interfaces/level_1_design.yaml @@ -7,14 +7,14 @@ # ---- # Generate FEAT specific files # -# Examples -# -------- +# Examples +# -------- # -# >>> level1design = Level1Design() -# >>> level1design.inputs.interscan_interval = 2.5 -# >>> level1design.inputs.bases = {'dgamma':{'derivs': False}} -# >>> level1design.inputs.session_info = 'session_info.npz' -# >>> level1design.run() # doctest: +SKIP +# >>> level1design = Level1Design() +# >>> level1design.inputs.interscan_interval = 2.5 +# >>> level1design.inputs.bases = {'dgamma':{'derivs': False}} +# >>> level1design.inputs.session_info = 'session_info.npz' +# >>> level1design.run() # doctest: +SKIP # # task_name: Level1Design @@ -53,7 +53,7 @@ outputs: # dict[str, str] - names of methods/callable classes defined in the adjacent `*_callables.py` # to set to the `callable` attribute of output fields templates: - # dict[str, str] - `output_file_template` values to be provided to output fields + # dict[str, str] - `path_template` values to be provided to output fields requirements: # dict[str, list[str]] - input fields that are required to be provided for the output field to be present tests: @@ -73,7 +73,7 @@ tests: contrasts: # type=list|default=[]: List of contrasts with each contrast being a list of the form - [('name', 'stat', [condition list], [weight list], [session list])]. if session list is None or not provided, all sessions are used. For F contrasts, the condition list should contain previously defined T-contrasts. imports: - # list[nipype2pydra.task.base.importstatement] - list import statements required by the test, with each list item + # list[nipype2pydra.statements.imports.explicitimport] - list import statements required by the test, with each list item # consisting of 'module', 'name', and optionally 'alias' keys expected_outputs: # dict[str, str] - expected values for selected outputs, noting that tests will typically diff --git a/nipype-auto-conv/specs/interfaces/level_1_design_callables.py b/nipype-auto-conv/specs/interfaces/level_1_design_callables.py deleted file mode 100644 index ed9f720..0000000 --- a/nipype-auto-conv/specs/interfaces/level_1_design_callables.py +++ /dev/null @@ -1,63 +0,0 @@ -"""Module to put any functions that are referred to in the "callables" section of Level1Design.yaml""" - -import os - - -def ev_files_callable(output_dir, inputs, stdout, stderr): - outputs = _list_outputs( - output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr - ) - return outputs["ev_files"] - - -def fsf_files_callable(output_dir, inputs, stdout, stderr): - outputs = _list_outputs( - output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr - ) - return outputs["fsf_files"] - - -# Original source at L343 of /interfaces/fsl/model.py -def _format_session_info( - session_info, inputs=None, stdout=None, stderr=None, output_dir=None -): - if isinstance(session_info, dict): - session_info = [session_info] - return session_info - - -# Original source at L414 of /interfaces/fsl/model.py -def _list_outputs(inputs=None, stdout=None, stderr=None, output_dir=None): - outputs = {} - cwd = output_dir - outputs["fsf_files"] = [] - outputs["ev_files"] = [] - basis_key = list(inputs.bases.keys())[0] - ev_parameters = dict(inputs.bases[basis_key]) - for runno, runinfo in enumerate( - _format_session_info( - inputs.session_info, - inputs=inputs, - stdout=stdout, - stderr=stderr, - output_dir=output_dir, - ) - ): - outputs["fsf_files"].append(os.path.join(cwd, "run%d.fsf" % runno)) - outputs["ev_files"].insert(runno, []) - evname = [] - for field in ["cond", "regress"]: - for i, cond in enumerate(runinfo[field]): - name = cond["name"] - evname.append(name) - evfname = os.path.join( - cwd, "ev_%s_%d_%d.txt" % (name, runno, len(evname)) - ) - if field == "cond": - ev_parameters["temporalderiv"] = int( - bool(ev_parameters.get("derivs", False)) - ) - if ev_parameters["temporalderiv"]: - evname.append(name + "TD") - outputs["ev_files"][runno].append(os.path.join(cwd, evfname)) - return outputs diff --git a/nipype-auto-conv/specs/interfaces/make_dyadic_vectors.yaml b/nipype-auto-conv/specs/interfaces/make_dyadic_vectors.yaml index 0ad7027..8bc94fb 100644 --- a/nipype-auto-conv/specs/interfaces/make_dyadic_vectors.yaml +++ b/nipype-auto-conv/specs/interfaces/make_dyadic_vectors.yaml @@ -6,7 +6,7 @@ # Docs # ---- # Create vector volume representing mean principal diffusion direction -# and its uncertainty (dispersion) +# and its uncertainty (dispersion) task_name: MakeDyadicVectors nipype_name: MakeDyadicVectors nipype_module: nipype.interfaces.fsl.dti @@ -53,7 +53,7 @@ outputs: # dict[str, str] - names of methods/callable classes defined in the adjacent `*_callables.py` # to set to the `callable` attribute of output fields templates: - # dict[str, str] - `output_file_template` values to be provided to output fields + # dict[str, str] - `path_template` values to be provided to output fields requirements: # dict[str, list[str]] - input fields that are required to be provided for the output field to be present tests: @@ -71,13 +71,13 @@ tests: perc: # type=float|default=0.0: the {perc}% angle of the output cone of uncertainty (output will be in degrees) output_type: - # type=enum|default='NIFTI'|allowed['NIFTI','NIFTI_GZ','NIFTI_PAIR','NIFTI_PAIR_GZ']: FSL output type + # type=enum|default='NIFTI'|allowed['GIFTI','NIFTI','NIFTI_GZ','NIFTI_PAIR','NIFTI_PAIR_GZ']: FSL output type args: # type=str|default='': Additional parameters to the command environ: # type=dict|default={}: Environment variables imports: - # list[nipype2pydra.task.base.importstatement] - list import statements required by the test, with each list item + # list[nipype2pydra.statements.imports.explicitimport] - list import statements required by the test, with each list item # consisting of 'module', 'name', and optionally 'alias' keys expected_outputs: # dict[str, str] - expected values for selected outputs, noting that tests will typically diff --git a/nipype-auto-conv/specs/interfaces/make_dyadic_vectors_callables.py b/nipype-auto-conv/specs/interfaces/make_dyadic_vectors_callables.py deleted file mode 100644 index 8851bde..0000000 --- a/nipype-auto-conv/specs/interfaces/make_dyadic_vectors_callables.py +++ /dev/null @@ -1,332 +0,0 @@ -"""Module to put any functions that are referred to in the "callables" section of MakeDyadicVectors.yaml""" - -import logging -import os -import os.path as op -from glob import glob -from pathlib import Path - - -def dispersion_callable(output_dir, inputs, stdout, stderr): - outputs = _list_outputs( - output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr - ) - return outputs["dispersion"] - - -def dyads_callable(output_dir, inputs, stdout, stderr): - outputs = _list_outputs( - output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr - ) - return outputs["dyads"] - - -IFLOGGER = logging.getLogger("nipype.interface") - - -# Original source at L885 of /interfaces/base/core.py -def _gen_filename(name, inputs=None, stdout=None, stderr=None, output_dir=None): - raise NotImplementedError - - -# Original source at L205 of /interfaces/fsl/base.py -def _gen_fname( - basename, - cwd=None, - suffix=None, - change_ext=True, - ext=None, - inputs=None, - stdout=None, - stderr=None, - output_dir=None, -): - """Generate a filename based on the given parameters. - - The filename will take the form: cwd/basename. - If change_ext is True, it will use the extensions specified in - inputs.output_type. - - Parameters - ---------- - basename : str - Filename to base the new filename on. - cwd : str - Path to prefix to the new filename. (default is output_dir) - suffix : str - Suffix to add to the `basename`. (defaults is '' ) - change_ext : bool - Flag to change the filename extension to the FSL output type. - (default True) - - Returns - ------- - fname : str - New filename based on given parameters. - - """ - - if basename == "": - msg = "Unable to generate filename for command %s. " % "make_dyadic_vectors" - msg += "basename is not set!" - raise ValueError(msg) - if cwd is None: - cwd = output_dir - if ext is None: - ext = Info.output_type_to_ext(inputs.output_type) - if change_ext: - if suffix: - suffix = "".join((suffix, ext)) - else: - suffix = ext - if suffix is None: - suffix = "" - fname = fname_presuffix(basename, suffix=suffix, use_ext=False, newpath=cwd) - return fname - - -# Original source at L1571 of /interfaces/fsl/dti.py -def _list_outputs(inputs=None, stdout=None, stderr=None, output_dir=None): - outputs = {} - outputs["dyads"] = _gen_fname( - inputs.output, - inputs=inputs, - stdout=stdout, - stderr=stderr, - output_dir=output_dir, - ) - outputs["dispersion"] = _gen_fname( - inputs.output, - suffix="_dispersion", - inputs=inputs, - stdout=stdout, - stderr=stderr, - output_dir=output_dir, - ) - - return outputs - - -# Original source at L108 of /utils/filemanip.py -def fname_presuffix(fname, prefix="", suffix="", newpath=None, use_ext=True): - """Manipulates path and name of input filename - - Parameters - ---------- - fname : string - A filename (may or may not include path) - prefix : string - Characters to prepend to the filename - suffix : string - Characters to append to the filename - newpath : string - Path to replace the path of the input fname - use_ext : boolean - If True (default), appends the extension of the original file - to the output name. - - Returns - ------- - Absolute path of the modified filename - - >>> from nipype.utils.filemanip import fname_presuffix - >>> fname = 'foo.nii.gz' - >>> fname_presuffix(fname,'pre','post','/tmp') - '/tmp/prefoopost.nii.gz' - - >>> from nipype.interfaces.base import attrs.NOTHING - >>> fname_presuffix(fname, 'pre', 'post', attrs.NOTHING) == \ - fname_presuffix(fname, 'pre', 'post') - True - - """ - pth, fname, ext = split_filename(fname) - if not use_ext: - ext = "" - - # No need for : bool(attrs.NOTHING is not attrs.NOTHING) evaluates to False - if newpath: - pth = op.abspath(newpath) - return op.join(pth, prefix + fname + suffix + ext) - - -# Original source at L58 of /utils/filemanip.py -def split_filename(fname): - """Split a filename into parts: path, base filename and extension. - - Parameters - ---------- - fname : str - file or path name - - Returns - ------- - pth : str - base path from fname - fname : str - filename from fname, without extension - ext : str - file extension from fname - - Examples - -------- - >>> from nipype.utils.filemanip import split_filename - >>> pth, fname, ext = split_filename('/home/data/subject.nii.gz') - >>> pth - '/home/data' - - >>> fname - 'subject' - - >>> ext - '.nii.gz' - - """ - - special_extensions = [".nii.gz", ".tar.gz", ".niml.dset"] - - pth = op.dirname(fname) - fname = op.basename(fname) - - ext = None - for special_ext in special_extensions: - ext_len = len(special_ext) - if (len(fname) > ext_len) and (fname[-ext_len:].lower() == special_ext.lower()): - ext = fname[-ext_len:] - fname = fname[:-ext_len] - break - if not ext: - fname, ext = op.splitext(fname) - - return pth, fname, ext - - -# Original source at L1069 of /interfaces/base/core.py -class PackageInfo(object): - _version = None - version_cmd = None - version_file = None - - @classmethod - def version(klass): - if klass._version is None: - if klass.version_cmd is not None: - try: - clout = CommandLine( - command=klass.version_cmd, - resource_monitor=False, - terminal_output="allatonce", - ).run() - except IOError: - return None - - raw_info = clout.runtime.stdout - elif klass.version_file is not None: - try: - with open(klass.version_file, "rt") as fobj: - raw_info = fobj.read() - except OSError: - return None - else: - return None - - klass._version = klass.parse_version(raw_info) - - return klass._version - - @staticmethod - def parse_version(raw_info): - raise NotImplementedError - - -# Original source at L40 of /interfaces/fsl/base.py -class Info(PackageInfo): - """ - Handle FSL ``output_type`` and version information. - - output type refers to the type of file fsl defaults to writing - eg, NIFTI, NIFTI_GZ - - Examples - -------- - - >>> from nipype.interfaces.fsl import Info - >>> Info.version() # doctest: +SKIP - >>> Info.output_type() # doctest: +SKIP - - """ - - ftypes = { - "NIFTI": ".nii", - "NIFTI_PAIR": ".img", - "NIFTI_GZ": ".nii.gz", - "NIFTI_PAIR_GZ": ".img.gz", - } - - if os.getenv("FSLDIR"): - version_file = os.path.join(os.getenv("FSLDIR"), "etc", "fslversion") - - @staticmethod - def parse_version(raw_info): - return raw_info.splitlines()[0] - - @classmethod - def output_type_to_ext(cls, output_type): - """Get the file extension for the given output type. - - Parameters - ---------- - output_type : {'NIFTI', 'NIFTI_GZ', 'NIFTI_PAIR', 'NIFTI_PAIR_GZ'} - String specifying the output type. - - Returns - ------- - extension : str - The file extension for the output type. - """ - - try: - return cls.ftypes[output_type] - except KeyError: - msg = "Invalid FSLOUTPUTTYPE: ", output_type - raise KeyError(msg) - - @classmethod - def output_type(cls): - """Get the global FSL output file type FSLOUTPUTTYPE. - - This returns the value of the environment variable - FSLOUTPUTTYPE. An exception is raised if it is not defined. - - Returns - ------- - fsl_ftype : string - Represents the current environment setting of FSLOUTPUTTYPE - """ - try: - return os.environ["FSLOUTPUTTYPE"] - except KeyError: - IFLOGGER.warning( - "FSLOUTPUTTYPE environment variable is not set. " - "Setting FSLOUTPUTTYPE=NIFTI" - ) - return "NIFTI" - - @staticmethod - def standard_image(img_name=None): - """Grab an image from the standard location. - - Returns a list of standard images if called without arguments. - - Could be made more fancy to allow for more relocatability""" - try: - fsldir = os.environ["FSLDIR"] - except KeyError: - raise Exception("FSL environment variables not set") - stdpath = os.path.join(fsldir, "data", "standard") - if img_name is None: - return [ - filename.replace(stdpath + "/", "") - for filename in glob(os.path.join(stdpath, "*nii*")) - ] - return os.path.join(stdpath, img_name) diff --git a/nipype-auto-conv/specs/interfaces/maths_command.yaml b/nipype-auto-conv/specs/interfaces/maths_command.yaml index 9b8e624..d86904e 100644 --- a/nipype-auto-conv/specs/interfaces/maths_command.yaml +++ b/nipype-auto-conv/specs/interfaces/maths_command.yaml @@ -22,9 +22,6 @@ inputs: # passed to the field in the automatically generated unittests. in_file: generic/file # type=file|default=: image to operate on - out_file: Path - # type=file: image written after calculations - # type=file|default=: image to write callable_defaults: # dict[str, str] - names of methods/callable classes defined in the adjacent `*_callables.py` # to set as the `default` method of input fields @@ -48,7 +45,7 @@ outputs: # dict[str, str] - names of methods/callable classes defined in the adjacent `*_callables.py` # to set to the `callable` attribute of output fields templates: - # dict[str, str] - `output_file_template` values to be provided to output fields + # dict[str, str] - `path_template` values to be provided to output fields out_file: out_file # type=file: image written after calculations # type=file|default=: image to write @@ -70,13 +67,13 @@ tests: nan2zeros: # type=bool|default=False: change NaNs to zeros before doing anything output_type: - # type=enum|default='NIFTI'|allowed['NIFTI','NIFTI_GZ','NIFTI_PAIR','NIFTI_PAIR_GZ']: FSL output type + # type=enum|default='NIFTI'|allowed['GIFTI','NIFTI','NIFTI_GZ','NIFTI_PAIR','NIFTI_PAIR_GZ']: FSL output type args: # type=str|default='': Additional parameters to the command environ: # type=dict|default={}: Environment variables imports: - # list[nipype2pydra.task.base.importstatement] - list import statements required by the test, with each list item + # list[nipype2pydra.statements.imports.explicitimport] - list import statements required by the test, with each list item # consisting of 'module', 'name', and optionally 'alias' keys expected_outputs: # dict[str, str] - expected values for selected outputs, noting that tests will typically diff --git a/nipype-auto-conv/specs/interfaces/maths_command_callables.py b/nipype-auto-conv/specs/interfaces/maths_command_callables.py deleted file mode 100644 index c11baf2..0000000 --- a/nipype-auto-conv/specs/interfaces/maths_command_callables.py +++ /dev/null @@ -1,329 +0,0 @@ -"""Module to put any functions that are referred to in the "callables" section of MathsCommand.yaml""" - -import attrs -import logging -import os -import os.path as op -from glob import glob -from pathlib import Path - - -def out_file_default(inputs): - return _gen_filename("out_file", inputs=inputs) - - -def out_file_callable(output_dir, inputs, stdout, stderr): - outputs = _list_outputs( - output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr - ) - return outputs["out_file"] - - -IFLOGGER = logging.getLogger("nipype.interface") - - -# Original source at L61 of /interfaces/fsl/maths.py -def _gen_filename(name, inputs=None, stdout=None, stderr=None, output_dir=None): - if name == "out_file": - return _list_outputs( - inputs=inputs, stdout=stdout, stderr=stderr, output_dir=output_dir - )["out_file"] - return None - - -# Original source at L205 of /interfaces/fsl/base.py -def _gen_fname( - basename, - cwd=None, - suffix=None, - change_ext=True, - ext=None, - inputs=None, - stdout=None, - stderr=None, - output_dir=None, -): - """Generate a filename based on the given parameters. - - The filename will take the form: cwd/basename. - If change_ext is True, it will use the extensions specified in - inputs.output_type. - - Parameters - ---------- - basename : str - Filename to base the new filename on. - cwd : str - Path to prefix to the new filename. (default is output_dir) - suffix : str - Suffix to add to the `basename`. (defaults is '' ) - change_ext : bool - Flag to change the filename extension to the FSL output type. - (default True) - - Returns - ------- - fname : str - New filename based on given parameters. - - """ - - if basename == "": - msg = "Unable to generate filename for command %s. " % "fslmaths" - msg += "basename is not set!" - raise ValueError(msg) - if cwd is None: - cwd = output_dir - if ext is None: - ext = Info.output_type_to_ext(inputs.output_type) - if change_ext: - if suffix: - suffix = "".join((suffix, ext)) - else: - suffix = ext - if suffix is None: - suffix = "" - fname = fname_presuffix(basename, suffix=suffix, use_ext=False, newpath=cwd) - return fname - - -# Original source at L51 of /interfaces/fsl/maths.py -def _list_outputs(inputs=None, stdout=None, stderr=None, output_dir=None): - outputs = {} - outputs["out_file"] = inputs.out_file - if inputs.out_file is attrs.NOTHING: - outputs["out_file"] = _gen_fname( - inputs.in_file, - suffix=_suffix, - inputs=inputs, - stdout=stdout, - stderr=stderr, - output_dir=output_dir, - ) - outputs["out_file"] = os.path.abspath(outputs["out_file"]) - return outputs - - -# Original source at L108 of /utils/filemanip.py -def fname_presuffix(fname, prefix="", suffix="", newpath=None, use_ext=True): - """Manipulates path and name of input filename - - Parameters - ---------- - fname : string - A filename (may or may not include path) - prefix : string - Characters to prepend to the filename - suffix : string - Characters to append to the filename - newpath : string - Path to replace the path of the input fname - use_ext : boolean - If True (default), appends the extension of the original file - to the output name. - - Returns - ------- - Absolute path of the modified filename - - >>> from nipype.utils.filemanip import fname_presuffix - >>> fname = 'foo.nii.gz' - >>> fname_presuffix(fname,'pre','post','/tmp') - '/tmp/prefoopost.nii.gz' - - >>> from nipype.interfaces.base import attrs.NOTHING - >>> fname_presuffix(fname, 'pre', 'post', attrs.NOTHING) == \ - fname_presuffix(fname, 'pre', 'post') - True - - """ - pth, fname, ext = split_filename(fname) - if not use_ext: - ext = "" - - # No need for : bool(attrs.NOTHING is not attrs.NOTHING) evaluates to False - if newpath: - pth = op.abspath(newpath) - return op.join(pth, prefix + fname + suffix + ext) - - -# Original source at L58 of /utils/filemanip.py -def split_filename(fname): - """Split a filename into parts: path, base filename and extension. - - Parameters - ---------- - fname : str - file or path name - - Returns - ------- - pth : str - base path from fname - fname : str - filename from fname, without extension - ext : str - file extension from fname - - Examples - -------- - >>> from nipype.utils.filemanip import split_filename - >>> pth, fname, ext = split_filename('/home/data/subject.nii.gz') - >>> pth - '/home/data' - - >>> fname - 'subject' - - >>> ext - '.nii.gz' - - """ - - special_extensions = [".nii.gz", ".tar.gz", ".niml.dset"] - - pth = op.dirname(fname) - fname = op.basename(fname) - - ext = None - for special_ext in special_extensions: - ext_len = len(special_ext) - if (len(fname) > ext_len) and (fname[-ext_len:].lower() == special_ext.lower()): - ext = fname[-ext_len:] - fname = fname[:-ext_len] - break - if not ext: - fname, ext = op.splitext(fname) - - return pth, fname, ext - - -# Original source at L1069 of /interfaces/base/core.py -class PackageInfo(object): - _version = None - version_cmd = None - version_file = None - - @classmethod - def version(klass): - if klass._version is None: - if klass.version_cmd is not None: - try: - clout = CommandLine( - command=klass.version_cmd, - resource_monitor=False, - terminal_output="allatonce", - ).run() - except IOError: - return None - - raw_info = clout.runtime.stdout - elif klass.version_file is not None: - try: - with open(klass.version_file, "rt") as fobj: - raw_info = fobj.read() - except OSError: - return None - else: - return None - - klass._version = klass.parse_version(raw_info) - - return klass._version - - @staticmethod - def parse_version(raw_info): - raise NotImplementedError - - -# Original source at L40 of /interfaces/fsl/base.py -class Info(PackageInfo): - """ - Handle FSL ``output_type`` and version information. - - output type refers to the type of file fsl defaults to writing - eg, NIFTI, NIFTI_GZ - - Examples - -------- - - >>> from nipype.interfaces.fsl import Info - >>> Info.version() # doctest: +SKIP - >>> Info.output_type() # doctest: +SKIP - - """ - - ftypes = { - "NIFTI": ".nii", - "NIFTI_PAIR": ".img", - "NIFTI_GZ": ".nii.gz", - "NIFTI_PAIR_GZ": ".img.gz", - } - - if os.getenv("FSLDIR"): - version_file = os.path.join(os.getenv("FSLDIR"), "etc", "fslversion") - - @staticmethod - def parse_version(raw_info): - return raw_info.splitlines()[0] - - @classmethod - def output_type_to_ext(cls, output_type): - """Get the file extension for the given output type. - - Parameters - ---------- - output_type : {'NIFTI', 'NIFTI_GZ', 'NIFTI_PAIR', 'NIFTI_PAIR_GZ'} - String specifying the output type. - - Returns - ------- - extension : str - The file extension for the output type. - """ - - try: - return cls.ftypes[output_type] - except KeyError: - msg = "Invalid FSLOUTPUTTYPE: ", output_type - raise KeyError(msg) - - @classmethod - def output_type(cls): - """Get the global FSL output file type FSLOUTPUTTYPE. - - This returns the value of the environment variable - FSLOUTPUTTYPE. An exception is raised if it is not defined. - - Returns - ------- - fsl_ftype : string - Represents the current environment setting of FSLOUTPUTTYPE - """ - try: - return os.environ["FSLOUTPUTTYPE"] - except KeyError: - IFLOGGER.warning( - "FSLOUTPUTTYPE environment variable is not set. " - "Setting FSLOUTPUTTYPE=NIFTI" - ) - return "NIFTI" - - @staticmethod - def standard_image(img_name=None): - """Grab an image from the standard location. - - Returns a list of standard images if called without arguments. - - Could be made more fancy to allow for more relocatability""" - try: - fsldir = os.environ["FSLDIR"] - except KeyError: - raise Exception("FSL environment variables not set") - stdpath = os.path.join(fsldir, "data", "standard") - if img_name is None: - return [ - filename.replace(stdpath + "/", "") - for filename in glob(os.path.join(stdpath, "*nii*")) - ] - return os.path.join(stdpath, img_name) diff --git a/nipype-auto-conv/specs/interfaces/max_image.yaml b/nipype-auto-conv/specs/interfaces/max_image.yaml index 3e56aed..be9df28 100644 --- a/nipype-auto-conv/specs/interfaces/max_image.yaml +++ b/nipype-auto-conv/specs/interfaces/max_image.yaml @@ -7,14 +7,14 @@ # ---- # Use fslmaths to generate a max image across a given dimension. # -# Examples -# -------- -# >>> from nipype.interfaces.fsl.maths import MaxImage -# >>> maxer = MaxImage() -# >>> maxer.inputs.in_file = "functional.nii" # doctest: +SKIP -# >>> maxer.dimension = "T" -# >>> maxer.cmdline # doctest: +SKIP -# 'fslmaths functional.nii -Tmax functional_max.nii' +# Examples +# -------- +# >>> from nipype.interfaces.fsl.maths import MaxImage +# >>> maxer = MaxImage() +# >>> maxer.inputs.in_file = "functional.nii" # doctest: +SKIP +# >>> maxer.dimension = "T" +# >>> maxer.cmdline # doctest: +SKIP +# 'fslmaths functional.nii -Tmax functional_max.nii' # # task_name: MaxImage @@ -33,9 +33,6 @@ inputs: # passed to the field in the automatically generated unittests. in_file: medimage/nifti1 # type=file|default=: image to operate on - out_file: Path - # type=file: image written after calculations - # type=file|default=: image to write callable_defaults: # dict[str, str] - names of methods/callable classes defined in the adjacent `*_callables.py` # to set as the `default` method of input fields @@ -59,7 +56,7 @@ outputs: # dict[str, str] - names of methods/callable classes defined in the adjacent `*_callables.py` # to set to the `callable` attribute of output fields templates: - # dict[str, str] - `output_file_template` values to be provided to output fields + # dict[str, str] - `path_template` values to be provided to output fields out_file: out_file # type=file: image written after calculations # type=file|default=: image to write @@ -83,13 +80,13 @@ tests: nan2zeros: # type=bool|default=False: change NaNs to zeros before doing anything output_type: - # type=enum|default='NIFTI'|allowed['NIFTI','NIFTI_GZ','NIFTI_PAIR','NIFTI_PAIR_GZ']: FSL output type + # type=enum|default='NIFTI'|allowed['GIFTI','NIFTI','NIFTI_GZ','NIFTI_PAIR','NIFTI_PAIR_GZ']: FSL output type args: # type=str|default='': Additional parameters to the command environ: # type=dict|default={}: Environment variables imports: - # list[nipype2pydra.task.base.importstatement] - list import statements required by the test, with each list item + # list[nipype2pydra.statements.imports.explicitimport] - list import statements required by the test, with each list item # consisting of 'module', 'name', and optionally 'alias' keys expected_outputs: # dict[str, str] - expected values for selected outputs, noting that tests will typically @@ -109,7 +106,7 @@ tests: in_file: # type=file|default=: image to operate on imports: - # list[nipype2pydra.task.base.importstatement] - list import statements required by the test, with each list item + # list[nipype2pydra.statements.imports.explicitimport] - list import statements required by the test, with each list item # consisting of 'module', 'name', and optionally 'alias' keys expected_outputs: # dict[str, str] - expected values for selected outputs, noting that tests will typically @@ -124,7 +121,7 @@ tests: # bool - whether the unittest is expected to fail or not. Set to false # when you are satisfied with the edits you have made to this file doctests: -- cmdline: fslmaths functional.nii -Tmax functional_max.nii +- cmdline: # str - the expected cmdline output inputs: # dict[str, str] - name-value pairs for inputs to be provided to the doctest. @@ -133,7 +130,7 @@ doctests: in_file: '"functional.nii" # doctest: +SKIP' # type=file|default=: image to operate on imports: - # list[nipype2pydra.task.base.importstatement] - list import statements required by the test, with each list item + # list[nipype2pydra.statements.imports.explicitimport] - list import statements required by the test, with each list item # consisting of 'module', 'name', and optionally 'alias' keys directive: # str - any doctest directive to place on the cmdline call, e.g. # doctest: +ELLIPSIS diff --git a/nipype-auto-conv/specs/interfaces/max_image_callables.py b/nipype-auto-conv/specs/interfaces/max_image_callables.py deleted file mode 100644 index 61b6ae6..0000000 --- a/nipype-auto-conv/specs/interfaces/max_image_callables.py +++ /dev/null @@ -1,329 +0,0 @@ -"""Module to put any functions that are referred to in the "callables" section of MaxImage.yaml""" - -import attrs -import logging -import os -import os.path as op -from glob import glob -from pathlib import Path - - -def out_file_default(inputs): - return _gen_filename("out_file", inputs=inputs) - - -def out_file_callable(output_dir, inputs, stdout, stderr): - outputs = _list_outputs( - output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr - ) - return outputs["out_file"] - - -IFLOGGER = logging.getLogger("nipype.interface") - - -# Original source at L61 of /interfaces/fsl/maths.py -def _gen_filename(name, inputs=None, stdout=None, stderr=None, output_dir=None): - if name == "out_file": - return _list_outputs( - inputs=inputs, stdout=stdout, stderr=stderr, output_dir=output_dir - )["out_file"] - return None - - -# Original source at L205 of /interfaces/fsl/base.py -def _gen_fname( - basename, - cwd=None, - suffix=None, - change_ext=True, - ext=None, - inputs=None, - stdout=None, - stderr=None, - output_dir=None, -): - """Generate a filename based on the given parameters. - - The filename will take the form: cwd/basename. - If change_ext is True, it will use the extensions specified in - inputs.output_type. - - Parameters - ---------- - basename : str - Filename to base the new filename on. - cwd : str - Path to prefix to the new filename. (default is output_dir) - suffix : str - Suffix to add to the `basename`. (defaults is '' ) - change_ext : bool - Flag to change the filename extension to the FSL output type. - (default True) - - Returns - ------- - fname : str - New filename based on given parameters. - - """ - - if basename == "": - msg = "Unable to generate filename for command %s. " % "fslmaths" - msg += "basename is not set!" - raise ValueError(msg) - if cwd is None: - cwd = output_dir - if ext is None: - ext = Info.output_type_to_ext(inputs.output_type) - if change_ext: - if suffix: - suffix = "".join((suffix, ext)) - else: - suffix = ext - if suffix is None: - suffix = "" - fname = fname_presuffix(basename, suffix=suffix, use_ext=False, newpath=cwd) - return fname - - -# Original source at L51 of /interfaces/fsl/maths.py -def _list_outputs(inputs=None, stdout=None, stderr=None, output_dir=None): - outputs = {} - outputs["out_file"] = inputs.out_file - if inputs.out_file is attrs.NOTHING: - outputs["out_file"] = _gen_fname( - inputs.in_file, - suffix=_suffix, - inputs=inputs, - stdout=stdout, - stderr=stderr, - output_dir=output_dir, - ) - outputs["out_file"] = os.path.abspath(outputs["out_file"]) - return outputs - - -# Original source at L108 of /utils/filemanip.py -def fname_presuffix(fname, prefix="", suffix="", newpath=None, use_ext=True): - """Manipulates path and name of input filename - - Parameters - ---------- - fname : string - A filename (may or may not include path) - prefix : string - Characters to prepend to the filename - suffix : string - Characters to append to the filename - newpath : string - Path to replace the path of the input fname - use_ext : boolean - If True (default), appends the extension of the original file - to the output name. - - Returns - ------- - Absolute path of the modified filename - - >>> from nipype.utils.filemanip import fname_presuffix - >>> fname = 'foo.nii.gz' - >>> fname_presuffix(fname,'pre','post','/tmp') - '/tmp/prefoopost.nii.gz' - - >>> from nipype.interfaces.base import attrs.NOTHING - >>> fname_presuffix(fname, 'pre', 'post', attrs.NOTHING) == \ - fname_presuffix(fname, 'pre', 'post') - True - - """ - pth, fname, ext = split_filename(fname) - if not use_ext: - ext = "" - - # No need for : bool(attrs.NOTHING is not attrs.NOTHING) evaluates to False - if newpath: - pth = op.abspath(newpath) - return op.join(pth, prefix + fname + suffix + ext) - - -# Original source at L58 of /utils/filemanip.py -def split_filename(fname): - """Split a filename into parts: path, base filename and extension. - - Parameters - ---------- - fname : str - file or path name - - Returns - ------- - pth : str - base path from fname - fname : str - filename from fname, without extension - ext : str - file extension from fname - - Examples - -------- - >>> from nipype.utils.filemanip import split_filename - >>> pth, fname, ext = split_filename('/home/data/subject.nii.gz') - >>> pth - '/home/data' - - >>> fname - 'subject' - - >>> ext - '.nii.gz' - - """ - - special_extensions = [".nii.gz", ".tar.gz", ".niml.dset"] - - pth = op.dirname(fname) - fname = op.basename(fname) - - ext = None - for special_ext in special_extensions: - ext_len = len(special_ext) - if (len(fname) > ext_len) and (fname[-ext_len:].lower() == special_ext.lower()): - ext = fname[-ext_len:] - fname = fname[:-ext_len] - break - if not ext: - fname, ext = op.splitext(fname) - - return pth, fname, ext - - -# Original source at L1069 of /interfaces/base/core.py -class PackageInfo(object): - _version = None - version_cmd = None - version_file = None - - @classmethod - def version(klass): - if klass._version is None: - if klass.version_cmd is not None: - try: - clout = CommandLine( - command=klass.version_cmd, - resource_monitor=False, - terminal_output="allatonce", - ).run() - except IOError: - return None - - raw_info = clout.runtime.stdout - elif klass.version_file is not None: - try: - with open(klass.version_file, "rt") as fobj: - raw_info = fobj.read() - except OSError: - return None - else: - return None - - klass._version = klass.parse_version(raw_info) - - return klass._version - - @staticmethod - def parse_version(raw_info): - raise NotImplementedError - - -# Original source at L40 of /interfaces/fsl/base.py -class Info(PackageInfo): - """ - Handle FSL ``output_type`` and version information. - - output type refers to the type of file fsl defaults to writing - eg, NIFTI, NIFTI_GZ - - Examples - -------- - - >>> from nipype.interfaces.fsl import Info - >>> Info.version() # doctest: +SKIP - >>> Info.output_type() # doctest: +SKIP - - """ - - ftypes = { - "NIFTI": ".nii", - "NIFTI_PAIR": ".img", - "NIFTI_GZ": ".nii.gz", - "NIFTI_PAIR_GZ": ".img.gz", - } - - if os.getenv("FSLDIR"): - version_file = os.path.join(os.getenv("FSLDIR"), "etc", "fslversion") - - @staticmethod - def parse_version(raw_info): - return raw_info.splitlines()[0] - - @classmethod - def output_type_to_ext(cls, output_type): - """Get the file extension for the given output type. - - Parameters - ---------- - output_type : {'NIFTI', 'NIFTI_GZ', 'NIFTI_PAIR', 'NIFTI_PAIR_GZ'} - String specifying the output type. - - Returns - ------- - extension : str - The file extension for the output type. - """ - - try: - return cls.ftypes[output_type] - except KeyError: - msg = "Invalid FSLOUTPUTTYPE: ", output_type - raise KeyError(msg) - - @classmethod - def output_type(cls): - """Get the global FSL output file type FSLOUTPUTTYPE. - - This returns the value of the environment variable - FSLOUTPUTTYPE. An exception is raised if it is not defined. - - Returns - ------- - fsl_ftype : string - Represents the current environment setting of FSLOUTPUTTYPE - """ - try: - return os.environ["FSLOUTPUTTYPE"] - except KeyError: - IFLOGGER.warning( - "FSLOUTPUTTYPE environment variable is not set. " - "Setting FSLOUTPUTTYPE=NIFTI" - ) - return "NIFTI" - - @staticmethod - def standard_image(img_name=None): - """Grab an image from the standard location. - - Returns a list of standard images if called without arguments. - - Could be made more fancy to allow for more relocatability""" - try: - fsldir = os.environ["FSLDIR"] - except KeyError: - raise Exception("FSL environment variables not set") - stdpath = os.path.join(fsldir, "data", "standard") - if img_name is None: - return [ - filename.replace(stdpath + "/", "") - for filename in glob(os.path.join(stdpath, "*nii*")) - ] - return os.path.join(stdpath, img_name) diff --git a/nipype-auto-conv/specs/interfaces/maxn_image.yaml b/nipype-auto-conv/specs/interfaces/maxn_image.yaml index c60f028..eec7752 100644 --- a/nipype-auto-conv/specs/interfaces/maxn_image.yaml +++ b/nipype-auto-conv/specs/interfaces/maxn_image.yaml @@ -6,7 +6,7 @@ # Docs # ---- # Use fslmaths to generate an image of index of max across -# a given dimension. +# a given dimension. # # task_name: MaxnImage @@ -25,9 +25,6 @@ inputs: # passed to the field in the automatically generated unittests. in_file: generic/file # type=file|default=: image to operate on - out_file: Path - # type=file: image written after calculations - # type=file|default=: image to write callable_defaults: # dict[str, str] - names of methods/callable classes defined in the adjacent `*_callables.py` # to set as the `default` method of input fields @@ -51,7 +48,7 @@ outputs: # dict[str, str] - names of methods/callable classes defined in the adjacent `*_callables.py` # to set to the `callable` attribute of output fields templates: - # dict[str, str] - `output_file_template` values to be provided to output fields + # dict[str, str] - `path_template` values to be provided to output fields out_file: out_file # type=file: image written after calculations # type=file|default=: image to write @@ -75,13 +72,13 @@ tests: nan2zeros: # type=bool|default=False: change NaNs to zeros before doing anything output_type: - # type=enum|default='NIFTI'|allowed['NIFTI','NIFTI_GZ','NIFTI_PAIR','NIFTI_PAIR_GZ']: FSL output type + # type=enum|default='NIFTI'|allowed['GIFTI','NIFTI','NIFTI_GZ','NIFTI_PAIR','NIFTI_PAIR_GZ']: FSL output type args: # type=str|default='': Additional parameters to the command environ: # type=dict|default={}: Environment variables imports: - # list[nipype2pydra.task.base.importstatement] - list import statements required by the test, with each list item + # list[nipype2pydra.statements.imports.explicitimport] - list import statements required by the test, with each list item # consisting of 'module', 'name', and optionally 'alias' keys expected_outputs: # dict[str, str] - expected values for selected outputs, noting that tests will typically diff --git a/nipype-auto-conv/specs/interfaces/maxn_image_callables.py b/nipype-auto-conv/specs/interfaces/maxn_image_callables.py deleted file mode 100644 index 87f0b52..0000000 --- a/nipype-auto-conv/specs/interfaces/maxn_image_callables.py +++ /dev/null @@ -1,329 +0,0 @@ -"""Module to put any functions that are referred to in the "callables" section of MaxnImage.yaml""" - -import attrs -import logging -import os -import os.path as op -from glob import glob -from pathlib import Path - - -def out_file_default(inputs): - return _gen_filename("out_file", inputs=inputs) - - -def out_file_callable(output_dir, inputs, stdout, stderr): - outputs = _list_outputs( - output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr - ) - return outputs["out_file"] - - -IFLOGGER = logging.getLogger("nipype.interface") - - -# Original source at L61 of /interfaces/fsl/maths.py -def _gen_filename(name, inputs=None, stdout=None, stderr=None, output_dir=None): - if name == "out_file": - return _list_outputs( - inputs=inputs, stdout=stdout, stderr=stderr, output_dir=output_dir - )["out_file"] - return None - - -# Original source at L205 of /interfaces/fsl/base.py -def _gen_fname( - basename, - cwd=None, - suffix=None, - change_ext=True, - ext=None, - inputs=None, - stdout=None, - stderr=None, - output_dir=None, -): - """Generate a filename based on the given parameters. - - The filename will take the form: cwd/basename. - If change_ext is True, it will use the extensions specified in - inputs.output_type. - - Parameters - ---------- - basename : str - Filename to base the new filename on. - cwd : str - Path to prefix to the new filename. (default is output_dir) - suffix : str - Suffix to add to the `basename`. (defaults is '' ) - change_ext : bool - Flag to change the filename extension to the FSL output type. - (default True) - - Returns - ------- - fname : str - New filename based on given parameters. - - """ - - if basename == "": - msg = "Unable to generate filename for command %s. " % "fslmaths" - msg += "basename is not set!" - raise ValueError(msg) - if cwd is None: - cwd = output_dir - if ext is None: - ext = Info.output_type_to_ext(inputs.output_type) - if change_ext: - if suffix: - suffix = "".join((suffix, ext)) - else: - suffix = ext - if suffix is None: - suffix = "" - fname = fname_presuffix(basename, suffix=suffix, use_ext=False, newpath=cwd) - return fname - - -# Original source at L51 of /interfaces/fsl/maths.py -def _list_outputs(inputs=None, stdout=None, stderr=None, output_dir=None): - outputs = {} - outputs["out_file"] = inputs.out_file - if inputs.out_file is attrs.NOTHING: - outputs["out_file"] = _gen_fname( - inputs.in_file, - suffix=_suffix, - inputs=inputs, - stdout=stdout, - stderr=stderr, - output_dir=output_dir, - ) - outputs["out_file"] = os.path.abspath(outputs["out_file"]) - return outputs - - -# Original source at L108 of /utils/filemanip.py -def fname_presuffix(fname, prefix="", suffix="", newpath=None, use_ext=True): - """Manipulates path and name of input filename - - Parameters - ---------- - fname : string - A filename (may or may not include path) - prefix : string - Characters to prepend to the filename - suffix : string - Characters to append to the filename - newpath : string - Path to replace the path of the input fname - use_ext : boolean - If True (default), appends the extension of the original file - to the output name. - - Returns - ------- - Absolute path of the modified filename - - >>> from nipype.utils.filemanip import fname_presuffix - >>> fname = 'foo.nii.gz' - >>> fname_presuffix(fname,'pre','post','/tmp') - '/tmp/prefoopost.nii.gz' - - >>> from nipype.interfaces.base import attrs.NOTHING - >>> fname_presuffix(fname, 'pre', 'post', attrs.NOTHING) == \ - fname_presuffix(fname, 'pre', 'post') - True - - """ - pth, fname, ext = split_filename(fname) - if not use_ext: - ext = "" - - # No need for : bool(attrs.NOTHING is not attrs.NOTHING) evaluates to False - if newpath: - pth = op.abspath(newpath) - return op.join(pth, prefix + fname + suffix + ext) - - -# Original source at L58 of /utils/filemanip.py -def split_filename(fname): - """Split a filename into parts: path, base filename and extension. - - Parameters - ---------- - fname : str - file or path name - - Returns - ------- - pth : str - base path from fname - fname : str - filename from fname, without extension - ext : str - file extension from fname - - Examples - -------- - >>> from nipype.utils.filemanip import split_filename - >>> pth, fname, ext = split_filename('/home/data/subject.nii.gz') - >>> pth - '/home/data' - - >>> fname - 'subject' - - >>> ext - '.nii.gz' - - """ - - special_extensions = [".nii.gz", ".tar.gz", ".niml.dset"] - - pth = op.dirname(fname) - fname = op.basename(fname) - - ext = None - for special_ext in special_extensions: - ext_len = len(special_ext) - if (len(fname) > ext_len) and (fname[-ext_len:].lower() == special_ext.lower()): - ext = fname[-ext_len:] - fname = fname[:-ext_len] - break - if not ext: - fname, ext = op.splitext(fname) - - return pth, fname, ext - - -# Original source at L1069 of /interfaces/base/core.py -class PackageInfo(object): - _version = None - version_cmd = None - version_file = None - - @classmethod - def version(klass): - if klass._version is None: - if klass.version_cmd is not None: - try: - clout = CommandLine( - command=klass.version_cmd, - resource_monitor=False, - terminal_output="allatonce", - ).run() - except IOError: - return None - - raw_info = clout.runtime.stdout - elif klass.version_file is not None: - try: - with open(klass.version_file, "rt") as fobj: - raw_info = fobj.read() - except OSError: - return None - else: - return None - - klass._version = klass.parse_version(raw_info) - - return klass._version - - @staticmethod - def parse_version(raw_info): - raise NotImplementedError - - -# Original source at L40 of /interfaces/fsl/base.py -class Info(PackageInfo): - """ - Handle FSL ``output_type`` and version information. - - output type refers to the type of file fsl defaults to writing - eg, NIFTI, NIFTI_GZ - - Examples - -------- - - >>> from nipype.interfaces.fsl import Info - >>> Info.version() # doctest: +SKIP - >>> Info.output_type() # doctest: +SKIP - - """ - - ftypes = { - "NIFTI": ".nii", - "NIFTI_PAIR": ".img", - "NIFTI_GZ": ".nii.gz", - "NIFTI_PAIR_GZ": ".img.gz", - } - - if os.getenv("FSLDIR"): - version_file = os.path.join(os.getenv("FSLDIR"), "etc", "fslversion") - - @staticmethod - def parse_version(raw_info): - return raw_info.splitlines()[0] - - @classmethod - def output_type_to_ext(cls, output_type): - """Get the file extension for the given output type. - - Parameters - ---------- - output_type : {'NIFTI', 'NIFTI_GZ', 'NIFTI_PAIR', 'NIFTI_PAIR_GZ'} - String specifying the output type. - - Returns - ------- - extension : str - The file extension for the output type. - """ - - try: - return cls.ftypes[output_type] - except KeyError: - msg = "Invalid FSLOUTPUTTYPE: ", output_type - raise KeyError(msg) - - @classmethod - def output_type(cls): - """Get the global FSL output file type FSLOUTPUTTYPE. - - This returns the value of the environment variable - FSLOUTPUTTYPE. An exception is raised if it is not defined. - - Returns - ------- - fsl_ftype : string - Represents the current environment setting of FSLOUTPUTTYPE - """ - try: - return os.environ["FSLOUTPUTTYPE"] - except KeyError: - IFLOGGER.warning( - "FSLOUTPUTTYPE environment variable is not set. " - "Setting FSLOUTPUTTYPE=NIFTI" - ) - return "NIFTI" - - @staticmethod - def standard_image(img_name=None): - """Grab an image from the standard location. - - Returns a list of standard images if called without arguments. - - Could be made more fancy to allow for more relocatability""" - try: - fsldir = os.environ["FSLDIR"] - except KeyError: - raise Exception("FSL environment variables not set") - stdpath = os.path.join(fsldir, "data", "standard") - if img_name is None: - return [ - filename.replace(stdpath + "/", "") - for filename in glob(os.path.join(stdpath, "*nii*")) - ] - return os.path.join(stdpath, img_name) diff --git a/nipype-auto-conv/specs/interfaces/mcflirt.yaml b/nipype-auto-conv/specs/interfaces/mcflirt.yaml index 5db6ff0..da00f61 100644 --- a/nipype-auto-conv/specs/interfaces/mcflirt.yaml +++ b/nipype-auto-conv/specs/interfaces/mcflirt.yaml @@ -7,19 +7,19 @@ # ---- # FSL MCFLIRT wrapper for within-modality motion correction # -# For complete details, see the `MCFLIRT Documentation. -# `_ +# For complete details, see the `MCFLIRT Documentation. +# `_ # -# Examples -# -------- -# >>> from nipype.interfaces import fsl -# >>> mcflt = fsl.MCFLIRT() -# >>> mcflt.inputs.in_file = 'functional.nii' -# >>> mcflt.inputs.cost = 'mutualinfo' -# >>> mcflt.inputs.out_file = 'moco.nii' -# >>> mcflt.cmdline -# 'mcflirt -in functional.nii -cost mutualinfo -out moco.nii' -# >>> res = mcflt.run() # doctest: +SKIP +# Examples +# -------- +# >>> from nipype.interfaces import fsl +# >>> mcflt = fsl.MCFLIRT() +# >>> mcflt.inputs.in_file = 'functional.nii' +# >>> mcflt.inputs.cost = 'mutualinfo' +# >>> mcflt.inputs.out_file = 'moco.nii' +# >>> mcflt.cmdline +# 'mcflirt -in functional.nii -cost mutualinfo -out moco.nii' +# >>> res = mcflt.run() # doctest: +SKIP # # task_name: MCFLIRT @@ -40,9 +40,6 @@ inputs: # type=file|default=: timeseries to motion-correct init: generic/file # type=file|default=: initial transformation matrix - out_file: Path - # type=file: motion-corrected timeseries - # type=file|default=: file to write ref_file: generic/file # type=file|default=: target image for motion correction callable_defaults: @@ -80,7 +77,7 @@ outputs: # dict[str, str] - names of methods/callable classes defined in the adjacent `*_callables.py` # to set to the `callable` attribute of output fields templates: - # dict[str, str] - `output_file_template` values to be provided to output fields + # dict[str, str] - `path_template` values to be provided to output fields out_file: '"moco.nii"' # type=file: motion-corrected timeseries # type=file|default=: file to write @@ -132,13 +129,13 @@ tests: ref_file: # type=file|default=: target image for motion correction output_type: - # type=enum|default='NIFTI'|allowed['NIFTI','NIFTI_GZ','NIFTI_PAIR','NIFTI_PAIR_GZ']: FSL output type + # type=enum|default='NIFTI'|allowed['GIFTI','NIFTI','NIFTI_GZ','NIFTI_PAIR','NIFTI_PAIR_GZ']: FSL output type args: # type=str|default='': Additional parameters to the command environ: # type=dict|default={}: Environment variables imports: - # list[nipype2pydra.task.base.importstatement] - list import statements required by the test, with each list item + # list[nipype2pydra.statements.imports.explicitimport] - list import statements required by the test, with each list item # consisting of 'module', 'name', and optionally 'alias' keys expected_outputs: # dict[str, str] - expected values for selected outputs, noting that tests will typically @@ -157,13 +154,11 @@ tests: # (if not specified, will try to choose a sensible value) in_file: # type=file|default=: timeseries to motion-correct - cost: '"mutualinfo"' - # type=enum|default='mutualinfo'|allowed['corratio','leastsquares','mutualinfo','normcorr','normmi','woods']: cost function to optimize out_file: '"moco.nii"' # type=file: motion-corrected timeseries # type=file|default=: file to write imports: - # list[nipype2pydra.task.base.importstatement] - list import statements required by the test, with each list item + # list[nipype2pydra.statements.imports.explicitimport] - list import statements required by the test, with each list item # consisting of 'module', 'name', and optionally 'alias' keys expected_outputs: # dict[str, str] - expected values for selected outputs, noting that tests will typically @@ -178,7 +173,7 @@ tests: # bool - whether the unittest is expected to fail or not. Set to false # when you are satisfied with the edits you have made to this file doctests: -- cmdline: mcflirt -in functional.nii -cost mutualinfo -out moco.nii +- cmdline: # str - the expected cmdline output inputs: # dict[str, str] - name-value pairs for inputs to be provided to the doctest. @@ -186,13 +181,11 @@ doctests: # '.mock()' method of the corresponding class is used instead. in_file: '"functional.nii"' # type=file|default=: timeseries to motion-correct - cost: '"mutualinfo"' - # type=enum|default='mutualinfo'|allowed['corratio','leastsquares','mutualinfo','normcorr','normmi','woods']: cost function to optimize out_file: '"moco.nii"' # type=file: motion-corrected timeseries # type=file|default=: file to write imports: - # list[nipype2pydra.task.base.importstatement] - list import statements required by the test, with each list item + # list[nipype2pydra.statements.imports.explicitimport] - list import statements required by the test, with each list item # consisting of 'module', 'name', and optionally 'alias' keys directive: # str - any doctest directive to place on the cmdline call, e.g. # doctest: +ELLIPSIS diff --git a/nipype-auto-conv/specs/interfaces/mcflirt_callables.py b/nipype-auto-conv/specs/interfaces/mcflirt_callables.py deleted file mode 100644 index d6bc0a6..0000000 --- a/nipype-auto-conv/specs/interfaces/mcflirt_callables.py +++ /dev/null @@ -1,466 +0,0 @@ -"""Module to put any functions that are referred to in the "callables" section of MCFLIRT.yaml""" - -import attrs -import logging -import os -import os.path as op -from glob import glob -from looseversion import LooseVersion -from nibabel import load -from pathlib import Path - - -def out_file_default(inputs): - return _gen_filename("out_file", inputs=inputs) - - -def mat_file_callable(output_dir, inputs, stdout, stderr): - outputs = _list_outputs( - output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr - ) - return outputs["mat_file"] - - -def mean_img_callable(output_dir, inputs, stdout, stderr): - outputs = _list_outputs( - output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr - ) - return outputs["mean_img"] - - -def out_file_callable(output_dir, inputs, stdout, stderr): - outputs = _list_outputs( - output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr - ) - return outputs["out_file"] - - -def par_file_callable(output_dir, inputs, stdout, stderr): - outputs = _list_outputs( - output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr - ) - return outputs["par_file"] - - -def rms_files_callable(output_dir, inputs, stdout, stderr): - outputs = _list_outputs( - output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr - ) - return outputs["rms_files"] - - -def std_img_callable(output_dir, inputs, stdout, stderr): - outputs = _list_outputs( - output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr - ) - return outputs["std_img"] - - -def variance_img_callable(output_dir, inputs, stdout, stderr): - outputs = _list_outputs( - output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr - ) - return outputs["variance_img"] - - -IFLOGGER = logging.getLogger("nipype.interface") - - -# Original source at L962 of /interfaces/fsl/preprocess.py -def _gen_filename(name, inputs=None, stdout=None, stderr=None, output_dir=None): - if name == "out_file": - return _gen_outfilename( - inputs=inputs, stdout=stdout, stderr=stderr, output_dir=output_dir - ) - return None - - -# Original source at L205 of /interfaces/fsl/base.py -def _gen_fname( - basename, - cwd=None, - suffix=None, - change_ext=True, - ext=None, - inputs=None, - stdout=None, - stderr=None, - output_dir=None, -): - """Generate a filename based on the given parameters. - - The filename will take the form: cwd/basename. - If change_ext is True, it will use the extensions specified in - inputs.output_type. - - Parameters - ---------- - basename : str - Filename to base the new filename on. - cwd : str - Path to prefix to the new filename. (default is output_dir) - suffix : str - Suffix to add to the `basename`. (defaults is '' ) - change_ext : bool - Flag to change the filename extension to the FSL output type. - (default True) - - Returns - ------- - fname : str - New filename based on given parameters. - - """ - - if basename == "": - msg = "Unable to generate filename for command %s. " % "mcflirt" - msg += "basename is not set!" - raise ValueError(msg) - if cwd is None: - cwd = output_dir - if ext is None: - ext = Info.output_type_to_ext(inputs.output_type) - if change_ext: - if suffix: - suffix = "".join((suffix, ext)) - else: - suffix = ext - if suffix is None: - suffix = "" - fname = fname_presuffix(basename, suffix=suffix, use_ext=False, newpath=cwd) - return fname - - -# Original source at L967 of /interfaces/fsl/preprocess.py -def _gen_outfilename(inputs=None, stdout=None, stderr=None, output_dir=None): - out_file = inputs.out_file - if out_file is not attrs.NOTHING: - out_file = os.path.realpath(out_file) - if (out_file is attrs.NOTHING) and (inputs.in_file is not attrs.NOTHING): - out_file = _gen_fname( - inputs.in_file, - suffix="_mcf", - inputs=inputs, - stdout=stdout, - stderr=stderr, - output_dir=output_dir, - ) - return os.path.abspath(out_file) - - -# Original source at L906 of /interfaces/fsl/preprocess.py -def _list_outputs(inputs=None, stdout=None, stderr=None, output_dir=None): - outputs = {} - - outputs["out_file"] = _gen_outfilename( - inputs=inputs, stdout=stdout, stderr=stderr, output_dir=output_dir - ) - output_dir = os.path.dirname(outputs["out_file"]) - - if (inputs.stats_imgs is not attrs.NOTHING) and inputs.stats_imgs: - if LooseVersion(Info.version()) < LooseVersion("6.0.0"): - # FSL <6.0 outputs have .nii.gz_variance.nii.gz as extension - outputs["variance_img"] = _gen_fname( - outputs["out_file"] + "_variance.ext", - cwd=output_dir, - inputs=inputs, - stdout=stdout, - stderr=stderr, - output_dir=output_dir, - ) - outputs["std_img"] = _gen_fname( - outputs["out_file"] + "_sigma.ext", - cwd=output_dir, - inputs=inputs, - stdout=stdout, - stderr=stderr, - output_dir=output_dir, - ) - else: - outputs["variance_img"] = _gen_fname( - outputs["out_file"], - suffix="_variance", - cwd=output_dir, - inputs=inputs, - stdout=stdout, - stderr=stderr, - output_dir=output_dir, - ) - outputs["std_img"] = _gen_fname( - outputs["out_file"], - suffix="_sigma", - cwd=output_dir, - inputs=inputs, - stdout=stdout, - stderr=stderr, - output_dir=output_dir, - ) - - # The mean image created if -stats option is specified ('meanvol') - # is missing the top and bottom slices. Therefore we only expose the - # mean image created by -meanvol option ('mean_reg') which isn't - # corrupted. - # Note that the same problem holds for the std and variance image. - - if (inputs.mean_vol is not attrs.NOTHING) and inputs.mean_vol: - if LooseVersion(Info.version()) < LooseVersion("6.0.0"): - # FSL <6.0 outputs have .nii.gz_mean_img.nii.gz as extension - outputs["mean_img"] = _gen_fname( - outputs["out_file"] + "_mean_reg.ext", - cwd=output_dir, - inputs=inputs, - stdout=stdout, - stderr=stderr, - output_dir=output_dir, - ) - else: - outputs["mean_img"] = _gen_fname( - outputs["out_file"], - suffix="_mean_reg", - cwd=output_dir, - inputs=inputs, - stdout=stdout, - stderr=stderr, - output_dir=output_dir, - ) - - if (inputs.save_mats is not attrs.NOTHING) and inputs.save_mats: - _, filename = os.path.split(outputs["out_file"]) - matpathname = os.path.join(output_dir, filename + ".mat") - _, _, _, timepoints = load(inputs.in_file).shape - outputs["mat_file"] = [] - for t in range(timepoints): - outputs["mat_file"].append(os.path.join(matpathname, "MAT_%04d" % t)) - if (inputs.save_plots is not attrs.NOTHING) and inputs.save_plots: - # Note - if e.g. out_file has .nii.gz, you get .nii.gz.par, - # which is what mcflirt does! - outputs["par_file"] = outputs["out_file"] + ".par" - if (inputs.save_rms is not attrs.NOTHING) and inputs.save_rms: - outfile = outputs["out_file"] - outputs["rms_files"] = [outfile + "_abs.rms", outfile + "_rel.rms"] - return outputs - - -# Original source at L108 of /utils/filemanip.py -def fname_presuffix(fname, prefix="", suffix="", newpath=None, use_ext=True): - """Manipulates path and name of input filename - - Parameters - ---------- - fname : string - A filename (may or may not include path) - prefix : string - Characters to prepend to the filename - suffix : string - Characters to append to the filename - newpath : string - Path to replace the path of the input fname - use_ext : boolean - If True (default), appends the extension of the original file - to the output name. - - Returns - ------- - Absolute path of the modified filename - - >>> from nipype.utils.filemanip import fname_presuffix - >>> fname = 'foo.nii.gz' - >>> fname_presuffix(fname,'pre','post','/tmp') - '/tmp/prefoopost.nii.gz' - - >>> from nipype.interfaces.base import attrs.NOTHING - >>> fname_presuffix(fname, 'pre', 'post', attrs.NOTHING) == \ - fname_presuffix(fname, 'pre', 'post') - True - - """ - pth, fname, ext = split_filename(fname) - if not use_ext: - ext = "" - - # No need for : bool(attrs.NOTHING is not attrs.NOTHING) evaluates to False - if newpath: - pth = op.abspath(newpath) - return op.join(pth, prefix + fname + suffix + ext) - - -# Original source at L58 of /utils/filemanip.py -def split_filename(fname): - """Split a filename into parts: path, base filename and extension. - - Parameters - ---------- - fname : str - file or path name - - Returns - ------- - pth : str - base path from fname - fname : str - filename from fname, without extension - ext : str - file extension from fname - - Examples - -------- - >>> from nipype.utils.filemanip import split_filename - >>> pth, fname, ext = split_filename('/home/data/subject.nii.gz') - >>> pth - '/home/data' - - >>> fname - 'subject' - - >>> ext - '.nii.gz' - - """ - - special_extensions = [".nii.gz", ".tar.gz", ".niml.dset"] - - pth = op.dirname(fname) - fname = op.basename(fname) - - ext = None - for special_ext in special_extensions: - ext_len = len(special_ext) - if (len(fname) > ext_len) and (fname[-ext_len:].lower() == special_ext.lower()): - ext = fname[-ext_len:] - fname = fname[:-ext_len] - break - if not ext: - fname, ext = op.splitext(fname) - - return pth, fname, ext - - -# Original source at L1069 of /interfaces/base/core.py -class PackageInfo(object): - _version = None - version_cmd = None - version_file = None - - @classmethod - def version(klass): - if klass._version is None: - if klass.version_cmd is not None: - try: - clout = CommandLine( - command=klass.version_cmd, - resource_monitor=False, - terminal_output="allatonce", - ).run() - except IOError: - return None - - raw_info = clout.runtime.stdout - elif klass.version_file is not None: - try: - with open(klass.version_file, "rt") as fobj: - raw_info = fobj.read() - except OSError: - return None - else: - return None - - klass._version = klass.parse_version(raw_info) - - return klass._version - - @staticmethod - def parse_version(raw_info): - raise NotImplementedError - - -# Original source at L40 of /interfaces/fsl/base.py -class Info(PackageInfo): - """ - Handle FSL ``output_type`` and version information. - - output type refers to the type of file fsl defaults to writing - eg, NIFTI, NIFTI_GZ - - Examples - -------- - - >>> from nipype.interfaces.fsl import Info - >>> Info.version() # doctest: +SKIP - >>> Info.output_type() # doctest: +SKIP - - """ - - ftypes = { - "NIFTI": ".nii", - "NIFTI_PAIR": ".img", - "NIFTI_GZ": ".nii.gz", - "NIFTI_PAIR_GZ": ".img.gz", - } - - if os.getenv("FSLDIR"): - version_file = os.path.join(os.getenv("FSLDIR"), "etc", "fslversion") - - @staticmethod - def parse_version(raw_info): - return raw_info.splitlines()[0] - - @classmethod - def output_type_to_ext(cls, output_type): - """Get the file extension for the given output type. - - Parameters - ---------- - output_type : {'NIFTI', 'NIFTI_GZ', 'NIFTI_PAIR', 'NIFTI_PAIR_GZ'} - String specifying the output type. - - Returns - ------- - extension : str - The file extension for the output type. - """ - - try: - return cls.ftypes[output_type] - except KeyError: - msg = "Invalid FSLOUTPUTTYPE: ", output_type - raise KeyError(msg) - - @classmethod - def output_type(cls): - """Get the global FSL output file type FSLOUTPUTTYPE. - - This returns the value of the environment variable - FSLOUTPUTTYPE. An exception is raised if it is not defined. - - Returns - ------- - fsl_ftype : string - Represents the current environment setting of FSLOUTPUTTYPE - """ - try: - return os.environ["FSLOUTPUTTYPE"] - except KeyError: - IFLOGGER.warning( - "FSLOUTPUTTYPE environment variable is not set. " - "Setting FSLOUTPUTTYPE=NIFTI" - ) - return "NIFTI" - - @staticmethod - def standard_image(img_name=None): - """Grab an image from the standard location. - - Returns a list of standard images if called without arguments. - - Could be made more fancy to allow for more relocatability""" - try: - fsldir = os.environ["FSLDIR"] - except KeyError: - raise Exception("FSL environment variables not set") - stdpath = os.path.join(fsldir, "data", "standard") - if img_name is None: - return [ - filename.replace(stdpath + "/", "") - for filename in glob(os.path.join(stdpath, "*nii*")) - ] - return os.path.join(stdpath, img_name) diff --git a/nipype-auto-conv/specs/interfaces/mean_image.yaml b/nipype-auto-conv/specs/interfaces/mean_image.yaml index 78394bf..76c9ef9 100644 --- a/nipype-auto-conv/specs/interfaces/mean_image.yaml +++ b/nipype-auto-conv/specs/interfaces/mean_image.yaml @@ -22,9 +22,6 @@ inputs: # passed to the field in the automatically generated unittests. in_file: generic/file # type=file|default=: image to operate on - out_file: Path - # type=file: image written after calculations - # type=file|default=: image to write callable_defaults: # dict[str, str] - names of methods/callable classes defined in the adjacent `*_callables.py` # to set as the `default` method of input fields @@ -48,7 +45,7 @@ outputs: # dict[str, str] - names of methods/callable classes defined in the adjacent `*_callables.py` # to set to the `callable` attribute of output fields templates: - # dict[str, str] - `output_file_template` values to be provided to output fields + # dict[str, str] - `path_template` values to be provided to output fields out_file: out_file # type=file: image written after calculations # type=file|default=: image to write @@ -72,13 +69,13 @@ tests: nan2zeros: # type=bool|default=False: change NaNs to zeros before doing anything output_type: - # type=enum|default='NIFTI'|allowed['NIFTI','NIFTI_GZ','NIFTI_PAIR','NIFTI_PAIR_GZ']: FSL output type + # type=enum|default='NIFTI'|allowed['GIFTI','NIFTI','NIFTI_GZ','NIFTI_PAIR','NIFTI_PAIR_GZ']: FSL output type args: # type=str|default='': Additional parameters to the command environ: # type=dict|default={}: Environment variables imports: - # list[nipype2pydra.task.base.importstatement] - list import statements required by the test, with each list item + # list[nipype2pydra.statements.imports.explicitimport] - list import statements required by the test, with each list item # consisting of 'module', 'name', and optionally 'alias' keys expected_outputs: # dict[str, str] - expected values for selected outputs, noting that tests will typically diff --git a/nipype-auto-conv/specs/interfaces/mean_image_callables.py b/nipype-auto-conv/specs/interfaces/mean_image_callables.py deleted file mode 100644 index 5943cf3..0000000 --- a/nipype-auto-conv/specs/interfaces/mean_image_callables.py +++ /dev/null @@ -1,329 +0,0 @@ -"""Module to put any functions that are referred to in the "callables" section of MeanImage.yaml""" - -import attrs -import logging -import os -import os.path as op -from glob import glob -from pathlib import Path - - -def out_file_default(inputs): - return _gen_filename("out_file", inputs=inputs) - - -def out_file_callable(output_dir, inputs, stdout, stderr): - outputs = _list_outputs( - output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr - ) - return outputs["out_file"] - - -IFLOGGER = logging.getLogger("nipype.interface") - - -# Original source at L61 of /interfaces/fsl/maths.py -def _gen_filename(name, inputs=None, stdout=None, stderr=None, output_dir=None): - if name == "out_file": - return _list_outputs( - inputs=inputs, stdout=stdout, stderr=stderr, output_dir=output_dir - )["out_file"] - return None - - -# Original source at L205 of /interfaces/fsl/base.py -def _gen_fname( - basename, - cwd=None, - suffix=None, - change_ext=True, - ext=None, - inputs=None, - stdout=None, - stderr=None, - output_dir=None, -): - """Generate a filename based on the given parameters. - - The filename will take the form: cwd/basename. - If change_ext is True, it will use the extensions specified in - inputs.output_type. - - Parameters - ---------- - basename : str - Filename to base the new filename on. - cwd : str - Path to prefix to the new filename. (default is output_dir) - suffix : str - Suffix to add to the `basename`. (defaults is '' ) - change_ext : bool - Flag to change the filename extension to the FSL output type. - (default True) - - Returns - ------- - fname : str - New filename based on given parameters. - - """ - - if basename == "": - msg = "Unable to generate filename for command %s. " % "fslmaths" - msg += "basename is not set!" - raise ValueError(msg) - if cwd is None: - cwd = output_dir - if ext is None: - ext = Info.output_type_to_ext(inputs.output_type) - if change_ext: - if suffix: - suffix = "".join((suffix, ext)) - else: - suffix = ext - if suffix is None: - suffix = "" - fname = fname_presuffix(basename, suffix=suffix, use_ext=False, newpath=cwd) - return fname - - -# Original source at L51 of /interfaces/fsl/maths.py -def _list_outputs(inputs=None, stdout=None, stderr=None, output_dir=None): - outputs = {} - outputs["out_file"] = inputs.out_file - if inputs.out_file is attrs.NOTHING: - outputs["out_file"] = _gen_fname( - inputs.in_file, - suffix=_suffix, - inputs=inputs, - stdout=stdout, - stderr=stderr, - output_dir=output_dir, - ) - outputs["out_file"] = os.path.abspath(outputs["out_file"]) - return outputs - - -# Original source at L108 of /utils/filemanip.py -def fname_presuffix(fname, prefix="", suffix="", newpath=None, use_ext=True): - """Manipulates path and name of input filename - - Parameters - ---------- - fname : string - A filename (may or may not include path) - prefix : string - Characters to prepend to the filename - suffix : string - Characters to append to the filename - newpath : string - Path to replace the path of the input fname - use_ext : boolean - If True (default), appends the extension of the original file - to the output name. - - Returns - ------- - Absolute path of the modified filename - - >>> from nipype.utils.filemanip import fname_presuffix - >>> fname = 'foo.nii.gz' - >>> fname_presuffix(fname,'pre','post','/tmp') - '/tmp/prefoopost.nii.gz' - - >>> from nipype.interfaces.base import attrs.NOTHING - >>> fname_presuffix(fname, 'pre', 'post', attrs.NOTHING) == \ - fname_presuffix(fname, 'pre', 'post') - True - - """ - pth, fname, ext = split_filename(fname) - if not use_ext: - ext = "" - - # No need for : bool(attrs.NOTHING is not attrs.NOTHING) evaluates to False - if newpath: - pth = op.abspath(newpath) - return op.join(pth, prefix + fname + suffix + ext) - - -# Original source at L58 of /utils/filemanip.py -def split_filename(fname): - """Split a filename into parts: path, base filename and extension. - - Parameters - ---------- - fname : str - file or path name - - Returns - ------- - pth : str - base path from fname - fname : str - filename from fname, without extension - ext : str - file extension from fname - - Examples - -------- - >>> from nipype.utils.filemanip import split_filename - >>> pth, fname, ext = split_filename('/home/data/subject.nii.gz') - >>> pth - '/home/data' - - >>> fname - 'subject' - - >>> ext - '.nii.gz' - - """ - - special_extensions = [".nii.gz", ".tar.gz", ".niml.dset"] - - pth = op.dirname(fname) - fname = op.basename(fname) - - ext = None - for special_ext in special_extensions: - ext_len = len(special_ext) - if (len(fname) > ext_len) and (fname[-ext_len:].lower() == special_ext.lower()): - ext = fname[-ext_len:] - fname = fname[:-ext_len] - break - if not ext: - fname, ext = op.splitext(fname) - - return pth, fname, ext - - -# Original source at L1069 of /interfaces/base/core.py -class PackageInfo(object): - _version = None - version_cmd = None - version_file = None - - @classmethod - def version(klass): - if klass._version is None: - if klass.version_cmd is not None: - try: - clout = CommandLine( - command=klass.version_cmd, - resource_monitor=False, - terminal_output="allatonce", - ).run() - except IOError: - return None - - raw_info = clout.runtime.stdout - elif klass.version_file is not None: - try: - with open(klass.version_file, "rt") as fobj: - raw_info = fobj.read() - except OSError: - return None - else: - return None - - klass._version = klass.parse_version(raw_info) - - return klass._version - - @staticmethod - def parse_version(raw_info): - raise NotImplementedError - - -# Original source at L40 of /interfaces/fsl/base.py -class Info(PackageInfo): - """ - Handle FSL ``output_type`` and version information. - - output type refers to the type of file fsl defaults to writing - eg, NIFTI, NIFTI_GZ - - Examples - -------- - - >>> from nipype.interfaces.fsl import Info - >>> Info.version() # doctest: +SKIP - >>> Info.output_type() # doctest: +SKIP - - """ - - ftypes = { - "NIFTI": ".nii", - "NIFTI_PAIR": ".img", - "NIFTI_GZ": ".nii.gz", - "NIFTI_PAIR_GZ": ".img.gz", - } - - if os.getenv("FSLDIR"): - version_file = os.path.join(os.getenv("FSLDIR"), "etc", "fslversion") - - @staticmethod - def parse_version(raw_info): - return raw_info.splitlines()[0] - - @classmethod - def output_type_to_ext(cls, output_type): - """Get the file extension for the given output type. - - Parameters - ---------- - output_type : {'NIFTI', 'NIFTI_GZ', 'NIFTI_PAIR', 'NIFTI_PAIR_GZ'} - String specifying the output type. - - Returns - ------- - extension : str - The file extension for the output type. - """ - - try: - return cls.ftypes[output_type] - except KeyError: - msg = "Invalid FSLOUTPUTTYPE: ", output_type - raise KeyError(msg) - - @classmethod - def output_type(cls): - """Get the global FSL output file type FSLOUTPUTTYPE. - - This returns the value of the environment variable - FSLOUTPUTTYPE. An exception is raised if it is not defined. - - Returns - ------- - fsl_ftype : string - Represents the current environment setting of FSLOUTPUTTYPE - """ - try: - return os.environ["FSLOUTPUTTYPE"] - except KeyError: - IFLOGGER.warning( - "FSLOUTPUTTYPE environment variable is not set. " - "Setting FSLOUTPUTTYPE=NIFTI" - ) - return "NIFTI" - - @staticmethod - def standard_image(img_name=None): - """Grab an image from the standard location. - - Returns a list of standard images if called without arguments. - - Could be made more fancy to allow for more relocatability""" - try: - fsldir = os.environ["FSLDIR"] - except KeyError: - raise Exception("FSL environment variables not set") - stdpath = os.path.join(fsldir, "data", "standard") - if img_name is None: - return [ - filename.replace(stdpath + "/", "") - for filename in glob(os.path.join(stdpath, "*nii*")) - ] - return os.path.join(stdpath, img_name) diff --git a/nipype-auto-conv/specs/interfaces/median_image.yaml b/nipype-auto-conv/specs/interfaces/median_image.yaml index e297a4a..e03ef4b 100644 --- a/nipype-auto-conv/specs/interfaces/median_image.yaml +++ b/nipype-auto-conv/specs/interfaces/median_image.yaml @@ -22,9 +22,6 @@ inputs: # passed to the field in the automatically generated unittests. in_file: generic/file # type=file|default=: image to operate on - out_file: Path - # type=file: image written after calculations - # type=file|default=: image to write callable_defaults: # dict[str, str] - names of methods/callable classes defined in the adjacent `*_callables.py` # to set as the `default` method of input fields @@ -48,7 +45,7 @@ outputs: # dict[str, str] - names of methods/callable classes defined in the adjacent `*_callables.py` # to set to the `callable` attribute of output fields templates: - # dict[str, str] - `output_file_template` values to be provided to output fields + # dict[str, str] - `path_template` values to be provided to output fields out_file: out_file # type=file: image written after calculations # type=file|default=: image to write @@ -72,13 +69,13 @@ tests: nan2zeros: # type=bool|default=False: change NaNs to zeros before doing anything output_type: - # type=enum|default='NIFTI'|allowed['NIFTI','NIFTI_GZ','NIFTI_PAIR','NIFTI_PAIR_GZ']: FSL output type + # type=enum|default='NIFTI'|allowed['GIFTI','NIFTI','NIFTI_GZ','NIFTI_PAIR','NIFTI_PAIR_GZ']: FSL output type args: # type=str|default='': Additional parameters to the command environ: # type=dict|default={}: Environment variables imports: - # list[nipype2pydra.task.base.importstatement] - list import statements required by the test, with each list item + # list[nipype2pydra.statements.imports.explicitimport] - list import statements required by the test, with each list item # consisting of 'module', 'name', and optionally 'alias' keys expected_outputs: # dict[str, str] - expected values for selected outputs, noting that tests will typically diff --git a/nipype-auto-conv/specs/interfaces/median_image_callables.py b/nipype-auto-conv/specs/interfaces/median_image_callables.py deleted file mode 100644 index ac60356..0000000 --- a/nipype-auto-conv/specs/interfaces/median_image_callables.py +++ /dev/null @@ -1,329 +0,0 @@ -"""Module to put any functions that are referred to in the "callables" section of MedianImage.yaml""" - -import attrs -import logging -import os -import os.path as op -from glob import glob -from pathlib import Path - - -def out_file_default(inputs): - return _gen_filename("out_file", inputs=inputs) - - -def out_file_callable(output_dir, inputs, stdout, stderr): - outputs = _list_outputs( - output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr - ) - return outputs["out_file"] - - -IFLOGGER = logging.getLogger("nipype.interface") - - -# Original source at L61 of /interfaces/fsl/maths.py -def _gen_filename(name, inputs=None, stdout=None, stderr=None, output_dir=None): - if name == "out_file": - return _list_outputs( - inputs=inputs, stdout=stdout, stderr=stderr, output_dir=output_dir - )["out_file"] - return None - - -# Original source at L205 of /interfaces/fsl/base.py -def _gen_fname( - basename, - cwd=None, - suffix=None, - change_ext=True, - ext=None, - inputs=None, - stdout=None, - stderr=None, - output_dir=None, -): - """Generate a filename based on the given parameters. - - The filename will take the form: cwd/basename. - If change_ext is True, it will use the extensions specified in - inputs.output_type. - - Parameters - ---------- - basename : str - Filename to base the new filename on. - cwd : str - Path to prefix to the new filename. (default is output_dir) - suffix : str - Suffix to add to the `basename`. (defaults is '' ) - change_ext : bool - Flag to change the filename extension to the FSL output type. - (default True) - - Returns - ------- - fname : str - New filename based on given parameters. - - """ - - if basename == "": - msg = "Unable to generate filename for command %s. " % "fslmaths" - msg += "basename is not set!" - raise ValueError(msg) - if cwd is None: - cwd = output_dir - if ext is None: - ext = Info.output_type_to_ext(inputs.output_type) - if change_ext: - if suffix: - suffix = "".join((suffix, ext)) - else: - suffix = ext - if suffix is None: - suffix = "" - fname = fname_presuffix(basename, suffix=suffix, use_ext=False, newpath=cwd) - return fname - - -# Original source at L51 of /interfaces/fsl/maths.py -def _list_outputs(inputs=None, stdout=None, stderr=None, output_dir=None): - outputs = {} - outputs["out_file"] = inputs.out_file - if inputs.out_file is attrs.NOTHING: - outputs["out_file"] = _gen_fname( - inputs.in_file, - suffix=_suffix, - inputs=inputs, - stdout=stdout, - stderr=stderr, - output_dir=output_dir, - ) - outputs["out_file"] = os.path.abspath(outputs["out_file"]) - return outputs - - -# Original source at L108 of /utils/filemanip.py -def fname_presuffix(fname, prefix="", suffix="", newpath=None, use_ext=True): - """Manipulates path and name of input filename - - Parameters - ---------- - fname : string - A filename (may or may not include path) - prefix : string - Characters to prepend to the filename - suffix : string - Characters to append to the filename - newpath : string - Path to replace the path of the input fname - use_ext : boolean - If True (default), appends the extension of the original file - to the output name. - - Returns - ------- - Absolute path of the modified filename - - >>> from nipype.utils.filemanip import fname_presuffix - >>> fname = 'foo.nii.gz' - >>> fname_presuffix(fname,'pre','post','/tmp') - '/tmp/prefoopost.nii.gz' - - >>> from nipype.interfaces.base import attrs.NOTHING - >>> fname_presuffix(fname, 'pre', 'post', attrs.NOTHING) == \ - fname_presuffix(fname, 'pre', 'post') - True - - """ - pth, fname, ext = split_filename(fname) - if not use_ext: - ext = "" - - # No need for : bool(attrs.NOTHING is not attrs.NOTHING) evaluates to False - if newpath: - pth = op.abspath(newpath) - return op.join(pth, prefix + fname + suffix + ext) - - -# Original source at L58 of /utils/filemanip.py -def split_filename(fname): - """Split a filename into parts: path, base filename and extension. - - Parameters - ---------- - fname : str - file or path name - - Returns - ------- - pth : str - base path from fname - fname : str - filename from fname, without extension - ext : str - file extension from fname - - Examples - -------- - >>> from nipype.utils.filemanip import split_filename - >>> pth, fname, ext = split_filename('/home/data/subject.nii.gz') - >>> pth - '/home/data' - - >>> fname - 'subject' - - >>> ext - '.nii.gz' - - """ - - special_extensions = [".nii.gz", ".tar.gz", ".niml.dset"] - - pth = op.dirname(fname) - fname = op.basename(fname) - - ext = None - for special_ext in special_extensions: - ext_len = len(special_ext) - if (len(fname) > ext_len) and (fname[-ext_len:].lower() == special_ext.lower()): - ext = fname[-ext_len:] - fname = fname[:-ext_len] - break - if not ext: - fname, ext = op.splitext(fname) - - return pth, fname, ext - - -# Original source at L1069 of /interfaces/base/core.py -class PackageInfo(object): - _version = None - version_cmd = None - version_file = None - - @classmethod - def version(klass): - if klass._version is None: - if klass.version_cmd is not None: - try: - clout = CommandLine( - command=klass.version_cmd, - resource_monitor=False, - terminal_output="allatonce", - ).run() - except IOError: - return None - - raw_info = clout.runtime.stdout - elif klass.version_file is not None: - try: - with open(klass.version_file, "rt") as fobj: - raw_info = fobj.read() - except OSError: - return None - else: - return None - - klass._version = klass.parse_version(raw_info) - - return klass._version - - @staticmethod - def parse_version(raw_info): - raise NotImplementedError - - -# Original source at L40 of /interfaces/fsl/base.py -class Info(PackageInfo): - """ - Handle FSL ``output_type`` and version information. - - output type refers to the type of file fsl defaults to writing - eg, NIFTI, NIFTI_GZ - - Examples - -------- - - >>> from nipype.interfaces.fsl import Info - >>> Info.version() # doctest: +SKIP - >>> Info.output_type() # doctest: +SKIP - - """ - - ftypes = { - "NIFTI": ".nii", - "NIFTI_PAIR": ".img", - "NIFTI_GZ": ".nii.gz", - "NIFTI_PAIR_GZ": ".img.gz", - } - - if os.getenv("FSLDIR"): - version_file = os.path.join(os.getenv("FSLDIR"), "etc", "fslversion") - - @staticmethod - def parse_version(raw_info): - return raw_info.splitlines()[0] - - @classmethod - def output_type_to_ext(cls, output_type): - """Get the file extension for the given output type. - - Parameters - ---------- - output_type : {'NIFTI', 'NIFTI_GZ', 'NIFTI_PAIR', 'NIFTI_PAIR_GZ'} - String specifying the output type. - - Returns - ------- - extension : str - The file extension for the output type. - """ - - try: - return cls.ftypes[output_type] - except KeyError: - msg = "Invalid FSLOUTPUTTYPE: ", output_type - raise KeyError(msg) - - @classmethod - def output_type(cls): - """Get the global FSL output file type FSLOUTPUTTYPE. - - This returns the value of the environment variable - FSLOUTPUTTYPE. An exception is raised if it is not defined. - - Returns - ------- - fsl_ftype : string - Represents the current environment setting of FSLOUTPUTTYPE - """ - try: - return os.environ["FSLOUTPUTTYPE"] - except KeyError: - IFLOGGER.warning( - "FSLOUTPUTTYPE environment variable is not set. " - "Setting FSLOUTPUTTYPE=NIFTI" - ) - return "NIFTI" - - @staticmethod - def standard_image(img_name=None): - """Grab an image from the standard location. - - Returns a list of standard images if called without arguments. - - Could be made more fancy to allow for more relocatability""" - try: - fsldir = os.environ["FSLDIR"] - except KeyError: - raise Exception("FSL environment variables not set") - stdpath = os.path.join(fsldir, "data", "standard") - if img_name is None: - return [ - filename.replace(stdpath + "/", "") - for filename in glob(os.path.join(stdpath, "*nii*")) - ] - return os.path.join(stdpath, img_name) diff --git a/nipype-auto-conv/specs/interfaces/melodic.yaml b/nipype-auto-conv/specs/interfaces/melodic.yaml index e6c7c27..cb56182 100644 --- a/nipype-auto-conv/specs/interfaces/melodic.yaml +++ b/nipype-auto-conv/specs/interfaces/melodic.yaml @@ -6,27 +6,27 @@ # Docs # ---- # Multivariate Exploratory Linear Optimised Decomposition into Independent -# Components +# Components # -# Examples -# -------- +# Examples +# -------- # -# >>> melodic_setup = MELODIC() -# >>> melodic_setup.inputs.approach = 'tica' -# >>> melodic_setup.inputs.in_files = ['functional.nii', 'functional2.nii', 'functional3.nii'] -# >>> melodic_setup.inputs.no_bet = True -# >>> melodic_setup.inputs.bg_threshold = 10 -# >>> melodic_setup.inputs.tr_sec = 1.5 -# >>> melodic_setup.inputs.mm_thresh = 0.5 -# >>> melodic_setup.inputs.out_stats = True -# >>> melodic_setup.inputs.t_des = 'timeDesign.mat' -# >>> melodic_setup.inputs.t_con = 'timeDesign.con' -# >>> melodic_setup.inputs.s_des = 'subjectDesign.mat' -# >>> melodic_setup.inputs.s_con = 'subjectDesign.con' -# >>> melodic_setup.inputs.out_dir = 'groupICA.out' -# >>> melodic_setup.cmdline -# 'melodic -i functional.nii,functional2.nii,functional3.nii -a tica --bgthreshold=10.000000 --mmthresh=0.500000 --nobet -o groupICA.out --Ostats --Scon=subjectDesign.con --Sdes=subjectDesign.mat --Tcon=timeDesign.con --Tdes=timeDesign.mat --tr=1.500000' -# >>> melodic_setup.run() # doctest: +SKIP +# >>> melodic_setup = MELODIC() +# >>> melodic_setup.inputs.approach = 'tica' +# >>> melodic_setup.inputs.in_files = ['functional.nii', 'functional2.nii', 'functional3.nii'] +# >>> melodic_setup.inputs.no_bet = True +# >>> melodic_setup.inputs.bg_threshold = 10 +# >>> melodic_setup.inputs.tr_sec = 1.5 +# >>> melodic_setup.inputs.mm_thresh = 0.5 +# >>> melodic_setup.inputs.out_stats = True +# >>> melodic_setup.inputs.t_des = 'timeDesign.mat' +# >>> melodic_setup.inputs.t_con = 'timeDesign.con' +# >>> melodic_setup.inputs.s_des = 'subjectDesign.mat' +# >>> melodic_setup.inputs.s_con = 'subjectDesign.con' +# >>> melodic_setup.inputs.out_dir = 'groupICA.out' +# >>> melodic_setup.cmdline +# 'melodic -i functional.nii,functional2.nii,functional3.nii -a tica --bgthreshold=10.000000 --mmthresh=0.500000 --nobet -o groupICA.out --Ostats --Scon=subjectDesign.con --Sdes=subjectDesign.mat --Tcon=timeDesign.con --Tdes=timeDesign.mat --tr=1.500000' +# >>> melodic_setup.run() # doctest: +SKIP # # # @@ -48,24 +48,21 @@ inputs: # type=file|default=: filename of the IC components file for mixture modelling bg_image: generic/file # type=file|default=: specify background image for report (default: mean image) - in_files: medimage/nifti1+list-of + in_files: generic/file+list-of # type=inputmultiobject|default=[]: input file names (either single file name or a list) mask: generic/file # type=file|default=: file name of mask for thresholding mix: generic/file # type=file|default=: mixing matrix for mixture modelling / filtering - out_dir: Path - # type=directory: - # type=directory|default=: output directory name - s_con: medimage-fsl/con + s_con: fileformats.medimage_fsl.Con # type=file|default=: t-contrast matrix across subject-domain - s_des: datascience/text-matrix + s_des: generic/file # type=file|default=: design matrix across subject-domain smode: generic/file # type=file|default=: matrix of session modes for report generation - t_con: medimage-fsl/con + t_con: fileformats.medimage_fsl.Con # type=file|default=: t-contrast matrix across time-domain - t_des: datascience/text-matrix + t_des: generic/file # type=file|default=: design matrix across time-domain callable_defaults: # dict[str, str] - names of methods/callable classes defined in the adjacent `*_callables.py` @@ -83,12 +80,17 @@ outputs: # from the nipype interface, but you may want to be more specific, particularly # for file types, where specifying the format also specifies the file that will be # passed to the field in the automatically generated unittests. + out_dir: generic/directory + # type=directory: + # type=directory|default=: output directory name + report_dir: generic/directory + # type=directory: callables: # dict[str, str] - names of methods/callable classes defined in the adjacent `*_callables.py` # to set to the `callable` attribute of output fields templates: - # dict[str, str] - `output_file_template` values to be provided to output fields - out_dir: '"groupICA.out"' + # dict[str, str] - `path_template` values to be provided to output fields + out_dir: out_dir # type=directory: # type=directory|default=: output directory name requirements: @@ -195,13 +197,13 @@ tests: remove_deriv: # type=bool|default=False: removes every second entry in paradigm file (EV derivatives) output_type: - # type=enum|default='NIFTI'|allowed['NIFTI','NIFTI_GZ','NIFTI_PAIR','NIFTI_PAIR_GZ']: FSL output type + # type=enum|default='NIFTI'|allowed['GIFTI','NIFTI','NIFTI_GZ','NIFTI_PAIR','NIFTI_PAIR_GZ']: FSL output type args: # type=str|default='': Additional parameters to the command environ: # type=dict|default={}: Environment variables imports: - # list[nipype2pydra.task.base.importstatement] - list import statements required by the test, with each list item + # list[nipype2pydra.statements.imports.explicitimport] - list import statements required by the test, with each list item # consisting of 'module', 'name', and optionally 'alias' keys expected_outputs: # dict[str, str] - expected values for selected outputs, noting that tests will typically @@ -220,31 +222,18 @@ tests: # (if not specified, will try to choose a sensible value) approach: '"tica"' # type=str|default='': approach for decomposition, 2D: defl, symm (default), 3D: tica (default), concat - in_files: - # type=inputmultiobject|default=[]: input file names (either single file name or a list) no_bet: 'True' # type=bool|default=False: switch off BET - bg_threshold: '10' - # type=float|default=0.0: brain/non-brain threshold used to mask non-brain voxels, as a percentage (only if --nobet selected) tr_sec: '1.5' # type=float|default=0.0: TR in seconds - mm_thresh: '0.5' - # type=float|default=0.0: threshold for Mixture Model based inference out_stats: 'True' # type=bool|default=False: output thresholded maps and probability maps - t_des: - # type=file|default=: design matrix across time-domain t_con: # type=file|default=: t-contrast matrix across time-domain - s_des: - # type=file|default=: design matrix across subject-domain s_con: # type=file|default=: t-contrast matrix across subject-domain - out_dir: '"groupICA.out"' - # type=directory: - # type=directory|default=: output directory name imports: - # list[nipype2pydra.task.base.importstatement] - list import statements required by the test, with each list item + # list[nipype2pydra.statements.imports.explicitimport] - list import statements required by the test, with each list item # consisting of 'module', 'name', and optionally 'alias' keys expected_outputs: # dict[str, str] - expected values for selected outputs, noting that tests will typically @@ -267,31 +256,18 @@ doctests: # '.mock()' method of the corresponding class is used instead. approach: '"tica"' # type=str|default='': approach for decomposition, 2D: defl, symm (default), 3D: tica (default), concat - in_files: '["functional.nii", "functional2.nii", "functional3.nii"]' - # type=inputmultiobject|default=[]: input file names (either single file name or a list) no_bet: 'True' # type=bool|default=False: switch off BET - bg_threshold: '10' - # type=float|default=0.0: brain/non-brain threshold used to mask non-brain voxels, as a percentage (only if --nobet selected) tr_sec: '1.5' # type=float|default=0.0: TR in seconds - mm_thresh: '0.5' - # type=float|default=0.0: threshold for Mixture Model based inference out_stats: 'True' # type=bool|default=False: output thresholded maps and probability maps - t_des: '"timeDesign.mat"' - # type=file|default=: design matrix across time-domain t_con: '"timeDesign.con"' # type=file|default=: t-contrast matrix across time-domain - s_des: '"subjectDesign.mat"' - # type=file|default=: design matrix across subject-domain s_con: '"subjectDesign.con"' # type=file|default=: t-contrast matrix across subject-domain - out_dir: '"groupICA.out"' - # type=directory: - # type=directory|default=: output directory name imports: - # list[nipype2pydra.task.base.importstatement] - list import statements required by the test, with each list item + # list[nipype2pydra.statements.imports.explicitimport] - list import statements required by the test, with each list item # consisting of 'module', 'name', and optionally 'alias' keys directive: # str - any doctest directive to place on the cmdline call, e.g. # doctest: +ELLIPSIS diff --git a/nipype-auto-conv/specs/interfaces/melodic_callables.py b/nipype-auto-conv/specs/interfaces/melodic_callables.py deleted file mode 100644 index 9ab73a8..0000000 --- a/nipype-auto-conv/specs/interfaces/melodic_callables.py +++ /dev/null @@ -1,46 +0,0 @@ -"""Module to put any functions that are referred to in the "callables" section of MELODIC.yaml""" - -import attrs -import os - - -def out_dir_default(inputs): - return _gen_filename("out_dir", inputs=inputs) - - -def out_dir_callable(output_dir, inputs, stdout, stderr): - outputs = _list_outputs( - output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr - ) - return outputs["out_dir"] - - -def report_dir_callable(output_dir, inputs, stdout, stderr): - outputs = _list_outputs( - output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr - ) - return outputs["report_dir"] - - -# Original source at L1858 of /interfaces/fsl/model.py -def _gen_filename(name, inputs=None, stdout=None, stderr=None, output_dir=None): - if name == "out_dir": - return output_dir - - -# Original source at L1848 of /interfaces/fsl/model.py -def _list_outputs(inputs=None, stdout=None, stderr=None, output_dir=None): - outputs = {} - if inputs.out_dir is not attrs.NOTHING: - outputs["out_dir"] = os.path.abspath(inputs.out_dir) - else: - outputs["out_dir"] = _gen_filename( - "out_dir", - inputs=inputs, - stdout=stdout, - stderr=stderr, - output_dir=output_dir, - ) - if (inputs.report is not attrs.NOTHING) and inputs.report: - outputs["report_dir"] = os.path.join(outputs["out_dir"], "report") - return outputs diff --git a/nipype-auto-conv/specs/interfaces/merge.yaml b/nipype-auto-conv/specs/interfaces/merge.yaml index 6dd97e4..4480f9c 100644 --- a/nipype-auto-conv/specs/interfaces/merge.yaml +++ b/nipype-auto-conv/specs/interfaces/merge.yaml @@ -7,26 +7,26 @@ # ---- # Use fslmerge to concatenate images # -# Images can be concatenated across time, x, y, or z dimensions. Across the -# time (t) dimension the TR is set by default to 1 sec. +# Images can be concatenated across time, x, y, or z dimensions. Across the +# time (t) dimension the TR is set by default to 1 sec. # -# Note: to set the TR to a different value, specify 't' for dimension and -# specify the TR value in seconds for the tr input. The dimension will be -# automatically updated to 'tr'. +# Note: to set the TR to a different value, specify 't' for dimension and +# specify the TR value in seconds for the tr input. The dimension will be +# automatically updated to 'tr'. # -# Examples -# -------- +# Examples +# -------- # -# >>> from nipype.interfaces.fsl import Merge -# >>> merger = Merge() -# >>> merger.inputs.in_files = ['functional2.nii', 'functional3.nii'] -# >>> merger.inputs.dimension = 't' -# >>> merger.inputs.output_type = 'NIFTI_GZ' -# >>> merger.cmdline -# 'fslmerge -t functional2_merged.nii.gz functional2.nii functional3.nii' -# >>> merger.inputs.tr = 2.25 -# >>> merger.cmdline -# 'fslmerge -tr functional2_merged.nii.gz functional2.nii functional3.nii 2.25' +# >>> from nipype.interfaces.fsl import Merge +# >>> merger = Merge() +# >>> merger.inputs.in_files = ['functional2.nii', 'functional3.nii'] +# >>> merger.inputs.dimension = 't' +# >>> merger.inputs.output_type = 'NIFTI_GZ' +# >>> merger.cmdline +# 'fslmerge -t functional2_merged.nii.gz functional2.nii functional3.nii' +# >>> merger.inputs.tr = 2.25 +# >>> merger.cmdline +# 'fslmerge -tr functional2_merged.nii.gz functional2.nii functional3.nii 2.25' # # # @@ -46,9 +46,6 @@ inputs: # passed to the field in the automatically generated unittests. in_files: medimage/nifti1+list-of # type=list|default=[]: - merged_file: Path - # type=file: - # type=file|default=: callable_defaults: # dict[str, str] - names of methods/callable classes defined in the adjacent `*_callables.py` # to set as the `default` method of input fields @@ -72,7 +69,7 @@ outputs: # dict[str, str] - names of methods/callable classes defined in the adjacent `*_callables.py` # to set to the `callable` attribute of output fields templates: - # dict[str, str] - `output_file_template` values to be provided to output fields + # dict[str, str] - `path_template` values to be provided to output fields requirements: # dict[str, list[str]] - input fields that are required to be provided for the output field to be present tests: @@ -89,13 +86,13 @@ tests: # type=file: # type=file|default=: output_type: - # type=enum|default='NIFTI'|allowed['NIFTI','NIFTI_GZ','NIFTI_PAIR','NIFTI_PAIR_GZ']: FSL output type + # type=enum|default='NIFTI'|allowed['GIFTI','NIFTI','NIFTI_GZ','NIFTI_PAIR','NIFTI_PAIR_GZ']: FSL output type args: # type=str|default='': Additional parameters to the command environ: # type=dict|default={}: Environment variables imports: - # list[nipype2pydra.task.base.importstatement] - list import statements required by the test, with each list item + # list[nipype2pydra.statements.imports.explicitimport] - list import statements required by the test, with each list item # consisting of 'module', 'name', and optionally 'alias' keys expected_outputs: # dict[str, str] - expected values for selected outputs, noting that tests will typically @@ -114,14 +111,12 @@ tests: # (if not specified, will try to choose a sensible value) in_files: # type=list|default=[]: - dimension: '"t"' - # type=enum|default='t'|allowed['a','t','x','y','z']: dimension along which to merge, optionally set tr input when dimension is t output_type: '"NIFTI_GZ"' - # type=enum|default='NIFTI'|allowed['NIFTI','NIFTI_GZ','NIFTI_PAIR','NIFTI_PAIR_GZ']: FSL output type + # type=enum|default='NIFTI'|allowed['GIFTI','NIFTI','NIFTI_GZ','NIFTI_PAIR','NIFTI_PAIR_GZ']: FSL output type tr: '2.25' # type=float|default=0.0: use to specify TR in seconds (default is 1.00 sec), overrides dimension and sets it to tr imports: - # list[nipype2pydra.task.base.importstatement] - list import statements required by the test, with each list item + # list[nipype2pydra.statements.imports.explicitimport] - list import statements required by the test, with each list item # consisting of 'module', 'name', and optionally 'alias' keys expected_outputs: # dict[str, str] - expected values for selected outputs, noting that tests will typically @@ -144,14 +139,12 @@ doctests: # '.mock()' method of the corresponding class is used instead. in_files: '["functional2.nii", "functional3.nii"]' # type=list|default=[]: - dimension: '"t"' - # type=enum|default='t'|allowed['a','t','x','y','z']: dimension along which to merge, optionally set tr input when dimension is t output_type: '"NIFTI_GZ"' - # type=enum|default='NIFTI'|allowed['NIFTI','NIFTI_GZ','NIFTI_PAIR','NIFTI_PAIR_GZ']: FSL output type + # type=enum|default='NIFTI'|allowed['GIFTI','NIFTI','NIFTI_GZ','NIFTI_PAIR','NIFTI_PAIR_GZ']: FSL output type tr: '2.25' # type=float|default=0.0: use to specify TR in seconds (default is 1.00 sec), overrides dimension and sets it to tr imports: - # list[nipype2pydra.task.base.importstatement] - list import statements required by the test, with each list item + # list[nipype2pydra.statements.imports.explicitimport] - list import statements required by the test, with each list item # consisting of 'module', 'name', and optionally 'alias' keys directive: # str - any doctest directive to place on the cmdline call, e.g. # doctest: +ELLIPSIS diff --git a/nipype-auto-conv/specs/interfaces/merge_callables.py b/nipype-auto-conv/specs/interfaces/merge_callables.py deleted file mode 100644 index efab957..0000000 --- a/nipype-auto-conv/specs/interfaces/merge_callables.py +++ /dev/null @@ -1,338 +0,0 @@ -"""Module to put any functions that are referred to in the "callables" section of Merge.yaml""" - -import attrs -import logging -import os -import os.path as op -from glob import glob - - -def merged_file_callable(output_dir, inputs, stdout, stderr): - outputs = _list_outputs( - output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr - ) - return outputs["merged_file"] - - -IFLOGGER = logging.getLogger("nipype.interface") - - -iflogger = logging.getLogger("nipype.interface") - - -# Original source at L809 of /interfaces/base/core.py -def _filename_from_source( - name, chain=None, inputs=None, stdout=None, stderr=None, output_dir=None -): - if chain is None: - chain = [] - - trait_spec = inputs.trait(name) - retval = getattr(inputs, name) - source_ext = None - if (retval is attrs.NOTHING) or "%s" in retval: - if not trait_spec.name_source: - return retval - - # Do not generate filename when excluded by other inputs - if any( - (getattr(inputs, field) is not attrs.NOTHING) - for field in trait_spec.xor or () - ): - return retval - - # Do not generate filename when required fields are missing - if not all( - (getattr(inputs, field) is not attrs.NOTHING) - for field in trait_spec.requires or () - ): - return retval - - if (retval is not attrs.NOTHING) and "%s" in retval: - name_template = retval - else: - name_template = trait_spec.name_template - if not name_template: - name_template = "%s_generated" - - ns = trait_spec.name_source - while isinstance(ns, (list, tuple)): - if len(ns) > 1: - iflogger.warning("Only one name_source per trait is allowed") - ns = ns[0] - - if not isinstance(ns, (str, bytes)): - raise ValueError( - "name_source of '{}' trait should be an input trait " - "name, but a type {} object was found".format(name, type(ns)) - ) - - if getattr(inputs, ns) is not attrs.NOTHING: - name_source = ns - source = getattr(inputs, name_source) - while isinstance(source, list): - source = source[0] - - # special treatment for files - try: - _, base, source_ext = split_filename(source) - except (AttributeError, TypeError): - base = source - else: - if name in chain: - raise NipypeInterfaceError("Mutually pointing name_sources") - - chain.append(name) - base = _filename_from_source( - ns, - chain, - inputs=inputs, - stdout=stdout, - stderr=stderr, - output_dir=output_dir, - ) - if base is not attrs.NOTHING: - _, _, source_ext = split_filename(base) - else: - # Do not generate filename when required fields are missing - return retval - - chain = None - retval = name_template % base - _, _, ext = split_filename(retval) - if trait_spec.keep_extension and (ext or source_ext): - if (ext is None or not ext) and source_ext: - retval = retval + source_ext - else: - retval = _overload_extension( - retval, - name, - inputs=inputs, - stdout=stdout, - stderr=stderr, - output_dir=output_dir, - ) - return retval - - -# Original source at L885 of /interfaces/base/core.py -def _gen_filename(name, inputs=None, stdout=None, stderr=None, output_dir=None): - raise NotImplementedError - - -# Original source at L891 of /interfaces/base/core.py -def _list_outputs(inputs=None, stdout=None, stderr=None, output_dir=None): - metadata = dict(name_source=lambda t: t is not None) - traits = inputs.traits(**metadata) - if traits: - outputs = {} - for name, trait_spec in list(traits.items()): - out_name = name - if trait_spec.output_name is not None: - out_name = trait_spec.output_name - fname = _filename_from_source( - name, inputs=inputs, stdout=stdout, stderr=stderr, output_dir=output_dir - ) - if fname is not attrs.NOTHING: - outputs[out_name] = os.path.abspath(fname) - return outputs - - -# Original source at L249 of /interfaces/fsl/base.py -def _overload_extension( - value, name=None, inputs=None, stdout=None, stderr=None, output_dir=None -): - return value + Info.output_type_to_ext(inputs.output_type) - - -# Original source at L58 of /utils/filemanip.py -def split_filename(fname): - """Split a filename into parts: path, base filename and extension. - - Parameters - ---------- - fname : str - file or path name - - Returns - ------- - pth : str - base path from fname - fname : str - filename from fname, without extension - ext : str - file extension from fname - - Examples - -------- - >>> from nipype.utils.filemanip import split_filename - >>> pth, fname, ext = split_filename('/home/data/subject.nii.gz') - >>> pth - '/home/data' - - >>> fname - 'subject' - - >>> ext - '.nii.gz' - - """ - - special_extensions = [".nii.gz", ".tar.gz", ".niml.dset"] - - pth = op.dirname(fname) - fname = op.basename(fname) - - ext = None - for special_ext in special_extensions: - ext_len = len(special_ext) - if (len(fname) > ext_len) and (fname[-ext_len:].lower() == special_ext.lower()): - ext = fname[-ext_len:] - fname = fname[:-ext_len] - break - if not ext: - fname, ext = op.splitext(fname) - - return pth, fname, ext - - -# Original source at L1069 of /interfaces/base/core.py -class PackageInfo(object): - _version = None - version_cmd = None - version_file = None - - @classmethod - def version(klass): - if klass._version is None: - if klass.version_cmd is not None: - try: - clout = CommandLine( - command=klass.version_cmd, - resource_monitor=False, - terminal_output="allatonce", - ).run() - except IOError: - return None - - raw_info = clout.runtime.stdout - elif klass.version_file is not None: - try: - with open(klass.version_file, "rt") as fobj: - raw_info = fobj.read() - except OSError: - return None - else: - return None - - klass._version = klass.parse_version(raw_info) - - return klass._version - - @staticmethod - def parse_version(raw_info): - raise NotImplementedError - - -# Original source at L40 of /interfaces/fsl/base.py -class Info(PackageInfo): - """ - Handle FSL ``output_type`` and version information. - - output type refers to the type of file fsl defaults to writing - eg, NIFTI, NIFTI_GZ - - Examples - -------- - - >>> from nipype.interfaces.fsl import Info - >>> Info.version() # doctest: +SKIP - >>> Info.output_type() # doctest: +SKIP - - """ - - ftypes = { - "NIFTI": ".nii", - "NIFTI_PAIR": ".img", - "NIFTI_GZ": ".nii.gz", - "NIFTI_PAIR_GZ": ".img.gz", - } - - if os.getenv("FSLDIR"): - version_file = os.path.join(os.getenv("FSLDIR"), "etc", "fslversion") - - @staticmethod - def parse_version(raw_info): - return raw_info.splitlines()[0] - - @classmethod - def output_type_to_ext(cls, output_type): - """Get the file extension for the given output type. - - Parameters - ---------- - output_type : {'NIFTI', 'NIFTI_GZ', 'NIFTI_PAIR', 'NIFTI_PAIR_GZ'} - String specifying the output type. - - Returns - ------- - extension : str - The file extension for the output type. - """ - - try: - return cls.ftypes[output_type] - except KeyError: - msg = "Invalid FSLOUTPUTTYPE: ", output_type - raise KeyError(msg) - - @classmethod - def output_type(cls): - """Get the global FSL output file type FSLOUTPUTTYPE. - - This returns the value of the environment variable - FSLOUTPUTTYPE. An exception is raised if it is not defined. - - Returns - ------- - fsl_ftype : string - Represents the current environment setting of FSLOUTPUTTYPE - """ - try: - return os.environ["FSLOUTPUTTYPE"] - except KeyError: - IFLOGGER.warning( - "FSLOUTPUTTYPE environment variable is not set. " - "Setting FSLOUTPUTTYPE=NIFTI" - ) - return "NIFTI" - - @staticmethod - def standard_image(img_name=None): - """Grab an image from the standard location. - - Returns a list of standard images if called without arguments. - - Could be made more fancy to allow for more relocatability""" - try: - fsldir = os.environ["FSLDIR"] - except KeyError: - raise Exception("FSL environment variables not set") - stdpath = os.path.join(fsldir, "data", "standard") - if img_name is None: - return [ - filename.replace(stdpath + "/", "") - for filename in glob(os.path.join(stdpath, "*nii*")) - ] - return os.path.join(stdpath, img_name) - - -# Original source at L125 of /interfaces/base/support.py -class NipypeInterfaceError(Exception): - """Custom error for interfaces""" - - def __init__(self, value): - self.value = value - - def __str__(self): - return "{}".format(self.value) diff --git a/nipype-auto-conv/specs/interfaces/min_image.yaml b/nipype-auto-conv/specs/interfaces/min_image.yaml index 3b50baa..2345c50 100644 --- a/nipype-auto-conv/specs/interfaces/min_image.yaml +++ b/nipype-auto-conv/specs/interfaces/min_image.yaml @@ -22,9 +22,6 @@ inputs: # passed to the field in the automatically generated unittests. in_file: generic/file # type=file|default=: image to operate on - out_file: Path - # type=file: image written after calculations - # type=file|default=: image to write callable_defaults: # dict[str, str] - names of methods/callable classes defined in the adjacent `*_callables.py` # to set as the `default` method of input fields @@ -48,7 +45,7 @@ outputs: # dict[str, str] - names of methods/callable classes defined in the adjacent `*_callables.py` # to set to the `callable` attribute of output fields templates: - # dict[str, str] - `output_file_template` values to be provided to output fields + # dict[str, str] - `path_template` values to be provided to output fields out_file: out_file # type=file: image written after calculations # type=file|default=: image to write @@ -72,13 +69,13 @@ tests: nan2zeros: # type=bool|default=False: change NaNs to zeros before doing anything output_type: - # type=enum|default='NIFTI'|allowed['NIFTI','NIFTI_GZ','NIFTI_PAIR','NIFTI_PAIR_GZ']: FSL output type + # type=enum|default='NIFTI'|allowed['GIFTI','NIFTI','NIFTI_GZ','NIFTI_PAIR','NIFTI_PAIR_GZ']: FSL output type args: # type=str|default='': Additional parameters to the command environ: # type=dict|default={}: Environment variables imports: - # list[nipype2pydra.task.base.importstatement] - list import statements required by the test, with each list item + # list[nipype2pydra.statements.imports.explicitimport] - list import statements required by the test, with each list item # consisting of 'module', 'name', and optionally 'alias' keys expected_outputs: # dict[str, str] - expected values for selected outputs, noting that tests will typically diff --git a/nipype-auto-conv/specs/interfaces/min_image_callables.py b/nipype-auto-conv/specs/interfaces/min_image_callables.py deleted file mode 100644 index 9cde1b9..0000000 --- a/nipype-auto-conv/specs/interfaces/min_image_callables.py +++ /dev/null @@ -1,329 +0,0 @@ -"""Module to put any functions that are referred to in the "callables" section of MinImage.yaml""" - -import attrs -import logging -import os -import os.path as op -from glob import glob -from pathlib import Path - - -def out_file_default(inputs): - return _gen_filename("out_file", inputs=inputs) - - -def out_file_callable(output_dir, inputs, stdout, stderr): - outputs = _list_outputs( - output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr - ) - return outputs["out_file"] - - -IFLOGGER = logging.getLogger("nipype.interface") - - -# Original source at L61 of /interfaces/fsl/maths.py -def _gen_filename(name, inputs=None, stdout=None, stderr=None, output_dir=None): - if name == "out_file": - return _list_outputs( - inputs=inputs, stdout=stdout, stderr=stderr, output_dir=output_dir - )["out_file"] - return None - - -# Original source at L205 of /interfaces/fsl/base.py -def _gen_fname( - basename, - cwd=None, - suffix=None, - change_ext=True, - ext=None, - inputs=None, - stdout=None, - stderr=None, - output_dir=None, -): - """Generate a filename based on the given parameters. - - The filename will take the form: cwd/basename. - If change_ext is True, it will use the extensions specified in - inputs.output_type. - - Parameters - ---------- - basename : str - Filename to base the new filename on. - cwd : str - Path to prefix to the new filename. (default is output_dir) - suffix : str - Suffix to add to the `basename`. (defaults is '' ) - change_ext : bool - Flag to change the filename extension to the FSL output type. - (default True) - - Returns - ------- - fname : str - New filename based on given parameters. - - """ - - if basename == "": - msg = "Unable to generate filename for command %s. " % "fslmaths" - msg += "basename is not set!" - raise ValueError(msg) - if cwd is None: - cwd = output_dir - if ext is None: - ext = Info.output_type_to_ext(inputs.output_type) - if change_ext: - if suffix: - suffix = "".join((suffix, ext)) - else: - suffix = ext - if suffix is None: - suffix = "" - fname = fname_presuffix(basename, suffix=suffix, use_ext=False, newpath=cwd) - return fname - - -# Original source at L51 of /interfaces/fsl/maths.py -def _list_outputs(inputs=None, stdout=None, stderr=None, output_dir=None): - outputs = {} - outputs["out_file"] = inputs.out_file - if inputs.out_file is attrs.NOTHING: - outputs["out_file"] = _gen_fname( - inputs.in_file, - suffix=_suffix, - inputs=inputs, - stdout=stdout, - stderr=stderr, - output_dir=output_dir, - ) - outputs["out_file"] = os.path.abspath(outputs["out_file"]) - return outputs - - -# Original source at L108 of /utils/filemanip.py -def fname_presuffix(fname, prefix="", suffix="", newpath=None, use_ext=True): - """Manipulates path and name of input filename - - Parameters - ---------- - fname : string - A filename (may or may not include path) - prefix : string - Characters to prepend to the filename - suffix : string - Characters to append to the filename - newpath : string - Path to replace the path of the input fname - use_ext : boolean - If True (default), appends the extension of the original file - to the output name. - - Returns - ------- - Absolute path of the modified filename - - >>> from nipype.utils.filemanip import fname_presuffix - >>> fname = 'foo.nii.gz' - >>> fname_presuffix(fname,'pre','post','/tmp') - '/tmp/prefoopost.nii.gz' - - >>> from nipype.interfaces.base import attrs.NOTHING - >>> fname_presuffix(fname, 'pre', 'post', attrs.NOTHING) == \ - fname_presuffix(fname, 'pre', 'post') - True - - """ - pth, fname, ext = split_filename(fname) - if not use_ext: - ext = "" - - # No need for : bool(attrs.NOTHING is not attrs.NOTHING) evaluates to False - if newpath: - pth = op.abspath(newpath) - return op.join(pth, prefix + fname + suffix + ext) - - -# Original source at L58 of /utils/filemanip.py -def split_filename(fname): - """Split a filename into parts: path, base filename and extension. - - Parameters - ---------- - fname : str - file or path name - - Returns - ------- - pth : str - base path from fname - fname : str - filename from fname, without extension - ext : str - file extension from fname - - Examples - -------- - >>> from nipype.utils.filemanip import split_filename - >>> pth, fname, ext = split_filename('/home/data/subject.nii.gz') - >>> pth - '/home/data' - - >>> fname - 'subject' - - >>> ext - '.nii.gz' - - """ - - special_extensions = [".nii.gz", ".tar.gz", ".niml.dset"] - - pth = op.dirname(fname) - fname = op.basename(fname) - - ext = None - for special_ext in special_extensions: - ext_len = len(special_ext) - if (len(fname) > ext_len) and (fname[-ext_len:].lower() == special_ext.lower()): - ext = fname[-ext_len:] - fname = fname[:-ext_len] - break - if not ext: - fname, ext = op.splitext(fname) - - return pth, fname, ext - - -# Original source at L1069 of /interfaces/base/core.py -class PackageInfo(object): - _version = None - version_cmd = None - version_file = None - - @classmethod - def version(klass): - if klass._version is None: - if klass.version_cmd is not None: - try: - clout = CommandLine( - command=klass.version_cmd, - resource_monitor=False, - terminal_output="allatonce", - ).run() - except IOError: - return None - - raw_info = clout.runtime.stdout - elif klass.version_file is not None: - try: - with open(klass.version_file, "rt") as fobj: - raw_info = fobj.read() - except OSError: - return None - else: - return None - - klass._version = klass.parse_version(raw_info) - - return klass._version - - @staticmethod - def parse_version(raw_info): - raise NotImplementedError - - -# Original source at L40 of /interfaces/fsl/base.py -class Info(PackageInfo): - """ - Handle FSL ``output_type`` and version information. - - output type refers to the type of file fsl defaults to writing - eg, NIFTI, NIFTI_GZ - - Examples - -------- - - >>> from nipype.interfaces.fsl import Info - >>> Info.version() # doctest: +SKIP - >>> Info.output_type() # doctest: +SKIP - - """ - - ftypes = { - "NIFTI": ".nii", - "NIFTI_PAIR": ".img", - "NIFTI_GZ": ".nii.gz", - "NIFTI_PAIR_GZ": ".img.gz", - } - - if os.getenv("FSLDIR"): - version_file = os.path.join(os.getenv("FSLDIR"), "etc", "fslversion") - - @staticmethod - def parse_version(raw_info): - return raw_info.splitlines()[0] - - @classmethod - def output_type_to_ext(cls, output_type): - """Get the file extension for the given output type. - - Parameters - ---------- - output_type : {'NIFTI', 'NIFTI_GZ', 'NIFTI_PAIR', 'NIFTI_PAIR_GZ'} - String specifying the output type. - - Returns - ------- - extension : str - The file extension for the output type. - """ - - try: - return cls.ftypes[output_type] - except KeyError: - msg = "Invalid FSLOUTPUTTYPE: ", output_type - raise KeyError(msg) - - @classmethod - def output_type(cls): - """Get the global FSL output file type FSLOUTPUTTYPE. - - This returns the value of the environment variable - FSLOUTPUTTYPE. An exception is raised if it is not defined. - - Returns - ------- - fsl_ftype : string - Represents the current environment setting of FSLOUTPUTTYPE - """ - try: - return os.environ["FSLOUTPUTTYPE"] - except KeyError: - IFLOGGER.warning( - "FSLOUTPUTTYPE environment variable is not set. " - "Setting FSLOUTPUTTYPE=NIFTI" - ) - return "NIFTI" - - @staticmethod - def standard_image(img_name=None): - """Grab an image from the standard location. - - Returns a list of standard images if called without arguments. - - Could be made more fancy to allow for more relocatability""" - try: - fsldir = os.environ["FSLDIR"] - except KeyError: - raise Exception("FSL environment variables not set") - stdpath = os.path.join(fsldir, "data", "standard") - if img_name is None: - return [ - filename.replace(stdpath + "/", "") - for filename in glob(os.path.join(stdpath, "*nii*")) - ] - return os.path.join(stdpath, img_name) diff --git a/nipype-auto-conv/specs/interfaces/motion_outliers.yaml b/nipype-auto-conv/specs/interfaces/motion_outliers.yaml index 0c8650c..d2d83b0 100644 --- a/nipype-auto-conv/specs/interfaces/motion_outliers.yaml +++ b/nipype-auto-conv/specs/interfaces/motion_outliers.yaml @@ -6,15 +6,15 @@ # Docs # ---- # -# Use FSL fsl_motion_outliers`http://fsl.fmrib.ox.ac.uk/fsl/fslwiki/FSLMotionOutliers`_ to find outliers in timeseries (4d) data. -# Examples -# -------- -# >>> from nipype.interfaces.fsl import MotionOutliers -# >>> mo = MotionOutliers() -# >>> mo.inputs.in_file = "epi.nii" -# >>> mo.cmdline # doctest: +ELLIPSIS -# 'fsl_motion_outliers -i epi.nii -o epi_outliers.txt -p epi_metrics.png -s epi_metrics.txt' -# >>> res = mo.run() # doctest: +SKIP +# Use FSL fsl_motion_outliers`http://fsl.fmrib.ox.ac.uk/fsl/fslwiki/FSLMotionOutliers`_ to find outliers in timeseries (4d) data. +# Examples +# -------- +# >>> from nipype.interfaces.fsl import MotionOutliers +# >>> mo = MotionOutliers() +# >>> mo.inputs.in_file = "epi.nii" +# >>> mo.cmdline # doctest: +ELLIPSIS +# 'fsl_motion_outliers -i epi.nii -o epi_outliers.txt -p epi_metrics.png -s epi_metrics.txt' +# >>> res = mo.run() # doctest: +SKIP # task_name: MotionOutliers nipype_name: MotionOutliers @@ -34,15 +34,6 @@ inputs: # type=file|default=: unfiltered 4D image mask: generic/file # type=file|default=: mask image for calculating metric - out_file: Path - # type=file: - # type=file|default=: output outlier file name - out_metric_plot: Path - # type=file: - # type=file|default=: output metric values plot (DVARS etc.) file name - out_metric_values: Path - # type=file: - # type=file|default=: output metric values (DVARS etc.) file name callable_defaults: # dict[str, str] - names of methods/callable classes defined in the adjacent `*_callables.py` # to set as the `default` method of input fields @@ -72,7 +63,7 @@ outputs: # dict[str, str] - names of methods/callable classes defined in the adjacent `*_callables.py` # to set to the `callable` attribute of output fields templates: - # dict[str, str] - `output_file_template` values to be provided to output fields + # dict[str, str] - `path_template` values to be provided to output fields requirements: # dict[str, list[str]] - input fields that are required to be provided for the output field to be present tests: @@ -101,13 +92,13 @@ tests: # type=file: # type=file|default=: output metric values plot (DVARS etc.) file name output_type: - # type=enum|default='NIFTI'|allowed['NIFTI','NIFTI_GZ','NIFTI_PAIR','NIFTI_PAIR_GZ']: FSL output type + # type=enum|default='NIFTI'|allowed['GIFTI','NIFTI','NIFTI_GZ','NIFTI_PAIR','NIFTI_PAIR_GZ']: FSL output type args: # type=str|default='': Additional parameters to the command environ: # type=dict|default={}: Environment variables imports: - # list[nipype2pydra.task.base.importstatement] - list import statements required by the test, with each list item + # list[nipype2pydra.statements.imports.explicitimport] - list import statements required by the test, with each list item # consisting of 'module', 'name', and optionally 'alias' keys expected_outputs: # dict[str, str] - expected values for selected outputs, noting that tests will typically @@ -127,7 +118,7 @@ tests: in_file: # type=file|default=: unfiltered 4D image imports: - # list[nipype2pydra.task.base.importstatement] - list import statements required by the test, with each list item + # list[nipype2pydra.statements.imports.explicitimport] - list import statements required by the test, with each list item # consisting of 'module', 'name', and optionally 'alias' keys expected_outputs: # dict[str, str] - expected values for selected outputs, noting that tests will typically @@ -142,7 +133,7 @@ tests: # bool - whether the unittest is expected to fail or not. Set to false # when you are satisfied with the edits you have made to this file doctests: -- cmdline: fsl_motion_outliers -i epi.nii -o epi_outliers.txt -p epi_metrics.png -s epi_metrics.txt +- cmdline: # str - the expected cmdline output inputs: # dict[str, str] - name-value pairs for inputs to be provided to the doctest. @@ -151,7 +142,7 @@ doctests: in_file: '"epi.nii"' # type=file|default=: unfiltered 4D image imports: - # list[nipype2pydra.task.base.importstatement] - list import statements required by the test, with each list item + # list[nipype2pydra.statements.imports.explicitimport] - list import statements required by the test, with each list item # consisting of 'module', 'name', and optionally 'alias' keys directive: # str - any doctest directive to place on the cmdline call, e.g. # doctest: +ELLIPSIS diff --git a/nipype-auto-conv/specs/interfaces/motion_outliers_callables.py b/nipype-auto-conv/specs/interfaces/motion_outliers_callables.py deleted file mode 100644 index 09bc820..0000000 --- a/nipype-auto-conv/specs/interfaces/motion_outliers_callables.py +++ /dev/null @@ -1,352 +0,0 @@ -"""Module to put any functions that are referred to in the "callables" section of MotionOutliers.yaml""" - -import attrs -import logging -import os -import os.path as op -from glob import glob - - -def out_file_callable(output_dir, inputs, stdout, stderr): - outputs = _list_outputs( - output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr - ) - return outputs["out_file"] - - -def out_metric_plot_callable(output_dir, inputs, stdout, stderr): - outputs = _list_outputs( - output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr - ) - return outputs["out_metric_plot"] - - -def out_metric_values_callable(output_dir, inputs, stdout, stderr): - outputs = _list_outputs( - output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr - ) - return outputs["out_metric_values"] - - -IFLOGGER = logging.getLogger("nipype.interface") - - -iflogger = logging.getLogger("nipype.interface") - - -# Original source at L809 of /interfaces/base/core.py -def _filename_from_source( - name, chain=None, inputs=None, stdout=None, stderr=None, output_dir=None -): - if chain is None: - chain = [] - - trait_spec = inputs.trait(name) - retval = getattr(inputs, name) - source_ext = None - if (retval is attrs.NOTHING) or "%s" in retval: - if not trait_spec.name_source: - return retval - - # Do not generate filename when excluded by other inputs - if any( - (getattr(inputs, field) is not attrs.NOTHING) - for field in trait_spec.xor or () - ): - return retval - - # Do not generate filename when required fields are missing - if not all( - (getattr(inputs, field) is not attrs.NOTHING) - for field in trait_spec.requires or () - ): - return retval - - if (retval is not attrs.NOTHING) and "%s" in retval: - name_template = retval - else: - name_template = trait_spec.name_template - if not name_template: - name_template = "%s_generated" - - ns = trait_spec.name_source - while isinstance(ns, (list, tuple)): - if len(ns) > 1: - iflogger.warning("Only one name_source per trait is allowed") - ns = ns[0] - - if not isinstance(ns, (str, bytes)): - raise ValueError( - "name_source of '{}' trait should be an input trait " - "name, but a type {} object was found".format(name, type(ns)) - ) - - if getattr(inputs, ns) is not attrs.NOTHING: - name_source = ns - source = getattr(inputs, name_source) - while isinstance(source, list): - source = source[0] - - # special treatment for files - try: - _, base, source_ext = split_filename(source) - except (AttributeError, TypeError): - base = source - else: - if name in chain: - raise NipypeInterfaceError("Mutually pointing name_sources") - - chain.append(name) - base = _filename_from_source( - ns, - chain, - inputs=inputs, - stdout=stdout, - stderr=stderr, - output_dir=output_dir, - ) - if base is not attrs.NOTHING: - _, _, source_ext = split_filename(base) - else: - # Do not generate filename when required fields are missing - return retval - - chain = None - retval = name_template % base - _, _, ext = split_filename(retval) - if trait_spec.keep_extension and (ext or source_ext): - if (ext is None or not ext) and source_ext: - retval = retval + source_ext - else: - retval = _overload_extension( - retval, - name, - inputs=inputs, - stdout=stdout, - stderr=stderr, - output_dir=output_dir, - ) - return retval - - -# Original source at L885 of /interfaces/base/core.py -def _gen_filename(name, inputs=None, stdout=None, stderr=None, output_dir=None): - raise NotImplementedError - - -# Original source at L891 of /interfaces/base/core.py -def _list_outputs(inputs=None, stdout=None, stderr=None, output_dir=None): - metadata = dict(name_source=lambda t: t is not None) - traits = inputs.traits(**metadata) - if traits: - outputs = {} - for name, trait_spec in list(traits.items()): - out_name = name - if trait_spec.output_name is not None: - out_name = trait_spec.output_name - fname = _filename_from_source( - name, inputs=inputs, stdout=stdout, stderr=stderr, output_dir=output_dir - ) - if fname is not attrs.NOTHING: - outputs[out_name] = os.path.abspath(fname) - return outputs - - -# Original source at L249 of /interfaces/fsl/base.py -def _overload_extension( - value, name=None, inputs=None, stdout=None, stderr=None, output_dir=None -): - return value + Info.output_type_to_ext(inputs.output_type) - - -# Original source at L58 of /utils/filemanip.py -def split_filename(fname): - """Split a filename into parts: path, base filename and extension. - - Parameters - ---------- - fname : str - file or path name - - Returns - ------- - pth : str - base path from fname - fname : str - filename from fname, without extension - ext : str - file extension from fname - - Examples - -------- - >>> from nipype.utils.filemanip import split_filename - >>> pth, fname, ext = split_filename('/home/data/subject.nii.gz') - >>> pth - '/home/data' - - >>> fname - 'subject' - - >>> ext - '.nii.gz' - - """ - - special_extensions = [".nii.gz", ".tar.gz", ".niml.dset"] - - pth = op.dirname(fname) - fname = op.basename(fname) - - ext = None - for special_ext in special_extensions: - ext_len = len(special_ext) - if (len(fname) > ext_len) and (fname[-ext_len:].lower() == special_ext.lower()): - ext = fname[-ext_len:] - fname = fname[:-ext_len] - break - if not ext: - fname, ext = op.splitext(fname) - - return pth, fname, ext - - -# Original source at L1069 of /interfaces/base/core.py -class PackageInfo(object): - _version = None - version_cmd = None - version_file = None - - @classmethod - def version(klass): - if klass._version is None: - if klass.version_cmd is not None: - try: - clout = CommandLine( - command=klass.version_cmd, - resource_monitor=False, - terminal_output="allatonce", - ).run() - except IOError: - return None - - raw_info = clout.runtime.stdout - elif klass.version_file is not None: - try: - with open(klass.version_file, "rt") as fobj: - raw_info = fobj.read() - except OSError: - return None - else: - return None - - klass._version = klass.parse_version(raw_info) - - return klass._version - - @staticmethod - def parse_version(raw_info): - raise NotImplementedError - - -# Original source at L40 of /interfaces/fsl/base.py -class Info(PackageInfo): - """ - Handle FSL ``output_type`` and version information. - - output type refers to the type of file fsl defaults to writing - eg, NIFTI, NIFTI_GZ - - Examples - -------- - - >>> from nipype.interfaces.fsl import Info - >>> Info.version() # doctest: +SKIP - >>> Info.output_type() # doctest: +SKIP - - """ - - ftypes = { - "NIFTI": ".nii", - "NIFTI_PAIR": ".img", - "NIFTI_GZ": ".nii.gz", - "NIFTI_PAIR_GZ": ".img.gz", - } - - if os.getenv("FSLDIR"): - version_file = os.path.join(os.getenv("FSLDIR"), "etc", "fslversion") - - @staticmethod - def parse_version(raw_info): - return raw_info.splitlines()[0] - - @classmethod - def output_type_to_ext(cls, output_type): - """Get the file extension for the given output type. - - Parameters - ---------- - output_type : {'NIFTI', 'NIFTI_GZ', 'NIFTI_PAIR', 'NIFTI_PAIR_GZ'} - String specifying the output type. - - Returns - ------- - extension : str - The file extension for the output type. - """ - - try: - return cls.ftypes[output_type] - except KeyError: - msg = "Invalid FSLOUTPUTTYPE: ", output_type - raise KeyError(msg) - - @classmethod - def output_type(cls): - """Get the global FSL output file type FSLOUTPUTTYPE. - - This returns the value of the environment variable - FSLOUTPUTTYPE. An exception is raised if it is not defined. - - Returns - ------- - fsl_ftype : string - Represents the current environment setting of FSLOUTPUTTYPE - """ - try: - return os.environ["FSLOUTPUTTYPE"] - except KeyError: - IFLOGGER.warning( - "FSLOUTPUTTYPE environment variable is not set. " - "Setting FSLOUTPUTTYPE=NIFTI" - ) - return "NIFTI" - - @staticmethod - def standard_image(img_name=None): - """Grab an image from the standard location. - - Returns a list of standard images if called without arguments. - - Could be made more fancy to allow for more relocatability""" - try: - fsldir = os.environ["FSLDIR"] - except KeyError: - raise Exception("FSL environment variables not set") - stdpath = os.path.join(fsldir, "data", "standard") - if img_name is None: - return [ - filename.replace(stdpath + "/", "") - for filename in glob(os.path.join(stdpath, "*nii*")) - ] - return os.path.join(stdpath, img_name) - - -# Original source at L125 of /interfaces/base/support.py -class NipypeInterfaceError(Exception): - """Custom error for interfaces""" - - def __init__(self, value): - self.value = value - - def __str__(self): - return "{}".format(self.value) diff --git a/nipype-auto-conv/specs/interfaces/multi_image_maths.yaml b/nipype-auto-conv/specs/interfaces/multi_image_maths.yaml index 17e28ce..40ea7f5 100644 --- a/nipype-auto-conv/specs/interfaces/multi_image_maths.yaml +++ b/nipype-auto-conv/specs/interfaces/multi_image_maths.yaml @@ -7,16 +7,16 @@ # ---- # Use fslmaths to perform a sequence of mathematical operations. # -# Examples -# -------- -# >>> from nipype.interfaces.fsl import MultiImageMaths -# >>> maths = MultiImageMaths() -# >>> maths.inputs.in_file = "functional.nii" -# >>> maths.inputs.op_string = "-add %s -mul -1 -div %s" -# >>> maths.inputs.operand_files = ["functional2.nii", "functional3.nii"] -# >>> maths.inputs.out_file = "functional4.nii" -# >>> maths.cmdline -# 'fslmaths functional.nii -add functional2.nii -mul -1 -div functional3.nii functional4.nii' +# Examples +# -------- +# >>> from nipype.interfaces.fsl import MultiImageMaths +# >>> maths = MultiImageMaths() +# >>> maths.inputs.in_file = "functional.nii" +# >>> maths.inputs.op_string = "-add %s -mul -1 -div %s" +# >>> maths.inputs.operand_files = ["functional2.nii", "functional3.nii"] +# >>> maths.inputs.out_file = "functional4.nii" +# >>> maths.cmdline +# 'fslmaths functional.nii -add functional2.nii -mul -1 -div functional3.nii functional4.nii' # # task_name: MultiImageMaths @@ -37,9 +37,6 @@ inputs: # type=file|default=: image to operate on operand_files: medimage/nifti1+list-of # type=inputmultiobject|default=[]: list of file names to plug into op string - out_file: Path - # type=file: image written after calculations - # type=file|default=: image to write callable_defaults: # dict[str, str] - names of methods/callable classes defined in the adjacent `*_callables.py` # to set as the `default` method of input fields @@ -56,15 +53,15 @@ outputs: # from the nipype interface, but you may want to be more specific, particularly # for file types, where specifying the format also specifies the file that will be # passed to the field in the automatically generated unittests. - out_file: medimage/nifti1 + out_file: generic/file # type=file: image written after calculations # type=file|default=: image to write callables: # dict[str, str] - names of methods/callable classes defined in the adjacent `*_callables.py` # to set to the `callable` attribute of output fields templates: - # dict[str, str] - `output_file_template` values to be provided to output fields - out_file: '"functional4.nii"' + # dict[str, str] - `path_template` values to be provided to output fields + out_file: out_file # type=file: image written after calculations # type=file|default=: image to write requirements: @@ -89,13 +86,13 @@ tests: nan2zeros: # type=bool|default=False: change NaNs to zeros before doing anything output_type: - # type=enum|default='NIFTI'|allowed['NIFTI','NIFTI_GZ','NIFTI_PAIR','NIFTI_PAIR_GZ']: FSL output type + # type=enum|default='NIFTI'|allowed['GIFTI','NIFTI','NIFTI_GZ','NIFTI_PAIR','NIFTI_PAIR_GZ']: FSL output type args: # type=str|default='': Additional parameters to the command environ: # type=dict|default={}: Environment variables imports: - # list[nipype2pydra.task.base.importstatement] - list import statements required by the test, with each list item + # list[nipype2pydra.statements.imports.explicitimport] - list import statements required by the test, with each list item # consisting of 'module', 'name', and optionally 'alias' keys expected_outputs: # dict[str, str] - expected values for selected outputs, noting that tests will typically @@ -114,15 +111,10 @@ tests: # (if not specified, will try to choose a sensible value) in_file: # type=file|default=: image to operate on - op_string: '"-add %s -mul -1 -div %s"' - # type=string|default='': python formatted string of operations to perform operand_files: # type=inputmultiobject|default=[]: list of file names to plug into op string - out_file: '"functional4.nii"' - # type=file: image written after calculations - # type=file|default=: image to write imports: - # list[nipype2pydra.task.base.importstatement] - list import statements required by the test, with each list item + # list[nipype2pydra.statements.imports.explicitimport] - list import statements required by the test, with each list item # consisting of 'module', 'name', and optionally 'alias' keys expected_outputs: # dict[str, str] - expected values for selected outputs, noting that tests will typically @@ -137,7 +129,7 @@ tests: # bool - whether the unittest is expected to fail or not. Set to false # when you are satisfied with the edits you have made to this file doctests: -- cmdline: fslmaths functional.nii -add functional2.nii -mul -1 -div functional3.nii functional4.nii +- cmdline: # str - the expected cmdline output inputs: # dict[str, str] - name-value pairs for inputs to be provided to the doctest. @@ -145,15 +137,10 @@ doctests: # '.mock()' method of the corresponding class is used instead. in_file: '"functional.nii"' # type=file|default=: image to operate on - op_string: '"-add %s -mul -1 -div %s"' - # type=string|default='': python formatted string of operations to perform operand_files: '["functional2.nii", "functional3.nii"]' # type=inputmultiobject|default=[]: list of file names to plug into op string - out_file: '"functional4.nii"' - # type=file: image written after calculations - # type=file|default=: image to write imports: - # list[nipype2pydra.task.base.importstatement] - list import statements required by the test, with each list item + # list[nipype2pydra.statements.imports.explicitimport] - list import statements required by the test, with each list item # consisting of 'module', 'name', and optionally 'alias' keys directive: # str - any doctest directive to place on the cmdline call, e.g. # doctest: +ELLIPSIS diff --git a/nipype-auto-conv/specs/interfaces/multi_image_maths_callables.py b/nipype-auto-conv/specs/interfaces/multi_image_maths_callables.py deleted file mode 100644 index 9b5eedb..0000000 --- a/nipype-auto-conv/specs/interfaces/multi_image_maths_callables.py +++ /dev/null @@ -1,329 +0,0 @@ -"""Module to put any functions that are referred to in the "callables" section of MultiImageMaths.yaml""" - -import attrs -import logging -import os -import os.path as op -from glob import glob -from pathlib import Path - - -def out_file_default(inputs): - return _gen_filename("out_file", inputs=inputs) - - -def out_file_callable(output_dir, inputs, stdout, stderr): - outputs = _list_outputs( - output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr - ) - return outputs["out_file"] - - -IFLOGGER = logging.getLogger("nipype.interface") - - -# Original source at L61 of /interfaces/fsl/maths.py -def _gen_filename(name, inputs=None, stdout=None, stderr=None, output_dir=None): - if name == "out_file": - return _list_outputs( - inputs=inputs, stdout=stdout, stderr=stderr, output_dir=output_dir - )["out_file"] - return None - - -# Original source at L205 of /interfaces/fsl/base.py -def _gen_fname( - basename, - cwd=None, - suffix=None, - change_ext=True, - ext=None, - inputs=None, - stdout=None, - stderr=None, - output_dir=None, -): - """Generate a filename based on the given parameters. - - The filename will take the form: cwd/basename. - If change_ext is True, it will use the extensions specified in - inputs.output_type. - - Parameters - ---------- - basename : str - Filename to base the new filename on. - cwd : str - Path to prefix to the new filename. (default is output_dir) - suffix : str - Suffix to add to the `basename`. (defaults is '' ) - change_ext : bool - Flag to change the filename extension to the FSL output type. - (default True) - - Returns - ------- - fname : str - New filename based on given parameters. - - """ - - if basename == "": - msg = "Unable to generate filename for command %s. " % "fslmaths" - msg += "basename is not set!" - raise ValueError(msg) - if cwd is None: - cwd = output_dir - if ext is None: - ext = Info.output_type_to_ext(inputs.output_type) - if change_ext: - if suffix: - suffix = "".join((suffix, ext)) - else: - suffix = ext - if suffix is None: - suffix = "" - fname = fname_presuffix(basename, suffix=suffix, use_ext=False, newpath=cwd) - return fname - - -# Original source at L51 of /interfaces/fsl/maths.py -def _list_outputs(inputs=None, stdout=None, stderr=None, output_dir=None): - outputs = {} - outputs["out_file"] = inputs.out_file - if inputs.out_file is attrs.NOTHING: - outputs["out_file"] = _gen_fname( - inputs.in_file, - suffix=_suffix, - inputs=inputs, - stdout=stdout, - stderr=stderr, - output_dir=output_dir, - ) - outputs["out_file"] = os.path.abspath(outputs["out_file"]) - return outputs - - -# Original source at L108 of /utils/filemanip.py -def fname_presuffix(fname, prefix="", suffix="", newpath=None, use_ext=True): - """Manipulates path and name of input filename - - Parameters - ---------- - fname : string - A filename (may or may not include path) - prefix : string - Characters to prepend to the filename - suffix : string - Characters to append to the filename - newpath : string - Path to replace the path of the input fname - use_ext : boolean - If True (default), appends the extension of the original file - to the output name. - - Returns - ------- - Absolute path of the modified filename - - >>> from nipype.utils.filemanip import fname_presuffix - >>> fname = 'foo.nii.gz' - >>> fname_presuffix(fname,'pre','post','/tmp') - '/tmp/prefoopost.nii.gz' - - >>> from nipype.interfaces.base import attrs.NOTHING - >>> fname_presuffix(fname, 'pre', 'post', attrs.NOTHING) == \ - fname_presuffix(fname, 'pre', 'post') - True - - """ - pth, fname, ext = split_filename(fname) - if not use_ext: - ext = "" - - # No need for : bool(attrs.NOTHING is not attrs.NOTHING) evaluates to False - if newpath: - pth = op.abspath(newpath) - return op.join(pth, prefix + fname + suffix + ext) - - -# Original source at L58 of /utils/filemanip.py -def split_filename(fname): - """Split a filename into parts: path, base filename and extension. - - Parameters - ---------- - fname : str - file or path name - - Returns - ------- - pth : str - base path from fname - fname : str - filename from fname, without extension - ext : str - file extension from fname - - Examples - -------- - >>> from nipype.utils.filemanip import split_filename - >>> pth, fname, ext = split_filename('/home/data/subject.nii.gz') - >>> pth - '/home/data' - - >>> fname - 'subject' - - >>> ext - '.nii.gz' - - """ - - special_extensions = [".nii.gz", ".tar.gz", ".niml.dset"] - - pth = op.dirname(fname) - fname = op.basename(fname) - - ext = None - for special_ext in special_extensions: - ext_len = len(special_ext) - if (len(fname) > ext_len) and (fname[-ext_len:].lower() == special_ext.lower()): - ext = fname[-ext_len:] - fname = fname[:-ext_len] - break - if not ext: - fname, ext = op.splitext(fname) - - return pth, fname, ext - - -# Original source at L1069 of /interfaces/base/core.py -class PackageInfo(object): - _version = None - version_cmd = None - version_file = None - - @classmethod - def version(klass): - if klass._version is None: - if klass.version_cmd is not None: - try: - clout = CommandLine( - command=klass.version_cmd, - resource_monitor=False, - terminal_output="allatonce", - ).run() - except IOError: - return None - - raw_info = clout.runtime.stdout - elif klass.version_file is not None: - try: - with open(klass.version_file, "rt") as fobj: - raw_info = fobj.read() - except OSError: - return None - else: - return None - - klass._version = klass.parse_version(raw_info) - - return klass._version - - @staticmethod - def parse_version(raw_info): - raise NotImplementedError - - -# Original source at L40 of /interfaces/fsl/base.py -class Info(PackageInfo): - """ - Handle FSL ``output_type`` and version information. - - output type refers to the type of file fsl defaults to writing - eg, NIFTI, NIFTI_GZ - - Examples - -------- - - >>> from nipype.interfaces.fsl import Info - >>> Info.version() # doctest: +SKIP - >>> Info.output_type() # doctest: +SKIP - - """ - - ftypes = { - "NIFTI": ".nii", - "NIFTI_PAIR": ".img", - "NIFTI_GZ": ".nii.gz", - "NIFTI_PAIR_GZ": ".img.gz", - } - - if os.getenv("FSLDIR"): - version_file = os.path.join(os.getenv("FSLDIR"), "etc", "fslversion") - - @staticmethod - def parse_version(raw_info): - return raw_info.splitlines()[0] - - @classmethod - def output_type_to_ext(cls, output_type): - """Get the file extension for the given output type. - - Parameters - ---------- - output_type : {'NIFTI', 'NIFTI_GZ', 'NIFTI_PAIR', 'NIFTI_PAIR_GZ'} - String specifying the output type. - - Returns - ------- - extension : str - The file extension for the output type. - """ - - try: - return cls.ftypes[output_type] - except KeyError: - msg = "Invalid FSLOUTPUTTYPE: ", output_type - raise KeyError(msg) - - @classmethod - def output_type(cls): - """Get the global FSL output file type FSLOUTPUTTYPE. - - This returns the value of the environment variable - FSLOUTPUTTYPE. An exception is raised if it is not defined. - - Returns - ------- - fsl_ftype : string - Represents the current environment setting of FSLOUTPUTTYPE - """ - try: - return os.environ["FSLOUTPUTTYPE"] - except KeyError: - IFLOGGER.warning( - "FSLOUTPUTTYPE environment variable is not set. " - "Setting FSLOUTPUTTYPE=NIFTI" - ) - return "NIFTI" - - @staticmethod - def standard_image(img_name=None): - """Grab an image from the standard location. - - Returns a list of standard images if called without arguments. - - Could be made more fancy to allow for more relocatability""" - try: - fsldir = os.environ["FSLDIR"] - except KeyError: - raise Exception("FSL environment variables not set") - stdpath = os.path.join(fsldir, "data", "standard") - if img_name is None: - return [ - filename.replace(stdpath + "/", "") - for filename in glob(os.path.join(stdpath, "*nii*")) - ] - return os.path.join(stdpath, img_name) diff --git a/nipype-auto-conv/specs/interfaces/multiple_regress_design.yaml b/nipype-auto-conv/specs/interfaces/multiple_regress_design.yaml index 38e5a82..0713753 100644 --- a/nipype-auto-conv/specs/interfaces/multiple_regress_design.yaml +++ b/nipype-auto-conv/specs/interfaces/multiple_regress_design.yaml @@ -7,21 +7,21 @@ # ---- # Generate multiple regression design # -# .. note:: -# FSL does not demean columns for higher level analysis. +# .. note:: +# FSL does not demean columns for higher level analysis. # -# Please see `FSL documentation -# `_ -# for more details on model specification for higher level analysis. +# Please see `FSL documentation +# `_ +# for more details on model specification for higher level analysis. # -# Examples -# -------- +# Examples +# -------- # -# >>> from nipype.interfaces.fsl import MultipleRegressDesign -# >>> model = MultipleRegressDesign() -# >>> model.inputs.contrasts = [['group mean', 'T',['reg1'],[1]]] -# >>> model.inputs.regressors = dict(reg1=[1, 1, 1], reg2=[2.,-4, 3]) -# >>> model.run() # doctest: +SKIP +# >>> from nipype.interfaces.fsl import MultipleRegressDesign +# >>> model = MultipleRegressDesign() +# >>> model.inputs.contrasts = [['group mean', 'T',['reg1'],[1]]] +# >>> model.inputs.regressors = dict(reg1=[1, 1, 1], reg2=[2.,-4, 3]) +# >>> model.run() # doctest: +SKIP # # task_name: MultipleRegressDesign @@ -66,7 +66,7 @@ outputs: # dict[str, str] - names of methods/callable classes defined in the adjacent `*_callables.py` # to set to the `callable` attribute of output fields templates: - # dict[str, str] - `output_file_template` values to be provided to output fields + # dict[str, str] - `path_template` values to be provided to output fields requirements: # dict[str, list[str]] - input fields that are required to be provided for the output field to be present tests: @@ -80,7 +80,7 @@ tests: groups: # type=list|default=[]: list of group identifiers (defaults to single group) imports: - # list[nipype2pydra.task.base.importstatement] - list import statements required by the test, with each list item + # list[nipype2pydra.statements.imports.explicitimport] - list import statements required by the test, with each list item # consisting of 'module', 'name', and optionally 'alias' keys expected_outputs: # dict[str, str] - expected values for selected outputs, noting that tests will typically diff --git a/nipype-auto-conv/specs/interfaces/multiple_regress_design_callables.py b/nipype-auto-conv/specs/interfaces/multiple_regress_design_callables.py deleted file mode 100644 index a295709..0000000 --- a/nipype-auto-conv/specs/interfaces/multiple_regress_design_callables.py +++ /dev/null @@ -1,42 +0,0 @@ -"""Module to put any functions that are referred to in the "callables" section of MultipleRegressDesign.yaml""" - -import os - - -def design_con_callable(output_dir, inputs, stdout, stderr): - outputs = _list_outputs( - output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr - ) - return outputs["design_con"] - - -def design_fts_callable(output_dir, inputs, stdout, stderr): - outputs = _list_outputs( - output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr - ) - return outputs["design_fts"] - - -def design_grp_callable(output_dir, inputs, stdout, stderr): - outputs = _list_outputs( - output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr - ) - return outputs["design_grp"] - - -def design_mat_callable(output_dir, inputs, stdout, stderr): - outputs = _list_outputs( - output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr - ) - return outputs["design_mat"] - - -# Original source at L1600 of /interfaces/fsl/model.py -def _list_outputs(inputs=None, stdout=None, stderr=None, output_dir=None): - outputs = {} - nfcons = sum([1 for con in inputs.contrasts if con[1] == "F"]) - for field in list(outputs.keys()): - if ("fts" in field) and (nfcons == 0): - continue - outputs[field] = os.path.join(output_dir, field.replace("_", ".")) - return outputs diff --git a/nipype-auto-conv/specs/interfaces/overlay.yaml b/nipype-auto-conv/specs/interfaces/overlay.yaml index 6ffac87..8ea477d 100644 --- a/nipype-auto-conv/specs/interfaces/overlay.yaml +++ b/nipype-auto-conv/specs/interfaces/overlay.yaml @@ -6,20 +6,20 @@ # Docs # ---- # Use FSL's overlay command to combine background and statistical images -# into one volume +# into one volume # # -# Examples -# -------- +# Examples +# -------- # -# >>> from nipype.interfaces import fsl -# >>> combine = fsl.Overlay() -# >>> combine.inputs.background_image = 'mean_func.nii.gz' -# >>> combine.inputs.auto_thresh_bg = True -# >>> combine.inputs.stat_image = 'zstat1.nii.gz' -# >>> combine.inputs.stat_thresh = (3.5, 10) -# >>> combine.inputs.show_negative_stats = True -# >>> res = combine.run() #doctest: +SKIP +# >>> from nipype.interfaces import fsl +# >>> combine = fsl.Overlay() +# >>> combine.inputs.background_image = 'mean_func.nii.gz' +# >>> combine.inputs.auto_thresh_bg = True +# >>> combine.inputs.stat_image = 'zstat1.nii.gz' +# >>> combine.inputs.stat_thresh = (3.5, 10) +# >>> combine.inputs.show_negative_stats = True +# >>> res = combine.run() #doctest: +SKIP # # # @@ -39,9 +39,6 @@ inputs: # passed to the field in the automatically generated unittests. background_image: generic/file # type=file|default=: image to use as background - out_file: Path - # type=file: combined image volume - # type=file|default=: combined image volume stat_image: generic/file # type=file|default=: statistical image to overlay in color stat_image2: generic/file @@ -69,7 +66,7 @@ outputs: # dict[str, str] - names of methods/callable classes defined in the adjacent `*_callables.py` # to set to the `callable` attribute of output fields templates: - # dict[str, str] - `output_file_template` values to be provided to output fields + # dict[str, str] - `path_template` values to be provided to output fields out_file: out_file # type=file: combined image volume # type=file|default=: combined image volume @@ -107,13 +104,13 @@ tests: # type=file: combined image volume # type=file|default=: combined image volume output_type: - # type=enum|default='NIFTI'|allowed['NIFTI','NIFTI_GZ','NIFTI_PAIR','NIFTI_PAIR_GZ']: FSL output type + # type=enum|default='NIFTI'|allowed['GIFTI','NIFTI','NIFTI_GZ','NIFTI_PAIR','NIFTI_PAIR_GZ']: FSL output type args: # type=str|default='': Additional parameters to the command environ: # type=dict|default={}: Environment variables imports: - # list[nipype2pydra.task.base.importstatement] - list import statements required by the test, with each list item + # list[nipype2pydra.statements.imports.explicitimport] - list import statements required by the test, with each list item # consisting of 'module', 'name', and optionally 'alias' keys expected_outputs: # dict[str, str] - expected values for selected outputs, noting that tests will typically diff --git a/nipype-auto-conv/specs/interfaces/overlay_callables.py b/nipype-auto-conv/specs/interfaces/overlay_callables.py deleted file mode 100644 index a56187c..0000000 --- a/nipype-auto-conv/specs/interfaces/overlay_callables.py +++ /dev/null @@ -1,339 +0,0 @@ -"""Module to put any functions that are referred to in the "callables" section of Overlay.yaml""" - -import attrs -import logging -import os -import os.path as op -from glob import glob -from pathlib import Path - - -def out_file_default(inputs): - return _gen_filename("out_file", inputs=inputs) - - -def out_file_callable(output_dir, inputs, stdout, stderr): - outputs = _list_outputs( - output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr - ) - return outputs["out_file"] - - -IFLOGGER = logging.getLogger("nipype.interface") - - -# Original source at L1098 of /interfaces/fsl/utils.py -def _gen_filename(name, inputs=None, stdout=None, stderr=None, output_dir=None): - if name == "out_file": - return _list_outputs( - inputs=inputs, stdout=stdout, stderr=stderr, output_dir=output_dir - )["out_file"] - return None - - -# Original source at L205 of /interfaces/fsl/base.py -def _gen_fname( - basename, - cwd=None, - suffix=None, - change_ext=True, - ext=None, - inputs=None, - stdout=None, - stderr=None, - output_dir=None, -): - """Generate a filename based on the given parameters. - - The filename will take the form: cwd/basename. - If change_ext is True, it will use the extensions specified in - inputs.output_type. - - Parameters - ---------- - basename : str - Filename to base the new filename on. - cwd : str - Path to prefix to the new filename. (default is output_dir) - suffix : str - Suffix to add to the `basename`. (defaults is '' ) - change_ext : bool - Flag to change the filename extension to the FSL output type. - (default True) - - Returns - ------- - fname : str - New filename based on given parameters. - - """ - - if basename == "": - msg = "Unable to generate filename for command %s. " % "overlay" - msg += "basename is not set!" - raise ValueError(msg) - if cwd is None: - cwd = output_dir - if ext is None: - ext = Info.output_type_to_ext(inputs.output_type) - if change_ext: - if suffix: - suffix = "".join((suffix, ext)) - else: - suffix = ext - if suffix is None: - suffix = "" - fname = fname_presuffix(basename, suffix=suffix, use_ext=False, newpath=cwd) - return fname - - -# Original source at L1080 of /interfaces/fsl/utils.py -def _list_outputs(inputs=None, stdout=None, stderr=None, output_dir=None): - outputs = {} - out_file = inputs.out_file - if out_file is attrs.NOTHING: - if (inputs.stat_image2 is not attrs.NOTHING) and ( - (inputs.show_negative_stats is attrs.NOTHING) - or not inputs.show_negative_stats - ): - stem = "%s_and_%s" % ( - split_filename(inputs.stat_image)[1], - split_filename(inputs.stat_image2)[1], - ) - else: - stem = split_filename(inputs.stat_image)[1] - out_file = _gen_fname( - stem, - suffix="_overlay", - inputs=inputs, - stdout=stdout, - stderr=stderr, - output_dir=output_dir, - ) - outputs["out_file"] = os.path.abspath(out_file) - return outputs - - -# Original source at L108 of /utils/filemanip.py -def fname_presuffix(fname, prefix="", suffix="", newpath=None, use_ext=True): - """Manipulates path and name of input filename - - Parameters - ---------- - fname : string - A filename (may or may not include path) - prefix : string - Characters to prepend to the filename - suffix : string - Characters to append to the filename - newpath : string - Path to replace the path of the input fname - use_ext : boolean - If True (default), appends the extension of the original file - to the output name. - - Returns - ------- - Absolute path of the modified filename - - >>> from nipype.utils.filemanip import fname_presuffix - >>> fname = 'foo.nii.gz' - >>> fname_presuffix(fname,'pre','post','/tmp') - '/tmp/prefoopost.nii.gz' - - >>> from nipype.interfaces.base import attrs.NOTHING - >>> fname_presuffix(fname, 'pre', 'post', attrs.NOTHING) == \ - fname_presuffix(fname, 'pre', 'post') - True - - """ - pth, fname, ext = split_filename(fname) - if not use_ext: - ext = "" - - # No need for : bool(attrs.NOTHING is not attrs.NOTHING) evaluates to False - if newpath: - pth = op.abspath(newpath) - return op.join(pth, prefix + fname + suffix + ext) - - -# Original source at L58 of /utils/filemanip.py -def split_filename(fname): - """Split a filename into parts: path, base filename and extension. - - Parameters - ---------- - fname : str - file or path name - - Returns - ------- - pth : str - base path from fname - fname : str - filename from fname, without extension - ext : str - file extension from fname - - Examples - -------- - >>> from nipype.utils.filemanip import split_filename - >>> pth, fname, ext = split_filename('/home/data/subject.nii.gz') - >>> pth - '/home/data' - - >>> fname - 'subject' - - >>> ext - '.nii.gz' - - """ - - special_extensions = [".nii.gz", ".tar.gz", ".niml.dset"] - - pth = op.dirname(fname) - fname = op.basename(fname) - - ext = None - for special_ext in special_extensions: - ext_len = len(special_ext) - if (len(fname) > ext_len) and (fname[-ext_len:].lower() == special_ext.lower()): - ext = fname[-ext_len:] - fname = fname[:-ext_len] - break - if not ext: - fname, ext = op.splitext(fname) - - return pth, fname, ext - - -# Original source at L1069 of /interfaces/base/core.py -class PackageInfo(object): - _version = None - version_cmd = None - version_file = None - - @classmethod - def version(klass): - if klass._version is None: - if klass.version_cmd is not None: - try: - clout = CommandLine( - command=klass.version_cmd, - resource_monitor=False, - terminal_output="allatonce", - ).run() - except IOError: - return None - - raw_info = clout.runtime.stdout - elif klass.version_file is not None: - try: - with open(klass.version_file, "rt") as fobj: - raw_info = fobj.read() - except OSError: - return None - else: - return None - - klass._version = klass.parse_version(raw_info) - - return klass._version - - @staticmethod - def parse_version(raw_info): - raise NotImplementedError - - -# Original source at L40 of /interfaces/fsl/base.py -class Info(PackageInfo): - """ - Handle FSL ``output_type`` and version information. - - output type refers to the type of file fsl defaults to writing - eg, NIFTI, NIFTI_GZ - - Examples - -------- - - >>> from nipype.interfaces.fsl import Info - >>> Info.version() # doctest: +SKIP - >>> Info.output_type() # doctest: +SKIP - - """ - - ftypes = { - "NIFTI": ".nii", - "NIFTI_PAIR": ".img", - "NIFTI_GZ": ".nii.gz", - "NIFTI_PAIR_GZ": ".img.gz", - } - - if os.getenv("FSLDIR"): - version_file = os.path.join(os.getenv("FSLDIR"), "etc", "fslversion") - - @staticmethod - def parse_version(raw_info): - return raw_info.splitlines()[0] - - @classmethod - def output_type_to_ext(cls, output_type): - """Get the file extension for the given output type. - - Parameters - ---------- - output_type : {'NIFTI', 'NIFTI_GZ', 'NIFTI_PAIR', 'NIFTI_PAIR_GZ'} - String specifying the output type. - - Returns - ------- - extension : str - The file extension for the output type. - """ - - try: - return cls.ftypes[output_type] - except KeyError: - msg = "Invalid FSLOUTPUTTYPE: ", output_type - raise KeyError(msg) - - @classmethod - def output_type(cls): - """Get the global FSL output file type FSLOUTPUTTYPE. - - This returns the value of the environment variable - FSLOUTPUTTYPE. An exception is raised if it is not defined. - - Returns - ------- - fsl_ftype : string - Represents the current environment setting of FSLOUTPUTTYPE - """ - try: - return os.environ["FSLOUTPUTTYPE"] - except KeyError: - IFLOGGER.warning( - "FSLOUTPUTTYPE environment variable is not set. " - "Setting FSLOUTPUTTYPE=NIFTI" - ) - return "NIFTI" - - @staticmethod - def standard_image(img_name=None): - """Grab an image from the standard location. - - Returns a list of standard images if called without arguments. - - Could be made more fancy to allow for more relocatability""" - try: - fsldir = os.environ["FSLDIR"] - except KeyError: - raise Exception("FSL environment variables not set") - stdpath = os.path.join(fsldir, "data", "standard") - if img_name is None: - return [ - filename.replace(stdpath + "/", "") - for filename in glob(os.path.join(stdpath, "*nii*")) - ] - return os.path.join(stdpath, img_name) diff --git a/nipype-auto-conv/specs/interfaces/percentile_image.yaml b/nipype-auto-conv/specs/interfaces/percentile_image.yaml index d57bf3b..a2a67e0 100644 --- a/nipype-auto-conv/specs/interfaces/percentile_image.yaml +++ b/nipype-auto-conv/specs/interfaces/percentile_image.yaml @@ -7,15 +7,15 @@ # ---- # Use fslmaths to generate a percentile image across a given dimension. # -# Examples -# -------- -# >>> from nipype.interfaces.fsl.maths import MaxImage -# >>> percer = PercentileImage() -# >>> percer.inputs.in_file = "functional.nii" # doctest: +SKIP -# >>> percer.dimension = "T" -# >>> percer.perc = 90 -# >>> percer.cmdline # doctest: +SKIP -# 'fslmaths functional.nii -Tperc 90 functional_perc.nii' +# Examples +# -------- +# >>> from nipype.interfaces.fsl.maths import MaxImage +# >>> percer = PercentileImage() +# >>> percer.inputs.in_file = "functional.nii" # doctest: +SKIP +# >>> percer.dimension = "T" +# >>> percer.perc = 90 +# >>> percer.cmdline # doctest: +SKIP +# 'fslmaths functional.nii -Tperc 90 functional_perc.nii' # # task_name: PercentileImage @@ -34,9 +34,6 @@ inputs: # passed to the field in the automatically generated unittests. in_file: medimage/nifti1 # type=file|default=: image to operate on - out_file: Path - # type=file: image written after calculations - # type=file|default=: image to write callable_defaults: # dict[str, str] - names of methods/callable classes defined in the adjacent `*_callables.py` # to set as the `default` method of input fields @@ -60,7 +57,7 @@ outputs: # dict[str, str] - names of methods/callable classes defined in the adjacent `*_callables.py` # to set to the `callable` attribute of output fields templates: - # dict[str, str] - `output_file_template` values to be provided to output fields + # dict[str, str] - `path_template` values to be provided to output fields out_file: out_file # type=file: image written after calculations # type=file|default=: image to write @@ -86,13 +83,13 @@ tests: nan2zeros: # type=bool|default=False: change NaNs to zeros before doing anything output_type: - # type=enum|default='NIFTI'|allowed['NIFTI','NIFTI_GZ','NIFTI_PAIR','NIFTI_PAIR_GZ']: FSL output type + # type=enum|default='NIFTI'|allowed['GIFTI','NIFTI','NIFTI_GZ','NIFTI_PAIR','NIFTI_PAIR_GZ']: FSL output type args: # type=str|default='': Additional parameters to the command environ: # type=dict|default={}: Environment variables imports: - # list[nipype2pydra.task.base.importstatement] - list import statements required by the test, with each list item + # list[nipype2pydra.statements.imports.explicitimport] - list import statements required by the test, with each list item # consisting of 'module', 'name', and optionally 'alias' keys expected_outputs: # dict[str, str] - expected values for selected outputs, noting that tests will typically @@ -112,7 +109,7 @@ tests: in_file: # type=file|default=: image to operate on imports: - # list[nipype2pydra.task.base.importstatement] - list import statements required by the test, with each list item + # list[nipype2pydra.statements.imports.explicitimport] - list import statements required by the test, with each list item # consisting of 'module', 'name', and optionally 'alias' keys expected_outputs: # dict[str, str] - expected values for selected outputs, noting that tests will typically @@ -127,7 +124,7 @@ tests: # bool - whether the unittest is expected to fail or not. Set to false # when you are satisfied with the edits you have made to this file doctests: -- cmdline: fslmaths functional.nii -Tperc 90 functional_perc.nii +- cmdline: # str - the expected cmdline output inputs: # dict[str, str] - name-value pairs for inputs to be provided to the doctest. @@ -136,7 +133,7 @@ doctests: in_file: '"functional.nii" # doctest: +SKIP' # type=file|default=: image to operate on imports: - # list[nipype2pydra.task.base.importstatement] - list import statements required by the test, with each list item + # list[nipype2pydra.statements.imports.explicitimport] - list import statements required by the test, with each list item # consisting of 'module', 'name', and optionally 'alias' keys directive: # str - any doctest directive to place on the cmdline call, e.g. # doctest: +ELLIPSIS diff --git a/nipype-auto-conv/specs/interfaces/percentile_image_callables.py b/nipype-auto-conv/specs/interfaces/percentile_image_callables.py deleted file mode 100644 index a86a925..0000000 --- a/nipype-auto-conv/specs/interfaces/percentile_image_callables.py +++ /dev/null @@ -1,329 +0,0 @@ -"""Module to put any functions that are referred to in the "callables" section of PercentileImage.yaml""" - -import attrs -import logging -import os -import os.path as op -from glob import glob -from pathlib import Path - - -def out_file_default(inputs): - return _gen_filename("out_file", inputs=inputs) - - -def out_file_callable(output_dir, inputs, stdout, stderr): - outputs = _list_outputs( - output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr - ) - return outputs["out_file"] - - -IFLOGGER = logging.getLogger("nipype.interface") - - -# Original source at L61 of /interfaces/fsl/maths.py -def _gen_filename(name, inputs=None, stdout=None, stderr=None, output_dir=None): - if name == "out_file": - return _list_outputs( - inputs=inputs, stdout=stdout, stderr=stderr, output_dir=output_dir - )["out_file"] - return None - - -# Original source at L205 of /interfaces/fsl/base.py -def _gen_fname( - basename, - cwd=None, - suffix=None, - change_ext=True, - ext=None, - inputs=None, - stdout=None, - stderr=None, - output_dir=None, -): - """Generate a filename based on the given parameters. - - The filename will take the form: cwd/basename. - If change_ext is True, it will use the extensions specified in - inputs.output_type. - - Parameters - ---------- - basename : str - Filename to base the new filename on. - cwd : str - Path to prefix to the new filename. (default is output_dir) - suffix : str - Suffix to add to the `basename`. (defaults is '' ) - change_ext : bool - Flag to change the filename extension to the FSL output type. - (default True) - - Returns - ------- - fname : str - New filename based on given parameters. - - """ - - if basename == "": - msg = "Unable to generate filename for command %s. " % "fslmaths" - msg += "basename is not set!" - raise ValueError(msg) - if cwd is None: - cwd = output_dir - if ext is None: - ext = Info.output_type_to_ext(inputs.output_type) - if change_ext: - if suffix: - suffix = "".join((suffix, ext)) - else: - suffix = ext - if suffix is None: - suffix = "" - fname = fname_presuffix(basename, suffix=suffix, use_ext=False, newpath=cwd) - return fname - - -# Original source at L51 of /interfaces/fsl/maths.py -def _list_outputs(inputs=None, stdout=None, stderr=None, output_dir=None): - outputs = {} - outputs["out_file"] = inputs.out_file - if inputs.out_file is attrs.NOTHING: - outputs["out_file"] = _gen_fname( - inputs.in_file, - suffix=_suffix, - inputs=inputs, - stdout=stdout, - stderr=stderr, - output_dir=output_dir, - ) - outputs["out_file"] = os.path.abspath(outputs["out_file"]) - return outputs - - -# Original source at L108 of /utils/filemanip.py -def fname_presuffix(fname, prefix="", suffix="", newpath=None, use_ext=True): - """Manipulates path and name of input filename - - Parameters - ---------- - fname : string - A filename (may or may not include path) - prefix : string - Characters to prepend to the filename - suffix : string - Characters to append to the filename - newpath : string - Path to replace the path of the input fname - use_ext : boolean - If True (default), appends the extension of the original file - to the output name. - - Returns - ------- - Absolute path of the modified filename - - >>> from nipype.utils.filemanip import fname_presuffix - >>> fname = 'foo.nii.gz' - >>> fname_presuffix(fname,'pre','post','/tmp') - '/tmp/prefoopost.nii.gz' - - >>> from nipype.interfaces.base import attrs.NOTHING - >>> fname_presuffix(fname, 'pre', 'post', attrs.NOTHING) == \ - fname_presuffix(fname, 'pre', 'post') - True - - """ - pth, fname, ext = split_filename(fname) - if not use_ext: - ext = "" - - # No need for : bool(attrs.NOTHING is not attrs.NOTHING) evaluates to False - if newpath: - pth = op.abspath(newpath) - return op.join(pth, prefix + fname + suffix + ext) - - -# Original source at L58 of /utils/filemanip.py -def split_filename(fname): - """Split a filename into parts: path, base filename and extension. - - Parameters - ---------- - fname : str - file or path name - - Returns - ------- - pth : str - base path from fname - fname : str - filename from fname, without extension - ext : str - file extension from fname - - Examples - -------- - >>> from nipype.utils.filemanip import split_filename - >>> pth, fname, ext = split_filename('/home/data/subject.nii.gz') - >>> pth - '/home/data' - - >>> fname - 'subject' - - >>> ext - '.nii.gz' - - """ - - special_extensions = [".nii.gz", ".tar.gz", ".niml.dset"] - - pth = op.dirname(fname) - fname = op.basename(fname) - - ext = None - for special_ext in special_extensions: - ext_len = len(special_ext) - if (len(fname) > ext_len) and (fname[-ext_len:].lower() == special_ext.lower()): - ext = fname[-ext_len:] - fname = fname[:-ext_len] - break - if not ext: - fname, ext = op.splitext(fname) - - return pth, fname, ext - - -# Original source at L1069 of /interfaces/base/core.py -class PackageInfo(object): - _version = None - version_cmd = None - version_file = None - - @classmethod - def version(klass): - if klass._version is None: - if klass.version_cmd is not None: - try: - clout = CommandLine( - command=klass.version_cmd, - resource_monitor=False, - terminal_output="allatonce", - ).run() - except IOError: - return None - - raw_info = clout.runtime.stdout - elif klass.version_file is not None: - try: - with open(klass.version_file, "rt") as fobj: - raw_info = fobj.read() - except OSError: - return None - else: - return None - - klass._version = klass.parse_version(raw_info) - - return klass._version - - @staticmethod - def parse_version(raw_info): - raise NotImplementedError - - -# Original source at L40 of /interfaces/fsl/base.py -class Info(PackageInfo): - """ - Handle FSL ``output_type`` and version information. - - output type refers to the type of file fsl defaults to writing - eg, NIFTI, NIFTI_GZ - - Examples - -------- - - >>> from nipype.interfaces.fsl import Info - >>> Info.version() # doctest: +SKIP - >>> Info.output_type() # doctest: +SKIP - - """ - - ftypes = { - "NIFTI": ".nii", - "NIFTI_PAIR": ".img", - "NIFTI_GZ": ".nii.gz", - "NIFTI_PAIR_GZ": ".img.gz", - } - - if os.getenv("FSLDIR"): - version_file = os.path.join(os.getenv("FSLDIR"), "etc", "fslversion") - - @staticmethod - def parse_version(raw_info): - return raw_info.splitlines()[0] - - @classmethod - def output_type_to_ext(cls, output_type): - """Get the file extension for the given output type. - - Parameters - ---------- - output_type : {'NIFTI', 'NIFTI_GZ', 'NIFTI_PAIR', 'NIFTI_PAIR_GZ'} - String specifying the output type. - - Returns - ------- - extension : str - The file extension for the output type. - """ - - try: - return cls.ftypes[output_type] - except KeyError: - msg = "Invalid FSLOUTPUTTYPE: ", output_type - raise KeyError(msg) - - @classmethod - def output_type(cls): - """Get the global FSL output file type FSLOUTPUTTYPE. - - This returns the value of the environment variable - FSLOUTPUTTYPE. An exception is raised if it is not defined. - - Returns - ------- - fsl_ftype : string - Represents the current environment setting of FSLOUTPUTTYPE - """ - try: - return os.environ["FSLOUTPUTTYPE"] - except KeyError: - IFLOGGER.warning( - "FSLOUTPUTTYPE environment variable is not set. " - "Setting FSLOUTPUTTYPE=NIFTI" - ) - return "NIFTI" - - @staticmethod - def standard_image(img_name=None): - """Grab an image from the standard location. - - Returns a list of standard images if called without arguments. - - Could be made more fancy to allow for more relocatability""" - try: - fsldir = os.environ["FSLDIR"] - except KeyError: - raise Exception("FSL environment variables not set") - stdpath = os.path.join(fsldir, "data", "standard") - if img_name is None: - return [ - filename.replace(stdpath + "/", "") - for filename in glob(os.path.join(stdpath, "*nii*")) - ] - return os.path.join(stdpath, img_name) diff --git a/nipype-auto-conv/specs/interfaces/plot_motion_params.yaml b/nipype-auto-conv/specs/interfaces/plot_motion_params.yaml index c375af6..6c4b897 100644 --- a/nipype-auto-conv/specs/interfaces/plot_motion_params.yaml +++ b/nipype-auto-conv/specs/interfaces/plot_motion_params.yaml @@ -6,29 +6,29 @@ # Docs # ---- # Use fsl_tsplot to plot the estimated motion parameters from a -# realignment program. +# realignment program. # # -# Examples -# -------- +# Examples +# -------- # -# >>> import nipype.interfaces.fsl as fsl -# >>> plotter = fsl.PlotMotionParams() -# >>> plotter.inputs.in_file = 'functional.par' -# >>> plotter.inputs.in_source = 'fsl' -# >>> plotter.inputs.plot_type = 'rotations' -# >>> res = plotter.run() #doctest: +SKIP +# >>> import nipype.interfaces.fsl as fsl +# >>> plotter = fsl.PlotMotionParams() +# >>> plotter.inputs.in_file = 'functional.par' +# >>> plotter.inputs.in_source = 'fsl' +# >>> plotter.inputs.plot_type = 'rotations' +# >>> res = plotter.run() #doctest: +SKIP # # -# Notes -# ----- +# Notes +# ----- # -# The 'in_source' attribute determines the order of columns that are expected -# in the source file. FSL prints motion parameters in the order rotations, -# translations, while SPM prints them in the opposite order. This interface -# should be able to plot timecourses of motion parameters generated from -# other sources as long as they fall under one of these two patterns. For -# more flexibility, see the :class:`fsl.PlotTimeSeries` interface. +# The 'in_source' attribute determines the order of columns that are expected +# in the source file. FSL prints motion parameters in the order rotations, +# translations, while SPM prints them in the opposite order. This interface +# should be able to plot timecourses of motion parameters generated from +# other sources as long as they fall under one of these two patterns. For +# more flexibility, see the :class:`fsl.PlotTimeSeries` interface. # # task_name: PlotMotionParams @@ -45,9 +45,6 @@ inputs: # from the nipype interface, but you may want to be more specific, particularly # for file types, where specifying the format also specifies the file that will be # passed to the field in the automatically generated unittests. - out_file: Path - # type=file: image to write - # type=file|default=: image to write callable_defaults: # dict[str, str] - names of methods/callable classes defined in the adjacent `*_callables.py` # to set as the `default` method of input fields @@ -71,7 +68,7 @@ outputs: # dict[str, str] - names of methods/callable classes defined in the adjacent `*_callables.py` # to set to the `callable` attribute of output fields templates: - # dict[str, str] - `output_file_template` values to be provided to output fields + # dict[str, str] - `path_template` values to be provided to output fields out_file: out_file # type=file: image to write # type=file|default=: image to write @@ -93,13 +90,13 @@ tests: # type=file: image to write # type=file|default=: image to write output_type: - # type=enum|default='NIFTI'|allowed['NIFTI','NIFTI_GZ','NIFTI_PAIR','NIFTI_PAIR_GZ']: FSL output type + # type=enum|default='NIFTI'|allowed['GIFTI','NIFTI','NIFTI_GZ','NIFTI_PAIR','NIFTI_PAIR_GZ']: FSL output type args: # type=str|default='': Additional parameters to the command environ: # type=dict|default={}: Environment variables imports: - # list[nipype2pydra.task.base.importstatement] - list import statements required by the test, with each list item + # list[nipype2pydra.statements.imports.explicitimport] - list import statements required by the test, with each list item # consisting of 'module', 'name', and optionally 'alias' keys expected_outputs: # dict[str, str] - expected values for selected outputs, noting that tests will typically diff --git a/nipype-auto-conv/specs/interfaces/plot_time_series.yaml b/nipype-auto-conv/specs/interfaces/plot_time_series.yaml index 17abf1a..ee16162 100644 --- a/nipype-auto-conv/specs/interfaces/plot_time_series.yaml +++ b/nipype-auto-conv/specs/interfaces/plot_time_series.yaml @@ -7,15 +7,15 @@ # ---- # Use fsl_tsplot to create images of time course plots. # -# Examples -# -------- +# Examples +# -------- # -# >>> import nipype.interfaces.fsl as fsl -# >>> plotter = fsl.PlotTimeSeries() -# >>> plotter.inputs.in_file = 'functional.par' -# >>> plotter.inputs.title = 'Functional timeseries' -# >>> plotter.inputs.labels = ['run1', 'run2'] -# >>> plotter.run() #doctest: +SKIP +# >>> import nipype.interfaces.fsl as fsl +# >>> plotter = fsl.PlotTimeSeries() +# >>> plotter.inputs.in_file = 'functional.par' +# >>> plotter.inputs.title = 'Functional timeseries' +# >>> plotter.inputs.labels = ['run1', 'run2'] +# >>> plotter.run() #doctest: +SKIP # # # @@ -35,9 +35,6 @@ inputs: # passed to the field in the automatically generated unittests. legend_file: generic/file # type=file|default=: legend file - out_file: Path - # type=file: image to write - # type=file|default=: image to write callable_defaults: # dict[str, str] - names of methods/callable classes defined in the adjacent `*_callables.py` # to set as the `default` method of input fields @@ -61,7 +58,7 @@ outputs: # dict[str, str] - names of methods/callable classes defined in the adjacent `*_callables.py` # to set to the `callable` attribute of output fields templates: - # dict[str, str] - `output_file_template` values to be provided to output fields + # dict[str, str] - `path_template` values to be provided to output fields out_file: out_file # type=file: image to write # type=file|default=: image to write @@ -103,13 +100,13 @@ tests: # type=file: image to write # type=file|default=: image to write output_type: - # type=enum|default='NIFTI'|allowed['NIFTI','NIFTI_GZ','NIFTI_PAIR','NIFTI_PAIR_GZ']: FSL output type + # type=enum|default='NIFTI'|allowed['GIFTI','NIFTI','NIFTI_GZ','NIFTI_PAIR','NIFTI_PAIR_GZ']: FSL output type args: # type=str|default='': Additional parameters to the command environ: # type=dict|default={}: Environment variables imports: - # list[nipype2pydra.task.base.importstatement] - list import statements required by the test, with each list item + # list[nipype2pydra.statements.imports.explicitimport] - list import statements required by the test, with each list item # consisting of 'module', 'name', and optionally 'alias' keys expected_outputs: # dict[str, str] - expected values for selected outputs, noting that tests will typically diff --git a/nipype-auto-conv/specs/interfaces/plot_time_series_callables.py b/nipype-auto-conv/specs/interfaces/plot_time_series_callables.py deleted file mode 100644 index bd4ce1e..0000000 --- a/nipype-auto-conv/specs/interfaces/plot_time_series_callables.py +++ /dev/null @@ -1,333 +0,0 @@ -"""Module to put any functions that are referred to in the "callables" section of PlotTimeSeries.yaml""" - -import attrs -import logging -import os -import os.path as op -from glob import glob -from pathlib import Path - - -def out_file_default(inputs): - return _gen_filename("out_file", inputs=inputs) - - -def out_file_callable(output_dir, inputs, stdout, stderr): - outputs = _list_outputs( - output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr - ) - return outputs["out_file"] - - -IFLOGGER = logging.getLogger("nipype.interface") - - -# Original source at L1367 of /interfaces/fsl/utils.py -def _gen_filename(name, inputs=None, stdout=None, stderr=None, output_dir=None): - if name == "out_file": - return _list_outputs( - inputs=inputs, stdout=stdout, stderr=stderr, output_dir=output_dir - )["out_file"] - return None - - -# Original source at L205 of /interfaces/fsl/base.py -def _gen_fname( - basename, - cwd=None, - suffix=None, - change_ext=True, - ext=None, - inputs=None, - stdout=None, - stderr=None, - output_dir=None, -): - """Generate a filename based on the given parameters. - - The filename will take the form: cwd/basename. - If change_ext is True, it will use the extensions specified in - inputs.output_type. - - Parameters - ---------- - basename : str - Filename to base the new filename on. - cwd : str - Path to prefix to the new filename. (default is output_dir) - suffix : str - Suffix to add to the `basename`. (defaults is '' ) - change_ext : bool - Flag to change the filename extension to the FSL output type. - (default True) - - Returns - ------- - fname : str - New filename based on given parameters. - - """ - - if basename == "": - msg = "Unable to generate filename for command %s. " % "fsl_tsplot" - msg += "basename is not set!" - raise ValueError(msg) - if cwd is None: - cwd = output_dir - if ext is None: - ext = Info.output_type_to_ext(inputs.output_type) - if change_ext: - if suffix: - suffix = "".join((suffix, ext)) - else: - suffix = ext - if suffix is None: - suffix = "" - fname = fname_presuffix(basename, suffix=suffix, use_ext=False, newpath=cwd) - return fname - - -# Original source at L1355 of /interfaces/fsl/utils.py -def _list_outputs(inputs=None, stdout=None, stderr=None, output_dir=None): - outputs = {} - out_file = inputs.out_file - if out_file is attrs.NOTHING: - if isinstance(inputs.in_file, list): - infile = inputs.in_file[0] - else: - infile = inputs.in_file - out_file = _gen_fname( - infile, - ext=".png", - inputs=inputs, - stdout=stdout, - stderr=stderr, - output_dir=output_dir, - ) - outputs["out_file"] = os.path.abspath(out_file) - return outputs - - -# Original source at L108 of /utils/filemanip.py -def fname_presuffix(fname, prefix="", suffix="", newpath=None, use_ext=True): - """Manipulates path and name of input filename - - Parameters - ---------- - fname : string - A filename (may or may not include path) - prefix : string - Characters to prepend to the filename - suffix : string - Characters to append to the filename - newpath : string - Path to replace the path of the input fname - use_ext : boolean - If True (default), appends the extension of the original file - to the output name. - - Returns - ------- - Absolute path of the modified filename - - >>> from nipype.utils.filemanip import fname_presuffix - >>> fname = 'foo.nii.gz' - >>> fname_presuffix(fname,'pre','post','/tmp') - '/tmp/prefoopost.nii.gz' - - >>> from nipype.interfaces.base import attrs.NOTHING - >>> fname_presuffix(fname, 'pre', 'post', attrs.NOTHING) == \ - fname_presuffix(fname, 'pre', 'post') - True - - """ - pth, fname, ext = split_filename(fname) - if not use_ext: - ext = "" - - # No need for : bool(attrs.NOTHING is not attrs.NOTHING) evaluates to False - if newpath: - pth = op.abspath(newpath) - return op.join(pth, prefix + fname + suffix + ext) - - -# Original source at L58 of /utils/filemanip.py -def split_filename(fname): - """Split a filename into parts: path, base filename and extension. - - Parameters - ---------- - fname : str - file or path name - - Returns - ------- - pth : str - base path from fname - fname : str - filename from fname, without extension - ext : str - file extension from fname - - Examples - -------- - >>> from nipype.utils.filemanip import split_filename - >>> pth, fname, ext = split_filename('/home/data/subject.nii.gz') - >>> pth - '/home/data' - - >>> fname - 'subject' - - >>> ext - '.nii.gz' - - """ - - special_extensions = [".nii.gz", ".tar.gz", ".niml.dset"] - - pth = op.dirname(fname) - fname = op.basename(fname) - - ext = None - for special_ext in special_extensions: - ext_len = len(special_ext) - if (len(fname) > ext_len) and (fname[-ext_len:].lower() == special_ext.lower()): - ext = fname[-ext_len:] - fname = fname[:-ext_len] - break - if not ext: - fname, ext = op.splitext(fname) - - return pth, fname, ext - - -# Original source at L1069 of /interfaces/base/core.py -class PackageInfo(object): - _version = None - version_cmd = None - version_file = None - - @classmethod - def version(klass): - if klass._version is None: - if klass.version_cmd is not None: - try: - clout = CommandLine( - command=klass.version_cmd, - resource_monitor=False, - terminal_output="allatonce", - ).run() - except IOError: - return None - - raw_info = clout.runtime.stdout - elif klass.version_file is not None: - try: - with open(klass.version_file, "rt") as fobj: - raw_info = fobj.read() - except OSError: - return None - else: - return None - - klass._version = klass.parse_version(raw_info) - - return klass._version - - @staticmethod - def parse_version(raw_info): - raise NotImplementedError - - -# Original source at L40 of /interfaces/fsl/base.py -class Info(PackageInfo): - """ - Handle FSL ``output_type`` and version information. - - output type refers to the type of file fsl defaults to writing - eg, NIFTI, NIFTI_GZ - - Examples - -------- - - >>> from nipype.interfaces.fsl import Info - >>> Info.version() # doctest: +SKIP - >>> Info.output_type() # doctest: +SKIP - - """ - - ftypes = { - "NIFTI": ".nii", - "NIFTI_PAIR": ".img", - "NIFTI_GZ": ".nii.gz", - "NIFTI_PAIR_GZ": ".img.gz", - } - - if os.getenv("FSLDIR"): - version_file = os.path.join(os.getenv("FSLDIR"), "etc", "fslversion") - - @staticmethod - def parse_version(raw_info): - return raw_info.splitlines()[0] - - @classmethod - def output_type_to_ext(cls, output_type): - """Get the file extension for the given output type. - - Parameters - ---------- - output_type : {'NIFTI', 'NIFTI_GZ', 'NIFTI_PAIR', 'NIFTI_PAIR_GZ'} - String specifying the output type. - - Returns - ------- - extension : str - The file extension for the output type. - """ - - try: - return cls.ftypes[output_type] - except KeyError: - msg = "Invalid FSLOUTPUTTYPE: ", output_type - raise KeyError(msg) - - @classmethod - def output_type(cls): - """Get the global FSL output file type FSLOUTPUTTYPE. - - This returns the value of the environment variable - FSLOUTPUTTYPE. An exception is raised if it is not defined. - - Returns - ------- - fsl_ftype : string - Represents the current environment setting of FSLOUTPUTTYPE - """ - try: - return os.environ["FSLOUTPUTTYPE"] - except KeyError: - IFLOGGER.warning( - "FSLOUTPUTTYPE environment variable is not set. " - "Setting FSLOUTPUTTYPE=NIFTI" - ) - return "NIFTI" - - @staticmethod - def standard_image(img_name=None): - """Grab an image from the standard location. - - Returns a list of standard images if called without arguments. - - Could be made more fancy to allow for more relocatability""" - try: - fsldir = os.environ["FSLDIR"] - except KeyError: - raise Exception("FSL environment variables not set") - stdpath = os.path.join(fsldir, "data", "standard") - if img_name is None: - return [ - filename.replace(stdpath + "/", "") - for filename in glob(os.path.join(stdpath, "*nii*")) - ] - return os.path.join(stdpath, img_name) diff --git a/nipype-auto-conv/specs/interfaces/power_spectrum.yaml b/nipype-auto-conv/specs/interfaces/power_spectrum.yaml index 8375cfe..f51aeb0 100644 --- a/nipype-auto-conv/specs/interfaces/power_spectrum.yaml +++ b/nipype-auto-conv/specs/interfaces/power_spectrum.yaml @@ -7,13 +7,13 @@ # ---- # Use FSL PowerSpectrum command for power spectrum estimation. # -# Examples -# -------- +# Examples +# -------- # -# >>> from nipype.interfaces import fsl -# >>> pspec = fsl.PowerSpectrum() -# >>> pspec.inputs.in_file = 'functional.nii' -# >>> res = pspec.run() # doctest: +SKIP +# >>> from nipype.interfaces import fsl +# >>> pspec = fsl.PowerSpectrum() +# >>> pspec.inputs.in_file = 'functional.nii' +# >>> res = pspec.run() # doctest: +SKIP # # # @@ -33,9 +33,6 @@ inputs: # passed to the field in the automatically generated unittests. in_file: generic/file # type=file|default=: input 4D file to estimate the power spectrum - out_file: Path - # type=file: path/name of the output 4D power spectrum file - # type=file|default=: name of output 4D file for power spectrum callable_defaults: # dict[str, str] - names of methods/callable classes defined in the adjacent `*_callables.py` # to set as the `default` method of input fields @@ -59,7 +56,7 @@ outputs: # dict[str, str] - names of methods/callable classes defined in the adjacent `*_callables.py` # to set to the `callable` attribute of output fields templates: - # dict[str, str] - `output_file_template` values to be provided to output fields + # dict[str, str] - `path_template` values to be provided to output fields out_file: out_file # type=file: path/name of the output 4D power spectrum file # type=file|default=: name of output 4D file for power spectrum @@ -75,13 +72,13 @@ tests: # type=file: path/name of the output 4D power spectrum file # type=file|default=: name of output 4D file for power spectrum output_type: - # type=enum|default='NIFTI'|allowed['NIFTI','NIFTI_GZ','NIFTI_PAIR','NIFTI_PAIR_GZ']: FSL output type + # type=enum|default='NIFTI'|allowed['GIFTI','NIFTI','NIFTI_GZ','NIFTI_PAIR','NIFTI_PAIR_GZ']: FSL output type args: # type=str|default='': Additional parameters to the command environ: # type=dict|default={}: Environment variables imports: - # list[nipype2pydra.task.base.importstatement] - list import statements required by the test, with each list item + # list[nipype2pydra.statements.imports.explicitimport] - list import statements required by the test, with each list item # consisting of 'module', 'name', and optionally 'alias' keys expected_outputs: # dict[str, str] - expected values for selected outputs, noting that tests will typically diff --git a/nipype-auto-conv/specs/interfaces/power_spectrum_callables.py b/nipype-auto-conv/specs/interfaces/power_spectrum_callables.py deleted file mode 100644 index 126f6e6..0000000 --- a/nipype-auto-conv/specs/interfaces/power_spectrum_callables.py +++ /dev/null @@ -1,338 +0,0 @@ -"""Module to put any functions that are referred to in the "callables" section of PowerSpectrum.yaml""" - -import attrs -import logging -import os -import os.path as op -from glob import glob -from pathlib import Path - - -def out_file_default(inputs): - return _gen_filename("out_file", inputs=inputs) - - -def out_file_callable(output_dir, inputs, stdout, stderr): - outputs = _list_outputs( - output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr - ) - return outputs["out_file"] - - -IFLOGGER = logging.getLogger("nipype.interface") - - -# Original source at L1700 of /interfaces/fsl/utils.py -def _gen_filename(name, inputs=None, stdout=None, stderr=None, output_dir=None): - if name == "out_file": - return _gen_outfilename( - inputs=inputs, stdout=stdout, stderr=stderr, output_dir=output_dir - ) - return None - - -# Original source at L205 of /interfaces/fsl/base.py -def _gen_fname( - basename, - cwd=None, - suffix=None, - change_ext=True, - ext=None, - inputs=None, - stdout=None, - stderr=None, - output_dir=None, -): - """Generate a filename based on the given parameters. - - The filename will take the form: cwd/basename. - If change_ext is True, it will use the extensions specified in - inputs.output_type. - - Parameters - ---------- - basename : str - Filename to base the new filename on. - cwd : str - Path to prefix to the new filename. (default is output_dir) - suffix : str - Suffix to add to the `basename`. (defaults is '' ) - change_ext : bool - Flag to change the filename extension to the FSL output type. - (default True) - - Returns - ------- - fname : str - New filename based on given parameters. - - """ - - if basename == "": - msg = "Unable to generate filename for command %s. " % "fslpspec" - msg += "basename is not set!" - raise ValueError(msg) - if cwd is None: - cwd = output_dir - if ext is None: - ext = Info.output_type_to_ext(inputs.output_type) - if change_ext: - if suffix: - suffix = "".join((suffix, ext)) - else: - suffix = ext - if suffix is None: - suffix = "" - fname = fname_presuffix(basename, suffix=suffix, use_ext=False, newpath=cwd) - return fname - - -# Original source at L1689 of /interfaces/fsl/utils.py -def _gen_outfilename(inputs=None, stdout=None, stderr=None, output_dir=None): - out_file = inputs.out_file - if (out_file is attrs.NOTHING) and (inputs.in_file is not attrs.NOTHING): - out_file = _gen_fname( - inputs.in_file, - suffix="_ps", - inputs=inputs, - stdout=stdout, - stderr=stderr, - output_dir=output_dir, - ) - return out_file - - -# Original source at L1695 of /interfaces/fsl/utils.py -def _list_outputs(inputs=None, stdout=None, stderr=None, output_dir=None): - outputs = {} - outputs["out_file"] = os.path.abspath( - _gen_outfilename( - inputs=inputs, stdout=stdout, stderr=stderr, output_dir=output_dir - ) - ) - return outputs - - -# Original source at L108 of /utils/filemanip.py -def fname_presuffix(fname, prefix="", suffix="", newpath=None, use_ext=True): - """Manipulates path and name of input filename - - Parameters - ---------- - fname : string - A filename (may or may not include path) - prefix : string - Characters to prepend to the filename - suffix : string - Characters to append to the filename - newpath : string - Path to replace the path of the input fname - use_ext : boolean - If True (default), appends the extension of the original file - to the output name. - - Returns - ------- - Absolute path of the modified filename - - >>> from nipype.utils.filemanip import fname_presuffix - >>> fname = 'foo.nii.gz' - >>> fname_presuffix(fname,'pre','post','/tmp') - '/tmp/prefoopost.nii.gz' - - >>> from nipype.interfaces.base import attrs.NOTHING - >>> fname_presuffix(fname, 'pre', 'post', attrs.NOTHING) == \ - fname_presuffix(fname, 'pre', 'post') - True - - """ - pth, fname, ext = split_filename(fname) - if not use_ext: - ext = "" - - # No need for : bool(attrs.NOTHING is not attrs.NOTHING) evaluates to False - if newpath: - pth = op.abspath(newpath) - return op.join(pth, prefix + fname + suffix + ext) - - -# Original source at L58 of /utils/filemanip.py -def split_filename(fname): - """Split a filename into parts: path, base filename and extension. - - Parameters - ---------- - fname : str - file or path name - - Returns - ------- - pth : str - base path from fname - fname : str - filename from fname, without extension - ext : str - file extension from fname - - Examples - -------- - >>> from nipype.utils.filemanip import split_filename - >>> pth, fname, ext = split_filename('/home/data/subject.nii.gz') - >>> pth - '/home/data' - - >>> fname - 'subject' - - >>> ext - '.nii.gz' - - """ - - special_extensions = [".nii.gz", ".tar.gz", ".niml.dset"] - - pth = op.dirname(fname) - fname = op.basename(fname) - - ext = None - for special_ext in special_extensions: - ext_len = len(special_ext) - if (len(fname) > ext_len) and (fname[-ext_len:].lower() == special_ext.lower()): - ext = fname[-ext_len:] - fname = fname[:-ext_len] - break - if not ext: - fname, ext = op.splitext(fname) - - return pth, fname, ext - - -# Original source at L1069 of /interfaces/base/core.py -class PackageInfo(object): - _version = None - version_cmd = None - version_file = None - - @classmethod - def version(klass): - if klass._version is None: - if klass.version_cmd is not None: - try: - clout = CommandLine( - command=klass.version_cmd, - resource_monitor=False, - terminal_output="allatonce", - ).run() - except IOError: - return None - - raw_info = clout.runtime.stdout - elif klass.version_file is not None: - try: - with open(klass.version_file, "rt") as fobj: - raw_info = fobj.read() - except OSError: - return None - else: - return None - - klass._version = klass.parse_version(raw_info) - - return klass._version - - @staticmethod - def parse_version(raw_info): - raise NotImplementedError - - -# Original source at L40 of /interfaces/fsl/base.py -class Info(PackageInfo): - """ - Handle FSL ``output_type`` and version information. - - output type refers to the type of file fsl defaults to writing - eg, NIFTI, NIFTI_GZ - - Examples - -------- - - >>> from nipype.interfaces.fsl import Info - >>> Info.version() # doctest: +SKIP - >>> Info.output_type() # doctest: +SKIP - - """ - - ftypes = { - "NIFTI": ".nii", - "NIFTI_PAIR": ".img", - "NIFTI_GZ": ".nii.gz", - "NIFTI_PAIR_GZ": ".img.gz", - } - - if os.getenv("FSLDIR"): - version_file = os.path.join(os.getenv("FSLDIR"), "etc", "fslversion") - - @staticmethod - def parse_version(raw_info): - return raw_info.splitlines()[0] - - @classmethod - def output_type_to_ext(cls, output_type): - """Get the file extension for the given output type. - - Parameters - ---------- - output_type : {'NIFTI', 'NIFTI_GZ', 'NIFTI_PAIR', 'NIFTI_PAIR_GZ'} - String specifying the output type. - - Returns - ------- - extension : str - The file extension for the output type. - """ - - try: - return cls.ftypes[output_type] - except KeyError: - msg = "Invalid FSLOUTPUTTYPE: ", output_type - raise KeyError(msg) - - @classmethod - def output_type(cls): - """Get the global FSL output file type FSLOUTPUTTYPE. - - This returns the value of the environment variable - FSLOUTPUTTYPE. An exception is raised if it is not defined. - - Returns - ------- - fsl_ftype : string - Represents the current environment setting of FSLOUTPUTTYPE - """ - try: - return os.environ["FSLOUTPUTTYPE"] - except KeyError: - IFLOGGER.warning( - "FSLOUTPUTTYPE environment variable is not set. " - "Setting FSLOUTPUTTYPE=NIFTI" - ) - return "NIFTI" - - @staticmethod - def standard_image(img_name=None): - """Grab an image from the standard location. - - Returns a list of standard images if called without arguments. - - Could be made more fancy to allow for more relocatability""" - try: - fsldir = os.environ["FSLDIR"] - except KeyError: - raise Exception("FSL environment variables not set") - stdpath = os.path.join(fsldir, "data", "standard") - if img_name is None: - return [ - filename.replace(stdpath + "/", "") - for filename in glob(os.path.join(stdpath, "*nii*")) - ] - return os.path.join(stdpath, img_name) diff --git a/nipype-auto-conv/specs/interfaces/prelude.yaml b/nipype-auto-conv/specs/interfaces/prelude.yaml index e4148fc..02bb462 100644 --- a/nipype-auto-conv/specs/interfaces/prelude.yaml +++ b/nipype-auto-conv/specs/interfaces/prelude.yaml @@ -7,10 +7,10 @@ # ---- # FSL prelude wrapper for phase unwrapping # -# Examples -# -------- +# Examples +# -------- # -# Please insert examples for use of this command +# Please insert examples for use of this command # # task_name: PRELUDE @@ -41,9 +41,6 @@ inputs: # type=file|default=: saving the raw phase output savemask_file: generic/file # type=file|default=: saving the mask volume - unwrapped_phase_file: Path - # type=file: unwrapped phase file - # type=file|default=: file containing unwrapepd phase callable_defaults: # dict[str, str] - names of methods/callable classes defined in the adjacent `*_callables.py` # to set as the `default` method of input fields @@ -67,7 +64,7 @@ outputs: # dict[str, str] - names of methods/callable classes defined in the adjacent `*_callables.py` # to set to the `callable` attribute of output fields templates: - # dict[str, str] - `output_file_template` values to be provided to output fields + # dict[str, str] - `path_template` values to be provided to output fields unwrapped_phase_file: unwrapped_phase_file # type=file: unwrapped phase file # type=file|default=: file containing unwrapepd phase @@ -111,13 +108,13 @@ tests: removeramps: # type=bool|default=False: remove phase ramps during unwrapping output_type: - # type=enum|default='NIFTI'|allowed['NIFTI','NIFTI_GZ','NIFTI_PAIR','NIFTI_PAIR_GZ']: FSL output type + # type=enum|default='NIFTI'|allowed['GIFTI','NIFTI','NIFTI_GZ','NIFTI_PAIR','NIFTI_PAIR_GZ']: FSL output type args: # type=str|default='': Additional parameters to the command environ: # type=dict|default={}: Environment variables imports: - # list[nipype2pydra.task.base.importstatement] - list import statements required by the test, with each list item + # list[nipype2pydra.statements.imports.explicitimport] - list import statements required by the test, with each list item # consisting of 'module', 'name', and optionally 'alias' keys expected_outputs: # dict[str, str] - expected values for selected outputs, noting that tests will typically diff --git a/nipype-auto-conv/specs/interfaces/prelude_callables.py b/nipype-auto-conv/specs/interfaces/prelude_callables.py deleted file mode 100644 index 3104d4a..0000000 --- a/nipype-auto-conv/specs/interfaces/prelude_callables.py +++ /dev/null @@ -1,339 +0,0 @@ -"""Module to put any functions that are referred to in the "callables" section of PRELUDE.yaml""" - -import attrs -import logging -import os -import os.path as op -from glob import glob -from pathlib import Path - - -def unwrapped_phase_file_default(inputs): - return _gen_filename("unwrapped_phase_file", inputs=inputs) - - -def unwrapped_phase_file_callable(output_dir, inputs, stdout, stderr): - outputs = _list_outputs( - output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr - ) - return outputs["unwrapped_phase_file"] - - -IFLOGGER = logging.getLogger("nipype.interface") - - -# Original source at L2115 of /interfaces/fsl/preprocess.py -def _gen_filename(name, inputs=None, stdout=None, stderr=None, output_dir=None): - if name == "unwrapped_phase_file": - return _list_outputs( - inputs=inputs, stdout=stdout, stderr=stderr, output_dir=output_dir - )["unwrapped_phase_file"] - return None - - -# Original source at L205 of /interfaces/fsl/base.py -def _gen_fname( - basename, - cwd=None, - suffix=None, - change_ext=True, - ext=None, - inputs=None, - stdout=None, - stderr=None, - output_dir=None, -): - """Generate a filename based on the given parameters. - - The filename will take the form: cwd/basename. - If change_ext is True, it will use the extensions specified in - inputs.output_type. - - Parameters - ---------- - basename : str - Filename to base the new filename on. - cwd : str - Path to prefix to the new filename. (default is output_dir) - suffix : str - Suffix to add to the `basename`. (defaults is '' ) - change_ext : bool - Flag to change the filename extension to the FSL output type. - (default True) - - Returns - ------- - fname : str - New filename based on given parameters. - - """ - - if basename == "": - msg = "Unable to generate filename for command %s. " % "prelude" - msg += "basename is not set!" - raise ValueError(msg) - if cwd is None: - cwd = output_dir - if ext is None: - ext = Info.output_type_to_ext(inputs.output_type) - if change_ext: - if suffix: - suffix = "".join((suffix, ext)) - else: - suffix = ext - if suffix is None: - suffix = "" - fname = fname_presuffix(basename, suffix=suffix, use_ext=False, newpath=cwd) - return fname - - -# Original source at L2102 of /interfaces/fsl/preprocess.py -def _list_outputs(inputs=None, stdout=None, stderr=None, output_dir=None): - outputs = {} - out_file = inputs.unwrapped_phase_file - if out_file is attrs.NOTHING: - if inputs.phase_file is not attrs.NOTHING: - out_file = _gen_fname( - inputs.phase_file, - suffix="_unwrapped", - inputs=inputs, - stdout=stdout, - stderr=stderr, - output_dir=output_dir, - ) - elif inputs.complex_phase_file is not attrs.NOTHING: - out_file = _gen_fname( - inputs.complex_phase_file, - suffix="_phase_unwrapped", - inputs=inputs, - stdout=stdout, - stderr=stderr, - output_dir=output_dir, - ) - outputs["unwrapped_phase_file"] = os.path.abspath(out_file) - return outputs - - -# Original source at L108 of /utils/filemanip.py -def fname_presuffix(fname, prefix="", suffix="", newpath=None, use_ext=True): - """Manipulates path and name of input filename - - Parameters - ---------- - fname : string - A filename (may or may not include path) - prefix : string - Characters to prepend to the filename - suffix : string - Characters to append to the filename - newpath : string - Path to replace the path of the input fname - use_ext : boolean - If True (default), appends the extension of the original file - to the output name. - - Returns - ------- - Absolute path of the modified filename - - >>> from nipype.utils.filemanip import fname_presuffix - >>> fname = 'foo.nii.gz' - >>> fname_presuffix(fname,'pre','post','/tmp') - '/tmp/prefoopost.nii.gz' - - >>> from nipype.interfaces.base import attrs.NOTHING - >>> fname_presuffix(fname, 'pre', 'post', attrs.NOTHING) == \ - fname_presuffix(fname, 'pre', 'post') - True - - """ - pth, fname, ext = split_filename(fname) - if not use_ext: - ext = "" - - # No need for : bool(attrs.NOTHING is not attrs.NOTHING) evaluates to False - if newpath: - pth = op.abspath(newpath) - return op.join(pth, prefix + fname + suffix + ext) - - -# Original source at L58 of /utils/filemanip.py -def split_filename(fname): - """Split a filename into parts: path, base filename and extension. - - Parameters - ---------- - fname : str - file or path name - - Returns - ------- - pth : str - base path from fname - fname : str - filename from fname, without extension - ext : str - file extension from fname - - Examples - -------- - >>> from nipype.utils.filemanip import split_filename - >>> pth, fname, ext = split_filename('/home/data/subject.nii.gz') - >>> pth - '/home/data' - - >>> fname - 'subject' - - >>> ext - '.nii.gz' - - """ - - special_extensions = [".nii.gz", ".tar.gz", ".niml.dset"] - - pth = op.dirname(fname) - fname = op.basename(fname) - - ext = None - for special_ext in special_extensions: - ext_len = len(special_ext) - if (len(fname) > ext_len) and (fname[-ext_len:].lower() == special_ext.lower()): - ext = fname[-ext_len:] - fname = fname[:-ext_len] - break - if not ext: - fname, ext = op.splitext(fname) - - return pth, fname, ext - - -# Original source at L1069 of /interfaces/base/core.py -class PackageInfo(object): - _version = None - version_cmd = None - version_file = None - - @classmethod - def version(klass): - if klass._version is None: - if klass.version_cmd is not None: - try: - clout = CommandLine( - command=klass.version_cmd, - resource_monitor=False, - terminal_output="allatonce", - ).run() - except IOError: - return None - - raw_info = clout.runtime.stdout - elif klass.version_file is not None: - try: - with open(klass.version_file, "rt") as fobj: - raw_info = fobj.read() - except OSError: - return None - else: - return None - - klass._version = klass.parse_version(raw_info) - - return klass._version - - @staticmethod - def parse_version(raw_info): - raise NotImplementedError - - -# Original source at L40 of /interfaces/fsl/base.py -class Info(PackageInfo): - """ - Handle FSL ``output_type`` and version information. - - output type refers to the type of file fsl defaults to writing - eg, NIFTI, NIFTI_GZ - - Examples - -------- - - >>> from nipype.interfaces.fsl import Info - >>> Info.version() # doctest: +SKIP - >>> Info.output_type() # doctest: +SKIP - - """ - - ftypes = { - "NIFTI": ".nii", - "NIFTI_PAIR": ".img", - "NIFTI_GZ": ".nii.gz", - "NIFTI_PAIR_GZ": ".img.gz", - } - - if os.getenv("FSLDIR"): - version_file = os.path.join(os.getenv("FSLDIR"), "etc", "fslversion") - - @staticmethod - def parse_version(raw_info): - return raw_info.splitlines()[0] - - @classmethod - def output_type_to_ext(cls, output_type): - """Get the file extension for the given output type. - - Parameters - ---------- - output_type : {'NIFTI', 'NIFTI_GZ', 'NIFTI_PAIR', 'NIFTI_PAIR_GZ'} - String specifying the output type. - - Returns - ------- - extension : str - The file extension for the output type. - """ - - try: - return cls.ftypes[output_type] - except KeyError: - msg = "Invalid FSLOUTPUTTYPE: ", output_type - raise KeyError(msg) - - @classmethod - def output_type(cls): - """Get the global FSL output file type FSLOUTPUTTYPE. - - This returns the value of the environment variable - FSLOUTPUTTYPE. An exception is raised if it is not defined. - - Returns - ------- - fsl_ftype : string - Represents the current environment setting of FSLOUTPUTTYPE - """ - try: - return os.environ["FSLOUTPUTTYPE"] - except KeyError: - IFLOGGER.warning( - "FSLOUTPUTTYPE environment variable is not set. " - "Setting FSLOUTPUTTYPE=NIFTI" - ) - return "NIFTI" - - @staticmethod - def standard_image(img_name=None): - """Grab an image from the standard location. - - Returns a list of standard images if called without arguments. - - Could be made more fancy to allow for more relocatability""" - try: - fsldir = os.environ["FSLDIR"] - except KeyError: - raise Exception("FSL environment variables not set") - stdpath = os.path.join(fsldir, "data", "standard") - if img_name is None: - return [ - filename.replace(stdpath + "/", "") - for filename in glob(os.path.join(stdpath, "*nii*")) - ] - return os.path.join(stdpath, img_name) diff --git a/nipype-auto-conv/specs/interfaces/prepare_fieldmap.yaml b/nipype-auto-conv/specs/interfaces/prepare_fieldmap.yaml index bcd7324..a07b73b 100644 --- a/nipype-auto-conv/specs/interfaces/prepare_fieldmap.yaml +++ b/nipype-auto-conv/specs/interfaces/prepare_fieldmap.yaml @@ -6,24 +6,24 @@ # Docs # ---- # -# Interface for the fsl_prepare_fieldmap script (FSL 5.0) +# Interface for the fsl_prepare_fieldmap script (FSL 5.0) # -# Prepares a fieldmap suitable for FEAT from SIEMENS data - saves output in -# rad/s format (e.g. ```fsl_prepare_fieldmap SIEMENS -# images_3_gre_field_mapping images_4_gre_field_mapping fmap_rads 2.65```). +# Prepares a fieldmap suitable for FEAT from SIEMENS data - saves output in +# rad/s format (e.g. ```fsl_prepare_fieldmap SIEMENS +# images_3_gre_field_mapping images_4_gre_field_mapping fmap_rads 2.65```). # # -# Examples -# -------- +# Examples +# -------- # -# >>> from nipype.interfaces.fsl import PrepareFieldmap -# >>> prepare = PrepareFieldmap() -# >>> prepare.inputs.in_phase = "phase.nii" -# >>> prepare.inputs.in_magnitude = "magnitude.nii" -# >>> prepare.inputs.output_type = "NIFTI_GZ" -# >>> prepare.cmdline # doctest: +ELLIPSIS -# 'fsl_prepare_fieldmap SIEMENS phase.nii magnitude.nii .../phase_fslprepared.nii.gz 2.460000' -# >>> res = prepare.run() # doctest: +SKIP +# >>> from nipype.interfaces.fsl import PrepareFieldmap +# >>> prepare = PrepareFieldmap() +# >>> prepare.inputs.in_phase = "phase.nii" +# >>> prepare.inputs.in_magnitude = "magnitude.nii" +# >>> prepare.inputs.output_type = "NIFTI_GZ" +# >>> prepare.cmdline # doctest: +ELLIPSIS +# 'fsl_prepare_fieldmap SIEMENS phase.nii magnitude.nii .../phase_fslprepared.nii.gz 2.460000' +# >>> res = prepare.run() # doctest: +SKIP # # # @@ -41,13 +41,10 @@ inputs: # from the nipype interface, but you may want to be more specific, particularly # for file types, where specifying the format also specifies the file that will be # passed to the field in the automatically generated unittests. - in_magnitude: medimage/nifti1 + in_magnitude: generic/file # type=file|default=: Magnitude difference map, brain extracted in_phase: medimage/nifti1 # type=file|default=: Phase difference map, in SIEMENS format range from 0-4096 or 0-8192) - out_fieldmap: Path - # type=file: output name for prepared fieldmap - # type=file|default=: output name for prepared fieldmap callable_defaults: # dict[str, str] - names of methods/callable classes defined in the adjacent `*_callables.py` # to set as the `default` method of input fields @@ -71,7 +68,7 @@ outputs: # dict[str, str] - names of methods/callable classes defined in the adjacent `*_callables.py` # to set to the `callable` attribute of output fields templates: - # dict[str, str] - `output_file_template` values to be provided to output fields + # dict[str, str] - `path_template` values to be provided to output fields requirements: # dict[str, list[str]] - input fields that are required to be provided for the output field to be present tests: @@ -92,13 +89,13 @@ tests: # type=file: output name for prepared fieldmap # type=file|default=: output name for prepared fieldmap output_type: - # type=enum|default='NIFTI'|allowed['NIFTI','NIFTI_GZ','NIFTI_PAIR','NIFTI_PAIR_GZ']: FSL output type + # type=enum|default='NIFTI'|allowed['GIFTI','NIFTI','NIFTI_GZ','NIFTI_PAIR','NIFTI_PAIR_GZ']: FSL output type args: # type=str|default='': Additional parameters to the command environ: # type=dict|default={}: Environment variables imports: - # list[nipype2pydra.task.base.importstatement] - list import statements required by the test, with each list item + # list[nipype2pydra.statements.imports.explicitimport] - list import statements required by the test, with each list item # consisting of 'module', 'name', and optionally 'alias' keys expected_outputs: # dict[str, str] - expected values for selected outputs, noting that tests will typically @@ -117,12 +114,10 @@ tests: # (if not specified, will try to choose a sensible value) in_phase: # type=file|default=: Phase difference map, in SIEMENS format range from 0-4096 or 0-8192) - in_magnitude: - # type=file|default=: Magnitude difference map, brain extracted output_type: '"NIFTI_GZ"' - # type=enum|default='NIFTI'|allowed['NIFTI','NIFTI_GZ','NIFTI_PAIR','NIFTI_PAIR_GZ']: FSL output type + # type=enum|default='NIFTI'|allowed['GIFTI','NIFTI','NIFTI_GZ','NIFTI_PAIR','NIFTI_PAIR_GZ']: FSL output type imports: - # list[nipype2pydra.task.base.importstatement] - list import statements required by the test, with each list item + # list[nipype2pydra.statements.imports.explicitimport] - list import statements required by the test, with each list item # consisting of 'module', 'name', and optionally 'alias' keys expected_outputs: # dict[str, str] - expected values for selected outputs, noting that tests will typically @@ -145,12 +140,10 @@ doctests: # '.mock()' method of the corresponding class is used instead. in_phase: '"phase.nii"' # type=file|default=: Phase difference map, in SIEMENS format range from 0-4096 or 0-8192) - in_magnitude: '"magnitude.nii"' - # type=file|default=: Magnitude difference map, brain extracted output_type: '"NIFTI_GZ"' - # type=enum|default='NIFTI'|allowed['NIFTI','NIFTI_GZ','NIFTI_PAIR','NIFTI_PAIR_GZ']: FSL output type + # type=enum|default='NIFTI'|allowed['GIFTI','NIFTI','NIFTI_GZ','NIFTI_PAIR','NIFTI_PAIR_GZ']: FSL output type imports: - # list[nipype2pydra.task.base.importstatement] - list import statements required by the test, with each list item + # list[nipype2pydra.statements.imports.explicitimport] - list import statements required by the test, with each list item # consisting of 'module', 'name', and optionally 'alias' keys directive: # str - any doctest directive to place on the cmdline call, e.g. # doctest: +ELLIPSIS diff --git a/nipype-auto-conv/specs/interfaces/prepare_fieldmap_callables.py b/nipype-auto-conv/specs/interfaces/prepare_fieldmap_callables.py deleted file mode 100644 index 9b4e146..0000000 --- a/nipype-auto-conv/specs/interfaces/prepare_fieldmap_callables.py +++ /dev/null @@ -1,20 +0,0 @@ -"""Module to put any functions that are referred to in the "callables" section of PrepareFieldmap.yaml""" - - -def out_fieldmap_callable(output_dir, inputs, stdout, stderr): - outputs = _list_outputs( - output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr - ) - return outputs["out_fieldmap"] - - -# Original source at L885 of /interfaces/base/core.py -def _gen_filename(name, inputs=None, stdout=None, stderr=None, output_dir=None): - raise NotImplementedError - - -# Original source at L110 of /interfaces/fsl/epi.py -def _list_outputs(inputs=None, stdout=None, stderr=None, output_dir=None): - outputs = {} - outputs["out_fieldmap"] = inputs.out_fieldmap - return outputs diff --git a/nipype-auto-conv/specs/interfaces/prob_track_x.yaml b/nipype-auto-conv/specs/interfaces/prob_track_x.yaml index f03ab34..ea80aac 100644 --- a/nipype-auto-conv/specs/interfaces/prob_track_x.yaml +++ b/nipype-auto-conv/specs/interfaces/prob_track_x.yaml @@ -5,15 +5,15 @@ # # Docs # ---- -# Use FSL probtrackx for tractography on bedpostx results +# Use FSL probtrackx for tractography on bedpostx results # -# Examples -# -------- +# Examples +# -------- # -# >>> from nipype.interfaces import fsl -# >>> pbx = fsl.ProbTrackX(samples_base_name='merged', mask='mask.nii', seed='MASK_average_thal_right.nii', mode='seedmask', xfm='trans.mat', n_samples=3, n_steps=10, force_dir=True, opd=True, os2t=True, target_masks = ['targets_MASK1.nii', 'targets_MASK2.nii'], thsamples='merged_thsamples.nii', fsamples='merged_fsamples.nii', phsamples='merged_phsamples.nii', out_dir='.') -# >>> pbx.cmdline -# 'probtrackx --forcedir -m mask.nii --mode=seedmask --nsamples=3 --nsteps=10 --opd --os2t --dir=. --samples=merged --seed=MASK_average_thal_right.nii --targetmasks=targets.txt --xfm=trans.mat' +# >>> from nipype.interfaces import fsl +# >>> pbx = fsl.ProbTrackX(samples_base_name='merged', mask='mask.nii', seed='MASK_average_thal_right.nii', mode='seedmask', xfm='trans.mat', n_samples=3, n_steps=10, force_dir=True, opd=True, os2t=True, target_masks = ['targets_MASK1.nii', 'targets_MASK2.nii'], thsamples='merged_thsamples.nii', fsamples='merged_fsamples.nii', phsamples='merged_phsamples.nii', out_dir='.') +# >>> pbx.cmdline +# 'probtrackx --forcedir -m mask.nii --mode=seedmask --nsamples=3 --nsteps=10 --opd --os2t --dir=. --samples=merged --seed=MASK_average_thal_right.nii --targetmasks=targets.txt --xfm=trans.mat' # # task_name: ProbTrackX @@ -42,8 +42,6 @@ inputs: # type=file|default=: second bet binary mask (in diffusion space) in twomask_symm mode mesh: generic/file # type=file|default=: Freesurfer-type surface descriptor (in ascii format) - out_dir: Path - # type=directory|default=: directory to put the final volumes in phsamples: medimage/nifti1+list-of # type=inputmultiobject|default=[]: seed_ref: generic/file @@ -92,7 +90,7 @@ outputs: # dict[str, str] - names of methods/callable classes defined in the adjacent `*_callables.py` # to set to the `callable` attribute of output fields templates: - # dict[str, str] - `output_file_template` values to be provided to output fields + # dict[str, str] - `path_template` values to be provided to output fields requirements: # dict[str, list[str]] - input fields that are required to be provided for the output field to be present tests: @@ -172,13 +170,13 @@ tests: verbose: # type=enum|default=0|allowed[0,1,2]: Verbose level, [0-2]. Level 2 is required to output particle files. output_type: - # type=enum|default='NIFTI'|allowed['NIFTI','NIFTI_GZ','NIFTI_PAIR','NIFTI_PAIR_GZ']: FSL output type + # type=enum|default='NIFTI'|allowed['GIFTI','NIFTI','NIFTI_GZ','NIFTI_PAIR','NIFTI_PAIR_GZ']: FSL output type args: # type=str|default='': Additional parameters to the command environ: # type=dict|default={}: Environment variables imports: - # list[nipype2pydra.task.base.importstatement] - list import statements required by the test, with each list item + # list[nipype2pydra.statements.imports.explicitimport] - list import statements required by the test, with each list item # consisting of 'module', 'name', and optionally 'alias' keys expected_outputs: # dict[str, str] - expected values for selected outputs, noting that tests will typically @@ -226,7 +224,7 @@ tests: out_dir: '"."' # type=directory|default=: directory to put the final volumes in imports: - # list[nipype2pydra.task.base.importstatement] - list import statements required by the test, with each list item + # list[nipype2pydra.statements.imports.explicitimport] - list import statements required by the test, with each list item # consisting of 'module', 'name', and optionally 'alias' keys expected_outputs: # dict[str, str] - expected values for selected outputs, noting that tests will typically @@ -278,7 +276,7 @@ doctests: out_dir: '"."' # type=directory|default=: directory to put the final volumes in imports: - # list[nipype2pydra.task.base.importstatement] - list import statements required by the test, with each list item + # list[nipype2pydra.statements.imports.explicitimport] - list import statements required by the test, with each list item # consisting of 'module', 'name', and optionally 'alias' keys directive: # str - any doctest directive to place on the cmdline call, e.g. # doctest: +ELLIPSIS diff --git a/nipype-auto-conv/specs/interfaces/prob_track_x2.yaml b/nipype-auto-conv/specs/interfaces/prob_track_x2.yaml index 736c84e..482abc2 100644 --- a/nipype-auto-conv/specs/interfaces/prob_track_x2.yaml +++ b/nipype-auto-conv/specs/interfaces/prob_track_x2.yaml @@ -7,21 +7,21 @@ # ---- # Use FSL probtrackx2 for tractography on bedpostx results # -# Examples -# -------- +# Examples +# -------- # -# >>> from nipype.interfaces import fsl -# >>> pbx2 = fsl.ProbTrackX2() -# >>> pbx2.inputs.seed = 'seed_source.nii.gz' -# >>> pbx2.inputs.thsamples = 'merged_th1samples.nii.gz' -# >>> pbx2.inputs.fsamples = 'merged_f1samples.nii.gz' -# >>> pbx2.inputs.phsamples = 'merged_ph1samples.nii.gz' -# >>> pbx2.inputs.mask = 'nodif_brain_mask.nii.gz' -# >>> pbx2.inputs.out_dir = '.' -# >>> pbx2.inputs.n_samples = 3 -# >>> pbx2.inputs.n_steps = 10 -# >>> pbx2.cmdline -# 'probtrackx2 --forcedir -m nodif_brain_mask.nii.gz --nsamples=3 --nsteps=10 --opd --dir=. --samples=merged --seed=seed_source.nii.gz' +# >>> from nipype.interfaces import fsl +# >>> pbx2 = fsl.ProbTrackX2() +# >>> pbx2.inputs.seed = 'seed_source.nii.gz' +# >>> pbx2.inputs.thsamples = 'merged_th1samples.nii.gz' +# >>> pbx2.inputs.fsamples = 'merged_f1samples.nii.gz' +# >>> pbx2.inputs.phsamples = 'merged_ph1samples.nii.gz' +# >>> pbx2.inputs.mask = 'nodif_brain_mask.nii.gz' +# >>> pbx2.inputs.out_dir = '.' +# >>> pbx2.inputs.n_samples = 3 +# >>> pbx2.inputs.n_steps = 10 +# >>> pbx2.cmdline +# 'probtrackx2 --forcedir -m nodif_brain_mask.nii.gz --nsamples=3 --nsteps=10 --opd --dir=. --samples=merged --seed=seed_source.nii.gz' # task_name: ProbTrackX2 nipype_name: ProbTrackX2 @@ -51,9 +51,7 @@ inputs: # type=file|default=: Column-space mask used for Nxn connectivity matrix mask: medimage/nifti-gz # type=file|default=: bet binary mask file in diffusion space - out_dir: Path - # type=directory|default=: directory to put the final volumes in - phsamples: medimage/nifti-gz+list-of + phsamples: generic/file+list-of # type=inputmultiobject|default=[]: seed_ref: generic/file # type=file|default=: reference vol to define seed space in simple mode - diffusion space assumed if absent @@ -67,7 +65,7 @@ inputs: # type=file|default=: Brain mask in DTI space target_masks: generic/file+list-of # type=inputmultiobject|default=[]: list of target masks - required for seeds_to_targets classification - thsamples: medimage/nifti-gz+list-of + thsamples: generic/file+list-of # type=inputmultiobject|default=[]: waypoints: generic/file # type=file|default=: waypoint mask or ascii list of waypoint masks - only keep paths going through ALL the masks @@ -115,7 +113,7 @@ outputs: # dict[str, str] - names of methods/callable classes defined in the adjacent `*_callables.py` # to set to the `callable` attribute of output fields templates: - # dict[str, str] - `output_file_template` values to be provided to output fields + # dict[str, str] - `path_template` values to be provided to output fields requirements: # dict[str, list[str]] - input fields that are required to be provided for the output field to be present tests: @@ -223,13 +221,13 @@ tests: verbose: # type=enum|default=0|allowed[0,1,2]: Verbose level, [0-2]. Level 2 is required to output particle files. output_type: - # type=enum|default='NIFTI'|allowed['NIFTI','NIFTI_GZ','NIFTI_PAIR','NIFTI_PAIR_GZ']: FSL output type + # type=enum|default='NIFTI'|allowed['GIFTI','NIFTI','NIFTI_GZ','NIFTI_PAIR','NIFTI_PAIR_GZ']: FSL output type args: # type=str|default='': Additional parameters to the command environ: # type=dict|default={}: Environment variables imports: - # list[nipype2pydra.task.base.importstatement] - list import statements required by the test, with each list item + # list[nipype2pydra.statements.imports.explicitimport] - list import statements required by the test, with each list item # consisting of 'module', 'name', and optionally 'alias' keys expected_outputs: # dict[str, str] - expected values for selected outputs, noting that tests will typically @@ -248,22 +246,14 @@ tests: # (if not specified, will try to choose a sensible value) seed: '"seed_source.nii.gz"' # type=traitcompound|default=None: seed volume(s), or voxel(s) or freesurfer label file - thsamples: - # type=inputmultiobject|default=[]: fsamples: # type=inputmultiobject|default=[]: - phsamples: - # type=inputmultiobject|default=[]: mask: # type=file|default=: bet binary mask file in diffusion space - out_dir: '"."' - # type=directory|default=: directory to put the final volumes in n_samples: '3' # type=int|default=5000: number of samples - default=5000 - n_steps: '10' - # type=int|default=0: number of steps per sample - default=2000 imports: - # list[nipype2pydra.task.base.importstatement] - list import statements required by the test, with each list item + # list[nipype2pydra.statements.imports.explicitimport] - list import statements required by the test, with each list item # consisting of 'module', 'name', and optionally 'alias' keys expected_outputs: # dict[str, str] - expected values for selected outputs, noting that tests will typically @@ -286,22 +276,14 @@ doctests: # '.mock()' method of the corresponding class is used instead. seed: '"seed_source.nii.gz"' # type=traitcompound|default=None: seed volume(s), or voxel(s) or freesurfer label file - thsamples: '"merged_th1samples.nii.gz"' - # type=inputmultiobject|default=[]: fsamples: '"merged_f1samples.nii.gz"' # type=inputmultiobject|default=[]: - phsamples: '"merged_ph1samples.nii.gz"' - # type=inputmultiobject|default=[]: mask: '"nodif_brain_mask.nii.gz"' # type=file|default=: bet binary mask file in diffusion space - out_dir: '"."' - # type=directory|default=: directory to put the final volumes in n_samples: '3' # type=int|default=5000: number of samples - default=5000 - n_steps: '10' - # type=int|default=0: number of steps per sample - default=2000 imports: - # list[nipype2pydra.task.base.importstatement] - list import statements required by the test, with each list item + # list[nipype2pydra.statements.imports.explicitimport] - list import statements required by the test, with each list item # consisting of 'module', 'name', and optionally 'alias' keys directive: # str - any doctest directive to place on the cmdline call, e.g. # doctest: +ELLIPSIS diff --git a/nipype-auto-conv/specs/interfaces/prob_track_x2_callables.py b/nipype-auto-conv/specs/interfaces/prob_track_x2_callables.py deleted file mode 100644 index 8d008b4..0000000 --- a/nipype-auto-conv/specs/interfaces/prob_track_x2_callables.py +++ /dev/null @@ -1,485 +0,0 @@ -"""Module to put any functions that are referred to in the "callables" section of ProbTrackX2.yaml""" - -import attrs -import logging -import os -import os.path as op -from glob import glob -from pathlib import Path - - -def out_dir_default(inputs): - return _gen_filename("out_dir", inputs=inputs) - - -def fdt_paths_callable(output_dir, inputs, stdout, stderr): - outputs = _list_outputs( - output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr - ) - return outputs["fdt_paths"] - - -def log_callable(output_dir, inputs, stdout, stderr): - outputs = _list_outputs( - output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr - ) - return outputs["log"] - - -def lookup_tractspace_callable(output_dir, inputs, stdout, stderr): - outputs = _list_outputs( - output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr - ) - return outputs["lookup_tractspace"] - - -def matrix1_dot_callable(output_dir, inputs, stdout, stderr): - outputs = _list_outputs( - output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr - ) - return outputs["matrix1_dot"] - - -def matrix2_dot_callable(output_dir, inputs, stdout, stderr): - outputs = _list_outputs( - output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr - ) - return outputs["matrix2_dot"] - - -def matrix3_dot_callable(output_dir, inputs, stdout, stderr): - outputs = _list_outputs( - output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr - ) - return outputs["matrix3_dot"] - - -def network_matrix_callable(output_dir, inputs, stdout, stderr): - outputs = _list_outputs( - output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr - ) - return outputs["network_matrix"] - - -def particle_files_callable(output_dir, inputs, stdout, stderr): - outputs = _list_outputs( - output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr - ) - return outputs["particle_files"] - - -def targets_callable(output_dir, inputs, stdout, stderr): - outputs = _list_outputs( - output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr - ) - return outputs["targets"] - - -def way_total_callable(output_dir, inputs, stdout, stderr): - outputs = _list_outputs( - output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr - ) - return outputs["way_total"] - - -IFLOGGER = logging.getLogger("nipype.interface") - - -# Original source at L921 of /interfaces/fsl/dti.py -def _gen_filename(name, inputs=None, stdout=None, stderr=None, output_dir=None): - if name == "out_dir": - return output_dir - elif name == "mode": - if isinstance(inputs.seed, list) and isinstance(inputs.seed[0], list): - return "simple" - else: - return "seedmask" - - -# Original source at L205 of /interfaces/fsl/base.py -def _gen_fname( - basename, - cwd=None, - suffix=None, - change_ext=True, - ext=None, - inputs=None, - stdout=None, - stderr=None, - output_dir=None, -): - """Generate a filename based on the given parameters. - - The filename will take the form: cwd/basename. - If change_ext is True, it will use the extensions specified in - inputs.output_type. - - Parameters - ---------- - basename : str - Filename to base the new filename on. - cwd : str - Path to prefix to the new filename. (default is output_dir) - suffix : str - Suffix to add to the `basename`. (defaults is '' ) - change_ext : bool - Flag to change the filename extension to the FSL output type. - (default True) - - Returns - ------- - fname : str - New filename based on given parameters. - - """ - - if basename == "": - msg = "Unable to generate filename for command %s. " % "probtrackx2" - msg += "basename is not set!" - raise ValueError(msg) - if cwd is None: - cwd = output_dir - if ext is None: - ext = Info.output_type_to_ext(inputs.output_type) - if change_ext: - if suffix: - suffix = "".join((suffix, ext)) - else: - suffix = ext - if suffix is None: - suffix = "" - fname = fname_presuffix(basename, suffix=suffix, use_ext=False, newpath=cwd) - return fname - - -# Original source at L1070 of /interfaces/fsl/dti.py -def _list_outputs(inputs=None, stdout=None, stderr=None, output_dir=None): - outputs = nipype_interfaces_fsl_dti__ProbTrackX___list_outputs() - - if inputs.out_dir is attrs.NOTHING: - out_dir = output_dir - else: - out_dir = inputs.out_dir - - outputs["way_total"] = os.path.abspath(os.path.join(out_dir, "waytotal")) - - if inputs.omatrix1 is not attrs.NOTHING: - outputs["network_matrix"] = os.path.abspath( - os.path.join(out_dir, "matrix_seeds_to_all_targets") - ) - outputs["matrix1_dot"] = os.path.abspath( - os.path.join(out_dir, "fdt_matrix1.dot") - ) - - if inputs.omatrix2 is not attrs.NOTHING: - outputs["lookup_tractspace"] = os.path.abspath( - os.path.join(out_dir, "lookup_tractspace_fdt_matrix2.nii.gz") - ) - outputs["matrix2_dot"] = os.path.abspath( - os.path.join(out_dir, "fdt_matrix2.dot") - ) - - if inputs.omatrix3 is not attrs.NOTHING: - outputs["matrix3_dot"] = os.path.abspath( - os.path.join(out_dir, "fdt_matrix3.dot") - ) - return outputs - - -# Original source at L108 of /utils/filemanip.py -def fname_presuffix(fname, prefix="", suffix="", newpath=None, use_ext=True): - """Manipulates path and name of input filename - - Parameters - ---------- - fname : string - A filename (may or may not include path) - prefix : string - Characters to prepend to the filename - suffix : string - Characters to append to the filename - newpath : string - Path to replace the path of the input fname - use_ext : boolean - If True (default), appends the extension of the original file - to the output name. - - Returns - ------- - Absolute path of the modified filename - - >>> from nipype.utils.filemanip import fname_presuffix - >>> fname = 'foo.nii.gz' - >>> fname_presuffix(fname,'pre','post','/tmp') - '/tmp/prefoopost.nii.gz' - - >>> from nipype.interfaces.base import attrs.NOTHING - >>> fname_presuffix(fname, 'pre', 'post', attrs.NOTHING) == \ - fname_presuffix(fname, 'pre', 'post') - True - - """ - pth, fname, ext = split_filename(fname) - if not use_ext: - ext = "" - - # No need for : bool(attrs.NOTHING is not attrs.NOTHING) evaluates to False - if newpath: - pth = op.abspath(newpath) - return op.join(pth, prefix + fname + suffix + ext) - - -# Original source at L871 of /interfaces/fsl/dti.py -def nipype_interfaces_fsl_dti__ProbTrackX___list_outputs( - inputs=None, stdout=None, stderr=None, output_dir=None -): - outputs = {} - if inputs.out_dir is attrs.NOTHING: - out_dir = _gen_filename( - "out_dir", - inputs=inputs, - stdout=stdout, - stderr=stderr, - output_dir=output_dir, - ) - else: - out_dir = inputs.out_dir - - outputs["log"] = os.path.abspath(os.path.join(out_dir, "probtrackx.log")) - # outputs['way_total'] = os.path.abspath(os.path.join(out_dir, - # 'waytotal')) - if inputs.opd is True is not attrs.NOTHING: - if isinstance(inputs.seed, list) and isinstance(inputs.seed[0], list): - outputs["fdt_paths"] = [] - for seed in inputs.seed: - outputs["fdt_paths"].append( - os.path.abspath( - _gen_fname( - ("fdt_paths_%s" % ("_".join([str(s) for s in seed]))), - cwd=out_dir, - suffix="", - inputs=inputs, - stdout=stdout, - stderr=stderr, - output_dir=output_dir, - ) - ) - ) - else: - outputs["fdt_paths"] = os.path.abspath( - _gen_fname( - "fdt_paths", - cwd=out_dir, - suffix="", - inputs=inputs, - stdout=stdout, - stderr=stderr, - output_dir=output_dir, - ) - ) - - # handle seeds-to-target output files - if inputs.target_masks is not attrs.NOTHING: - outputs["targets"] = [] - for target in inputs.target_masks: - outputs["targets"].append( - os.path.abspath( - _gen_fname( - "seeds_to_" + os.path.split(target)[1], - cwd=out_dir, - suffix="", - inputs=inputs, - stdout=stdout, - stderr=stderr, - output_dir=output_dir, - ) - ) - ) - if (inputs.verbose is not attrs.NOTHING) and inputs.verbose == 2: - outputs["particle_files"] = [ - os.path.abspath(os.path.join(out_dir, "particle%d" % i)) - for i in range(inputs.n_samples) - ] - return outputs - - -# Original source at L58 of /utils/filemanip.py -def split_filename(fname): - """Split a filename into parts: path, base filename and extension. - - Parameters - ---------- - fname : str - file or path name - - Returns - ------- - pth : str - base path from fname - fname : str - filename from fname, without extension - ext : str - file extension from fname - - Examples - -------- - >>> from nipype.utils.filemanip import split_filename - >>> pth, fname, ext = split_filename('/home/data/subject.nii.gz') - >>> pth - '/home/data' - - >>> fname - 'subject' - - >>> ext - '.nii.gz' - - """ - - special_extensions = [".nii.gz", ".tar.gz", ".niml.dset"] - - pth = op.dirname(fname) - fname = op.basename(fname) - - ext = None - for special_ext in special_extensions: - ext_len = len(special_ext) - if (len(fname) > ext_len) and (fname[-ext_len:].lower() == special_ext.lower()): - ext = fname[-ext_len:] - fname = fname[:-ext_len] - break - if not ext: - fname, ext = op.splitext(fname) - - return pth, fname, ext - - -# Original source at L1069 of /interfaces/base/core.py -class PackageInfo(object): - _version = None - version_cmd = None - version_file = None - - @classmethod - def version(klass): - if klass._version is None: - if klass.version_cmd is not None: - try: - clout = CommandLine( - command=klass.version_cmd, - resource_monitor=False, - terminal_output="allatonce", - ).run() - except IOError: - return None - - raw_info = clout.runtime.stdout - elif klass.version_file is not None: - try: - with open(klass.version_file, "rt") as fobj: - raw_info = fobj.read() - except OSError: - return None - else: - return None - - klass._version = klass.parse_version(raw_info) - - return klass._version - - @staticmethod - def parse_version(raw_info): - raise NotImplementedError - - -# Original source at L40 of /interfaces/fsl/base.py -class Info(PackageInfo): - """ - Handle FSL ``output_type`` and version information. - - output type refers to the type of file fsl defaults to writing - eg, NIFTI, NIFTI_GZ - - Examples - -------- - - >>> from nipype.interfaces.fsl import Info - >>> Info.version() # doctest: +SKIP - >>> Info.output_type() # doctest: +SKIP - - """ - - ftypes = { - "NIFTI": ".nii", - "NIFTI_PAIR": ".img", - "NIFTI_GZ": ".nii.gz", - "NIFTI_PAIR_GZ": ".img.gz", - } - - if os.getenv("FSLDIR"): - version_file = os.path.join(os.getenv("FSLDIR"), "etc", "fslversion") - - @staticmethod - def parse_version(raw_info): - return raw_info.splitlines()[0] - - @classmethod - def output_type_to_ext(cls, output_type): - """Get the file extension for the given output type. - - Parameters - ---------- - output_type : {'NIFTI', 'NIFTI_GZ', 'NIFTI_PAIR', 'NIFTI_PAIR_GZ'} - String specifying the output type. - - Returns - ------- - extension : str - The file extension for the output type. - """ - - try: - return cls.ftypes[output_type] - except KeyError: - msg = "Invalid FSLOUTPUTTYPE: ", output_type - raise KeyError(msg) - - @classmethod - def output_type(cls): - """Get the global FSL output file type FSLOUTPUTTYPE. - - This returns the value of the environment variable - FSLOUTPUTTYPE. An exception is raised if it is not defined. - - Returns - ------- - fsl_ftype : string - Represents the current environment setting of FSLOUTPUTTYPE - """ - try: - return os.environ["FSLOUTPUTTYPE"] - except KeyError: - IFLOGGER.warning( - "FSLOUTPUTTYPE environment variable is not set. " - "Setting FSLOUTPUTTYPE=NIFTI" - ) - return "NIFTI" - - @staticmethod - def standard_image(img_name=None): - """Grab an image from the standard location. - - Returns a list of standard images if called without arguments. - - Could be made more fancy to allow for more relocatability""" - try: - fsldir = os.environ["FSLDIR"] - except KeyError: - raise Exception("FSL environment variables not set") - stdpath = os.path.join(fsldir, "data", "standard") - if img_name is None: - return [ - filename.replace(stdpath + "/", "") - for filename in glob(os.path.join(stdpath, "*nii*")) - ] - return os.path.join(stdpath, img_name) diff --git a/nipype-auto-conv/specs/interfaces/prob_track_x_callables.py b/nipype-auto-conv/specs/interfaces/prob_track_x_callables.py deleted file mode 100644 index 7c8c61f..0000000 --- a/nipype-auto-conv/specs/interfaces/prob_track_x_callables.py +++ /dev/null @@ -1,418 +0,0 @@ -"""Module to put any functions that are referred to in the "callables" section of ProbTrackX.yaml""" - -import attrs -import logging -import os -import os.path as op -from glob import glob -from pathlib import Path - - -def mode_default(inputs): - return _gen_filename("mode", inputs=inputs) - - -def out_dir_default(inputs): - return _gen_filename("out_dir", inputs=inputs) - - -def fdt_paths_callable(output_dir, inputs, stdout, stderr): - outputs = _list_outputs( - output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr - ) - return outputs["fdt_paths"] - - -def log_callable(output_dir, inputs, stdout, stderr): - outputs = _list_outputs( - output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr - ) - return outputs["log"] - - -def particle_files_callable(output_dir, inputs, stdout, stderr): - outputs = _list_outputs( - output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr - ) - return outputs["particle_files"] - - -def targets_callable(output_dir, inputs, stdout, stderr): - outputs = _list_outputs( - output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr - ) - return outputs["targets"] - - -def way_total_callable(output_dir, inputs, stdout, stderr): - outputs = _list_outputs( - output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr - ) - return outputs["way_total"] - - -IFLOGGER = logging.getLogger("nipype.interface") - - -# Original source at L921 of /interfaces/fsl/dti.py -def _gen_filename(name, inputs=None, stdout=None, stderr=None, output_dir=None): - if name == "out_dir": - return output_dir - elif name == "mode": - if isinstance(inputs.seed, list) and isinstance(inputs.seed[0], list): - return "simple" - else: - return "seedmask" - - -# Original source at L205 of /interfaces/fsl/base.py -def _gen_fname( - basename, - cwd=None, - suffix=None, - change_ext=True, - ext=None, - inputs=None, - stdout=None, - stderr=None, - output_dir=None, -): - """Generate a filename based on the given parameters. - - The filename will take the form: cwd/basename. - If change_ext is True, it will use the extensions specified in - inputs.output_type. - - Parameters - ---------- - basename : str - Filename to base the new filename on. - cwd : str - Path to prefix to the new filename. (default is output_dir) - suffix : str - Suffix to add to the `basename`. (defaults is '' ) - change_ext : bool - Flag to change the filename extension to the FSL output type. - (default True) - - Returns - ------- - fname : str - New filename based on given parameters. - - """ - - if basename == "": - msg = "Unable to generate filename for command %s. " % "probtrackx" - msg += "basename is not set!" - raise ValueError(msg) - if cwd is None: - cwd = output_dir - if ext is None: - ext = Info.output_type_to_ext(inputs.output_type) - if change_ext: - if suffix: - suffix = "".join((suffix, ext)) - else: - suffix = ext - if suffix is None: - suffix = "" - fname = fname_presuffix(basename, suffix=suffix, use_ext=False, newpath=cwd) - return fname - - -# Original source at L871 of /interfaces/fsl/dti.py -def _list_outputs(inputs=None, stdout=None, stderr=None, output_dir=None): - outputs = {} - if inputs.out_dir is attrs.NOTHING: - out_dir = _gen_filename( - "out_dir", - inputs=inputs, - stdout=stdout, - stderr=stderr, - output_dir=output_dir, - ) - else: - out_dir = inputs.out_dir - - outputs["log"] = os.path.abspath(os.path.join(out_dir, "probtrackx.log")) - # outputs['way_total'] = os.path.abspath(os.path.join(out_dir, - # 'waytotal')) - if inputs.opd is True is not attrs.NOTHING: - if isinstance(inputs.seed, list) and isinstance(inputs.seed[0], list): - outputs["fdt_paths"] = [] - for seed in inputs.seed: - outputs["fdt_paths"].append( - os.path.abspath( - _gen_fname( - ("fdt_paths_%s" % ("_".join([str(s) for s in seed]))), - cwd=out_dir, - suffix="", - inputs=inputs, - stdout=stdout, - stderr=stderr, - output_dir=output_dir, - ) - ) - ) - else: - outputs["fdt_paths"] = os.path.abspath( - _gen_fname( - "fdt_paths", - cwd=out_dir, - suffix="", - inputs=inputs, - stdout=stdout, - stderr=stderr, - output_dir=output_dir, - ) - ) - - # handle seeds-to-target output files - if inputs.target_masks is not attrs.NOTHING: - outputs["targets"] = [] - for target in inputs.target_masks: - outputs["targets"].append( - os.path.abspath( - _gen_fname( - "seeds_to_" + os.path.split(target)[1], - cwd=out_dir, - suffix="", - inputs=inputs, - stdout=stdout, - stderr=stderr, - output_dir=output_dir, - ) - ) - ) - if (inputs.verbose is not attrs.NOTHING) and inputs.verbose == 2: - outputs["particle_files"] = [ - os.path.abspath(os.path.join(out_dir, "particle%d" % i)) - for i in range(inputs.n_samples) - ] - return outputs - - -# Original source at L108 of /utils/filemanip.py -def fname_presuffix(fname, prefix="", suffix="", newpath=None, use_ext=True): - """Manipulates path and name of input filename - - Parameters - ---------- - fname : string - A filename (may or may not include path) - prefix : string - Characters to prepend to the filename - suffix : string - Characters to append to the filename - newpath : string - Path to replace the path of the input fname - use_ext : boolean - If True (default), appends the extension of the original file - to the output name. - - Returns - ------- - Absolute path of the modified filename - - >>> from nipype.utils.filemanip import fname_presuffix - >>> fname = 'foo.nii.gz' - >>> fname_presuffix(fname,'pre','post','/tmp') - '/tmp/prefoopost.nii.gz' - - >>> from nipype.interfaces.base import attrs.NOTHING - >>> fname_presuffix(fname, 'pre', 'post', attrs.NOTHING) == \ - fname_presuffix(fname, 'pre', 'post') - True - - """ - pth, fname, ext = split_filename(fname) - if not use_ext: - ext = "" - - # No need for : bool(attrs.NOTHING is not attrs.NOTHING) evaluates to False - if newpath: - pth = op.abspath(newpath) - return op.join(pth, prefix + fname + suffix + ext) - - -# Original source at L58 of /utils/filemanip.py -def split_filename(fname): - """Split a filename into parts: path, base filename and extension. - - Parameters - ---------- - fname : str - file or path name - - Returns - ------- - pth : str - base path from fname - fname : str - filename from fname, without extension - ext : str - file extension from fname - - Examples - -------- - >>> from nipype.utils.filemanip import split_filename - >>> pth, fname, ext = split_filename('/home/data/subject.nii.gz') - >>> pth - '/home/data' - - >>> fname - 'subject' - - >>> ext - '.nii.gz' - - """ - - special_extensions = [".nii.gz", ".tar.gz", ".niml.dset"] - - pth = op.dirname(fname) - fname = op.basename(fname) - - ext = None - for special_ext in special_extensions: - ext_len = len(special_ext) - if (len(fname) > ext_len) and (fname[-ext_len:].lower() == special_ext.lower()): - ext = fname[-ext_len:] - fname = fname[:-ext_len] - break - if not ext: - fname, ext = op.splitext(fname) - - return pth, fname, ext - - -# Original source at L1069 of /interfaces/base/core.py -class PackageInfo(object): - _version = None - version_cmd = None - version_file = None - - @classmethod - def version(klass): - if klass._version is None: - if klass.version_cmd is not None: - try: - clout = CommandLine( - command=klass.version_cmd, - resource_monitor=False, - terminal_output="allatonce", - ).run() - except IOError: - return None - - raw_info = clout.runtime.stdout - elif klass.version_file is not None: - try: - with open(klass.version_file, "rt") as fobj: - raw_info = fobj.read() - except OSError: - return None - else: - return None - - klass._version = klass.parse_version(raw_info) - - return klass._version - - @staticmethod - def parse_version(raw_info): - raise NotImplementedError - - -# Original source at L40 of /interfaces/fsl/base.py -class Info(PackageInfo): - """ - Handle FSL ``output_type`` and version information. - - output type refers to the type of file fsl defaults to writing - eg, NIFTI, NIFTI_GZ - - Examples - -------- - - >>> from nipype.interfaces.fsl import Info - >>> Info.version() # doctest: +SKIP - >>> Info.output_type() # doctest: +SKIP - - """ - - ftypes = { - "NIFTI": ".nii", - "NIFTI_PAIR": ".img", - "NIFTI_GZ": ".nii.gz", - "NIFTI_PAIR_GZ": ".img.gz", - } - - if os.getenv("FSLDIR"): - version_file = os.path.join(os.getenv("FSLDIR"), "etc", "fslversion") - - @staticmethod - def parse_version(raw_info): - return raw_info.splitlines()[0] - - @classmethod - def output_type_to_ext(cls, output_type): - """Get the file extension for the given output type. - - Parameters - ---------- - output_type : {'NIFTI', 'NIFTI_GZ', 'NIFTI_PAIR', 'NIFTI_PAIR_GZ'} - String specifying the output type. - - Returns - ------- - extension : str - The file extension for the output type. - """ - - try: - return cls.ftypes[output_type] - except KeyError: - msg = "Invalid FSLOUTPUTTYPE: ", output_type - raise KeyError(msg) - - @classmethod - def output_type(cls): - """Get the global FSL output file type FSLOUTPUTTYPE. - - This returns the value of the environment variable - FSLOUTPUTTYPE. An exception is raised if it is not defined. - - Returns - ------- - fsl_ftype : string - Represents the current environment setting of FSLOUTPUTTYPE - """ - try: - return os.environ["FSLOUTPUTTYPE"] - except KeyError: - IFLOGGER.warning( - "FSLOUTPUTTYPE environment variable is not set. " - "Setting FSLOUTPUTTYPE=NIFTI" - ) - return "NIFTI" - - @staticmethod - def standard_image(img_name=None): - """Grab an image from the standard location. - - Returns a list of standard images if called without arguments. - - Could be made more fancy to allow for more relocatability""" - try: - fsldir = os.environ["FSLDIR"] - except KeyError: - raise Exception("FSL environment variables not set") - stdpath = os.path.join(fsldir, "data", "standard") - if img_name is None: - return [ - filename.replace(stdpath + "/", "") - for filename in glob(os.path.join(stdpath, "*nii*")) - ] - return os.path.join(stdpath, img_name) diff --git a/nipype-auto-conv/specs/interfaces/proj_thresh.yaml b/nipype-auto-conv/specs/interfaces/proj_thresh.yaml index c111a93..f79d94e 100644 --- a/nipype-auto-conv/specs/interfaces/proj_thresh.yaml +++ b/nipype-auto-conv/specs/interfaces/proj_thresh.yaml @@ -6,17 +6,17 @@ # Docs # ---- # Use FSL proj_thresh for thresholding some outputs of probtrack -# For complete details, see the FDT Documentation -# +# For complete details, see the FDT Documentation +# # -# Example -# ------- +# Example +# ------- # -# >>> from nipype.interfaces import fsl -# >>> ldir = ['seeds_to_M1.nii', 'seeds_to_M2.nii'] -# >>> pThresh = fsl.ProjThresh(in_files=ldir, threshold=3) -# >>> pThresh.cmdline -# 'proj_thresh seeds_to_M1.nii seeds_to_M2.nii 3' +# >>> from nipype.interfaces import fsl +# >>> ldir = ['seeds_to_M1.nii', 'seeds_to_M2.nii'] +# >>> pThresh = fsl.ProjThresh(in_files=ldir, threshold=3) +# >>> pThresh.cmdline +# 'proj_thresh seeds_to_M1.nii seeds_to_M2.nii 3' # # task_name: ProjThresh @@ -57,7 +57,7 @@ outputs: # dict[str, str] - names of methods/callable classes defined in the adjacent `*_callables.py` # to set to the `callable` attribute of output fields templates: - # dict[str, str] - `output_file_template` values to be provided to output fields + # dict[str, str] - `path_template` values to be provided to output fields requirements: # dict[str, list[str]] - input fields that are required to be provided for the output field to be present tests: @@ -69,13 +69,13 @@ tests: threshold: # type=int|default=0: threshold indicating minimum number of seed voxels entering this mask region output_type: - # type=enum|default='NIFTI'|allowed['NIFTI','NIFTI_GZ','NIFTI_PAIR','NIFTI_PAIR_GZ']: FSL output type + # type=enum|default='NIFTI'|allowed['GIFTI','NIFTI','NIFTI_GZ','NIFTI_PAIR','NIFTI_PAIR_GZ']: FSL output type args: # type=str|default='': Additional parameters to the command environ: # type=dict|default={}: Environment variables imports: - # list[nipype2pydra.task.base.importstatement] - list import statements required by the test, with each list item + # list[nipype2pydra.statements.imports.explicitimport] - list import statements required by the test, with each list item # consisting of 'module', 'name', and optionally 'alias' keys expected_outputs: # dict[str, str] - expected values for selected outputs, noting that tests will typically @@ -97,7 +97,7 @@ tests: threshold: '3' # type=int|default=0: threshold indicating minimum number of seed voxels entering this mask region imports: - # list[nipype2pydra.task.base.importstatement] - list import statements required by the test, with each list item + # list[nipype2pydra.statements.imports.explicitimport] - list import statements required by the test, with each list item # consisting of 'module', 'name', and optionally 'alias' keys expected_outputs: # dict[str, str] - expected values for selected outputs, noting that tests will typically @@ -123,7 +123,7 @@ doctests: threshold: '3' # type=int|default=0: threshold indicating minimum number of seed voxels entering this mask region imports: - # list[nipype2pydra.task.base.importstatement] - list import statements required by the test, with each list item + # list[nipype2pydra.statements.imports.explicitimport] - list import statements required by the test, with each list item # consisting of 'module', 'name', and optionally 'alias' keys directive: # str - any doctest directive to place on the cmdline call, e.g. # doctest: +ELLIPSIS diff --git a/nipype-auto-conv/specs/interfaces/proj_thresh_callables.py b/nipype-auto-conv/specs/interfaces/proj_thresh_callables.py deleted file mode 100644 index 40ea989..0000000 --- a/nipype-auto-conv/specs/interfaces/proj_thresh_callables.py +++ /dev/null @@ -1,323 +0,0 @@ -"""Module to put any functions that are referred to in the "callables" section of ProjThresh.yaml""" - -import logging -import os -import os.path as op -from glob import glob -from pathlib import Path - - -def out_files_callable(output_dir, inputs, stdout, stderr): - outputs = _list_outputs( - output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr - ) - return outputs["out_files"] - - -IFLOGGER = logging.getLogger("nipype.interface") - - -# Original source at L885 of /interfaces/base/core.py -def _gen_filename(name, inputs=None, stdout=None, stderr=None, output_dir=None): - raise NotImplementedError - - -# Original source at L205 of /interfaces/fsl/base.py -def _gen_fname( - basename, - cwd=None, - suffix=None, - change_ext=True, - ext=None, - inputs=None, - stdout=None, - stderr=None, - output_dir=None, -): - """Generate a filename based on the given parameters. - - The filename will take the form: cwd/basename. - If change_ext is True, it will use the extensions specified in - inputs.output_type. - - Parameters - ---------- - basename : str - Filename to base the new filename on. - cwd : str - Path to prefix to the new filename. (default is output_dir) - suffix : str - Suffix to add to the `basename`. (defaults is '' ) - change_ext : bool - Flag to change the filename extension to the FSL output type. - (default True) - - Returns - ------- - fname : str - New filename based on given parameters. - - """ - - if basename == "": - msg = "Unable to generate filename for command %s. " % "proj_thresh" - msg += "basename is not set!" - raise ValueError(msg) - if cwd is None: - cwd = output_dir - if ext is None: - ext = Info.output_type_to_ext(inputs.output_type) - if change_ext: - if suffix: - suffix = "".join((suffix, ext)) - else: - suffix = ext - if suffix is None: - suffix = "" - fname = fname_presuffix(basename, suffix=suffix, use_ext=False, newpath=cwd) - return fname - - -# Original source at L1268 of /interfaces/fsl/dti.py -def _list_outputs(inputs=None, stdout=None, stderr=None, output_dir=None): - outputs = {} - outputs["out_files"] = [] - for name in inputs.in_files: - cwd, base_name = os.path.split(name) - outputs["out_files"].append( - _gen_fname( - base_name, - cwd=cwd, - suffix="_proj_seg_thr_{}".format(inputs.threshold), - inputs=inputs, - stdout=stdout, - stderr=stderr, - output_dir=output_dir, - ) - ) - return outputs - - -# Original source at L108 of /utils/filemanip.py -def fname_presuffix(fname, prefix="", suffix="", newpath=None, use_ext=True): - """Manipulates path and name of input filename - - Parameters - ---------- - fname : string - A filename (may or may not include path) - prefix : string - Characters to prepend to the filename - suffix : string - Characters to append to the filename - newpath : string - Path to replace the path of the input fname - use_ext : boolean - If True (default), appends the extension of the original file - to the output name. - - Returns - ------- - Absolute path of the modified filename - - >>> from nipype.utils.filemanip import fname_presuffix - >>> fname = 'foo.nii.gz' - >>> fname_presuffix(fname,'pre','post','/tmp') - '/tmp/prefoopost.nii.gz' - - >>> from nipype.interfaces.base import attrs.NOTHING - >>> fname_presuffix(fname, 'pre', 'post', attrs.NOTHING) == \ - fname_presuffix(fname, 'pre', 'post') - True - - """ - pth, fname, ext = split_filename(fname) - if not use_ext: - ext = "" - - # No need for : bool(attrs.NOTHING is not attrs.NOTHING) evaluates to False - if newpath: - pth = op.abspath(newpath) - return op.join(pth, prefix + fname + suffix + ext) - - -# Original source at L58 of /utils/filemanip.py -def split_filename(fname): - """Split a filename into parts: path, base filename and extension. - - Parameters - ---------- - fname : str - file or path name - - Returns - ------- - pth : str - base path from fname - fname : str - filename from fname, without extension - ext : str - file extension from fname - - Examples - -------- - >>> from nipype.utils.filemanip import split_filename - >>> pth, fname, ext = split_filename('/home/data/subject.nii.gz') - >>> pth - '/home/data' - - >>> fname - 'subject' - - >>> ext - '.nii.gz' - - """ - - special_extensions = [".nii.gz", ".tar.gz", ".niml.dset"] - - pth = op.dirname(fname) - fname = op.basename(fname) - - ext = None - for special_ext in special_extensions: - ext_len = len(special_ext) - if (len(fname) > ext_len) and (fname[-ext_len:].lower() == special_ext.lower()): - ext = fname[-ext_len:] - fname = fname[:-ext_len] - break - if not ext: - fname, ext = op.splitext(fname) - - return pth, fname, ext - - -# Original source at L1069 of /interfaces/base/core.py -class PackageInfo(object): - _version = None - version_cmd = None - version_file = None - - @classmethod - def version(klass): - if klass._version is None: - if klass.version_cmd is not None: - try: - clout = CommandLine( - command=klass.version_cmd, - resource_monitor=False, - terminal_output="allatonce", - ).run() - except IOError: - return None - - raw_info = clout.runtime.stdout - elif klass.version_file is not None: - try: - with open(klass.version_file, "rt") as fobj: - raw_info = fobj.read() - except OSError: - return None - else: - return None - - klass._version = klass.parse_version(raw_info) - - return klass._version - - @staticmethod - def parse_version(raw_info): - raise NotImplementedError - - -# Original source at L40 of /interfaces/fsl/base.py -class Info(PackageInfo): - """ - Handle FSL ``output_type`` and version information. - - output type refers to the type of file fsl defaults to writing - eg, NIFTI, NIFTI_GZ - - Examples - -------- - - >>> from nipype.interfaces.fsl import Info - >>> Info.version() # doctest: +SKIP - >>> Info.output_type() # doctest: +SKIP - - """ - - ftypes = { - "NIFTI": ".nii", - "NIFTI_PAIR": ".img", - "NIFTI_GZ": ".nii.gz", - "NIFTI_PAIR_GZ": ".img.gz", - } - - if os.getenv("FSLDIR"): - version_file = os.path.join(os.getenv("FSLDIR"), "etc", "fslversion") - - @staticmethod - def parse_version(raw_info): - return raw_info.splitlines()[0] - - @classmethod - def output_type_to_ext(cls, output_type): - """Get the file extension for the given output type. - - Parameters - ---------- - output_type : {'NIFTI', 'NIFTI_GZ', 'NIFTI_PAIR', 'NIFTI_PAIR_GZ'} - String specifying the output type. - - Returns - ------- - extension : str - The file extension for the output type. - """ - - try: - return cls.ftypes[output_type] - except KeyError: - msg = "Invalid FSLOUTPUTTYPE: ", output_type - raise KeyError(msg) - - @classmethod - def output_type(cls): - """Get the global FSL output file type FSLOUTPUTTYPE. - - This returns the value of the environment variable - FSLOUTPUTTYPE. An exception is raised if it is not defined. - - Returns - ------- - fsl_ftype : string - Represents the current environment setting of FSLOUTPUTTYPE - """ - try: - return os.environ["FSLOUTPUTTYPE"] - except KeyError: - IFLOGGER.warning( - "FSLOUTPUTTYPE environment variable is not set. " - "Setting FSLOUTPUTTYPE=NIFTI" - ) - return "NIFTI" - - @staticmethod - def standard_image(img_name=None): - """Grab an image from the standard location. - - Returns a list of standard images if called without arguments. - - Could be made more fancy to allow for more relocatability""" - try: - fsldir = os.environ["FSLDIR"] - except KeyError: - raise Exception("FSL environment variables not set") - stdpath = os.path.join(fsldir, "data", "standard") - if img_name is None: - return [ - filename.replace(stdpath + "/", "") - for filename in glob(os.path.join(stdpath, "*nii*")) - ] - return os.path.join(stdpath, img_name) diff --git a/nipype-auto-conv/specs/interfaces/randomise.yaml b/nipype-auto-conv/specs/interfaces/randomise.yaml index df78e2e..1c8f18d 100644 --- a/nipype-auto-conv/specs/interfaces/randomise.yaml +++ b/nipype-auto-conv/specs/interfaces/randomise.yaml @@ -6,15 +6,15 @@ # Docs # ---- # FSL Randomise: feeds the 4D projected FA data into GLM -# modelling and thresholding -# in order to find voxels which correlate with your model +# modelling and thresholding +# in order to find voxels which correlate with your model # -# Example -# ------- -# >>> import nipype.interfaces.fsl as fsl -# >>> rand = fsl.Randomise(in_file='allFA.nii', mask = 'mask.nii', tcon='design.con', design_mat='design.mat') -# >>> rand.cmdline -# 'randomise -i allFA.nii -o "randomise" -d design.mat -t design.con -m mask.nii' +# Example +# ------- +# >>> import nipype.interfaces.fsl as fsl +# >>> rand = fsl.Randomise(in_file='allFA.nii', mask = 'mask.nii', tcon='design.con', design_mat='design.mat') +# >>> rand.cmdline +# 'randomise -i allFA.nii -o "randomise" -d design.mat -t design.con -m mask.nii' # # task_name: Randomise @@ -39,7 +39,7 @@ inputs: # type=file|default=: 4D input file mask: medimage/nifti1 # type=file|default=: mask image - tcon: medimage-fsl/con + tcon: fileformats.medimage_fsl.Con # type=file|default=: t contrasts file x_block_labels: generic/file # type=file|default=: exchangeability block labels file @@ -75,7 +75,7 @@ outputs: # dict[str, str] - names of methods/callable classes defined in the adjacent `*_callables.py` # to set to the `callable` attribute of output fields templates: - # dict[str, str] - `output_file_template` values to be provided to output fields + # dict[str, str] - `path_template` values to be provided to output fields requirements: # dict[str, list[str]] - input fields that are required to be provided for the output field to be present tests: @@ -137,13 +137,13 @@ tests: tfce_C: # type=float|default=0.0: TFCE connectivity (6 or 26; default=6) output_type: - # type=enum|default='NIFTI'|allowed['NIFTI','NIFTI_GZ','NIFTI_PAIR','NIFTI_PAIR_GZ']: FSL output type + # type=enum|default='NIFTI'|allowed['GIFTI','NIFTI','NIFTI_GZ','NIFTI_PAIR','NIFTI_PAIR_GZ']: FSL output type args: # type=str|default='': Additional parameters to the command environ: # type=dict|default={}: Environment variables imports: - # list[nipype2pydra.task.base.importstatement] - list import statements required by the test, with each list item + # list[nipype2pydra.statements.imports.explicitimport] - list import statements required by the test, with each list item # consisting of 'module', 'name', and optionally 'alias' keys expected_outputs: # dict[str, str] - expected values for selected outputs, noting that tests will typically @@ -169,7 +169,7 @@ tests: design_mat: # type=file|default=: design matrix file imports: &id001 - # list[nipype2pydra.task.base.importstatement] - list import statements required by the test, with each list item + # list[nipype2pydra.statements.imports.explicitimport] - list import statements required by the test, with each list item # consisting of 'module', 'name', and optionally 'alias' keys - module: nipype.interfaces.fsl as fsl expected_outputs: @@ -185,7 +185,7 @@ tests: # bool - whether the unittest is expected to fail or not. Set to false # when you are satisfied with the edits you have made to this file doctests: -- cmdline: randomise -i allFA.nii -o "randomise" -d design.mat -t design.con -m mask.nii +- cmdline: # str - the expected cmdline output inputs: # dict[str, str] - name-value pairs for inputs to be provided to the doctest. @@ -200,7 +200,7 @@ doctests: design_mat: '"design.mat"' # type=file|default=: design matrix file imports: *id001 - # list[nipype2pydra.task.base.importstatement] - list import statements required by the test, with each list item + # list[nipype2pydra.statements.imports.explicitimport] - list import statements required by the test, with each list item # consisting of 'module', 'name', and optionally 'alias' keys directive: # str - any doctest directive to place on the cmdline call, e.g. # doctest: +ELLIPSIS diff --git a/nipype-auto-conv/specs/interfaces/randomise_callables.py b/nipype-auto-conv/specs/interfaces/randomise_callables.py deleted file mode 100644 index 635bd2c..0000000 --- a/nipype-auto-conv/specs/interfaces/randomise_callables.py +++ /dev/null @@ -1,409 +0,0 @@ -"""Module to put any functions that are referred to in the "callables" section of Randomise.yaml""" - -import logging -import os -import os.path as op -from glob import glob -from pathlib import Path - - -def f_corrected_p_files_callable(output_dir, inputs, stdout, stderr): - outputs = _list_outputs( - output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr - ) - return outputs["f_corrected_p_files"] - - -def f_p_files_callable(output_dir, inputs, stdout, stderr): - outputs = _list_outputs( - output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr - ) - return outputs["f_p_files"] - - -def fstat_files_callable(output_dir, inputs, stdout, stderr): - outputs = _list_outputs( - output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr - ) - return outputs["fstat_files"] - - -def t_corrected_p_files_callable(output_dir, inputs, stdout, stderr): - outputs = _list_outputs( - output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr - ) - return outputs["t_corrected_p_files"] - - -def t_p_files_callable(output_dir, inputs, stdout, stderr): - outputs = _list_outputs( - output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr - ) - return outputs["t_p_files"] - - -def tstat_files_callable(output_dir, inputs, stdout, stderr): - outputs = _list_outputs( - output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr - ) - return outputs["tstat_files"] - - -IFLOGGER = logging.getLogger("nipype.interface") - - -# Original source at L885 of /interfaces/base/core.py -def _gen_filename(name, inputs=None, stdout=None, stderr=None, output_dir=None): - raise NotImplementedError - - -# Original source at L205 of /interfaces/fsl/base.py -def _gen_fname( - basename, - cwd=None, - suffix=None, - change_ext=True, - ext=None, - inputs=None, - stdout=None, - stderr=None, - output_dir=None, -): - """Generate a filename based on the given parameters. - - The filename will take the form: cwd/basename. - If change_ext is True, it will use the extensions specified in - inputs.output_type. - - Parameters - ---------- - basename : str - Filename to base the new filename on. - cwd : str - Path to prefix to the new filename. (default is output_dir) - suffix : str - Suffix to add to the `basename`. (defaults is '' ) - change_ext : bool - Flag to change the filename extension to the FSL output type. - (default True) - - Returns - ------- - fname : str - New filename based on given parameters. - - """ - - if basename == "": - msg = "Unable to generate filename for command %s. " % "randomise" - msg += "basename is not set!" - raise ValueError(msg) - if cwd is None: - cwd = output_dir - if ext is None: - ext = Info.output_type_to_ext(inputs.output_type) - if change_ext: - if suffix: - suffix = "".join((suffix, ext)) - else: - suffix = ext - if suffix is None: - suffix = "" - fname = fname_presuffix(basename, suffix=suffix, use_ext=False, newpath=cwd) - return fname - - -# Original source at L2322 of /interfaces/fsl/model.py -def _list_outputs(inputs=None, stdout=None, stderr=None, output_dir=None): - outputs = {} - outputs["tstat_files"] = glob( - _gen_fname( - "%s_tstat*.nii" % inputs.base_name, - inputs=inputs, - stdout=stdout, - stderr=stderr, - output_dir=output_dir, - ) - ) - outputs["fstat_files"] = glob( - _gen_fname( - "%s_fstat*.nii" % inputs.base_name, - inputs=inputs, - stdout=stdout, - stderr=stderr, - output_dir=output_dir, - ) - ) - prefix = False - if inputs.tfce or inputs.tfce2D: - prefix = "tfce" - elif inputs.vox_p_values: - prefix = "vox" - elif inputs.c_thresh or inputs.f_c_thresh: - prefix = "clustere" - elif inputs.cm_thresh or inputs.f_cm_thresh: - prefix = "clusterm" - if prefix: - outputs["t_p_files"] = glob( - _gen_fname( - "%s_%s_p_tstat*" % (inputs.base_name, prefix), - inputs=inputs, - stdout=stdout, - stderr=stderr, - output_dir=output_dir, - ) - ) - outputs["t_corrected_p_files"] = glob( - _gen_fname( - "%s_%s_corrp_tstat*.nii" % (inputs.base_name, prefix), - inputs=inputs, - stdout=stdout, - stderr=stderr, - output_dir=output_dir, - ) - ) - - outputs["f_p_files"] = glob( - _gen_fname( - "%s_%s_p_fstat*.nii" % (inputs.base_name, prefix), - inputs=inputs, - stdout=stdout, - stderr=stderr, - output_dir=output_dir, - ) - ) - outputs["f_corrected_p_files"] = glob( - _gen_fname( - "%s_%s_corrp_fstat*.nii" % (inputs.base_name, prefix), - inputs=inputs, - stdout=stdout, - stderr=stderr, - output_dir=output_dir, - ) - ) - return outputs - - -# Original source at L108 of /utils/filemanip.py -def fname_presuffix(fname, prefix="", suffix="", newpath=None, use_ext=True): - """Manipulates path and name of input filename - - Parameters - ---------- - fname : string - A filename (may or may not include path) - prefix : string - Characters to prepend to the filename - suffix : string - Characters to append to the filename - newpath : string - Path to replace the path of the input fname - use_ext : boolean - If True (default), appends the extension of the original file - to the output name. - - Returns - ------- - Absolute path of the modified filename - - >>> from nipype.utils.filemanip import fname_presuffix - >>> fname = 'foo.nii.gz' - >>> fname_presuffix(fname,'pre','post','/tmp') - '/tmp/prefoopost.nii.gz' - - >>> from nipype.interfaces.base import attrs.NOTHING - >>> fname_presuffix(fname, 'pre', 'post', attrs.NOTHING) == \ - fname_presuffix(fname, 'pre', 'post') - True - - """ - pth, fname, ext = split_filename(fname) - if not use_ext: - ext = "" - - # No need for : bool(attrs.NOTHING is not attrs.NOTHING) evaluates to False - if newpath: - pth = op.abspath(newpath) - return op.join(pth, prefix + fname + suffix + ext) - - -# Original source at L58 of /utils/filemanip.py -def split_filename(fname): - """Split a filename into parts: path, base filename and extension. - - Parameters - ---------- - fname : str - file or path name - - Returns - ------- - pth : str - base path from fname - fname : str - filename from fname, without extension - ext : str - file extension from fname - - Examples - -------- - >>> from nipype.utils.filemanip import split_filename - >>> pth, fname, ext = split_filename('/home/data/subject.nii.gz') - >>> pth - '/home/data' - - >>> fname - 'subject' - - >>> ext - '.nii.gz' - - """ - - special_extensions = [".nii.gz", ".tar.gz", ".niml.dset"] - - pth = op.dirname(fname) - fname = op.basename(fname) - - ext = None - for special_ext in special_extensions: - ext_len = len(special_ext) - if (len(fname) > ext_len) and (fname[-ext_len:].lower() == special_ext.lower()): - ext = fname[-ext_len:] - fname = fname[:-ext_len] - break - if not ext: - fname, ext = op.splitext(fname) - - return pth, fname, ext - - -# Original source at L1069 of /interfaces/base/core.py -class PackageInfo(object): - _version = None - version_cmd = None - version_file = None - - @classmethod - def version(klass): - if klass._version is None: - if klass.version_cmd is not None: - try: - clout = CommandLine( - command=klass.version_cmd, - resource_monitor=False, - terminal_output="allatonce", - ).run() - except IOError: - return None - - raw_info = clout.runtime.stdout - elif klass.version_file is not None: - try: - with open(klass.version_file, "rt") as fobj: - raw_info = fobj.read() - except OSError: - return None - else: - return None - - klass._version = klass.parse_version(raw_info) - - return klass._version - - @staticmethod - def parse_version(raw_info): - raise NotImplementedError - - -# Original source at L40 of /interfaces/fsl/base.py -class Info(PackageInfo): - """ - Handle FSL ``output_type`` and version information. - - output type refers to the type of file fsl defaults to writing - eg, NIFTI, NIFTI_GZ - - Examples - -------- - - >>> from nipype.interfaces.fsl import Info - >>> Info.version() # doctest: +SKIP - >>> Info.output_type() # doctest: +SKIP - - """ - - ftypes = { - "NIFTI": ".nii", - "NIFTI_PAIR": ".img", - "NIFTI_GZ": ".nii.gz", - "NIFTI_PAIR_GZ": ".img.gz", - } - - if os.getenv("FSLDIR"): - version_file = os.path.join(os.getenv("FSLDIR"), "etc", "fslversion") - - @staticmethod - def parse_version(raw_info): - return raw_info.splitlines()[0] - - @classmethod - def output_type_to_ext(cls, output_type): - """Get the file extension for the given output type. - - Parameters - ---------- - output_type : {'NIFTI', 'NIFTI_GZ', 'NIFTI_PAIR', 'NIFTI_PAIR_GZ'} - String specifying the output type. - - Returns - ------- - extension : str - The file extension for the output type. - """ - - try: - return cls.ftypes[output_type] - except KeyError: - msg = "Invalid FSLOUTPUTTYPE: ", output_type - raise KeyError(msg) - - @classmethod - def output_type(cls): - """Get the global FSL output file type FSLOUTPUTTYPE. - - This returns the value of the environment variable - FSLOUTPUTTYPE. An exception is raised if it is not defined. - - Returns - ------- - fsl_ftype : string - Represents the current environment setting of FSLOUTPUTTYPE - """ - try: - return os.environ["FSLOUTPUTTYPE"] - except KeyError: - IFLOGGER.warning( - "FSLOUTPUTTYPE environment variable is not set. " - "Setting FSLOUTPUTTYPE=NIFTI" - ) - return "NIFTI" - - @staticmethod - def standard_image(img_name=None): - """Grab an image from the standard location. - - Returns a list of standard images if called without arguments. - - Could be made more fancy to allow for more relocatability""" - try: - fsldir = os.environ["FSLDIR"] - except KeyError: - raise Exception("FSL environment variables not set") - stdpath = os.path.join(fsldir, "data", "standard") - if img_name is None: - return [ - filename.replace(stdpath + "/", "") - for filename in glob(os.path.join(stdpath, "*nii*")) - ] - return os.path.join(stdpath, img_name) diff --git a/nipype-auto-conv/specs/interfaces/reorient_2_std.yaml b/nipype-auto-conv/specs/interfaces/reorient_2_std.yaml index a0ab8b6..7a7e2a3 100644 --- a/nipype-auto-conv/specs/interfaces/reorient_2_std.yaml +++ b/nipype-auto-conv/specs/interfaces/reorient_2_std.yaml @@ -6,15 +6,15 @@ # Docs # ---- # fslreorient2std is a tool for reorienting the image to match the -# approximate orientation of the standard template images (MNI152). +# approximate orientation of the standard template images (MNI152). # # -# Examples -# -------- +# Examples +# -------- # -# >>> reorient = Reorient2Std() -# >>> reorient.inputs.in_file = "functional.nii" -# >>> res = reorient.run() # doctest: +SKIP +# >>> reorient = Reorient2Std() +# >>> reorient.inputs.in_file = "functional.nii" +# >>> res = reorient.run() # doctest: +SKIP # # # @@ -34,9 +34,6 @@ inputs: # passed to the field in the automatically generated unittests. in_file: generic/file # type=file|default=: - out_file: Path - # type=file: - # type=file|default=: callable_defaults: # dict[str, str] - names of methods/callable classes defined in the adjacent `*_callables.py` # to set as the `default` method of input fields @@ -60,7 +57,7 @@ outputs: # dict[str, str] - names of methods/callable classes defined in the adjacent `*_callables.py` # to set to the `callable` attribute of output fields templates: - # dict[str, str] - `output_file_template` values to be provided to output fields + # dict[str, str] - `path_template` values to be provided to output fields out_file: out_file # type=file: # type=file|default=: @@ -76,13 +73,13 @@ tests: # type=file: # type=file|default=: output_type: - # type=enum|default='NIFTI'|allowed['NIFTI','NIFTI_GZ','NIFTI_PAIR','NIFTI_PAIR_GZ']: FSL output type + # type=enum|default='NIFTI'|allowed['GIFTI','NIFTI','NIFTI_GZ','NIFTI_PAIR','NIFTI_PAIR_GZ']: FSL output type args: # type=str|default='': Additional parameters to the command environ: # type=dict|default={}: Environment variables imports: - # list[nipype2pydra.task.base.importstatement] - list import statements required by the test, with each list item + # list[nipype2pydra.statements.imports.explicitimport] - list import statements required by the test, with each list item # consisting of 'module', 'name', and optionally 'alias' keys expected_outputs: # dict[str, str] - expected values for selected outputs, noting that tests will typically diff --git a/nipype-auto-conv/specs/interfaces/reorient_2_std_callables.py b/nipype-auto-conv/specs/interfaces/reorient_2_std_callables.py deleted file mode 100644 index dcfa528..0000000 --- a/nipype-auto-conv/specs/interfaces/reorient_2_std_callables.py +++ /dev/null @@ -1,333 +0,0 @@ -"""Module to put any functions that are referred to in the "callables" section of Reorient2Std.yaml""" - -import attrs -import logging -import os -import os.path as op -from glob import glob -from pathlib import Path - - -def out_file_default(inputs): - return _gen_filename("out_file", inputs=inputs) - - -def out_file_callable(output_dir, inputs, stdout, stderr): - outputs = _list_outputs( - output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr - ) - return outputs["out_file"] - - -IFLOGGER = logging.getLogger("nipype.interface") - - -# Original source at L1784 of /interfaces/fsl/utils.py -def _gen_filename(name, inputs=None, stdout=None, stderr=None, output_dir=None): - if name == "out_file": - return _gen_fname( - inputs.in_file, - suffix="_reoriented", - inputs=inputs, - stdout=stdout, - stderr=stderr, - output_dir=output_dir, - ) - return None - - -# Original source at L205 of /interfaces/fsl/base.py -def _gen_fname( - basename, - cwd=None, - suffix=None, - change_ext=True, - ext=None, - inputs=None, - stdout=None, - stderr=None, - output_dir=None, -): - """Generate a filename based on the given parameters. - - The filename will take the form: cwd/basename. - If change_ext is True, it will use the extensions specified in - inputs.output_type. - - Parameters - ---------- - basename : str - Filename to base the new filename on. - cwd : str - Path to prefix to the new filename. (default is output_dir) - suffix : str - Suffix to add to the `basename`. (defaults is '' ) - change_ext : bool - Flag to change the filename extension to the FSL output type. - (default True) - - Returns - ------- - fname : str - New filename based on given parameters. - - """ - - if basename == "": - msg = "Unable to generate filename for command %s. " % "fslreorient2std" - msg += "basename is not set!" - raise ValueError(msg) - if cwd is None: - cwd = output_dir - if ext is None: - ext = Info.output_type_to_ext(inputs.output_type) - if change_ext: - if suffix: - suffix = "".join((suffix, ext)) - else: - suffix = ext - if suffix is None: - suffix = "" - fname = fname_presuffix(basename, suffix=suffix, use_ext=False, newpath=cwd) - return fname - - -# Original source at L1789 of /interfaces/fsl/utils.py -def _list_outputs(inputs=None, stdout=None, stderr=None, output_dir=None): - outputs = {} - if inputs.out_file is attrs.NOTHING: - outputs["out_file"] = _gen_filename( - "out_file", - inputs=inputs, - stdout=stdout, - stderr=stderr, - output_dir=output_dir, - ) - else: - outputs["out_file"] = os.path.abspath(inputs.out_file) - return outputs - - -# Original source at L108 of /utils/filemanip.py -def fname_presuffix(fname, prefix="", suffix="", newpath=None, use_ext=True): - """Manipulates path and name of input filename - - Parameters - ---------- - fname : string - A filename (may or may not include path) - prefix : string - Characters to prepend to the filename - suffix : string - Characters to append to the filename - newpath : string - Path to replace the path of the input fname - use_ext : boolean - If True (default), appends the extension of the original file - to the output name. - - Returns - ------- - Absolute path of the modified filename - - >>> from nipype.utils.filemanip import fname_presuffix - >>> fname = 'foo.nii.gz' - >>> fname_presuffix(fname,'pre','post','/tmp') - '/tmp/prefoopost.nii.gz' - - >>> from nipype.interfaces.base import attrs.NOTHING - >>> fname_presuffix(fname, 'pre', 'post', attrs.NOTHING) == \ - fname_presuffix(fname, 'pre', 'post') - True - - """ - pth, fname, ext = split_filename(fname) - if not use_ext: - ext = "" - - # No need for : bool(attrs.NOTHING is not attrs.NOTHING) evaluates to False - if newpath: - pth = op.abspath(newpath) - return op.join(pth, prefix + fname + suffix + ext) - - -# Original source at L58 of /utils/filemanip.py -def split_filename(fname): - """Split a filename into parts: path, base filename and extension. - - Parameters - ---------- - fname : str - file or path name - - Returns - ------- - pth : str - base path from fname - fname : str - filename from fname, without extension - ext : str - file extension from fname - - Examples - -------- - >>> from nipype.utils.filemanip import split_filename - >>> pth, fname, ext = split_filename('/home/data/subject.nii.gz') - >>> pth - '/home/data' - - >>> fname - 'subject' - - >>> ext - '.nii.gz' - - """ - - special_extensions = [".nii.gz", ".tar.gz", ".niml.dset"] - - pth = op.dirname(fname) - fname = op.basename(fname) - - ext = None - for special_ext in special_extensions: - ext_len = len(special_ext) - if (len(fname) > ext_len) and (fname[-ext_len:].lower() == special_ext.lower()): - ext = fname[-ext_len:] - fname = fname[:-ext_len] - break - if not ext: - fname, ext = op.splitext(fname) - - return pth, fname, ext - - -# Original source at L1069 of /interfaces/base/core.py -class PackageInfo(object): - _version = None - version_cmd = None - version_file = None - - @classmethod - def version(klass): - if klass._version is None: - if klass.version_cmd is not None: - try: - clout = CommandLine( - command=klass.version_cmd, - resource_monitor=False, - terminal_output="allatonce", - ).run() - except IOError: - return None - - raw_info = clout.runtime.stdout - elif klass.version_file is not None: - try: - with open(klass.version_file, "rt") as fobj: - raw_info = fobj.read() - except OSError: - return None - else: - return None - - klass._version = klass.parse_version(raw_info) - - return klass._version - - @staticmethod - def parse_version(raw_info): - raise NotImplementedError - - -# Original source at L40 of /interfaces/fsl/base.py -class Info(PackageInfo): - """ - Handle FSL ``output_type`` and version information. - - output type refers to the type of file fsl defaults to writing - eg, NIFTI, NIFTI_GZ - - Examples - -------- - - >>> from nipype.interfaces.fsl import Info - >>> Info.version() # doctest: +SKIP - >>> Info.output_type() # doctest: +SKIP - - """ - - ftypes = { - "NIFTI": ".nii", - "NIFTI_PAIR": ".img", - "NIFTI_GZ": ".nii.gz", - "NIFTI_PAIR_GZ": ".img.gz", - } - - if os.getenv("FSLDIR"): - version_file = os.path.join(os.getenv("FSLDIR"), "etc", "fslversion") - - @staticmethod - def parse_version(raw_info): - return raw_info.splitlines()[0] - - @classmethod - def output_type_to_ext(cls, output_type): - """Get the file extension for the given output type. - - Parameters - ---------- - output_type : {'NIFTI', 'NIFTI_GZ', 'NIFTI_PAIR', 'NIFTI_PAIR_GZ'} - String specifying the output type. - - Returns - ------- - extension : str - The file extension for the output type. - """ - - try: - return cls.ftypes[output_type] - except KeyError: - msg = "Invalid FSLOUTPUTTYPE: ", output_type - raise KeyError(msg) - - @classmethod - def output_type(cls): - """Get the global FSL output file type FSLOUTPUTTYPE. - - This returns the value of the environment variable - FSLOUTPUTTYPE. An exception is raised if it is not defined. - - Returns - ------- - fsl_ftype : string - Represents the current environment setting of FSLOUTPUTTYPE - """ - try: - return os.environ["FSLOUTPUTTYPE"] - except KeyError: - IFLOGGER.warning( - "FSLOUTPUTTYPE environment variable is not set. " - "Setting FSLOUTPUTTYPE=NIFTI" - ) - return "NIFTI" - - @staticmethod - def standard_image(img_name=None): - """Grab an image from the standard location. - - Returns a list of standard images if called without arguments. - - Could be made more fancy to allow for more relocatability""" - try: - fsldir = os.environ["FSLDIR"] - except KeyError: - raise Exception("FSL environment variables not set") - stdpath = os.path.join(fsldir, "data", "standard") - if img_name is None: - return [ - filename.replace(stdpath + "/", "") - for filename in glob(os.path.join(stdpath, "*nii*")) - ] - return os.path.join(stdpath, img_name) diff --git a/nipype-auto-conv/specs/interfaces/robust_fov.yaml b/nipype-auto-conv/specs/interfaces/robust_fov.yaml index 31eed5b..3f5f03c 100644 --- a/nipype-auto-conv/specs/interfaces/robust_fov.yaml +++ b/nipype-auto-conv/specs/interfaces/robust_fov.yaml @@ -7,8 +7,8 @@ # ---- # Automatically crops an image removing lower head and neck. # -# Interface is stable 5.0.0 to 5.0.9, but default brainsize changed from -# 150mm to 170mm. +# Interface is stable 5.0.0 to 5.0.9, but default brainsize changed from +# 150mm to 170mm. # task_name: RobustFOV nipype_name: RobustFOV @@ -26,12 +26,6 @@ inputs: # passed to the field in the automatically generated unittests. in_file: generic/file # type=file|default=: input filename - out_roi: Path - # type=file: ROI volume output name - # type=file|default=: ROI volume output name - out_transform: Path - # type=file: Transformation matrix in_file to out_roi output name - # type=file|default=: Transformation matrix in_file to out_roi output name callable_defaults: # dict[str, str] - names of methods/callable classes defined in the adjacent `*_callables.py` # to set as the `default` method of input fields @@ -58,7 +52,7 @@ outputs: # dict[str, str] - names of methods/callable classes defined in the adjacent `*_callables.py` # to set to the `callable` attribute of output fields templates: - # dict[str, str] - `output_file_template` values to be provided to output fields + # dict[str, str] - `path_template` values to be provided to output fields requirements: # dict[str, list[str]] - input fields that are required to be provided for the output field to be present tests: @@ -76,13 +70,13 @@ tests: # type=file: Transformation matrix in_file to out_roi output name # type=file|default=: Transformation matrix in_file to out_roi output name output_type: - # type=enum|default='NIFTI'|allowed['NIFTI','NIFTI_GZ','NIFTI_PAIR','NIFTI_PAIR_GZ']: FSL output type + # type=enum|default='NIFTI'|allowed['GIFTI','NIFTI','NIFTI_GZ','NIFTI_PAIR','NIFTI_PAIR_GZ']: FSL output type args: # type=str|default='': Additional parameters to the command environ: # type=dict|default={}: Environment variables imports: - # list[nipype2pydra.task.base.importstatement] - list import statements required by the test, with each list item + # list[nipype2pydra.statements.imports.explicitimport] - list import statements required by the test, with each list item # consisting of 'module', 'name', and optionally 'alias' keys expected_outputs: # dict[str, str] - expected values for selected outputs, noting that tests will typically diff --git a/nipype-auto-conv/specs/interfaces/robust_fov_callables.py b/nipype-auto-conv/specs/interfaces/robust_fov_callables.py deleted file mode 100644 index f9cce33..0000000 --- a/nipype-auto-conv/specs/interfaces/robust_fov_callables.py +++ /dev/null @@ -1,345 +0,0 @@ -"""Module to put any functions that are referred to in the "callables" section of RobustFOV.yaml""" - -import attrs -import logging -import os -import os.path as op -from glob import glob - - -def out_roi_callable(output_dir, inputs, stdout, stderr): - outputs = _list_outputs( - output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr - ) - return outputs["out_roi"] - - -def out_transform_callable(output_dir, inputs, stdout, stderr): - outputs = _list_outputs( - output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr - ) - return outputs["out_transform"] - - -IFLOGGER = logging.getLogger("nipype.interface") - - -iflogger = logging.getLogger("nipype.interface") - - -# Original source at L809 of /interfaces/base/core.py -def _filename_from_source( - name, chain=None, inputs=None, stdout=None, stderr=None, output_dir=None -): - if chain is None: - chain = [] - - trait_spec = inputs.trait(name) - retval = getattr(inputs, name) - source_ext = None - if (retval is attrs.NOTHING) or "%s" in retval: - if not trait_spec.name_source: - return retval - - # Do not generate filename when excluded by other inputs - if any( - (getattr(inputs, field) is not attrs.NOTHING) - for field in trait_spec.xor or () - ): - return retval - - # Do not generate filename when required fields are missing - if not all( - (getattr(inputs, field) is not attrs.NOTHING) - for field in trait_spec.requires or () - ): - return retval - - if (retval is not attrs.NOTHING) and "%s" in retval: - name_template = retval - else: - name_template = trait_spec.name_template - if not name_template: - name_template = "%s_generated" - - ns = trait_spec.name_source - while isinstance(ns, (list, tuple)): - if len(ns) > 1: - iflogger.warning("Only one name_source per trait is allowed") - ns = ns[0] - - if not isinstance(ns, (str, bytes)): - raise ValueError( - "name_source of '{}' trait should be an input trait " - "name, but a type {} object was found".format(name, type(ns)) - ) - - if getattr(inputs, ns) is not attrs.NOTHING: - name_source = ns - source = getattr(inputs, name_source) - while isinstance(source, list): - source = source[0] - - # special treatment for files - try: - _, base, source_ext = split_filename(source) - except (AttributeError, TypeError): - base = source - else: - if name in chain: - raise NipypeInterfaceError("Mutually pointing name_sources") - - chain.append(name) - base = _filename_from_source( - ns, - chain, - inputs=inputs, - stdout=stdout, - stderr=stderr, - output_dir=output_dir, - ) - if base is not attrs.NOTHING: - _, _, source_ext = split_filename(base) - else: - # Do not generate filename when required fields are missing - return retval - - chain = None - retval = name_template % base - _, _, ext = split_filename(retval) - if trait_spec.keep_extension and (ext or source_ext): - if (ext is None or not ext) and source_ext: - retval = retval + source_ext - else: - retval = _overload_extension( - retval, - name, - inputs=inputs, - stdout=stdout, - stderr=stderr, - output_dir=output_dir, - ) - return retval - - -# Original source at L885 of /interfaces/base/core.py -def _gen_filename(name, inputs=None, stdout=None, stderr=None, output_dir=None): - raise NotImplementedError - - -# Original source at L891 of /interfaces/base/core.py -def _list_outputs(inputs=None, stdout=None, stderr=None, output_dir=None): - metadata = dict(name_source=lambda t: t is not None) - traits = inputs.traits(**metadata) - if traits: - outputs = {} - for name, trait_spec in list(traits.items()): - out_name = name - if trait_spec.output_name is not None: - out_name = trait_spec.output_name - fname = _filename_from_source( - name, inputs=inputs, stdout=stdout, stderr=stderr, output_dir=output_dir - ) - if fname is not attrs.NOTHING: - outputs[out_name] = os.path.abspath(fname) - return outputs - - -# Original source at L249 of /interfaces/fsl/base.py -def _overload_extension( - value, name=None, inputs=None, stdout=None, stderr=None, output_dir=None -): - return value + Info.output_type_to_ext(inputs.output_type) - - -# Original source at L58 of /utils/filemanip.py -def split_filename(fname): - """Split a filename into parts: path, base filename and extension. - - Parameters - ---------- - fname : str - file or path name - - Returns - ------- - pth : str - base path from fname - fname : str - filename from fname, without extension - ext : str - file extension from fname - - Examples - -------- - >>> from nipype.utils.filemanip import split_filename - >>> pth, fname, ext = split_filename('/home/data/subject.nii.gz') - >>> pth - '/home/data' - - >>> fname - 'subject' - - >>> ext - '.nii.gz' - - """ - - special_extensions = [".nii.gz", ".tar.gz", ".niml.dset"] - - pth = op.dirname(fname) - fname = op.basename(fname) - - ext = None - for special_ext in special_extensions: - ext_len = len(special_ext) - if (len(fname) > ext_len) and (fname[-ext_len:].lower() == special_ext.lower()): - ext = fname[-ext_len:] - fname = fname[:-ext_len] - break - if not ext: - fname, ext = op.splitext(fname) - - return pth, fname, ext - - -# Original source at L1069 of /interfaces/base/core.py -class PackageInfo(object): - _version = None - version_cmd = None - version_file = None - - @classmethod - def version(klass): - if klass._version is None: - if klass.version_cmd is not None: - try: - clout = CommandLine( - command=klass.version_cmd, - resource_monitor=False, - terminal_output="allatonce", - ).run() - except IOError: - return None - - raw_info = clout.runtime.stdout - elif klass.version_file is not None: - try: - with open(klass.version_file, "rt") as fobj: - raw_info = fobj.read() - except OSError: - return None - else: - return None - - klass._version = klass.parse_version(raw_info) - - return klass._version - - @staticmethod - def parse_version(raw_info): - raise NotImplementedError - - -# Original source at L40 of /interfaces/fsl/base.py -class Info(PackageInfo): - """ - Handle FSL ``output_type`` and version information. - - output type refers to the type of file fsl defaults to writing - eg, NIFTI, NIFTI_GZ - - Examples - -------- - - >>> from nipype.interfaces.fsl import Info - >>> Info.version() # doctest: +SKIP - >>> Info.output_type() # doctest: +SKIP - - """ - - ftypes = { - "NIFTI": ".nii", - "NIFTI_PAIR": ".img", - "NIFTI_GZ": ".nii.gz", - "NIFTI_PAIR_GZ": ".img.gz", - } - - if os.getenv("FSLDIR"): - version_file = os.path.join(os.getenv("FSLDIR"), "etc", "fslversion") - - @staticmethod - def parse_version(raw_info): - return raw_info.splitlines()[0] - - @classmethod - def output_type_to_ext(cls, output_type): - """Get the file extension for the given output type. - - Parameters - ---------- - output_type : {'NIFTI', 'NIFTI_GZ', 'NIFTI_PAIR', 'NIFTI_PAIR_GZ'} - String specifying the output type. - - Returns - ------- - extension : str - The file extension for the output type. - """ - - try: - return cls.ftypes[output_type] - except KeyError: - msg = "Invalid FSLOUTPUTTYPE: ", output_type - raise KeyError(msg) - - @classmethod - def output_type(cls): - """Get the global FSL output file type FSLOUTPUTTYPE. - - This returns the value of the environment variable - FSLOUTPUTTYPE. An exception is raised if it is not defined. - - Returns - ------- - fsl_ftype : string - Represents the current environment setting of FSLOUTPUTTYPE - """ - try: - return os.environ["FSLOUTPUTTYPE"] - except KeyError: - IFLOGGER.warning( - "FSLOUTPUTTYPE environment variable is not set. " - "Setting FSLOUTPUTTYPE=NIFTI" - ) - return "NIFTI" - - @staticmethod - def standard_image(img_name=None): - """Grab an image from the standard location. - - Returns a list of standard images if called without arguments. - - Could be made more fancy to allow for more relocatability""" - try: - fsldir = os.environ["FSLDIR"] - except KeyError: - raise Exception("FSL environment variables not set") - stdpath = os.path.join(fsldir, "data", "standard") - if img_name is None: - return [ - filename.replace(stdpath + "/", "") - for filename in glob(os.path.join(stdpath, "*nii*")) - ] - return os.path.join(stdpath, img_name) - - -# Original source at L125 of /interfaces/base/support.py -class NipypeInterfaceError(Exception): - """Custom error for interfaces""" - - def __init__(self, value): - self.value = value - - def __str__(self): - return "{}".format(self.value) diff --git a/nipype-auto-conv/specs/interfaces/sig_loss.yaml b/nipype-auto-conv/specs/interfaces/sig_loss.yaml index adf86db..0d35216 100644 --- a/nipype-auto-conv/specs/interfaces/sig_loss.yaml +++ b/nipype-auto-conv/specs/interfaces/sig_loss.yaml @@ -7,13 +7,13 @@ # ---- # Estimates signal loss from a field map (in rad/s) # -# Examples -# -------- +# Examples +# -------- # -# >>> sigloss = SigLoss() -# >>> sigloss.inputs.in_file = "phase.nii" -# >>> sigloss.inputs.echo_time = 0.03 -# >>> res = sigloss.run() # doctest: +SKIP +# >>> sigloss = SigLoss() +# >>> sigloss.inputs.in_file = "phase.nii" +# >>> sigloss.inputs.echo_time = 0.03 +# >>> res = sigloss.run() # doctest: +SKIP # # # @@ -35,9 +35,6 @@ inputs: # type=file|default=: b0 fieldmap file mask_file: generic/file # type=file|default=: brain mask file - out_file: Path - # type=file: signal loss estimate file - # type=file|default=: output signal loss estimate file callable_defaults: # dict[str, str] - names of methods/callable classes defined in the adjacent `*_callables.py` # to set as the `default` method of input fields @@ -61,7 +58,7 @@ outputs: # dict[str, str] - names of methods/callable classes defined in the adjacent `*_callables.py` # to set to the `callable` attribute of output fields templates: - # dict[str, str] - `output_file_template` values to be provided to output fields + # dict[str, str] - `path_template` values to be provided to output fields out_file: out_file # type=file: signal loss estimate file # type=file|default=: output signal loss estimate file @@ -83,13 +80,13 @@ tests: slice_direction: # type=enum|default='x'|allowed['x','y','z']: slicing direction output_type: - # type=enum|default='NIFTI'|allowed['NIFTI','NIFTI_GZ','NIFTI_PAIR','NIFTI_PAIR_GZ']: FSL output type + # type=enum|default='NIFTI'|allowed['GIFTI','NIFTI','NIFTI_GZ','NIFTI_PAIR','NIFTI_PAIR_GZ']: FSL output type args: # type=str|default='': Additional parameters to the command environ: # type=dict|default={}: Environment variables imports: - # list[nipype2pydra.task.base.importstatement] - list import statements required by the test, with each list item + # list[nipype2pydra.statements.imports.explicitimport] - list import statements required by the test, with each list item # consisting of 'module', 'name', and optionally 'alias' keys expected_outputs: # dict[str, str] - expected values for selected outputs, noting that tests will typically diff --git a/nipype-auto-conv/specs/interfaces/sig_loss_callables.py b/nipype-auto-conv/specs/interfaces/sig_loss_callables.py deleted file mode 100644 index 18d0a40..0000000 --- a/nipype-auto-conv/specs/interfaces/sig_loss_callables.py +++ /dev/null @@ -1,328 +0,0 @@ -"""Module to put any functions that are referred to in the "callables" section of SigLoss.yaml""" - -import attrs -import logging -import os -import os.path as op -from glob import glob -from pathlib import Path - - -def out_file_default(inputs): - return _gen_filename("out_file", inputs=inputs) - - -def out_file_callable(output_dir, inputs, stdout, stderr): - outputs = _list_outputs( - output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr - ) - return outputs["out_file"] - - -IFLOGGER = logging.getLogger("nipype.interface") - - -# Original source at L1750 of /interfaces/fsl/utils.py -def _gen_filename(name, inputs=None, stdout=None, stderr=None, output_dir=None): - if name == "out_file": - return _list_outputs( - inputs=inputs, stdout=stdout, stderr=stderr, output_dir=output_dir - )["out_file"] - return None - - -# Original source at L205 of /interfaces/fsl/base.py -def _gen_fname( - basename, - cwd=None, - suffix=None, - change_ext=True, - ext=None, - inputs=None, - stdout=None, - stderr=None, - output_dir=None, -): - """Generate a filename based on the given parameters. - - The filename will take the form: cwd/basename. - If change_ext is True, it will use the extensions specified in - inputs.output_type. - - Parameters - ---------- - basename : str - Filename to base the new filename on. - cwd : str - Path to prefix to the new filename. (default is output_dir) - suffix : str - Suffix to add to the `basename`. (defaults is '' ) - change_ext : bool - Flag to change the filename extension to the FSL output type. - (default True) - - Returns - ------- - fname : str - New filename based on given parameters. - - """ - - if basename == "": - msg = "Unable to generate filename for command %s. " % "sigloss" - msg += "basename is not set!" - raise ValueError(msg) - if cwd is None: - cwd = output_dir - if ext is None: - ext = Info.output_type_to_ext(inputs.output_type) - if change_ext: - if suffix: - suffix = "".join((suffix, ext)) - else: - suffix = ext - if suffix is None: - suffix = "" - fname = fname_presuffix(basename, suffix=suffix, use_ext=False, newpath=cwd) - return fname - - -# Original source at L1741 of /interfaces/fsl/utils.py -def _list_outputs(inputs=None, stdout=None, stderr=None, output_dir=None): - outputs = {} - outputs["out_file"] = inputs.out_file - if (outputs["out_file"] is attrs.NOTHING) and (inputs.in_file is not attrs.NOTHING): - outputs["out_file"] = _gen_fname( - inputs.in_file, - suffix="_sigloss", - inputs=inputs, - stdout=stdout, - stderr=stderr, - output_dir=output_dir, - ) - return outputs - - -# Original source at L108 of /utils/filemanip.py -def fname_presuffix(fname, prefix="", suffix="", newpath=None, use_ext=True): - """Manipulates path and name of input filename - - Parameters - ---------- - fname : string - A filename (may or may not include path) - prefix : string - Characters to prepend to the filename - suffix : string - Characters to append to the filename - newpath : string - Path to replace the path of the input fname - use_ext : boolean - If True (default), appends the extension of the original file - to the output name. - - Returns - ------- - Absolute path of the modified filename - - >>> from nipype.utils.filemanip import fname_presuffix - >>> fname = 'foo.nii.gz' - >>> fname_presuffix(fname,'pre','post','/tmp') - '/tmp/prefoopost.nii.gz' - - >>> from nipype.interfaces.base import attrs.NOTHING - >>> fname_presuffix(fname, 'pre', 'post', attrs.NOTHING) == \ - fname_presuffix(fname, 'pre', 'post') - True - - """ - pth, fname, ext = split_filename(fname) - if not use_ext: - ext = "" - - # No need for : bool(attrs.NOTHING is not attrs.NOTHING) evaluates to False - if newpath: - pth = op.abspath(newpath) - return op.join(pth, prefix + fname + suffix + ext) - - -# Original source at L58 of /utils/filemanip.py -def split_filename(fname): - """Split a filename into parts: path, base filename and extension. - - Parameters - ---------- - fname : str - file or path name - - Returns - ------- - pth : str - base path from fname - fname : str - filename from fname, without extension - ext : str - file extension from fname - - Examples - -------- - >>> from nipype.utils.filemanip import split_filename - >>> pth, fname, ext = split_filename('/home/data/subject.nii.gz') - >>> pth - '/home/data' - - >>> fname - 'subject' - - >>> ext - '.nii.gz' - - """ - - special_extensions = [".nii.gz", ".tar.gz", ".niml.dset"] - - pth = op.dirname(fname) - fname = op.basename(fname) - - ext = None - for special_ext in special_extensions: - ext_len = len(special_ext) - if (len(fname) > ext_len) and (fname[-ext_len:].lower() == special_ext.lower()): - ext = fname[-ext_len:] - fname = fname[:-ext_len] - break - if not ext: - fname, ext = op.splitext(fname) - - return pth, fname, ext - - -# Original source at L1069 of /interfaces/base/core.py -class PackageInfo(object): - _version = None - version_cmd = None - version_file = None - - @classmethod - def version(klass): - if klass._version is None: - if klass.version_cmd is not None: - try: - clout = CommandLine( - command=klass.version_cmd, - resource_monitor=False, - terminal_output="allatonce", - ).run() - except IOError: - return None - - raw_info = clout.runtime.stdout - elif klass.version_file is not None: - try: - with open(klass.version_file, "rt") as fobj: - raw_info = fobj.read() - except OSError: - return None - else: - return None - - klass._version = klass.parse_version(raw_info) - - return klass._version - - @staticmethod - def parse_version(raw_info): - raise NotImplementedError - - -# Original source at L40 of /interfaces/fsl/base.py -class Info(PackageInfo): - """ - Handle FSL ``output_type`` and version information. - - output type refers to the type of file fsl defaults to writing - eg, NIFTI, NIFTI_GZ - - Examples - -------- - - >>> from nipype.interfaces.fsl import Info - >>> Info.version() # doctest: +SKIP - >>> Info.output_type() # doctest: +SKIP - - """ - - ftypes = { - "NIFTI": ".nii", - "NIFTI_PAIR": ".img", - "NIFTI_GZ": ".nii.gz", - "NIFTI_PAIR_GZ": ".img.gz", - } - - if os.getenv("FSLDIR"): - version_file = os.path.join(os.getenv("FSLDIR"), "etc", "fslversion") - - @staticmethod - def parse_version(raw_info): - return raw_info.splitlines()[0] - - @classmethod - def output_type_to_ext(cls, output_type): - """Get the file extension for the given output type. - - Parameters - ---------- - output_type : {'NIFTI', 'NIFTI_GZ', 'NIFTI_PAIR', 'NIFTI_PAIR_GZ'} - String specifying the output type. - - Returns - ------- - extension : str - The file extension for the output type. - """ - - try: - return cls.ftypes[output_type] - except KeyError: - msg = "Invalid FSLOUTPUTTYPE: ", output_type - raise KeyError(msg) - - @classmethod - def output_type(cls): - """Get the global FSL output file type FSLOUTPUTTYPE. - - This returns the value of the environment variable - FSLOUTPUTTYPE. An exception is raised if it is not defined. - - Returns - ------- - fsl_ftype : string - Represents the current environment setting of FSLOUTPUTTYPE - """ - try: - return os.environ["FSLOUTPUTTYPE"] - except KeyError: - IFLOGGER.warning( - "FSLOUTPUTTYPE environment variable is not set. " - "Setting FSLOUTPUTTYPE=NIFTI" - ) - return "NIFTI" - - @staticmethod - def standard_image(img_name=None): - """Grab an image from the standard location. - - Returns a list of standard images if called without arguments. - - Could be made more fancy to allow for more relocatability""" - try: - fsldir = os.environ["FSLDIR"] - except KeyError: - raise Exception("FSL environment variables not set") - stdpath = os.path.join(fsldir, "data", "standard") - if img_name is None: - return [ - filename.replace(stdpath + "/", "") - for filename in glob(os.path.join(stdpath, "*nii*")) - ] - return os.path.join(stdpath, img_name) diff --git a/nipype-auto-conv/specs/interfaces/slice.yaml b/nipype-auto-conv/specs/interfaces/slice.yaml index 6ff12e7..fa64a71 100644 --- a/nipype-auto-conv/specs/interfaces/slice.yaml +++ b/nipype-auto-conv/specs/interfaces/slice.yaml @@ -8,15 +8,15 @@ # Use fslslice to split a 3D file into lots of 2D files (along z-axis). # # -# Examples -# -------- +# Examples +# -------- # -# >>> from nipype.interfaces.fsl import Slice -# >>> slice = Slice() -# >>> slice.inputs.in_file = 'functional.nii' -# >>> slice.inputs.out_base_name = 'sl' -# >>> slice.cmdline -# 'fslslice functional.nii sl' +# >>> from nipype.interfaces.fsl import Slice +# >>> slice = Slice() +# >>> slice.inputs.in_file = 'functional.nii' +# >>> slice.inputs.out_base_name = 'sl' +# >>> slice.cmdline +# 'fslslice functional.nii sl' # # # @@ -58,7 +58,7 @@ outputs: # dict[str, str] - names of methods/callable classes defined in the adjacent `*_callables.py` # to set to the `callable` attribute of output fields templates: - # dict[str, str] - `output_file_template` values to be provided to output fields + # dict[str, str] - `path_template` values to be provided to output fields requirements: # dict[str, list[str]] - input fields that are required to be provided for the output field to be present tests: @@ -70,13 +70,13 @@ tests: out_base_name: # type=str|default='': outputs prefix output_type: - # type=enum|default='NIFTI'|allowed['NIFTI','NIFTI_GZ','NIFTI_PAIR','NIFTI_PAIR_GZ']: FSL output type + # type=enum|default='NIFTI'|allowed['GIFTI','NIFTI','NIFTI_GZ','NIFTI_PAIR','NIFTI_PAIR_GZ']: FSL output type args: # type=str|default='': Additional parameters to the command environ: # type=dict|default={}: Environment variables imports: - # list[nipype2pydra.task.base.importstatement] - list import statements required by the test, with each list item + # list[nipype2pydra.statements.imports.explicitimport] - list import statements required by the test, with each list item # consisting of 'module', 'name', and optionally 'alias' keys expected_outputs: # dict[str, str] - expected values for selected outputs, noting that tests will typically @@ -95,10 +95,8 @@ tests: # (if not specified, will try to choose a sensible value) in_file: # type=file|default=: input filename - out_base_name: '"sl"' - # type=str|default='': outputs prefix imports: - # list[nipype2pydra.task.base.importstatement] - list import statements required by the test, with each list item + # list[nipype2pydra.statements.imports.explicitimport] - list import statements required by the test, with each list item # consisting of 'module', 'name', and optionally 'alias' keys expected_outputs: # dict[str, str] - expected values for selected outputs, noting that tests will typically @@ -121,10 +119,8 @@ doctests: # '.mock()' method of the corresponding class is used instead. in_file: '"functional.nii"' # type=file|default=: input filename - out_base_name: '"sl"' - # type=str|default='': outputs prefix imports: - # list[nipype2pydra.task.base.importstatement] - list import statements required by the test, with each list item + # list[nipype2pydra.statements.imports.explicitimport] - list import statements required by the test, with each list item # consisting of 'module', 'name', and optionally 'alias' keys directive: # str - any doctest directive to place on the cmdline call, e.g. # doctest: +ELLIPSIS diff --git a/nipype-auto-conv/specs/interfaces/slice_callables.py b/nipype-auto-conv/specs/interfaces/slice_callables.py deleted file mode 100644 index d6fc97a..0000000 --- a/nipype-auto-conv/specs/interfaces/slice_callables.py +++ /dev/null @@ -1,278 +0,0 @@ -"""Module to put any functions that are referred to in the "callables" section of Slice.yaml""" - -import attrs -import logging -import os -import os.path as op -from glob import glob -from pathlib import Path - - -def out_files_callable(output_dir, inputs, stdout, stderr): - outputs = _list_outputs( - output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr - ) - return outputs["out_files"] - - -IFLOGGER = logging.getLogger("nipype.interface") - - -# Original source at L885 of /interfaces/base/core.py -def _gen_filename(name, inputs=None, stdout=None, stderr=None, output_dir=None): - raise NotImplementedError - - -# Original source at L305 of /interfaces/fsl/utils.py -def _list_outputs(inputs=None, stdout=None, stderr=None, output_dir=None): - """Create a Bunch which contains all possible files generated - by running the interface. Some files are always generated, others - depending on which ``inputs`` options are set. - - Returns - ------- - - outputs : Bunch object - Bunch object containing all possible files generated by - interface object. - - If None, file was not generated - Else, contains path, filename of generated outputfile - - """ - outputs = {} - ext = Info.output_type_to_ext(inputs.output_type) - suffix = "_slice_*" + ext - if inputs.out_base_name is not attrs.NOTHING: - fname_template = os.path.abspath(inputs.out_base_name + suffix) - else: - fname_template = fname_presuffix(inputs.in_file, suffix=suffix, use_ext=False) - - outputs["out_files"] = sorted(glob(fname_template)) - - return outputs - - -# Original source at L108 of /utils/filemanip.py -def fname_presuffix(fname, prefix="", suffix="", newpath=None, use_ext=True): - """Manipulates path and name of input filename - - Parameters - ---------- - fname : string - A filename (may or may not include path) - prefix : string - Characters to prepend to the filename - suffix : string - Characters to append to the filename - newpath : string - Path to replace the path of the input fname - use_ext : boolean - If True (default), appends the extension of the original file - to the output name. - - Returns - ------- - Absolute path of the modified filename - - >>> from nipype.utils.filemanip import fname_presuffix - >>> fname = 'foo.nii.gz' - >>> fname_presuffix(fname,'pre','post','/tmp') - '/tmp/prefoopost.nii.gz' - - >>> from nipype.interfaces.base import attrs.NOTHING - >>> fname_presuffix(fname, 'pre', 'post', attrs.NOTHING) == \ - fname_presuffix(fname, 'pre', 'post') - True - - """ - pth, fname, ext = split_filename(fname) - if not use_ext: - ext = "" - - # No need for : bool(attrs.NOTHING is not attrs.NOTHING) evaluates to False - if newpath: - pth = op.abspath(newpath) - return op.join(pth, prefix + fname + suffix + ext) - - -# Original source at L58 of /utils/filemanip.py -def split_filename(fname): - """Split a filename into parts: path, base filename and extension. - - Parameters - ---------- - fname : str - file or path name - - Returns - ------- - pth : str - base path from fname - fname : str - filename from fname, without extension - ext : str - file extension from fname - - Examples - -------- - >>> from nipype.utils.filemanip import split_filename - >>> pth, fname, ext = split_filename('/home/data/subject.nii.gz') - >>> pth - '/home/data' - - >>> fname - 'subject' - - >>> ext - '.nii.gz' - - """ - - special_extensions = [".nii.gz", ".tar.gz", ".niml.dset"] - - pth = op.dirname(fname) - fname = op.basename(fname) - - ext = None - for special_ext in special_extensions: - ext_len = len(special_ext) - if (len(fname) > ext_len) and (fname[-ext_len:].lower() == special_ext.lower()): - ext = fname[-ext_len:] - fname = fname[:-ext_len] - break - if not ext: - fname, ext = op.splitext(fname) - - return pth, fname, ext - - -# Original source at L1069 of /interfaces/base/core.py -class PackageInfo(object): - _version = None - version_cmd = None - version_file = None - - @classmethod - def version(klass): - if klass._version is None: - if klass.version_cmd is not None: - try: - clout = CommandLine( - command=klass.version_cmd, - resource_monitor=False, - terminal_output="allatonce", - ).run() - except IOError: - return None - - raw_info = clout.runtime.stdout - elif klass.version_file is not None: - try: - with open(klass.version_file, "rt") as fobj: - raw_info = fobj.read() - except OSError: - return None - else: - return None - - klass._version = klass.parse_version(raw_info) - - return klass._version - - @staticmethod - def parse_version(raw_info): - raise NotImplementedError - - -# Original source at L40 of /interfaces/fsl/base.py -class Info(PackageInfo): - """ - Handle FSL ``output_type`` and version information. - - output type refers to the type of file fsl defaults to writing - eg, NIFTI, NIFTI_GZ - - Examples - -------- - - >>> from nipype.interfaces.fsl import Info - >>> Info.version() # doctest: +SKIP - >>> Info.output_type() # doctest: +SKIP - - """ - - ftypes = { - "NIFTI": ".nii", - "NIFTI_PAIR": ".img", - "NIFTI_GZ": ".nii.gz", - "NIFTI_PAIR_GZ": ".img.gz", - } - - if os.getenv("FSLDIR"): - version_file = os.path.join(os.getenv("FSLDIR"), "etc", "fslversion") - - @staticmethod - def parse_version(raw_info): - return raw_info.splitlines()[0] - - @classmethod - def output_type_to_ext(cls, output_type): - """Get the file extension for the given output type. - - Parameters - ---------- - output_type : {'NIFTI', 'NIFTI_GZ', 'NIFTI_PAIR', 'NIFTI_PAIR_GZ'} - String specifying the output type. - - Returns - ------- - extension : str - The file extension for the output type. - """ - - try: - return cls.ftypes[output_type] - except KeyError: - msg = "Invalid FSLOUTPUTTYPE: ", output_type - raise KeyError(msg) - - @classmethod - def output_type(cls): - """Get the global FSL output file type FSLOUTPUTTYPE. - - This returns the value of the environment variable - FSLOUTPUTTYPE. An exception is raised if it is not defined. - - Returns - ------- - fsl_ftype : string - Represents the current environment setting of FSLOUTPUTTYPE - """ - try: - return os.environ["FSLOUTPUTTYPE"] - except KeyError: - IFLOGGER.warning( - "FSLOUTPUTTYPE environment variable is not set. " - "Setting FSLOUTPUTTYPE=NIFTI" - ) - return "NIFTI" - - @staticmethod - def standard_image(img_name=None): - """Grab an image from the standard location. - - Returns a list of standard images if called without arguments. - - Could be made more fancy to allow for more relocatability""" - try: - fsldir = os.environ["FSLDIR"] - except KeyError: - raise Exception("FSL environment variables not set") - stdpath = os.path.join(fsldir, "data", "standard") - if img_name is None: - return [ - filename.replace(stdpath + "/", "") - for filename in glob(os.path.join(stdpath, "*nii*")) - ] - return os.path.join(stdpath, img_name) diff --git a/nipype-auto-conv/specs/interfaces/slice_timer.yaml b/nipype-auto-conv/specs/interfaces/slice_timer.yaml index 5bef559..0c28440 100644 --- a/nipype-auto-conv/specs/interfaces/slice_timer.yaml +++ b/nipype-auto-conv/specs/interfaces/slice_timer.yaml @@ -7,14 +7,14 @@ # ---- # FSL slicetimer wrapper to perform slice timing correction # -# Examples -# -------- -# >>> from nipype.interfaces import fsl -# >>> from nipype.testing import example_data -# >>> st = fsl.SliceTimer() -# >>> st.inputs.in_file = example_data('functional.nii') -# >>> st.inputs.interleaved = True -# >>> result = st.run() #doctest: +SKIP +# Examples +# -------- +# >>> from nipype.interfaces import fsl +# >>> from nipype.testing import example_data +# >>> st = fsl.SliceTimer() +# >>> st.inputs.in_file = example_data('functional.nii') +# >>> st.inputs.interleaved = True +# >>> result = st.run() #doctest: +SKIP # # task_name: SliceTimer @@ -37,8 +37,6 @@ inputs: # type=file|default=: slice timings, in fractions of TR, range 0:1 (default is 0.5 = no shift) in_file: generic/file # type=file|default=: filename of input timeseries - out_file: Path - # type=file|default=: filename of output timeseries callable_defaults: # dict[str, str] - names of methods/callable classes defined in the adjacent `*_callables.py` # to set as the `default` method of input fields @@ -63,7 +61,7 @@ outputs: # dict[str, str] - names of methods/callable classes defined in the adjacent `*_callables.py` # to set to the `callable` attribute of output fields templates: - # dict[str, str] - `output_file_template` values to be provided to output fields + # dict[str, str] - `path_template` values to be provided to output fields requirements: # dict[str, list[str]] - input fields that are required to be provided for the output field to be present tests: @@ -89,13 +87,13 @@ tests: custom_order: # type=file|default=: filename of single-column custom interleave order file (first slice is referred to as 1 not 0) output_type: - # type=enum|default='NIFTI'|allowed['NIFTI','NIFTI_GZ','NIFTI_PAIR','NIFTI_PAIR_GZ']: FSL output type + # type=enum|default='NIFTI'|allowed['GIFTI','NIFTI','NIFTI_GZ','NIFTI_PAIR','NIFTI_PAIR_GZ']: FSL output type args: # type=str|default='': Additional parameters to the command environ: # type=dict|default={}: Environment variables imports: - # list[nipype2pydra.task.base.importstatement] - list import statements required by the test, with each list item + # list[nipype2pydra.statements.imports.explicitimport] - list import statements required by the test, with each list item # consisting of 'module', 'name', and optionally 'alias' keys expected_outputs: # dict[str, str] - expected values for selected outputs, noting that tests will typically diff --git a/nipype-auto-conv/specs/interfaces/slice_timer_callables.py b/nipype-auto-conv/specs/interfaces/slice_timer_callables.py deleted file mode 100644 index 4e3d44c..0000000 --- a/nipype-auto-conv/specs/interfaces/slice_timer_callables.py +++ /dev/null @@ -1,329 +0,0 @@ -"""Module to put any functions that are referred to in the "callables" section of SliceTimer.yaml""" - -import attrs -import logging -import os -import os.path as op -from glob import glob -from pathlib import Path - - -def out_file_default(inputs): - return _gen_filename("out_file", inputs=inputs) - - -def slice_time_corrected_file_callable(output_dir, inputs, stdout, stderr): - outputs = _list_outputs( - output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr - ) - return outputs["slice_time_corrected_file"] - - -IFLOGGER = logging.getLogger("nipype.interface") - - -# Original source at L1578 of /interfaces/fsl/preprocess.py -def _gen_filename(name, inputs=None, stdout=None, stderr=None, output_dir=None): - if name == "out_file": - return _list_outputs( - inputs=inputs, stdout=stdout, stderr=stderr, output_dir=output_dir - )["slice_time_corrected_file"] - return None - - -# Original source at L205 of /interfaces/fsl/base.py -def _gen_fname( - basename, - cwd=None, - suffix=None, - change_ext=True, - ext=None, - inputs=None, - stdout=None, - stderr=None, - output_dir=None, -): - """Generate a filename based on the given parameters. - - The filename will take the form: cwd/basename. - If change_ext is True, it will use the extensions specified in - inputs.output_type. - - Parameters - ---------- - basename : str - Filename to base the new filename on. - cwd : str - Path to prefix to the new filename. (default is output_dir) - suffix : str - Suffix to add to the `basename`. (defaults is '' ) - change_ext : bool - Flag to change the filename extension to the FSL output type. - (default True) - - Returns - ------- - fname : str - New filename based on given parameters. - - """ - - if basename == "": - msg = "Unable to generate filename for command %s. " % "slicetimer" - msg += "basename is not set!" - raise ValueError(msg) - if cwd is None: - cwd = output_dir - if ext is None: - ext = Info.output_type_to_ext(inputs.output_type) - if change_ext: - if suffix: - suffix = "".join((suffix, ext)) - else: - suffix = ext - if suffix is None: - suffix = "" - fname = fname_presuffix(basename, suffix=suffix, use_ext=False, newpath=cwd) - return fname - - -# Original source at L1570 of /interfaces/fsl/preprocess.py -def _list_outputs(inputs=None, stdout=None, stderr=None, output_dir=None): - outputs = {} - out_file = inputs.out_file - if out_file is attrs.NOTHING: - out_file = _gen_fname( - inputs.in_file, - suffix="_st", - inputs=inputs, - stdout=stdout, - stderr=stderr, - output_dir=output_dir, - ) - outputs["slice_time_corrected_file"] = os.path.abspath(out_file) - return outputs - - -# Original source at L108 of /utils/filemanip.py -def fname_presuffix(fname, prefix="", suffix="", newpath=None, use_ext=True): - """Manipulates path and name of input filename - - Parameters - ---------- - fname : string - A filename (may or may not include path) - prefix : string - Characters to prepend to the filename - suffix : string - Characters to append to the filename - newpath : string - Path to replace the path of the input fname - use_ext : boolean - If True (default), appends the extension of the original file - to the output name. - - Returns - ------- - Absolute path of the modified filename - - >>> from nipype.utils.filemanip import fname_presuffix - >>> fname = 'foo.nii.gz' - >>> fname_presuffix(fname,'pre','post','/tmp') - '/tmp/prefoopost.nii.gz' - - >>> from nipype.interfaces.base import attrs.NOTHING - >>> fname_presuffix(fname, 'pre', 'post', attrs.NOTHING) == \ - fname_presuffix(fname, 'pre', 'post') - True - - """ - pth, fname, ext = split_filename(fname) - if not use_ext: - ext = "" - - # No need for : bool(attrs.NOTHING is not attrs.NOTHING) evaluates to False - if newpath: - pth = op.abspath(newpath) - return op.join(pth, prefix + fname + suffix + ext) - - -# Original source at L58 of /utils/filemanip.py -def split_filename(fname): - """Split a filename into parts: path, base filename and extension. - - Parameters - ---------- - fname : str - file or path name - - Returns - ------- - pth : str - base path from fname - fname : str - filename from fname, without extension - ext : str - file extension from fname - - Examples - -------- - >>> from nipype.utils.filemanip import split_filename - >>> pth, fname, ext = split_filename('/home/data/subject.nii.gz') - >>> pth - '/home/data' - - >>> fname - 'subject' - - >>> ext - '.nii.gz' - - """ - - special_extensions = [".nii.gz", ".tar.gz", ".niml.dset"] - - pth = op.dirname(fname) - fname = op.basename(fname) - - ext = None - for special_ext in special_extensions: - ext_len = len(special_ext) - if (len(fname) > ext_len) and (fname[-ext_len:].lower() == special_ext.lower()): - ext = fname[-ext_len:] - fname = fname[:-ext_len] - break - if not ext: - fname, ext = op.splitext(fname) - - return pth, fname, ext - - -# Original source at L1069 of /interfaces/base/core.py -class PackageInfo(object): - _version = None - version_cmd = None - version_file = None - - @classmethod - def version(klass): - if klass._version is None: - if klass.version_cmd is not None: - try: - clout = CommandLine( - command=klass.version_cmd, - resource_monitor=False, - terminal_output="allatonce", - ).run() - except IOError: - return None - - raw_info = clout.runtime.stdout - elif klass.version_file is not None: - try: - with open(klass.version_file, "rt") as fobj: - raw_info = fobj.read() - except OSError: - return None - else: - return None - - klass._version = klass.parse_version(raw_info) - - return klass._version - - @staticmethod - def parse_version(raw_info): - raise NotImplementedError - - -# Original source at L40 of /interfaces/fsl/base.py -class Info(PackageInfo): - """ - Handle FSL ``output_type`` and version information. - - output type refers to the type of file fsl defaults to writing - eg, NIFTI, NIFTI_GZ - - Examples - -------- - - >>> from nipype.interfaces.fsl import Info - >>> Info.version() # doctest: +SKIP - >>> Info.output_type() # doctest: +SKIP - - """ - - ftypes = { - "NIFTI": ".nii", - "NIFTI_PAIR": ".img", - "NIFTI_GZ": ".nii.gz", - "NIFTI_PAIR_GZ": ".img.gz", - } - - if os.getenv("FSLDIR"): - version_file = os.path.join(os.getenv("FSLDIR"), "etc", "fslversion") - - @staticmethod - def parse_version(raw_info): - return raw_info.splitlines()[0] - - @classmethod - def output_type_to_ext(cls, output_type): - """Get the file extension for the given output type. - - Parameters - ---------- - output_type : {'NIFTI', 'NIFTI_GZ', 'NIFTI_PAIR', 'NIFTI_PAIR_GZ'} - String specifying the output type. - - Returns - ------- - extension : str - The file extension for the output type. - """ - - try: - return cls.ftypes[output_type] - except KeyError: - msg = "Invalid FSLOUTPUTTYPE: ", output_type - raise KeyError(msg) - - @classmethod - def output_type(cls): - """Get the global FSL output file type FSLOUTPUTTYPE. - - This returns the value of the environment variable - FSLOUTPUTTYPE. An exception is raised if it is not defined. - - Returns - ------- - fsl_ftype : string - Represents the current environment setting of FSLOUTPUTTYPE - """ - try: - return os.environ["FSLOUTPUTTYPE"] - except KeyError: - IFLOGGER.warning( - "FSLOUTPUTTYPE environment variable is not set. " - "Setting FSLOUTPUTTYPE=NIFTI" - ) - return "NIFTI" - - @staticmethod - def standard_image(img_name=None): - """Grab an image from the standard location. - - Returns a list of standard images if called without arguments. - - Could be made more fancy to allow for more relocatability""" - try: - fsldir = os.environ["FSLDIR"] - except KeyError: - raise Exception("FSL environment variables not set") - stdpath = os.path.join(fsldir, "data", "standard") - if img_name is None: - return [ - filename.replace(stdpath + "/", "") - for filename in glob(os.path.join(stdpath, "*nii*")) - ] - return os.path.join(stdpath, img_name) diff --git a/nipype-auto-conv/specs/interfaces/slicer.yaml b/nipype-auto-conv/specs/interfaces/slicer.yaml index 3652534..04cad19 100644 --- a/nipype-auto-conv/specs/interfaces/slicer.yaml +++ b/nipype-auto-conv/specs/interfaces/slicer.yaml @@ -8,16 +8,16 @@ # Use FSL's slicer command to output a png image from a volume. # # -# Examples -# -------- +# Examples +# -------- # -# >>> from nipype.interfaces import fsl -# >>> from nipype.testing import example_data -# >>> slice = fsl.Slicer() -# >>> slice.inputs.in_file = example_data('functional.nii') -# >>> slice.inputs.all_axial = True -# >>> slice.inputs.image_width = 750 -# >>> res = slice.run() #doctest: +SKIP +# >>> from nipype.interfaces import fsl +# >>> from nipype.testing import example_data +# >>> slice = fsl.Slicer() +# >>> slice.inputs.in_file = example_data('functional.nii') +# >>> slice.inputs.all_axial = True +# >>> slice.inputs.image_width = 750 +# >>> res = slice.run() #doctest: +SKIP # # # @@ -41,9 +41,6 @@ inputs: # type=file|default=: volume to display edge overlay for (useful for checking registration in_file: generic/file # type=file|default=: input volume - out_file: Path - # type=file: picture to write - # type=file|default=: picture to write callable_defaults: # dict[str, str] - names of methods/callable classes defined in the adjacent `*_callables.py` # to set as the `default` method of input fields @@ -67,7 +64,7 @@ outputs: # dict[str, str] - names of methods/callable classes defined in the adjacent `*_callables.py` # to set to the `callable` attribute of output fields templates: - # dict[str, str] - `output_file_template` values to be provided to output fields + # dict[str, str] - `path_template` values to be provided to output fields out_file: out_file # type=file: picture to write # type=file|default=: picture to write @@ -113,13 +110,13 @@ tests: scaling: # type=float|default=0.0: image scale output_type: - # type=enum|default='NIFTI'|allowed['NIFTI','NIFTI_GZ','NIFTI_PAIR','NIFTI_PAIR_GZ']: FSL output type + # type=enum|default='NIFTI'|allowed['GIFTI','NIFTI','NIFTI_GZ','NIFTI_PAIR','NIFTI_PAIR_GZ']: FSL output type args: # type=str|default='': Additional parameters to the command environ: # type=dict|default={}: Environment variables imports: - # list[nipype2pydra.task.base.importstatement] - list import statements required by the test, with each list item + # list[nipype2pydra.statements.imports.explicitimport] - list import statements required by the test, with each list item # consisting of 'module', 'name', and optionally 'alias' keys expected_outputs: # dict[str, str] - expected values for selected outputs, noting that tests will typically diff --git a/nipype-auto-conv/specs/interfaces/slicer_callables.py b/nipype-auto-conv/specs/interfaces/slicer_callables.py deleted file mode 100644 index f7ee3c2..0000000 --- a/nipype-auto-conv/specs/interfaces/slicer_callables.py +++ /dev/null @@ -1,329 +0,0 @@ -"""Module to put any functions that are referred to in the "callables" section of Slicer.yaml""" - -import attrs -import logging -import os -import os.path as op -from glob import glob -from pathlib import Path - - -def out_file_default(inputs): - return _gen_filename("out_file", inputs=inputs) - - -def out_file_callable(output_dir, inputs, stdout, stderr): - outputs = _list_outputs( - output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr - ) - return outputs["out_file"] - - -IFLOGGER = logging.getLogger("nipype.interface") - - -# Original source at L1246 of /interfaces/fsl/utils.py -def _gen_filename(name, inputs=None, stdout=None, stderr=None, output_dir=None): - if name == "out_file": - return _list_outputs( - inputs=inputs, stdout=stdout, stderr=stderr, output_dir=output_dir - )["out_file"] - return None - - -# Original source at L205 of /interfaces/fsl/base.py -def _gen_fname( - basename, - cwd=None, - suffix=None, - change_ext=True, - ext=None, - inputs=None, - stdout=None, - stderr=None, - output_dir=None, -): - """Generate a filename based on the given parameters. - - The filename will take the form: cwd/basename. - If change_ext is True, it will use the extensions specified in - inputs.output_type. - - Parameters - ---------- - basename : str - Filename to base the new filename on. - cwd : str - Path to prefix to the new filename. (default is output_dir) - suffix : str - Suffix to add to the `basename`. (defaults is '' ) - change_ext : bool - Flag to change the filename extension to the FSL output type. - (default True) - - Returns - ------- - fname : str - New filename based on given parameters. - - """ - - if basename == "": - msg = "Unable to generate filename for command %s. " % "slicer" - msg += "basename is not set!" - raise ValueError(msg) - if cwd is None: - cwd = output_dir - if ext is None: - ext = Info.output_type_to_ext(inputs.output_type) - if change_ext: - if suffix: - suffix = "".join((suffix, ext)) - else: - suffix = ext - if suffix is None: - suffix = "" - fname = fname_presuffix(basename, suffix=suffix, use_ext=False, newpath=cwd) - return fname - - -# Original source at L1238 of /interfaces/fsl/utils.py -def _list_outputs(inputs=None, stdout=None, stderr=None, output_dir=None): - outputs = {} - out_file = inputs.out_file - if out_file is attrs.NOTHING: - out_file = _gen_fname( - inputs.in_file, - ext=".png", - inputs=inputs, - stdout=stdout, - stderr=stderr, - output_dir=output_dir, - ) - outputs["out_file"] = os.path.abspath(out_file) - return outputs - - -# Original source at L108 of /utils/filemanip.py -def fname_presuffix(fname, prefix="", suffix="", newpath=None, use_ext=True): - """Manipulates path and name of input filename - - Parameters - ---------- - fname : string - A filename (may or may not include path) - prefix : string - Characters to prepend to the filename - suffix : string - Characters to append to the filename - newpath : string - Path to replace the path of the input fname - use_ext : boolean - If True (default), appends the extension of the original file - to the output name. - - Returns - ------- - Absolute path of the modified filename - - >>> from nipype.utils.filemanip import fname_presuffix - >>> fname = 'foo.nii.gz' - >>> fname_presuffix(fname,'pre','post','/tmp') - '/tmp/prefoopost.nii.gz' - - >>> from nipype.interfaces.base import attrs.NOTHING - >>> fname_presuffix(fname, 'pre', 'post', attrs.NOTHING) == \ - fname_presuffix(fname, 'pre', 'post') - True - - """ - pth, fname, ext = split_filename(fname) - if not use_ext: - ext = "" - - # No need for : bool(attrs.NOTHING is not attrs.NOTHING) evaluates to False - if newpath: - pth = op.abspath(newpath) - return op.join(pth, prefix + fname + suffix + ext) - - -# Original source at L58 of /utils/filemanip.py -def split_filename(fname): - """Split a filename into parts: path, base filename and extension. - - Parameters - ---------- - fname : str - file or path name - - Returns - ------- - pth : str - base path from fname - fname : str - filename from fname, without extension - ext : str - file extension from fname - - Examples - -------- - >>> from nipype.utils.filemanip import split_filename - >>> pth, fname, ext = split_filename('/home/data/subject.nii.gz') - >>> pth - '/home/data' - - >>> fname - 'subject' - - >>> ext - '.nii.gz' - - """ - - special_extensions = [".nii.gz", ".tar.gz", ".niml.dset"] - - pth = op.dirname(fname) - fname = op.basename(fname) - - ext = None - for special_ext in special_extensions: - ext_len = len(special_ext) - if (len(fname) > ext_len) and (fname[-ext_len:].lower() == special_ext.lower()): - ext = fname[-ext_len:] - fname = fname[:-ext_len] - break - if not ext: - fname, ext = op.splitext(fname) - - return pth, fname, ext - - -# Original source at L1069 of /interfaces/base/core.py -class PackageInfo(object): - _version = None - version_cmd = None - version_file = None - - @classmethod - def version(klass): - if klass._version is None: - if klass.version_cmd is not None: - try: - clout = CommandLine( - command=klass.version_cmd, - resource_monitor=False, - terminal_output="allatonce", - ).run() - except IOError: - return None - - raw_info = clout.runtime.stdout - elif klass.version_file is not None: - try: - with open(klass.version_file, "rt") as fobj: - raw_info = fobj.read() - except OSError: - return None - else: - return None - - klass._version = klass.parse_version(raw_info) - - return klass._version - - @staticmethod - def parse_version(raw_info): - raise NotImplementedError - - -# Original source at L40 of /interfaces/fsl/base.py -class Info(PackageInfo): - """ - Handle FSL ``output_type`` and version information. - - output type refers to the type of file fsl defaults to writing - eg, NIFTI, NIFTI_GZ - - Examples - -------- - - >>> from nipype.interfaces.fsl import Info - >>> Info.version() # doctest: +SKIP - >>> Info.output_type() # doctest: +SKIP - - """ - - ftypes = { - "NIFTI": ".nii", - "NIFTI_PAIR": ".img", - "NIFTI_GZ": ".nii.gz", - "NIFTI_PAIR_GZ": ".img.gz", - } - - if os.getenv("FSLDIR"): - version_file = os.path.join(os.getenv("FSLDIR"), "etc", "fslversion") - - @staticmethod - def parse_version(raw_info): - return raw_info.splitlines()[0] - - @classmethod - def output_type_to_ext(cls, output_type): - """Get the file extension for the given output type. - - Parameters - ---------- - output_type : {'NIFTI', 'NIFTI_GZ', 'NIFTI_PAIR', 'NIFTI_PAIR_GZ'} - String specifying the output type. - - Returns - ------- - extension : str - The file extension for the output type. - """ - - try: - return cls.ftypes[output_type] - except KeyError: - msg = "Invalid FSLOUTPUTTYPE: ", output_type - raise KeyError(msg) - - @classmethod - def output_type(cls): - """Get the global FSL output file type FSLOUTPUTTYPE. - - This returns the value of the environment variable - FSLOUTPUTTYPE. An exception is raised if it is not defined. - - Returns - ------- - fsl_ftype : string - Represents the current environment setting of FSLOUTPUTTYPE - """ - try: - return os.environ["FSLOUTPUTTYPE"] - except KeyError: - IFLOGGER.warning( - "FSLOUTPUTTYPE environment variable is not set. " - "Setting FSLOUTPUTTYPE=NIFTI" - ) - return "NIFTI" - - @staticmethod - def standard_image(img_name=None): - """Grab an image from the standard location. - - Returns a list of standard images if called without arguments. - - Could be made more fancy to allow for more relocatability""" - try: - fsldir = os.environ["FSLDIR"] - except KeyError: - raise Exception("FSL environment variables not set") - stdpath = os.path.join(fsldir, "data", "standard") - if img_name is None: - return [ - filename.replace(stdpath + "/", "") - for filename in glob(os.path.join(stdpath, "*nii*")) - ] - return os.path.join(stdpath, img_name) diff --git a/nipype-auto-conv/specs/interfaces/smm.yaml b/nipype-auto-conv/specs/interfaces/smm.yaml index 2f1b196..e35538b 100644 --- a/nipype-auto-conv/specs/interfaces/smm.yaml +++ b/nipype-auto-conv/specs/interfaces/smm.yaml @@ -6,10 +6,10 @@ # Docs # ---- # -# Spatial Mixture Modelling. For more detail on the spatial mixture modelling -# see Mixture Models with Adaptive Spatial Regularisation for Segmentation -# with an Application to FMRI Data; Woolrich, M., Behrens, T., Beckmann, C., -# and Smith, S.; IEEE Trans. Medical Imaging, 24(1):1-11, 2005. +# Spatial Mixture Modelling. For more detail on the spatial mixture modelling +# see Mixture Models with Adaptive Spatial Regularisation for Segmentation +# with an Application to FMRI Data; Woolrich, M., Behrens, T., Beckmann, C., +# and Smith, S.; IEEE Trans. Medical Imaging, 24(1):1-11, 2005. # task_name: SMM nipype_name: SMM @@ -20,11 +20,11 @@ inputs: rename: # dict[str, str] - fields to rename in the Pydra interface types: - # dict[str, type] - override inferred types (use "mime-like" string for file-format types, - # e.g. 'medimage/nifti-gz'). For most fields the type will be correctly inferred - # from the nipype interface, but you may want to be more specific, particularly - # for file types, where specifying the format also specifies the file that will be - # passed to the field in the automatically generated unittests. + # dict[str, type] - override inferred types (use "mime-like" string for file-format types, + # e.g. 'medimage/nifti-gz'). For most fields the type will be correctly inferred + # from the nipype interface, but you may want to be more specific, particularly + # for file types, where specifying the format also specifies the file that will be + # passed to the field in the automatically generated unittests. mask: generic/file # type=file|default=: mask file spatial_data_file: generic/file @@ -40,11 +40,11 @@ outputs: rename: # dict[str, str] - fields to rename in the Pydra interface types: - # dict[str, type] - override inferred types (use "mime-like" string for file-format types, - # e.g. 'medimage/nifti-gz'). For most fields the type will be correctly inferred - # from the nipype interface, but you may want to be more specific, particularly - # for file types, where specifying the format also specifies the file that will be - # passed to the field in the automatically generated unittests. + # dict[str, type] - override inferred types (use "mime-like" string for file-format types, + # e.g. 'medimage/nifti-gz'). For most fields the type will be correctly inferred + # from the nipype interface, but you may want to be more specific, particularly + # for file types, where specifying the format also specifies the file that will be + # passed to the field in the automatically generated unittests. activation_p_map: generic/file # type=file: deactivation_p_map: generic/file @@ -55,38 +55,38 @@ outputs: # dict[str, str] - names of methods/callable classes defined in the adjacent `*_callables.py` # to set to the `callable` attribute of output fields templates: - # dict[str, str] - `output_file_template` values to be provided to output fields + # dict[str, str] - `path_template` values to be provided to output fields requirements: # dict[str, list[str]] - input fields that are required to be provided for the output field to be present tests: - - inputs: - # dict[str, str] - values to provide to inputs fields in the task initialisation - # (if not specified, will try to choose a sensible value) - spatial_data_file: - # type=file|default=: statistics spatial map - mask: - # type=file|default=: mask file - no_deactivation_class: - # type=bool|default=False: enforces no deactivation class - output_type: - # type=enum|default='NIFTI'|allowed['NIFTI','NIFTI_GZ','NIFTI_PAIR','NIFTI_PAIR_GZ']: FSL output type - args: - # type=str|default='': Additional parameters to the command - environ: - # type=dict|default={}: Environment variables - imports: - # list[nipype2pydra.task.base.importstatement] - list import statements required by the test, with each list item - # consisting of 'module', 'name', and optionally 'alias' keys - expected_outputs: - # dict[str, str] - expected values for selected outputs, noting that tests will typically - # be terminated before they complete for time-saving reasons, and therefore - # these values will be ignored, when running in CI - timeout: 10 - # int - the value to set for the timeout in the generated test, - # after which the test will be considered to have been initialised - # successfully. Set to 0 to disable the timeout (warning, this could - # lead to the unittests taking a very long time to complete) - xfail: true - # bool - whether the unittest is expected to fail or not. Set to false - # when you are satisfied with the edits you have made to this file +- inputs: + # dict[str, str] - values to provide to inputs fields in the task initialisation + # (if not specified, will try to choose a sensible value) + spatial_data_file: + # type=file|default=: statistics spatial map + mask: + # type=file|default=: mask file + no_deactivation_class: + # type=bool|default=False: enforces no deactivation class + output_type: + # type=enum|default='NIFTI'|allowed['GIFTI','NIFTI','NIFTI_GZ','NIFTI_PAIR','NIFTI_PAIR_GZ']: FSL output type + args: + # type=str|default='': Additional parameters to the command + environ: + # type=dict|default={}: Environment variables + imports: + # list[nipype2pydra.statements.imports.explicitimport] - list import statements required by the test, with each list item + # consisting of 'module', 'name', and optionally 'alias' keys + expected_outputs: + # dict[str, str] - expected values for selected outputs, noting that tests will typically + # be terminated before they complete for time-saving reasons, and therefore + # these values will be ignored, when running in CI + timeout: 10 + # int - the value to set for the timeout in the generated test, + # after which the test will be considered to have been initialised + # successfully. Set to 0 to disable the timeout (warning, this could + # lead to the unittests taking a very long time to complete) + xfail: true + # bool - whether the unittest is expected to fail or not. Set to false + # when you are satisfied with the edits you have made to this file doctests: [] diff --git a/nipype-auto-conv/specs/interfaces/smm_callables.py b/nipype-auto-conv/specs/interfaces/smm_callables.py deleted file mode 100644 index c3df01d..0000000 --- a/nipype-auto-conv/specs/interfaces/smm_callables.py +++ /dev/null @@ -1,352 +0,0 @@ -"""Module to put any functions that are referred to in the "callables" section of SMM.yaml""" - -import attrs -import logging -import os -import os.path as op -from glob import glob -from pathlib import Path - - -def activation_p_map_callable(output_dir, inputs, stdout, stderr): - outputs = _list_outputs( - output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr - ) - return outputs["activation_p_map"] - - -def deactivation_p_map_callable(output_dir, inputs, stdout, stderr): - outputs = _list_outputs( - output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr - ) - return outputs["deactivation_p_map"] - - -def null_p_map_callable(output_dir, inputs, stdout, stderr): - outputs = _list_outputs( - output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr - ) - return outputs["null_p_map"] - - -IFLOGGER = logging.getLogger("nipype.interface") - - -# Original source at L885 of /interfaces/base/core.py -def _gen_filename(name, inputs=None, stdout=None, stderr=None, output_dir=None): - raise NotImplementedError - - -# Original source at L205 of /interfaces/fsl/base.py -def _gen_fname( - basename, - cwd=None, - suffix=None, - change_ext=True, - ext=None, - inputs=None, - stdout=None, - stderr=None, - output_dir=None, -): - """Generate a filename based on the given parameters. - - The filename will take the form: cwd/basename. - If change_ext is True, it will use the extensions specified in - inputs.output_type. - - Parameters - ---------- - basename : str - Filename to base the new filename on. - cwd : str - Path to prefix to the new filename. (default is output_dir) - suffix : str - Suffix to add to the `basename`. (defaults is '' ) - change_ext : bool - Flag to change the filename extension to the FSL output type. - (default True) - - Returns - ------- - fname : str - New filename based on given parameters. - - """ - - if basename == "": - msg = "Unable to generate filename for command %s. " % "mm --ld=logdir" - msg += "basename is not set!" - raise ValueError(msg) - if cwd is None: - cwd = output_dir - if ext is None: - ext = Info.output_type_to_ext(inputs.output_type) - if change_ext: - if suffix: - suffix = "".join((suffix, ext)) - else: - suffix = ext - if suffix is None: - suffix = "" - fname = fname_presuffix(basename, suffix=suffix, use_ext=False, newpath=cwd) - return fname - - -# Original source at L1650 of /interfaces/fsl/model.py -def _list_outputs(inputs=None, stdout=None, stderr=None, output_dir=None): - outputs = {} - # TODO get the true logdir from the stdout - outputs["null_p_map"] = _gen_fname( - basename="w1_mean", - cwd="logdir", - inputs=inputs, - stdout=stdout, - stderr=stderr, - output_dir=output_dir, - ) - outputs["activation_p_map"] = _gen_fname( - basename="w2_mean", - cwd="logdir", - inputs=inputs, - stdout=stdout, - stderr=stderr, - output_dir=output_dir, - ) - if ( - inputs.no_deactivation_class is attrs.NOTHING - ) or not inputs.no_deactivation_class: - outputs["deactivation_p_map"] = _gen_fname( - basename="w3_mean", - cwd="logdir", - inputs=inputs, - stdout=stdout, - stderr=stderr, - output_dir=output_dir, - ) - return outputs - - -# Original source at L108 of /utils/filemanip.py -def fname_presuffix(fname, prefix="", suffix="", newpath=None, use_ext=True): - """Manipulates path and name of input filename - - Parameters - ---------- - fname : string - A filename (may or may not include path) - prefix : string - Characters to prepend to the filename - suffix : string - Characters to append to the filename - newpath : string - Path to replace the path of the input fname - use_ext : boolean - If True (default), appends the extension of the original file - to the output name. - - Returns - ------- - Absolute path of the modified filename - - >>> from nipype.utils.filemanip import fname_presuffix - >>> fname = 'foo.nii.gz' - >>> fname_presuffix(fname,'pre','post','/tmp') - '/tmp/prefoopost.nii.gz' - - >>> from nipype.interfaces.base import attrs.NOTHING - >>> fname_presuffix(fname, 'pre', 'post', attrs.NOTHING) == \ - fname_presuffix(fname, 'pre', 'post') - True - - """ - pth, fname, ext = split_filename(fname) - if not use_ext: - ext = "" - - # No need for : bool(attrs.NOTHING is not attrs.NOTHING) evaluates to False - if newpath: - pth = op.abspath(newpath) - return op.join(pth, prefix + fname + suffix + ext) - - -# Original source at L58 of /utils/filemanip.py -def split_filename(fname): - """Split a filename into parts: path, base filename and extension. - - Parameters - ---------- - fname : str - file or path name - - Returns - ------- - pth : str - base path from fname - fname : str - filename from fname, without extension - ext : str - file extension from fname - - Examples - -------- - >>> from nipype.utils.filemanip import split_filename - >>> pth, fname, ext = split_filename('/home/data/subject.nii.gz') - >>> pth - '/home/data' - - >>> fname - 'subject' - - >>> ext - '.nii.gz' - - """ - - special_extensions = [".nii.gz", ".tar.gz", ".niml.dset"] - - pth = op.dirname(fname) - fname = op.basename(fname) - - ext = None - for special_ext in special_extensions: - ext_len = len(special_ext) - if (len(fname) > ext_len) and (fname[-ext_len:].lower() == special_ext.lower()): - ext = fname[-ext_len:] - fname = fname[:-ext_len] - break - if not ext: - fname, ext = op.splitext(fname) - - return pth, fname, ext - - -# Original source at L1069 of /interfaces/base/core.py -class PackageInfo(object): - _version = None - version_cmd = None - version_file = None - - @classmethod - def version(klass): - if klass._version is None: - if klass.version_cmd is not None: - try: - clout = CommandLine( - command=klass.version_cmd, - resource_monitor=False, - terminal_output="allatonce", - ).run() - except IOError: - return None - - raw_info = clout.runtime.stdout - elif klass.version_file is not None: - try: - with open(klass.version_file, "rt") as fobj: - raw_info = fobj.read() - except OSError: - return None - else: - return None - - klass._version = klass.parse_version(raw_info) - - return klass._version - - @staticmethod - def parse_version(raw_info): - raise NotImplementedError - - -# Original source at L40 of /interfaces/fsl/base.py -class Info(PackageInfo): - """ - Handle FSL ``output_type`` and version information. - - output type refers to the type of file fsl defaults to writing - eg, NIFTI, NIFTI_GZ - - Examples - -------- - - >>> from nipype.interfaces.fsl import Info - >>> Info.version() # doctest: +SKIP - >>> Info.output_type() # doctest: +SKIP - - """ - - ftypes = { - "NIFTI": ".nii", - "NIFTI_PAIR": ".img", - "NIFTI_GZ": ".nii.gz", - "NIFTI_PAIR_GZ": ".img.gz", - } - - if os.getenv("FSLDIR"): - version_file = os.path.join(os.getenv("FSLDIR"), "etc", "fslversion") - - @staticmethod - def parse_version(raw_info): - return raw_info.splitlines()[0] - - @classmethod - def output_type_to_ext(cls, output_type): - """Get the file extension for the given output type. - - Parameters - ---------- - output_type : {'NIFTI', 'NIFTI_GZ', 'NIFTI_PAIR', 'NIFTI_PAIR_GZ'} - String specifying the output type. - - Returns - ------- - extension : str - The file extension for the output type. - """ - - try: - return cls.ftypes[output_type] - except KeyError: - msg = "Invalid FSLOUTPUTTYPE: ", output_type - raise KeyError(msg) - - @classmethod - def output_type(cls): - """Get the global FSL output file type FSLOUTPUTTYPE. - - This returns the value of the environment variable - FSLOUTPUTTYPE. An exception is raised if it is not defined. - - Returns - ------- - fsl_ftype : string - Represents the current environment setting of FSLOUTPUTTYPE - """ - try: - return os.environ["FSLOUTPUTTYPE"] - except KeyError: - IFLOGGER.warning( - "FSLOUTPUTTYPE environment variable is not set. " - "Setting FSLOUTPUTTYPE=NIFTI" - ) - return "NIFTI" - - @staticmethod - def standard_image(img_name=None): - """Grab an image from the standard location. - - Returns a list of standard images if called without arguments. - - Could be made more fancy to allow for more relocatability""" - try: - fsldir = os.environ["FSLDIR"] - except KeyError: - raise Exception("FSL environment variables not set") - stdpath = os.path.join(fsldir, "data", "standard") - if img_name is None: - return [ - filename.replace(stdpath + "/", "") - for filename in glob(os.path.join(stdpath, "*nii*")) - ] - return os.path.join(stdpath, img_name) diff --git a/nipype-auto-conv/specs/interfaces/smooth.yaml b/nipype-auto-conv/specs/interfaces/smooth.yaml index afc1c3e..0951bda 100644 --- a/nipype-auto-conv/specs/interfaces/smooth.yaml +++ b/nipype-auto-conv/specs/interfaces/smooth.yaml @@ -6,39 +6,39 @@ # Docs # ---- # -# Use fslmaths to smooth the image +# Use fslmaths to smooth the image # -# Examples -# -------- +# Examples +# -------- # -# Setting the kernel width using sigma: +# Setting the kernel width using sigma: # -# >>> sm = Smooth() -# >>> sm.inputs.output_type = 'NIFTI_GZ' -# >>> sm.inputs.in_file = 'functional2.nii' -# >>> sm.inputs.sigma = 8.0 -# >>> sm.cmdline # doctest: +ELLIPSIS -# 'fslmaths functional2.nii -kernel gauss 8.000 -fmean functional2_smooth.nii.gz' +# >>> sm = Smooth() +# >>> sm.inputs.output_type = 'NIFTI_GZ' +# >>> sm.inputs.in_file = 'functional2.nii' +# >>> sm.inputs.sigma = 8.0 +# >>> sm.cmdline # doctest: +ELLIPSIS +# 'fslmaths functional2.nii -kernel gauss 8.000 -fmean functional2_smooth.nii.gz' # -# Setting the kernel width using fwhm: +# Setting the kernel width using fwhm: # -# >>> sm = Smooth() -# >>> sm.inputs.output_type = 'NIFTI_GZ' -# >>> sm.inputs.in_file = 'functional2.nii' -# >>> sm.inputs.fwhm = 8.0 -# >>> sm.cmdline # doctest: +ELLIPSIS -# 'fslmaths functional2.nii -kernel gauss 3.397 -fmean functional2_smooth.nii.gz' +# >>> sm = Smooth() +# >>> sm.inputs.output_type = 'NIFTI_GZ' +# >>> sm.inputs.in_file = 'functional2.nii' +# >>> sm.inputs.fwhm = 8.0 +# >>> sm.cmdline # doctest: +ELLIPSIS +# 'fslmaths functional2.nii -kernel gauss 3.397 -fmean functional2_smooth.nii.gz' # -# One of sigma or fwhm must be set: +# One of sigma or fwhm must be set: # -# >>> from nipype.interfaces.fsl import Smooth -# >>> sm = Smooth() -# >>> sm.inputs.output_type = 'NIFTI_GZ' -# >>> sm.inputs.in_file = 'functional2.nii' -# >>> sm.cmdline #doctest: +ELLIPSIS -# Traceback (most recent call last): -# ... -# ValueError: Smooth requires a value for one of the inputs ... +# >>> from nipype.interfaces.fsl import Smooth +# >>> sm = Smooth() +# >>> sm.inputs.output_type = 'NIFTI_GZ' +# >>> sm.inputs.in_file = 'functional2.nii' +# >>> sm.cmdline #doctest: +ELLIPSIS +# Traceback (most recent call last): +# ... +# ValueError: Smooth requires a value for one of the inputs ... # # task_name: Smooth @@ -55,10 +55,7 @@ inputs: # from the nipype interface, but you may want to be more specific, particularly # for file types, where specifying the format also specifies the file that will be # passed to the field in the automatically generated unittests. - in_file: medimage/nifti1 - # type=file|default=: - smoothed_file: Path - # type=file: + in_file: generic/file # type=file|default=: callable_defaults: # dict[str, str] - names of methods/callable classes defined in the adjacent `*_callables.py` @@ -83,7 +80,7 @@ outputs: # dict[str, str] - names of methods/callable classes defined in the adjacent `*_callables.py` # to set to the `callable` attribute of output fields templates: - # dict[str, str] - `output_file_template` values to be provided to output fields + # dict[str, str] - `path_template` values to be provided to output fields requirements: # dict[str, list[str]] - input fields that are required to be provided for the output field to be present tests: @@ -100,13 +97,13 @@ tests: # type=file: # type=file|default=: output_type: - # type=enum|default='NIFTI'|allowed['NIFTI','NIFTI_GZ','NIFTI_PAIR','NIFTI_PAIR_GZ']: FSL output type + # type=enum|default='NIFTI'|allowed['GIFTI','NIFTI','NIFTI_GZ','NIFTI_PAIR','NIFTI_PAIR_GZ']: FSL output type args: # type=str|default='': Additional parameters to the command environ: # type=dict|default={}: Environment variables imports: - # list[nipype2pydra.task.base.importstatement] - list import statements required by the test, with each list item + # list[nipype2pydra.statements.imports.explicitimport] - list import statements required by the test, with each list item # consisting of 'module', 'name', and optionally 'alias' keys expected_outputs: # dict[str, str] - expected values for selected outputs, noting that tests will typically @@ -124,13 +121,11 @@ tests: # dict[str, str] - values to provide to inputs fields in the task initialisation # (if not specified, will try to choose a sensible value) output_type: '"NIFTI_GZ"' - # type=enum|default='NIFTI'|allowed['NIFTI','NIFTI_GZ','NIFTI_PAIR','NIFTI_PAIR_GZ']: FSL output type - in_file: - # type=file|default=: + # type=enum|default='NIFTI'|allowed['GIFTI','NIFTI','NIFTI_GZ','NIFTI_PAIR','NIFTI_PAIR_GZ']: FSL output type sigma: '8.0' # type=float|default=0.0: gaussian kernel sigma in mm (not voxels) imports: - # list[nipype2pydra.task.base.importstatement] - list import statements required by the test, with each list item + # list[nipype2pydra.statements.imports.explicitimport] - list import statements required by the test, with each list item # consisting of 'module', 'name', and optionally 'alias' keys expected_outputs: # dict[str, str] - expected values for selected outputs, noting that tests will typically @@ -148,13 +143,11 @@ tests: # dict[str, str] - values to provide to inputs fields in the task initialisation # (if not specified, will try to choose a sensible value) output_type: '"NIFTI_GZ"' - # type=enum|default='NIFTI'|allowed['NIFTI','NIFTI_GZ','NIFTI_PAIR','NIFTI_PAIR_GZ']: FSL output type - in_file: - # type=file|default=: + # type=enum|default='NIFTI'|allowed['GIFTI','NIFTI','NIFTI_GZ','NIFTI_PAIR','NIFTI_PAIR_GZ']: FSL output type fwhm: '8.0' # type=float|default=0.0: gaussian kernel fwhm, will be converted to sigma in mm (not voxels) imports: - # list[nipype2pydra.task.base.importstatement] - list import statements required by the test, with each list item + # list[nipype2pydra.statements.imports.explicitimport] - list import statements required by the test, with each list item # consisting of 'module', 'name', and optionally 'alias' keys expected_outputs: # dict[str, str] - expected values for selected outputs, noting that tests will typically @@ -172,11 +165,9 @@ tests: # dict[str, str] - values to provide to inputs fields in the task initialisation # (if not specified, will try to choose a sensible value) output_type: '"NIFTI_GZ"' - # type=enum|default='NIFTI'|allowed['NIFTI','NIFTI_GZ','NIFTI_PAIR','NIFTI_PAIR_GZ']: FSL output type - in_file: - # type=file|default=: + # type=enum|default='NIFTI'|allowed['GIFTI','NIFTI','NIFTI_GZ','NIFTI_PAIR','NIFTI_PAIR_GZ']: FSL output type imports: - # list[nipype2pydra.task.base.importstatement] - list import statements required by the test, with each list item + # list[nipype2pydra.statements.imports.explicitimport] - list import statements required by the test, with each list item # consisting of 'module', 'name', and optionally 'alias' keys expected_outputs: # dict[str, str] - expected values for selected outputs, noting that tests will typically @@ -198,13 +189,11 @@ doctests: # If the field is of file-format type and the value is None, then the # '.mock()' method of the corresponding class is used instead. output_type: '"NIFTI_GZ"' - # type=enum|default='NIFTI'|allowed['NIFTI','NIFTI_GZ','NIFTI_PAIR','NIFTI_PAIR_GZ']: FSL output type - in_file: '"functional2.nii"' - # type=file|default=: + # type=enum|default='NIFTI'|allowed['GIFTI','NIFTI','NIFTI_GZ','NIFTI_PAIR','NIFTI_PAIR_GZ']: FSL output type sigma: '8.0' # type=float|default=0.0: gaussian kernel sigma in mm (not voxels) imports: - # list[nipype2pydra.task.base.importstatement] - list import statements required by the test, with each list item + # list[nipype2pydra.statements.imports.explicitimport] - list import statements required by the test, with each list item # consisting of 'module', 'name', and optionally 'alias' keys directive: # str - any doctest directive to place on the cmdline call, e.g. # doctest: +ELLIPSIS @@ -215,13 +204,11 @@ doctests: # If the field is of file-format type and the value is None, then the # '.mock()' method of the corresponding class is used instead. output_type: '"NIFTI_GZ"' - # type=enum|default='NIFTI'|allowed['NIFTI','NIFTI_GZ','NIFTI_PAIR','NIFTI_PAIR_GZ']: FSL output type - in_file: '"functional2.nii"' - # type=file|default=: + # type=enum|default='NIFTI'|allowed['GIFTI','NIFTI','NIFTI_GZ','NIFTI_PAIR','NIFTI_PAIR_GZ']: FSL output type fwhm: '8.0' # type=float|default=0.0: gaussian kernel fwhm, will be converted to sigma in mm (not voxels) imports: - # list[nipype2pydra.task.base.importstatement] - list import statements required by the test, with each list item + # list[nipype2pydra.statements.imports.explicitimport] - list import statements required by the test, with each list item # consisting of 'module', 'name', and optionally 'alias' keys directive: # str - any doctest directive to place on the cmdline call, e.g. # doctest: +ELLIPSIS @@ -232,11 +219,9 @@ doctests: # If the field is of file-format type and the value is None, then the # '.mock()' method of the corresponding class is used instead. output_type: '"NIFTI_GZ"' - # type=enum|default='NIFTI'|allowed['NIFTI','NIFTI_GZ','NIFTI_PAIR','NIFTI_PAIR_GZ']: FSL output type - in_file: '"functional2.nii"' - # type=file|default=: + # type=enum|default='NIFTI'|allowed['GIFTI','NIFTI','NIFTI_GZ','NIFTI_PAIR','NIFTI_PAIR_GZ']: FSL output type imports: - # list[nipype2pydra.task.base.importstatement] - list import statements required by the test, with each list item + # list[nipype2pydra.statements.imports.explicitimport] - list import statements required by the test, with each list item # consisting of 'module', 'name', and optionally 'alias' keys directive: # str - any doctest directive to place on the cmdline call, e.g. # doctest: +ELLIPSIS diff --git a/nipype-auto-conv/specs/interfaces/smooth_callables.py b/nipype-auto-conv/specs/interfaces/smooth_callables.py deleted file mode 100644 index 6854c24..0000000 --- a/nipype-auto-conv/specs/interfaces/smooth_callables.py +++ /dev/null @@ -1,338 +0,0 @@ -"""Module to put any functions that are referred to in the "callables" section of Smooth.yaml""" - -import attrs -import logging -import os -import os.path as op -from glob import glob - - -def smoothed_file_callable(output_dir, inputs, stdout, stderr): - outputs = _list_outputs( - output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr - ) - return outputs["smoothed_file"] - - -IFLOGGER = logging.getLogger("nipype.interface") - - -iflogger = logging.getLogger("nipype.interface") - - -# Original source at L809 of /interfaces/base/core.py -def _filename_from_source( - name, chain=None, inputs=None, stdout=None, stderr=None, output_dir=None -): - if chain is None: - chain = [] - - trait_spec = inputs.trait(name) - retval = getattr(inputs, name) - source_ext = None - if (retval is attrs.NOTHING) or "%s" in retval: - if not trait_spec.name_source: - return retval - - # Do not generate filename when excluded by other inputs - if any( - (getattr(inputs, field) is not attrs.NOTHING) - for field in trait_spec.xor or () - ): - return retval - - # Do not generate filename when required fields are missing - if not all( - (getattr(inputs, field) is not attrs.NOTHING) - for field in trait_spec.requires or () - ): - return retval - - if (retval is not attrs.NOTHING) and "%s" in retval: - name_template = retval - else: - name_template = trait_spec.name_template - if not name_template: - name_template = "%s_generated" - - ns = trait_spec.name_source - while isinstance(ns, (list, tuple)): - if len(ns) > 1: - iflogger.warning("Only one name_source per trait is allowed") - ns = ns[0] - - if not isinstance(ns, (str, bytes)): - raise ValueError( - "name_source of '{}' trait should be an input trait " - "name, but a type {} object was found".format(name, type(ns)) - ) - - if getattr(inputs, ns) is not attrs.NOTHING: - name_source = ns - source = getattr(inputs, name_source) - while isinstance(source, list): - source = source[0] - - # special treatment for files - try: - _, base, source_ext = split_filename(source) - except (AttributeError, TypeError): - base = source - else: - if name in chain: - raise NipypeInterfaceError("Mutually pointing name_sources") - - chain.append(name) - base = _filename_from_source( - ns, - chain, - inputs=inputs, - stdout=stdout, - stderr=stderr, - output_dir=output_dir, - ) - if base is not attrs.NOTHING: - _, _, source_ext = split_filename(base) - else: - # Do not generate filename when required fields are missing - return retval - - chain = None - retval = name_template % base - _, _, ext = split_filename(retval) - if trait_spec.keep_extension and (ext or source_ext): - if (ext is None or not ext) and source_ext: - retval = retval + source_ext - else: - retval = _overload_extension( - retval, - name, - inputs=inputs, - stdout=stdout, - stderr=stderr, - output_dir=output_dir, - ) - return retval - - -# Original source at L885 of /interfaces/base/core.py -def _gen_filename(name, inputs=None, stdout=None, stderr=None, output_dir=None): - raise NotImplementedError - - -# Original source at L891 of /interfaces/base/core.py -def _list_outputs(inputs=None, stdout=None, stderr=None, output_dir=None): - metadata = dict(name_source=lambda t: t is not None) - traits = inputs.traits(**metadata) - if traits: - outputs = {} - for name, trait_spec in list(traits.items()): - out_name = name - if trait_spec.output_name is not None: - out_name = trait_spec.output_name - fname = _filename_from_source( - name, inputs=inputs, stdout=stdout, stderr=stderr, output_dir=output_dir - ) - if fname is not attrs.NOTHING: - outputs[out_name] = os.path.abspath(fname) - return outputs - - -# Original source at L249 of /interfaces/fsl/base.py -def _overload_extension( - value, name=None, inputs=None, stdout=None, stderr=None, output_dir=None -): - return value + Info.output_type_to_ext(inputs.output_type) - - -# Original source at L58 of /utils/filemanip.py -def split_filename(fname): - """Split a filename into parts: path, base filename and extension. - - Parameters - ---------- - fname : str - file or path name - - Returns - ------- - pth : str - base path from fname - fname : str - filename from fname, without extension - ext : str - file extension from fname - - Examples - -------- - >>> from nipype.utils.filemanip import split_filename - >>> pth, fname, ext = split_filename('/home/data/subject.nii.gz') - >>> pth - '/home/data' - - >>> fname - 'subject' - - >>> ext - '.nii.gz' - - """ - - special_extensions = [".nii.gz", ".tar.gz", ".niml.dset"] - - pth = op.dirname(fname) - fname = op.basename(fname) - - ext = None - for special_ext in special_extensions: - ext_len = len(special_ext) - if (len(fname) > ext_len) and (fname[-ext_len:].lower() == special_ext.lower()): - ext = fname[-ext_len:] - fname = fname[:-ext_len] - break - if not ext: - fname, ext = op.splitext(fname) - - return pth, fname, ext - - -# Original source at L1069 of /interfaces/base/core.py -class PackageInfo(object): - _version = None - version_cmd = None - version_file = None - - @classmethod - def version(klass): - if klass._version is None: - if klass.version_cmd is not None: - try: - clout = CommandLine( - command=klass.version_cmd, - resource_monitor=False, - terminal_output="allatonce", - ).run() - except IOError: - return None - - raw_info = clout.runtime.stdout - elif klass.version_file is not None: - try: - with open(klass.version_file, "rt") as fobj: - raw_info = fobj.read() - except OSError: - return None - else: - return None - - klass._version = klass.parse_version(raw_info) - - return klass._version - - @staticmethod - def parse_version(raw_info): - raise NotImplementedError - - -# Original source at L40 of /interfaces/fsl/base.py -class Info(PackageInfo): - """ - Handle FSL ``output_type`` and version information. - - output type refers to the type of file fsl defaults to writing - eg, NIFTI, NIFTI_GZ - - Examples - -------- - - >>> from nipype.interfaces.fsl import Info - >>> Info.version() # doctest: +SKIP - >>> Info.output_type() # doctest: +SKIP - - """ - - ftypes = { - "NIFTI": ".nii", - "NIFTI_PAIR": ".img", - "NIFTI_GZ": ".nii.gz", - "NIFTI_PAIR_GZ": ".img.gz", - } - - if os.getenv("FSLDIR"): - version_file = os.path.join(os.getenv("FSLDIR"), "etc", "fslversion") - - @staticmethod - def parse_version(raw_info): - return raw_info.splitlines()[0] - - @classmethod - def output_type_to_ext(cls, output_type): - """Get the file extension for the given output type. - - Parameters - ---------- - output_type : {'NIFTI', 'NIFTI_GZ', 'NIFTI_PAIR', 'NIFTI_PAIR_GZ'} - String specifying the output type. - - Returns - ------- - extension : str - The file extension for the output type. - """ - - try: - return cls.ftypes[output_type] - except KeyError: - msg = "Invalid FSLOUTPUTTYPE: ", output_type - raise KeyError(msg) - - @classmethod - def output_type(cls): - """Get the global FSL output file type FSLOUTPUTTYPE. - - This returns the value of the environment variable - FSLOUTPUTTYPE. An exception is raised if it is not defined. - - Returns - ------- - fsl_ftype : string - Represents the current environment setting of FSLOUTPUTTYPE - """ - try: - return os.environ["FSLOUTPUTTYPE"] - except KeyError: - IFLOGGER.warning( - "FSLOUTPUTTYPE environment variable is not set. " - "Setting FSLOUTPUTTYPE=NIFTI" - ) - return "NIFTI" - - @staticmethod - def standard_image(img_name=None): - """Grab an image from the standard location. - - Returns a list of standard images if called without arguments. - - Could be made more fancy to allow for more relocatability""" - try: - fsldir = os.environ["FSLDIR"] - except KeyError: - raise Exception("FSL environment variables not set") - stdpath = os.path.join(fsldir, "data", "standard") - if img_name is None: - return [ - filename.replace(stdpath + "/", "") - for filename in glob(os.path.join(stdpath, "*nii*")) - ] - return os.path.join(stdpath, img_name) - - -# Original source at L125 of /interfaces/base/support.py -class NipypeInterfaceError(Exception): - """Custom error for interfaces""" - - def __init__(self, value): - self.value = value - - def __str__(self): - return "{}".format(self.value) diff --git a/nipype-auto-conv/specs/interfaces/smooth_estimate.yaml b/nipype-auto-conv/specs/interfaces/smooth_estimate.yaml index 1da7862..c7d7376 100644 --- a/nipype-auto-conv/specs/interfaces/smooth_estimate.yaml +++ b/nipype-auto-conv/specs/interfaces/smooth_estimate.yaml @@ -7,14 +7,14 @@ # ---- # Estimates the smoothness of an image # -# Examples -# -------- +# Examples +# -------- # -# >>> est = SmoothEstimate() -# >>> est.inputs.zstat_file = 'zstat1.nii.gz' -# >>> est.inputs.mask_file = 'mask.nii' -# >>> est.cmdline -# 'smoothest --mask=mask.nii --zstat=zstat1.nii.gz' +# >>> est = SmoothEstimate() +# >>> est.inputs.zstat_file = 'zstat1.nii.gz' +# >>> est.inputs.mask_file = 'mask.nii' +# >>> est.cmdline +# 'smoothest --mask=mask.nii --zstat=zstat1.nii.gz' # # task_name: SmoothEstimate @@ -31,7 +31,7 @@ inputs: # from the nipype interface, but you may want to be more specific, particularly # for file types, where specifying the format also specifies the file that will be # passed to the field in the automatically generated unittests. - mask_file: medimage/nifti1 + mask_file: generic/file # type=file|default=: brain mask volume residual_fit_file: generic/file # type=file|default=: residual-fit image file @@ -63,7 +63,7 @@ outputs: volume: volume_callable # type=int: number of voxels in mask templates: - # dict[str, str] - `output_file_template` values to be provided to output fields + # dict[str, str] - `path_template` values to be provided to output fields requirements: # dict[str, list[str]] - input fields that are required to be provided for the output field to be present tests: @@ -79,13 +79,13 @@ tests: zstat_file: # type=file|default=: zstat image file output_type: - # type=enum|default='NIFTI'|allowed['NIFTI','NIFTI_GZ','NIFTI_PAIR','NIFTI_PAIR_GZ']: FSL output type + # type=enum|default='NIFTI'|allowed['GIFTI','NIFTI','NIFTI_GZ','NIFTI_PAIR','NIFTI_PAIR_GZ']: FSL output type args: # type=str|default='': Additional parameters to the command environ: # type=dict|default={}: Environment variables imports: - # list[nipype2pydra.task.base.importstatement] - list import statements required by the test, with each list item + # list[nipype2pydra.statements.imports.explicitimport] - list import statements required by the test, with each list item # consisting of 'module', 'name', and optionally 'alias' keys expected_outputs: # dict[str, str] - expected values for selected outputs, noting that tests will typically @@ -104,10 +104,8 @@ tests: # (if not specified, will try to choose a sensible value) zstat_file: # type=file|default=: zstat image file - mask_file: - # type=file|default=: brain mask volume imports: - # list[nipype2pydra.task.base.importstatement] - list import statements required by the test, with each list item + # list[nipype2pydra.statements.imports.explicitimport] - list import statements required by the test, with each list item # consisting of 'module', 'name', and optionally 'alias' keys expected_outputs: # dict[str, str] - expected values for selected outputs, noting that tests will typically @@ -130,10 +128,8 @@ doctests: # '.mock()' method of the corresponding class is used instead. zstat_file: '"zstat1.nii.gz"' # type=file|default=: zstat image file - mask_file: '"mask.nii"' - # type=file|default=: brain mask volume imports: - # list[nipype2pydra.task.base.importstatement] - list import statements required by the test, with each list item + # list[nipype2pydra.statements.imports.explicitimport] - list import statements required by the test, with each list item # consisting of 'module', 'name', and optionally 'alias' keys directive: # str - any doctest directive to place on the cmdline call, e.g. # doctest: +ELLIPSIS diff --git a/nipype-auto-conv/specs/interfaces/smooth_estimate_callables.py b/nipype-auto-conv/specs/interfaces/smooth_estimate_callables.py deleted file mode 100644 index 967d160..0000000 --- a/nipype-auto-conv/specs/interfaces/smooth_estimate_callables.py +++ /dev/null @@ -1,352 +0,0 @@ -"""Module to put any functions that are referred to in the "callables" section of SmoothEstimate.yaml""" - -import attrs -import logging -import os -import os.path as op -from glob import glob - - -def dlh_callable(output_dir, inputs, stdout, stderr): - outputs = _list_outputs( - output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr - ) - return outputs["dlh"] - - -def resels_callable(output_dir, inputs, stdout, stderr): - outputs = _list_outputs( - output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr - ) - return outputs["resels"] - - -def volume_callable(output_dir, inputs, stdout, stderr): - outputs = _list_outputs( - output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr - ) - return outputs["volume"] - - -IFLOGGER = logging.getLogger("nipype.interface") - - -iflogger = logging.getLogger("nipype.interface") - - -# Original source at L809 of /interfaces/base/core.py -def _filename_from_source( - name, chain=None, inputs=None, stdout=None, stderr=None, output_dir=None -): - if chain is None: - chain = [] - - trait_spec = inputs.trait(name) - retval = getattr(inputs, name) - source_ext = None - if (retval is attrs.NOTHING) or "%s" in retval: - if not trait_spec.name_source: - return retval - - # Do not generate filename when excluded by other inputs - if any( - (getattr(inputs, field) is not attrs.NOTHING) - for field in trait_spec.xor or () - ): - return retval - - # Do not generate filename when required fields are missing - if not all( - (getattr(inputs, field) is not attrs.NOTHING) - for field in trait_spec.requires or () - ): - return retval - - if (retval is not attrs.NOTHING) and "%s" in retval: - name_template = retval - else: - name_template = trait_spec.name_template - if not name_template: - name_template = "%s_generated" - - ns = trait_spec.name_source - while isinstance(ns, (list, tuple)): - if len(ns) > 1: - iflogger.warning("Only one name_source per trait is allowed") - ns = ns[0] - - if not isinstance(ns, (str, bytes)): - raise ValueError( - "name_source of '{}' trait should be an input trait " - "name, but a type {} object was found".format(name, type(ns)) - ) - - if getattr(inputs, ns) is not attrs.NOTHING: - name_source = ns - source = getattr(inputs, name_source) - while isinstance(source, list): - source = source[0] - - # special treatment for files - try: - _, base, source_ext = split_filename(source) - except (AttributeError, TypeError): - base = source - else: - if name in chain: - raise NipypeInterfaceError("Mutually pointing name_sources") - - chain.append(name) - base = _filename_from_source( - ns, - chain, - inputs=inputs, - stdout=stdout, - stderr=stderr, - output_dir=output_dir, - ) - if base is not attrs.NOTHING: - _, _, source_ext = split_filename(base) - else: - # Do not generate filename when required fields are missing - return retval - - chain = None - retval = name_template % base - _, _, ext = split_filename(retval) - if trait_spec.keep_extension and (ext or source_ext): - if (ext is None or not ext) and source_ext: - retval = retval + source_ext - else: - retval = _overload_extension( - retval, - name, - inputs=inputs, - stdout=stdout, - stderr=stderr, - output_dir=output_dir, - ) - return retval - - -# Original source at L885 of /interfaces/base/core.py -def _gen_filename(name, inputs=None, stdout=None, stderr=None, output_dir=None): - raise NotImplementedError - - -# Original source at L891 of /interfaces/base/core.py -def _list_outputs(inputs=None, stdout=None, stderr=None, output_dir=None): - metadata = dict(name_source=lambda t: t is not None) - traits = inputs.traits(**metadata) - if traits: - outputs = {} - for name, trait_spec in list(traits.items()): - out_name = name - if trait_spec.output_name is not None: - out_name = trait_spec.output_name - fname = _filename_from_source( - name, inputs=inputs, stdout=stdout, stderr=stderr, output_dir=output_dir - ) - if fname is not attrs.NOTHING: - outputs[out_name] = os.path.abspath(fname) - return outputs - - -# Original source at L249 of /interfaces/fsl/base.py -def _overload_extension( - value, name=None, inputs=None, stdout=None, stderr=None, output_dir=None -): - return value + Info.output_type_to_ext(inputs.output_type) - - -# Original source at L58 of /utils/filemanip.py -def split_filename(fname): - """Split a filename into parts: path, base filename and extension. - - Parameters - ---------- - fname : str - file or path name - - Returns - ------- - pth : str - base path from fname - fname : str - filename from fname, without extension - ext : str - file extension from fname - - Examples - -------- - >>> from nipype.utils.filemanip import split_filename - >>> pth, fname, ext = split_filename('/home/data/subject.nii.gz') - >>> pth - '/home/data' - - >>> fname - 'subject' - - >>> ext - '.nii.gz' - - """ - - special_extensions = [".nii.gz", ".tar.gz", ".niml.dset"] - - pth = op.dirname(fname) - fname = op.basename(fname) - - ext = None - for special_ext in special_extensions: - ext_len = len(special_ext) - if (len(fname) > ext_len) and (fname[-ext_len:].lower() == special_ext.lower()): - ext = fname[-ext_len:] - fname = fname[:-ext_len] - break - if not ext: - fname, ext = op.splitext(fname) - - return pth, fname, ext - - -# Original source at L1069 of /interfaces/base/core.py -class PackageInfo(object): - _version = None - version_cmd = None - version_file = None - - @classmethod - def version(klass): - if klass._version is None: - if klass.version_cmd is not None: - try: - clout = CommandLine( - command=klass.version_cmd, - resource_monitor=False, - terminal_output="allatonce", - ).run() - except IOError: - return None - - raw_info = clout.runtime.stdout - elif klass.version_file is not None: - try: - with open(klass.version_file, "rt") as fobj: - raw_info = fobj.read() - except OSError: - return None - else: - return None - - klass._version = klass.parse_version(raw_info) - - return klass._version - - @staticmethod - def parse_version(raw_info): - raise NotImplementedError - - -# Original source at L40 of /interfaces/fsl/base.py -class Info(PackageInfo): - """ - Handle FSL ``output_type`` and version information. - - output type refers to the type of file fsl defaults to writing - eg, NIFTI, NIFTI_GZ - - Examples - -------- - - >>> from nipype.interfaces.fsl import Info - >>> Info.version() # doctest: +SKIP - >>> Info.output_type() # doctest: +SKIP - - """ - - ftypes = { - "NIFTI": ".nii", - "NIFTI_PAIR": ".img", - "NIFTI_GZ": ".nii.gz", - "NIFTI_PAIR_GZ": ".img.gz", - } - - if os.getenv("FSLDIR"): - version_file = os.path.join(os.getenv("FSLDIR"), "etc", "fslversion") - - @staticmethod - def parse_version(raw_info): - return raw_info.splitlines()[0] - - @classmethod - def output_type_to_ext(cls, output_type): - """Get the file extension for the given output type. - - Parameters - ---------- - output_type : {'NIFTI', 'NIFTI_GZ', 'NIFTI_PAIR', 'NIFTI_PAIR_GZ'} - String specifying the output type. - - Returns - ------- - extension : str - The file extension for the output type. - """ - - try: - return cls.ftypes[output_type] - except KeyError: - msg = "Invalid FSLOUTPUTTYPE: ", output_type - raise KeyError(msg) - - @classmethod - def output_type(cls): - """Get the global FSL output file type FSLOUTPUTTYPE. - - This returns the value of the environment variable - FSLOUTPUTTYPE. An exception is raised if it is not defined. - - Returns - ------- - fsl_ftype : string - Represents the current environment setting of FSLOUTPUTTYPE - """ - try: - return os.environ["FSLOUTPUTTYPE"] - except KeyError: - IFLOGGER.warning( - "FSLOUTPUTTYPE environment variable is not set. " - "Setting FSLOUTPUTTYPE=NIFTI" - ) - return "NIFTI" - - @staticmethod - def standard_image(img_name=None): - """Grab an image from the standard location. - - Returns a list of standard images if called without arguments. - - Could be made more fancy to allow for more relocatability""" - try: - fsldir = os.environ["FSLDIR"] - except KeyError: - raise Exception("FSL environment variables not set") - stdpath = os.path.join(fsldir, "data", "standard") - if img_name is None: - return [ - filename.replace(stdpath + "/", "") - for filename in glob(os.path.join(stdpath, "*nii*")) - ] - return os.path.join(stdpath, img_name) - - -# Original source at L125 of /interfaces/base/support.py -class NipypeInterfaceError(Exception): - """Custom error for interfaces""" - - def __init__(self, value): - self.value = value - - def __str__(self): - return "{}".format(self.value) diff --git a/nipype-auto-conv/specs/interfaces/spatial_filter.yaml b/nipype-auto-conv/specs/interfaces/spatial_filter.yaml index 8956e06..e402e12 100644 --- a/nipype-auto-conv/specs/interfaces/spatial_filter.yaml +++ b/nipype-auto-conv/specs/interfaces/spatial_filter.yaml @@ -24,9 +24,6 @@ inputs: # type=file|default=: image to operate on kernel_file: generic/file # type=file|default=: use external file for kernel - out_file: Path - # type=file: image written after calculations - # type=file|default=: image to write callable_defaults: # dict[str, str] - names of methods/callable classes defined in the adjacent `*_callables.py` # to set as the `default` method of input fields @@ -50,7 +47,7 @@ outputs: # dict[str, str] - names of methods/callable classes defined in the adjacent `*_callables.py` # to set to the `callable` attribute of output fields templates: - # dict[str, str] - `output_file_template` values to be provided to output fields + # dict[str, str] - `path_template` values to be provided to output fields out_file: out_file # type=file: image written after calculations # type=file|default=: image to write @@ -80,13 +77,13 @@ tests: nan2zeros: # type=bool|default=False: change NaNs to zeros before doing anything output_type: - # type=enum|default='NIFTI'|allowed['NIFTI','NIFTI_GZ','NIFTI_PAIR','NIFTI_PAIR_GZ']: FSL output type + # type=enum|default='NIFTI'|allowed['GIFTI','NIFTI','NIFTI_GZ','NIFTI_PAIR','NIFTI_PAIR_GZ']: FSL output type args: # type=str|default='': Additional parameters to the command environ: # type=dict|default={}: Environment variables imports: - # list[nipype2pydra.task.base.importstatement] - list import statements required by the test, with each list item + # list[nipype2pydra.statements.imports.explicitimport] - list import statements required by the test, with each list item # consisting of 'module', 'name', and optionally 'alias' keys expected_outputs: # dict[str, str] - expected values for selected outputs, noting that tests will typically diff --git a/nipype-auto-conv/specs/interfaces/spatial_filter_callables.py b/nipype-auto-conv/specs/interfaces/spatial_filter_callables.py deleted file mode 100644 index 8799501..0000000 --- a/nipype-auto-conv/specs/interfaces/spatial_filter_callables.py +++ /dev/null @@ -1,329 +0,0 @@ -"""Module to put any functions that are referred to in the "callables" section of SpatialFilter.yaml""" - -import attrs -import logging -import os -import os.path as op -from glob import glob -from pathlib import Path - - -def out_file_default(inputs): - return _gen_filename("out_file", inputs=inputs) - - -def out_file_callable(output_dir, inputs, stdout, stderr): - outputs = _list_outputs( - output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr - ) - return outputs["out_file"] - - -IFLOGGER = logging.getLogger("nipype.interface") - - -# Original source at L61 of /interfaces/fsl/maths.py -def _gen_filename(name, inputs=None, stdout=None, stderr=None, output_dir=None): - if name == "out_file": - return _list_outputs( - inputs=inputs, stdout=stdout, stderr=stderr, output_dir=output_dir - )["out_file"] - return None - - -# Original source at L205 of /interfaces/fsl/base.py -def _gen_fname( - basename, - cwd=None, - suffix=None, - change_ext=True, - ext=None, - inputs=None, - stdout=None, - stderr=None, - output_dir=None, -): - """Generate a filename based on the given parameters. - - The filename will take the form: cwd/basename. - If change_ext is True, it will use the extensions specified in - inputs.output_type. - - Parameters - ---------- - basename : str - Filename to base the new filename on. - cwd : str - Path to prefix to the new filename. (default is output_dir) - suffix : str - Suffix to add to the `basename`. (defaults is '' ) - change_ext : bool - Flag to change the filename extension to the FSL output type. - (default True) - - Returns - ------- - fname : str - New filename based on given parameters. - - """ - - if basename == "": - msg = "Unable to generate filename for command %s. " % "fslmaths" - msg += "basename is not set!" - raise ValueError(msg) - if cwd is None: - cwd = output_dir - if ext is None: - ext = Info.output_type_to_ext(inputs.output_type) - if change_ext: - if suffix: - suffix = "".join((suffix, ext)) - else: - suffix = ext - if suffix is None: - suffix = "" - fname = fname_presuffix(basename, suffix=suffix, use_ext=False, newpath=cwd) - return fname - - -# Original source at L51 of /interfaces/fsl/maths.py -def _list_outputs(inputs=None, stdout=None, stderr=None, output_dir=None): - outputs = {} - outputs["out_file"] = inputs.out_file - if inputs.out_file is attrs.NOTHING: - outputs["out_file"] = _gen_fname( - inputs.in_file, - suffix=_suffix, - inputs=inputs, - stdout=stdout, - stderr=stderr, - output_dir=output_dir, - ) - outputs["out_file"] = os.path.abspath(outputs["out_file"]) - return outputs - - -# Original source at L108 of /utils/filemanip.py -def fname_presuffix(fname, prefix="", suffix="", newpath=None, use_ext=True): - """Manipulates path and name of input filename - - Parameters - ---------- - fname : string - A filename (may or may not include path) - prefix : string - Characters to prepend to the filename - suffix : string - Characters to append to the filename - newpath : string - Path to replace the path of the input fname - use_ext : boolean - If True (default), appends the extension of the original file - to the output name. - - Returns - ------- - Absolute path of the modified filename - - >>> from nipype.utils.filemanip import fname_presuffix - >>> fname = 'foo.nii.gz' - >>> fname_presuffix(fname,'pre','post','/tmp') - '/tmp/prefoopost.nii.gz' - - >>> from nipype.interfaces.base import attrs.NOTHING - >>> fname_presuffix(fname, 'pre', 'post', attrs.NOTHING) == \ - fname_presuffix(fname, 'pre', 'post') - True - - """ - pth, fname, ext = split_filename(fname) - if not use_ext: - ext = "" - - # No need for : bool(attrs.NOTHING is not attrs.NOTHING) evaluates to False - if newpath: - pth = op.abspath(newpath) - return op.join(pth, prefix + fname + suffix + ext) - - -# Original source at L58 of /utils/filemanip.py -def split_filename(fname): - """Split a filename into parts: path, base filename and extension. - - Parameters - ---------- - fname : str - file or path name - - Returns - ------- - pth : str - base path from fname - fname : str - filename from fname, without extension - ext : str - file extension from fname - - Examples - -------- - >>> from nipype.utils.filemanip import split_filename - >>> pth, fname, ext = split_filename('/home/data/subject.nii.gz') - >>> pth - '/home/data' - - >>> fname - 'subject' - - >>> ext - '.nii.gz' - - """ - - special_extensions = [".nii.gz", ".tar.gz", ".niml.dset"] - - pth = op.dirname(fname) - fname = op.basename(fname) - - ext = None - for special_ext in special_extensions: - ext_len = len(special_ext) - if (len(fname) > ext_len) and (fname[-ext_len:].lower() == special_ext.lower()): - ext = fname[-ext_len:] - fname = fname[:-ext_len] - break - if not ext: - fname, ext = op.splitext(fname) - - return pth, fname, ext - - -# Original source at L1069 of /interfaces/base/core.py -class PackageInfo(object): - _version = None - version_cmd = None - version_file = None - - @classmethod - def version(klass): - if klass._version is None: - if klass.version_cmd is not None: - try: - clout = CommandLine( - command=klass.version_cmd, - resource_monitor=False, - terminal_output="allatonce", - ).run() - except IOError: - return None - - raw_info = clout.runtime.stdout - elif klass.version_file is not None: - try: - with open(klass.version_file, "rt") as fobj: - raw_info = fobj.read() - except OSError: - return None - else: - return None - - klass._version = klass.parse_version(raw_info) - - return klass._version - - @staticmethod - def parse_version(raw_info): - raise NotImplementedError - - -# Original source at L40 of /interfaces/fsl/base.py -class Info(PackageInfo): - """ - Handle FSL ``output_type`` and version information. - - output type refers to the type of file fsl defaults to writing - eg, NIFTI, NIFTI_GZ - - Examples - -------- - - >>> from nipype.interfaces.fsl import Info - >>> Info.version() # doctest: +SKIP - >>> Info.output_type() # doctest: +SKIP - - """ - - ftypes = { - "NIFTI": ".nii", - "NIFTI_PAIR": ".img", - "NIFTI_GZ": ".nii.gz", - "NIFTI_PAIR_GZ": ".img.gz", - } - - if os.getenv("FSLDIR"): - version_file = os.path.join(os.getenv("FSLDIR"), "etc", "fslversion") - - @staticmethod - def parse_version(raw_info): - return raw_info.splitlines()[0] - - @classmethod - def output_type_to_ext(cls, output_type): - """Get the file extension for the given output type. - - Parameters - ---------- - output_type : {'NIFTI', 'NIFTI_GZ', 'NIFTI_PAIR', 'NIFTI_PAIR_GZ'} - String specifying the output type. - - Returns - ------- - extension : str - The file extension for the output type. - """ - - try: - return cls.ftypes[output_type] - except KeyError: - msg = "Invalid FSLOUTPUTTYPE: ", output_type - raise KeyError(msg) - - @classmethod - def output_type(cls): - """Get the global FSL output file type FSLOUTPUTTYPE. - - This returns the value of the environment variable - FSLOUTPUTTYPE. An exception is raised if it is not defined. - - Returns - ------- - fsl_ftype : string - Represents the current environment setting of FSLOUTPUTTYPE - """ - try: - return os.environ["FSLOUTPUTTYPE"] - except KeyError: - IFLOGGER.warning( - "FSLOUTPUTTYPE environment variable is not set. " - "Setting FSLOUTPUTTYPE=NIFTI" - ) - return "NIFTI" - - @staticmethod - def standard_image(img_name=None): - """Grab an image from the standard location. - - Returns a list of standard images if called without arguments. - - Could be made more fancy to allow for more relocatability""" - try: - fsldir = os.environ["FSLDIR"] - except KeyError: - raise Exception("FSL environment variables not set") - stdpath = os.path.join(fsldir, "data", "standard") - if img_name is None: - return [ - filename.replace(stdpath + "/", "") - for filename in glob(os.path.join(stdpath, "*nii*")) - ] - return os.path.join(stdpath, img_name) diff --git a/nipype-auto-conv/specs/interfaces/split.yaml b/nipype-auto-conv/specs/interfaces/split.yaml index a27bd79..8262670 100644 --- a/nipype-auto-conv/specs/interfaces/split.yaml +++ b/nipype-auto-conv/specs/interfaces/split.yaml @@ -6,7 +6,7 @@ # Docs # ---- # Uses FSL Fslsplit command to separate a volume into images in -# time, x, y or z dimension. +# time, x, y or z dimension. # task_name: Split nipype_name: Split @@ -46,7 +46,7 @@ outputs: # dict[str, str] - names of methods/callable classes defined in the adjacent `*_callables.py` # to set to the `callable` attribute of output fields templates: - # dict[str, str] - `output_file_template` values to be provided to output fields + # dict[str, str] - `path_template` values to be provided to output fields requirements: # dict[str, list[str]] - input fields that are required to be provided for the output field to be present tests: @@ -60,13 +60,13 @@ tests: dimension: # type=enum|default='t'|allowed['t','x','y','z']: dimension along which the file will be split output_type: - # type=enum|default='NIFTI'|allowed['NIFTI','NIFTI_GZ','NIFTI_PAIR','NIFTI_PAIR_GZ']: FSL output type + # type=enum|default='NIFTI'|allowed['GIFTI','NIFTI','NIFTI_GZ','NIFTI_PAIR','NIFTI_PAIR_GZ']: FSL output type args: # type=str|default='': Additional parameters to the command environ: # type=dict|default={}: Environment variables imports: - # list[nipype2pydra.task.base.importstatement] - list import statements required by the test, with each list item + # list[nipype2pydra.statements.imports.explicitimport] - list import statements required by the test, with each list item # consisting of 'module', 'name', and optionally 'alias' keys expected_outputs: # dict[str, str] - expected values for selected outputs, noting that tests will typically diff --git a/nipype-auto-conv/specs/interfaces/split_callables.py b/nipype-auto-conv/specs/interfaces/split_callables.py deleted file mode 100644 index 28b9397..0000000 --- a/nipype-auto-conv/specs/interfaces/split_callables.py +++ /dev/null @@ -1,178 +0,0 @@ -"""Module to put any functions that are referred to in the "callables" section of Split.yaml""" - -import attrs -import logging -import os -from glob import glob - - -def out_files_callable(output_dir, inputs, stdout, stderr): - outputs = _list_outputs( - output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr - ) - return outputs["out_files"] - - -IFLOGGER = logging.getLogger("nipype.interface") - - -# Original source at L885 of /interfaces/base/core.py -def _gen_filename(name, inputs=None, stdout=None, stderr=None, output_dir=None): - raise NotImplementedError - - -# Original source at L549 of /interfaces/fsl/utils.py -def _list_outputs(inputs=None, stdout=None, stderr=None, output_dir=None): - """Create a Bunch which contains all possible files generated - by running the interface. Some files are always generated, others - depending on which ``inputs`` options are set. - - Returns - ------- - - outputs : Bunch object - Bunch object containing all possible files generated by - interface object. - - If None, file was not generated - Else, contains path, filename of generated outputfile - - """ - outputs = {} - ext = Info.output_type_to_ext(inputs.output_type) - outbase = "vol[0-9]*" - if inputs.out_base_name is not attrs.NOTHING: - outbase = "%s[0-9]*" % inputs.out_base_name - outputs["out_files"] = sorted(glob(os.path.join(output_dir, outbase + ext))) - return outputs - - -# Original source at L1069 of /interfaces/base/core.py -class PackageInfo(object): - _version = None - version_cmd = None - version_file = None - - @classmethod - def version(klass): - if klass._version is None: - if klass.version_cmd is not None: - try: - clout = CommandLine( - command=klass.version_cmd, - resource_monitor=False, - terminal_output="allatonce", - ).run() - except IOError: - return None - - raw_info = clout.runtime.stdout - elif klass.version_file is not None: - try: - with open(klass.version_file, "rt") as fobj: - raw_info = fobj.read() - except OSError: - return None - else: - return None - - klass._version = klass.parse_version(raw_info) - - return klass._version - - @staticmethod - def parse_version(raw_info): - raise NotImplementedError - - -# Original source at L40 of /interfaces/fsl/base.py -class Info(PackageInfo): - """ - Handle FSL ``output_type`` and version information. - - output type refers to the type of file fsl defaults to writing - eg, NIFTI, NIFTI_GZ - - Examples - -------- - - >>> from nipype.interfaces.fsl import Info - >>> Info.version() # doctest: +SKIP - >>> Info.output_type() # doctest: +SKIP - - """ - - ftypes = { - "NIFTI": ".nii", - "NIFTI_PAIR": ".img", - "NIFTI_GZ": ".nii.gz", - "NIFTI_PAIR_GZ": ".img.gz", - } - - if os.getenv("FSLDIR"): - version_file = os.path.join(os.getenv("FSLDIR"), "etc", "fslversion") - - @staticmethod - def parse_version(raw_info): - return raw_info.splitlines()[0] - - @classmethod - def output_type_to_ext(cls, output_type): - """Get the file extension for the given output type. - - Parameters - ---------- - output_type : {'NIFTI', 'NIFTI_GZ', 'NIFTI_PAIR', 'NIFTI_PAIR_GZ'} - String specifying the output type. - - Returns - ------- - extension : str - The file extension for the output type. - """ - - try: - return cls.ftypes[output_type] - except KeyError: - msg = "Invalid FSLOUTPUTTYPE: ", output_type - raise KeyError(msg) - - @classmethod - def output_type(cls): - """Get the global FSL output file type FSLOUTPUTTYPE. - - This returns the value of the environment variable - FSLOUTPUTTYPE. An exception is raised if it is not defined. - - Returns - ------- - fsl_ftype : string - Represents the current environment setting of FSLOUTPUTTYPE - """ - try: - return os.environ["FSLOUTPUTTYPE"] - except KeyError: - IFLOGGER.warning( - "FSLOUTPUTTYPE environment variable is not set. " - "Setting FSLOUTPUTTYPE=NIFTI" - ) - return "NIFTI" - - @staticmethod - def standard_image(img_name=None): - """Grab an image from the standard location. - - Returns a list of standard images if called without arguments. - - Could be made more fancy to allow for more relocatability""" - try: - fsldir = os.environ["FSLDIR"] - except KeyError: - raise Exception("FSL environment variables not set") - stdpath = os.path.join(fsldir, "data", "standard") - if img_name is None: - return [ - filename.replace(stdpath + "/", "") - for filename in glob(os.path.join(stdpath, "*nii*")) - ] - return os.path.join(stdpath, img_name) diff --git a/nipype-auto-conv/specs/interfaces/std_image.yaml b/nipype-auto-conv/specs/interfaces/std_image.yaml index bbdb636..406e875 100644 --- a/nipype-auto-conv/specs/interfaces/std_image.yaml +++ b/nipype-auto-conv/specs/interfaces/std_image.yaml @@ -6,7 +6,7 @@ # Docs # ---- # Use fslmaths to generate a standard deviation in an image across a given -# dimension. +# dimension. # task_name: StdImage nipype_name: StdImage @@ -24,9 +24,6 @@ inputs: # passed to the field in the automatically generated unittests. in_file: generic/file # type=file|default=: image to operate on - out_file: Path - # type=file: image written after calculations - # type=file|default=: image to write callable_defaults: # dict[str, str] - names of methods/callable classes defined in the adjacent `*_callables.py` # to set as the `default` method of input fields @@ -50,7 +47,7 @@ outputs: # dict[str, str] - names of methods/callable classes defined in the adjacent `*_callables.py` # to set to the `callable` attribute of output fields templates: - # dict[str, str] - `output_file_template` values to be provided to output fields + # dict[str, str] - `path_template` values to be provided to output fields out_file: out_file # type=file: image written after calculations # type=file|default=: image to write @@ -74,13 +71,13 @@ tests: nan2zeros: # type=bool|default=False: change NaNs to zeros before doing anything output_type: - # type=enum|default='NIFTI'|allowed['NIFTI','NIFTI_GZ','NIFTI_PAIR','NIFTI_PAIR_GZ']: FSL output type + # type=enum|default='NIFTI'|allowed['GIFTI','NIFTI','NIFTI_GZ','NIFTI_PAIR','NIFTI_PAIR_GZ']: FSL output type args: # type=str|default='': Additional parameters to the command environ: # type=dict|default={}: Environment variables imports: - # list[nipype2pydra.task.base.importstatement] - list import statements required by the test, with each list item + # list[nipype2pydra.statements.imports.explicitimport] - list import statements required by the test, with each list item # consisting of 'module', 'name', and optionally 'alias' keys expected_outputs: # dict[str, str] - expected values for selected outputs, noting that tests will typically diff --git a/nipype-auto-conv/specs/interfaces/std_image_callables.py b/nipype-auto-conv/specs/interfaces/std_image_callables.py deleted file mode 100644 index d9e0072..0000000 --- a/nipype-auto-conv/specs/interfaces/std_image_callables.py +++ /dev/null @@ -1,329 +0,0 @@ -"""Module to put any functions that are referred to in the "callables" section of StdImage.yaml""" - -import attrs -import logging -import os -import os.path as op -from glob import glob -from pathlib import Path - - -def out_file_default(inputs): - return _gen_filename("out_file", inputs=inputs) - - -def out_file_callable(output_dir, inputs, stdout, stderr): - outputs = _list_outputs( - output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr - ) - return outputs["out_file"] - - -IFLOGGER = logging.getLogger("nipype.interface") - - -# Original source at L61 of /interfaces/fsl/maths.py -def _gen_filename(name, inputs=None, stdout=None, stderr=None, output_dir=None): - if name == "out_file": - return _list_outputs( - inputs=inputs, stdout=stdout, stderr=stderr, output_dir=output_dir - )["out_file"] - return None - - -# Original source at L205 of /interfaces/fsl/base.py -def _gen_fname( - basename, - cwd=None, - suffix=None, - change_ext=True, - ext=None, - inputs=None, - stdout=None, - stderr=None, - output_dir=None, -): - """Generate a filename based on the given parameters. - - The filename will take the form: cwd/basename. - If change_ext is True, it will use the extensions specified in - inputs.output_type. - - Parameters - ---------- - basename : str - Filename to base the new filename on. - cwd : str - Path to prefix to the new filename. (default is output_dir) - suffix : str - Suffix to add to the `basename`. (defaults is '' ) - change_ext : bool - Flag to change the filename extension to the FSL output type. - (default True) - - Returns - ------- - fname : str - New filename based on given parameters. - - """ - - if basename == "": - msg = "Unable to generate filename for command %s. " % "fslmaths" - msg += "basename is not set!" - raise ValueError(msg) - if cwd is None: - cwd = output_dir - if ext is None: - ext = Info.output_type_to_ext(inputs.output_type) - if change_ext: - if suffix: - suffix = "".join((suffix, ext)) - else: - suffix = ext - if suffix is None: - suffix = "" - fname = fname_presuffix(basename, suffix=suffix, use_ext=False, newpath=cwd) - return fname - - -# Original source at L51 of /interfaces/fsl/maths.py -def _list_outputs(inputs=None, stdout=None, stderr=None, output_dir=None): - outputs = {} - outputs["out_file"] = inputs.out_file - if inputs.out_file is attrs.NOTHING: - outputs["out_file"] = _gen_fname( - inputs.in_file, - suffix=_suffix, - inputs=inputs, - stdout=stdout, - stderr=stderr, - output_dir=output_dir, - ) - outputs["out_file"] = os.path.abspath(outputs["out_file"]) - return outputs - - -# Original source at L108 of /utils/filemanip.py -def fname_presuffix(fname, prefix="", suffix="", newpath=None, use_ext=True): - """Manipulates path and name of input filename - - Parameters - ---------- - fname : string - A filename (may or may not include path) - prefix : string - Characters to prepend to the filename - suffix : string - Characters to append to the filename - newpath : string - Path to replace the path of the input fname - use_ext : boolean - If True (default), appends the extension of the original file - to the output name. - - Returns - ------- - Absolute path of the modified filename - - >>> from nipype.utils.filemanip import fname_presuffix - >>> fname = 'foo.nii.gz' - >>> fname_presuffix(fname,'pre','post','/tmp') - '/tmp/prefoopost.nii.gz' - - >>> from nipype.interfaces.base import attrs.NOTHING - >>> fname_presuffix(fname, 'pre', 'post', attrs.NOTHING) == \ - fname_presuffix(fname, 'pre', 'post') - True - - """ - pth, fname, ext = split_filename(fname) - if not use_ext: - ext = "" - - # No need for : bool(attrs.NOTHING is not attrs.NOTHING) evaluates to False - if newpath: - pth = op.abspath(newpath) - return op.join(pth, prefix + fname + suffix + ext) - - -# Original source at L58 of /utils/filemanip.py -def split_filename(fname): - """Split a filename into parts: path, base filename and extension. - - Parameters - ---------- - fname : str - file or path name - - Returns - ------- - pth : str - base path from fname - fname : str - filename from fname, without extension - ext : str - file extension from fname - - Examples - -------- - >>> from nipype.utils.filemanip import split_filename - >>> pth, fname, ext = split_filename('/home/data/subject.nii.gz') - >>> pth - '/home/data' - - >>> fname - 'subject' - - >>> ext - '.nii.gz' - - """ - - special_extensions = [".nii.gz", ".tar.gz", ".niml.dset"] - - pth = op.dirname(fname) - fname = op.basename(fname) - - ext = None - for special_ext in special_extensions: - ext_len = len(special_ext) - if (len(fname) > ext_len) and (fname[-ext_len:].lower() == special_ext.lower()): - ext = fname[-ext_len:] - fname = fname[:-ext_len] - break - if not ext: - fname, ext = op.splitext(fname) - - return pth, fname, ext - - -# Original source at L1069 of /interfaces/base/core.py -class PackageInfo(object): - _version = None - version_cmd = None - version_file = None - - @classmethod - def version(klass): - if klass._version is None: - if klass.version_cmd is not None: - try: - clout = CommandLine( - command=klass.version_cmd, - resource_monitor=False, - terminal_output="allatonce", - ).run() - except IOError: - return None - - raw_info = clout.runtime.stdout - elif klass.version_file is not None: - try: - with open(klass.version_file, "rt") as fobj: - raw_info = fobj.read() - except OSError: - return None - else: - return None - - klass._version = klass.parse_version(raw_info) - - return klass._version - - @staticmethod - def parse_version(raw_info): - raise NotImplementedError - - -# Original source at L40 of /interfaces/fsl/base.py -class Info(PackageInfo): - """ - Handle FSL ``output_type`` and version information. - - output type refers to the type of file fsl defaults to writing - eg, NIFTI, NIFTI_GZ - - Examples - -------- - - >>> from nipype.interfaces.fsl import Info - >>> Info.version() # doctest: +SKIP - >>> Info.output_type() # doctest: +SKIP - - """ - - ftypes = { - "NIFTI": ".nii", - "NIFTI_PAIR": ".img", - "NIFTI_GZ": ".nii.gz", - "NIFTI_PAIR_GZ": ".img.gz", - } - - if os.getenv("FSLDIR"): - version_file = os.path.join(os.getenv("FSLDIR"), "etc", "fslversion") - - @staticmethod - def parse_version(raw_info): - return raw_info.splitlines()[0] - - @classmethod - def output_type_to_ext(cls, output_type): - """Get the file extension for the given output type. - - Parameters - ---------- - output_type : {'NIFTI', 'NIFTI_GZ', 'NIFTI_PAIR', 'NIFTI_PAIR_GZ'} - String specifying the output type. - - Returns - ------- - extension : str - The file extension for the output type. - """ - - try: - return cls.ftypes[output_type] - except KeyError: - msg = "Invalid FSLOUTPUTTYPE: ", output_type - raise KeyError(msg) - - @classmethod - def output_type(cls): - """Get the global FSL output file type FSLOUTPUTTYPE. - - This returns the value of the environment variable - FSLOUTPUTTYPE. An exception is raised if it is not defined. - - Returns - ------- - fsl_ftype : string - Represents the current environment setting of FSLOUTPUTTYPE - """ - try: - return os.environ["FSLOUTPUTTYPE"] - except KeyError: - IFLOGGER.warning( - "FSLOUTPUTTYPE environment variable is not set. " - "Setting FSLOUTPUTTYPE=NIFTI" - ) - return "NIFTI" - - @staticmethod - def standard_image(img_name=None): - """Grab an image from the standard location. - - Returns a list of standard images if called without arguments. - - Could be made more fancy to allow for more relocatability""" - try: - fsldir = os.environ["FSLDIR"] - except KeyError: - raise Exception("FSL environment variables not set") - stdpath = os.path.join(fsldir, "data", "standard") - if img_name is None: - return [ - filename.replace(stdpath + "/", "") - for filename in glob(os.path.join(stdpath, "*nii*")) - ] - return os.path.join(stdpath, img_name) diff --git a/nipype-auto-conv/specs/interfaces/susan.yaml b/nipype-auto-conv/specs/interfaces/susan.yaml index ebe726c..153df6f 100644 --- a/nipype-auto-conv/specs/interfaces/susan.yaml +++ b/nipype-auto-conv/specs/interfaces/susan.yaml @@ -7,21 +7,21 @@ # ---- # FSL SUSAN wrapper to perform smoothing # -# For complete details, see the `SUSAN Documentation. -# `_ +# For complete details, see the `SUSAN Documentation. +# `_ # -# Examples -# -------- +# Examples +# -------- # -# >>> from nipype.interfaces import fsl -# >>> from nipype.testing import example_data -# >>> anatfile # doctest: +SKIP -# anatomical.nii # doctest: +SKIP -# >>> sus = fsl.SUSAN() -# >>> sus.inputs.in_file = example_data('structural.nii') -# >>> sus.inputs.brightness_threshold = 2000.0 -# >>> sus.inputs.fwhm = 8.0 -# >>> result = sus.run() # doctest: +SKIP +# >>> from nipype.interfaces import fsl +# >>> from nipype.testing import example_data +# >>> anatfile # doctest: +SKIP +# anatomical.nii # doctest: +SKIP +# >>> sus = fsl.SUSAN() +# >>> sus.inputs.in_file = example_data('structural.nii') +# >>> sus.inputs.brightness_threshold = 2000.0 +# >>> sus.inputs.fwhm = 8.0 +# >>> result = sus.run() # doctest: +SKIP # task_name: SUSAN nipype_name: SUSAN @@ -39,8 +39,6 @@ inputs: # passed to the field in the automatically generated unittests. in_file: generic/file # type=file|default=: filename of input timeseries - out_file: Path - # type=file|default=: output file name callable_defaults: # dict[str, str] - names of methods/callable classes defined in the adjacent `*_callables.py` # to set as the `default` method of input fields @@ -65,7 +63,7 @@ outputs: # dict[str, str] - names of methods/callable classes defined in the adjacent `*_callables.py` # to set to the `callable` attribute of output fields templates: - # dict[str, str] - `output_file_template` values to be provided to output fields + # dict[str, str] - `path_template` values to be provided to output fields requirements: # dict[str, list[str]] - input fields that are required to be provided for the output field to be present tests: @@ -87,13 +85,13 @@ tests: out_file: # type=file|default=: output file name output_type: - # type=enum|default='NIFTI'|allowed['NIFTI','NIFTI_GZ','NIFTI_PAIR','NIFTI_PAIR_GZ']: FSL output type + # type=enum|default='NIFTI'|allowed['GIFTI','NIFTI','NIFTI_GZ','NIFTI_PAIR','NIFTI_PAIR_GZ']: FSL output type args: # type=str|default='': Additional parameters to the command environ: # type=dict|default={}: Environment variables imports: - # list[nipype2pydra.task.base.importstatement] - list import statements required by the test, with each list item + # list[nipype2pydra.statements.imports.explicitimport] - list import statements required by the test, with each list item # consisting of 'module', 'name', and optionally 'alias' keys expected_outputs: # dict[str, str] - expected values for selected outputs, noting that tests will typically diff --git a/nipype-auto-conv/specs/interfaces/susan_callables.py b/nipype-auto-conv/specs/interfaces/susan_callables.py deleted file mode 100644 index 859ed75..0000000 --- a/nipype-auto-conv/specs/interfaces/susan_callables.py +++ /dev/null @@ -1,329 +0,0 @@ -"""Module to put any functions that are referred to in the "callables" section of SUSAN.yaml""" - -import attrs -import logging -import os -import os.path as op -from glob import glob -from pathlib import Path - - -def out_file_default(inputs): - return _gen_filename("out_file", inputs=inputs) - - -def smoothed_file_callable(output_dir, inputs, stdout, stderr): - outputs = _list_outputs( - output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr - ) - return outputs["smoothed_file"] - - -IFLOGGER = logging.getLogger("nipype.interface") - - -# Original source at L1694 of /interfaces/fsl/preprocess.py -def _gen_filename(name, inputs=None, stdout=None, stderr=None, output_dir=None): - if name == "out_file": - return _list_outputs( - inputs=inputs, stdout=stdout, stderr=stderr, output_dir=output_dir - )["smoothed_file"] - return None - - -# Original source at L205 of /interfaces/fsl/base.py -def _gen_fname( - basename, - cwd=None, - suffix=None, - change_ext=True, - ext=None, - inputs=None, - stdout=None, - stderr=None, - output_dir=None, -): - """Generate a filename based on the given parameters. - - The filename will take the form: cwd/basename. - If change_ext is True, it will use the extensions specified in - inputs.output_type. - - Parameters - ---------- - basename : str - Filename to base the new filename on. - cwd : str - Path to prefix to the new filename. (default is output_dir) - suffix : str - Suffix to add to the `basename`. (defaults is '' ) - change_ext : bool - Flag to change the filename extension to the FSL output type. - (default True) - - Returns - ------- - fname : str - New filename based on given parameters. - - """ - - if basename == "": - msg = "Unable to generate filename for command %s. " % "susan" - msg += "basename is not set!" - raise ValueError(msg) - if cwd is None: - cwd = output_dir - if ext is None: - ext = Info.output_type_to_ext(inputs.output_type) - if change_ext: - if suffix: - suffix = "".join((suffix, ext)) - else: - suffix = ext - if suffix is None: - suffix = "" - fname = fname_presuffix(basename, suffix=suffix, use_ext=False, newpath=cwd) - return fname - - -# Original source at L1686 of /interfaces/fsl/preprocess.py -def _list_outputs(inputs=None, stdout=None, stderr=None, output_dir=None): - outputs = {} - out_file = inputs.out_file - if out_file is attrs.NOTHING: - out_file = _gen_fname( - inputs.in_file, - suffix="_smooth", - inputs=inputs, - stdout=stdout, - stderr=stderr, - output_dir=output_dir, - ) - outputs["smoothed_file"] = os.path.abspath(out_file) - return outputs - - -# Original source at L108 of /utils/filemanip.py -def fname_presuffix(fname, prefix="", suffix="", newpath=None, use_ext=True): - """Manipulates path and name of input filename - - Parameters - ---------- - fname : string - A filename (may or may not include path) - prefix : string - Characters to prepend to the filename - suffix : string - Characters to append to the filename - newpath : string - Path to replace the path of the input fname - use_ext : boolean - If True (default), appends the extension of the original file - to the output name. - - Returns - ------- - Absolute path of the modified filename - - >>> from nipype.utils.filemanip import fname_presuffix - >>> fname = 'foo.nii.gz' - >>> fname_presuffix(fname,'pre','post','/tmp') - '/tmp/prefoopost.nii.gz' - - >>> from nipype.interfaces.base import attrs.NOTHING - >>> fname_presuffix(fname, 'pre', 'post', attrs.NOTHING) == \ - fname_presuffix(fname, 'pre', 'post') - True - - """ - pth, fname, ext = split_filename(fname) - if not use_ext: - ext = "" - - # No need for : bool(attrs.NOTHING is not attrs.NOTHING) evaluates to False - if newpath: - pth = op.abspath(newpath) - return op.join(pth, prefix + fname + suffix + ext) - - -# Original source at L58 of /utils/filemanip.py -def split_filename(fname): - """Split a filename into parts: path, base filename and extension. - - Parameters - ---------- - fname : str - file or path name - - Returns - ------- - pth : str - base path from fname - fname : str - filename from fname, without extension - ext : str - file extension from fname - - Examples - -------- - >>> from nipype.utils.filemanip import split_filename - >>> pth, fname, ext = split_filename('/home/data/subject.nii.gz') - >>> pth - '/home/data' - - >>> fname - 'subject' - - >>> ext - '.nii.gz' - - """ - - special_extensions = [".nii.gz", ".tar.gz", ".niml.dset"] - - pth = op.dirname(fname) - fname = op.basename(fname) - - ext = None - for special_ext in special_extensions: - ext_len = len(special_ext) - if (len(fname) > ext_len) and (fname[-ext_len:].lower() == special_ext.lower()): - ext = fname[-ext_len:] - fname = fname[:-ext_len] - break - if not ext: - fname, ext = op.splitext(fname) - - return pth, fname, ext - - -# Original source at L1069 of /interfaces/base/core.py -class PackageInfo(object): - _version = None - version_cmd = None - version_file = None - - @classmethod - def version(klass): - if klass._version is None: - if klass.version_cmd is not None: - try: - clout = CommandLine( - command=klass.version_cmd, - resource_monitor=False, - terminal_output="allatonce", - ).run() - except IOError: - return None - - raw_info = clout.runtime.stdout - elif klass.version_file is not None: - try: - with open(klass.version_file, "rt") as fobj: - raw_info = fobj.read() - except OSError: - return None - else: - return None - - klass._version = klass.parse_version(raw_info) - - return klass._version - - @staticmethod - def parse_version(raw_info): - raise NotImplementedError - - -# Original source at L40 of /interfaces/fsl/base.py -class Info(PackageInfo): - """ - Handle FSL ``output_type`` and version information. - - output type refers to the type of file fsl defaults to writing - eg, NIFTI, NIFTI_GZ - - Examples - -------- - - >>> from nipype.interfaces.fsl import Info - >>> Info.version() # doctest: +SKIP - >>> Info.output_type() # doctest: +SKIP - - """ - - ftypes = { - "NIFTI": ".nii", - "NIFTI_PAIR": ".img", - "NIFTI_GZ": ".nii.gz", - "NIFTI_PAIR_GZ": ".img.gz", - } - - if os.getenv("FSLDIR"): - version_file = os.path.join(os.getenv("FSLDIR"), "etc", "fslversion") - - @staticmethod - def parse_version(raw_info): - return raw_info.splitlines()[0] - - @classmethod - def output_type_to_ext(cls, output_type): - """Get the file extension for the given output type. - - Parameters - ---------- - output_type : {'NIFTI', 'NIFTI_GZ', 'NIFTI_PAIR', 'NIFTI_PAIR_GZ'} - String specifying the output type. - - Returns - ------- - extension : str - The file extension for the output type. - """ - - try: - return cls.ftypes[output_type] - except KeyError: - msg = "Invalid FSLOUTPUTTYPE: ", output_type - raise KeyError(msg) - - @classmethod - def output_type(cls): - """Get the global FSL output file type FSLOUTPUTTYPE. - - This returns the value of the environment variable - FSLOUTPUTTYPE. An exception is raised if it is not defined. - - Returns - ------- - fsl_ftype : string - Represents the current environment setting of FSLOUTPUTTYPE - """ - try: - return os.environ["FSLOUTPUTTYPE"] - except KeyError: - IFLOGGER.warning( - "FSLOUTPUTTYPE environment variable is not set. " - "Setting FSLOUTPUTTYPE=NIFTI" - ) - return "NIFTI" - - @staticmethod - def standard_image(img_name=None): - """Grab an image from the standard location. - - Returns a list of standard images if called without arguments. - - Could be made more fancy to allow for more relocatability""" - try: - fsldir = os.environ["FSLDIR"] - except KeyError: - raise Exception("FSL environment variables not set") - stdpath = os.path.join(fsldir, "data", "standard") - if img_name is None: - return [ - filename.replace(stdpath + "/", "") - for filename in glob(os.path.join(stdpath, "*nii*")) - ] - return os.path.join(stdpath, img_name) diff --git a/nipype-auto-conv/specs/interfaces/swap_dimensions.yaml b/nipype-auto-conv/specs/interfaces/swap_dimensions.yaml index c2fb752..b5e7276 100644 --- a/nipype-auto-conv/specs/interfaces/swap_dimensions.yaml +++ b/nipype-auto-conv/specs/interfaces/swap_dimensions.yaml @@ -7,10 +7,10 @@ # ---- # Use fslswapdim to alter the orientation of an image. # -# This interface accepts a three-tuple corresponding to the new -# orientation. You may either provide dimension ids in the form of -# (-)x, (-)y, or (-z), or nifti-syle dimension codes -# (RL, LR, AP, PA, IS, SI). +# This interface accepts a three-tuple corresponding to the new +# orientation. You may either provide dimension ids in the form of +# (-)x, (-)y, or (-z), or nifti-syle dimension codes +# (RL, LR, AP, PA, IS, SI). # # task_name: SwapDimensions @@ -29,9 +29,6 @@ inputs: # passed to the field in the automatically generated unittests. in_file: generic/file # type=file|default=: input image - out_file: Path - # type=file: image with new dimensions - # type=file|default=: image to write callable_defaults: # dict[str, str] - names of methods/callable classes defined in the adjacent `*_callables.py` # to set as the `default` method of input fields @@ -55,7 +52,7 @@ outputs: # dict[str, str] - names of methods/callable classes defined in the adjacent `*_callables.py` # to set to the `callable` attribute of output fields templates: - # dict[str, str] - `output_file_template` values to be provided to output fields + # dict[str, str] - `path_template` values to be provided to output fields out_file: out_file # type=file: image with new dimensions # type=file|default=: image to write @@ -73,13 +70,13 @@ tests: # type=file: image with new dimensions # type=file|default=: image to write output_type: - # type=enum|default='NIFTI'|allowed['NIFTI','NIFTI_GZ','NIFTI_PAIR','NIFTI_PAIR_GZ']: FSL output type + # type=enum|default='NIFTI'|allowed['GIFTI','NIFTI','NIFTI_GZ','NIFTI_PAIR','NIFTI_PAIR_GZ']: FSL output type args: # type=str|default='': Additional parameters to the command environ: # type=dict|default={}: Environment variables imports: - # list[nipype2pydra.task.base.importstatement] - list import statements required by the test, with each list item + # list[nipype2pydra.statements.imports.explicitimport] - list import statements required by the test, with each list item # consisting of 'module', 'name', and optionally 'alias' keys expected_outputs: # dict[str, str] - expected values for selected outputs, noting that tests will typically diff --git a/nipype-auto-conv/specs/interfaces/swap_dimensions_callables.py b/nipype-auto-conv/specs/interfaces/swap_dimensions_callables.py deleted file mode 100644 index 055aff1..0000000 --- a/nipype-auto-conv/specs/interfaces/swap_dimensions_callables.py +++ /dev/null @@ -1,329 +0,0 @@ -"""Module to put any functions that are referred to in the "callables" section of SwapDimensions.yaml""" - -import attrs -import logging -import os -import os.path as op -from glob import glob -from pathlib import Path - - -def out_file_default(inputs): - return _gen_filename("out_file", inputs=inputs) - - -def out_file_callable(output_dir, inputs, stdout, stderr): - outputs = _list_outputs( - output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr - ) - return outputs["out_file"] - - -IFLOGGER = logging.getLogger("nipype.interface") - - -# Original source at L1642 of /interfaces/fsl/utils.py -def _gen_filename(name, inputs=None, stdout=None, stderr=None, output_dir=None): - if name == "out_file": - return _list_outputs( - inputs=inputs, stdout=stdout, stderr=stderr, output_dir=output_dir - )["out_file"] - return None - - -# Original source at L205 of /interfaces/fsl/base.py -def _gen_fname( - basename, - cwd=None, - suffix=None, - change_ext=True, - ext=None, - inputs=None, - stdout=None, - stderr=None, - output_dir=None, -): - """Generate a filename based on the given parameters. - - The filename will take the form: cwd/basename. - If change_ext is True, it will use the extensions specified in - inputs.output_type. - - Parameters - ---------- - basename : str - Filename to base the new filename on. - cwd : str - Path to prefix to the new filename. (default is output_dir) - suffix : str - Suffix to add to the `basename`. (defaults is '' ) - change_ext : bool - Flag to change the filename extension to the FSL output type. - (default True) - - Returns - ------- - fname : str - New filename based on given parameters. - - """ - - if basename == "": - msg = "Unable to generate filename for command %s. " % "fslswapdim" - msg += "basename is not set!" - raise ValueError(msg) - if cwd is None: - cwd = output_dir - if ext is None: - ext = Info.output_type_to_ext(inputs.output_type) - if change_ext: - if suffix: - suffix = "".join((suffix, ext)) - else: - suffix = ext - if suffix is None: - suffix = "" - fname = fname_presuffix(basename, suffix=suffix, use_ext=False, newpath=cwd) - return fname - - -# Original source at L1632 of /interfaces/fsl/utils.py -def _list_outputs(inputs=None, stdout=None, stderr=None, output_dir=None): - outputs = {} - outputs["out_file"] = inputs.out_file - if inputs.out_file is attrs.NOTHING: - outputs["out_file"] = _gen_fname( - inputs.in_file, - suffix="_newdims", - inputs=inputs, - stdout=stdout, - stderr=stderr, - output_dir=output_dir, - ) - outputs["out_file"] = os.path.abspath(outputs["out_file"]) - return outputs - - -# Original source at L108 of /utils/filemanip.py -def fname_presuffix(fname, prefix="", suffix="", newpath=None, use_ext=True): - """Manipulates path and name of input filename - - Parameters - ---------- - fname : string - A filename (may or may not include path) - prefix : string - Characters to prepend to the filename - suffix : string - Characters to append to the filename - newpath : string - Path to replace the path of the input fname - use_ext : boolean - If True (default), appends the extension of the original file - to the output name. - - Returns - ------- - Absolute path of the modified filename - - >>> from nipype.utils.filemanip import fname_presuffix - >>> fname = 'foo.nii.gz' - >>> fname_presuffix(fname,'pre','post','/tmp') - '/tmp/prefoopost.nii.gz' - - >>> from nipype.interfaces.base import attrs.NOTHING - >>> fname_presuffix(fname, 'pre', 'post', attrs.NOTHING) == \ - fname_presuffix(fname, 'pre', 'post') - True - - """ - pth, fname, ext = split_filename(fname) - if not use_ext: - ext = "" - - # No need for : bool(attrs.NOTHING is not attrs.NOTHING) evaluates to False - if newpath: - pth = op.abspath(newpath) - return op.join(pth, prefix + fname + suffix + ext) - - -# Original source at L58 of /utils/filemanip.py -def split_filename(fname): - """Split a filename into parts: path, base filename and extension. - - Parameters - ---------- - fname : str - file or path name - - Returns - ------- - pth : str - base path from fname - fname : str - filename from fname, without extension - ext : str - file extension from fname - - Examples - -------- - >>> from nipype.utils.filemanip import split_filename - >>> pth, fname, ext = split_filename('/home/data/subject.nii.gz') - >>> pth - '/home/data' - - >>> fname - 'subject' - - >>> ext - '.nii.gz' - - """ - - special_extensions = [".nii.gz", ".tar.gz", ".niml.dset"] - - pth = op.dirname(fname) - fname = op.basename(fname) - - ext = None - for special_ext in special_extensions: - ext_len = len(special_ext) - if (len(fname) > ext_len) and (fname[-ext_len:].lower() == special_ext.lower()): - ext = fname[-ext_len:] - fname = fname[:-ext_len] - break - if not ext: - fname, ext = op.splitext(fname) - - return pth, fname, ext - - -# Original source at L1069 of /interfaces/base/core.py -class PackageInfo(object): - _version = None - version_cmd = None - version_file = None - - @classmethod - def version(klass): - if klass._version is None: - if klass.version_cmd is not None: - try: - clout = CommandLine( - command=klass.version_cmd, - resource_monitor=False, - terminal_output="allatonce", - ).run() - except IOError: - return None - - raw_info = clout.runtime.stdout - elif klass.version_file is not None: - try: - with open(klass.version_file, "rt") as fobj: - raw_info = fobj.read() - except OSError: - return None - else: - return None - - klass._version = klass.parse_version(raw_info) - - return klass._version - - @staticmethod - def parse_version(raw_info): - raise NotImplementedError - - -# Original source at L40 of /interfaces/fsl/base.py -class Info(PackageInfo): - """ - Handle FSL ``output_type`` and version information. - - output type refers to the type of file fsl defaults to writing - eg, NIFTI, NIFTI_GZ - - Examples - -------- - - >>> from nipype.interfaces.fsl import Info - >>> Info.version() # doctest: +SKIP - >>> Info.output_type() # doctest: +SKIP - - """ - - ftypes = { - "NIFTI": ".nii", - "NIFTI_PAIR": ".img", - "NIFTI_GZ": ".nii.gz", - "NIFTI_PAIR_GZ": ".img.gz", - } - - if os.getenv("FSLDIR"): - version_file = os.path.join(os.getenv("FSLDIR"), "etc", "fslversion") - - @staticmethod - def parse_version(raw_info): - return raw_info.splitlines()[0] - - @classmethod - def output_type_to_ext(cls, output_type): - """Get the file extension for the given output type. - - Parameters - ---------- - output_type : {'NIFTI', 'NIFTI_GZ', 'NIFTI_PAIR', 'NIFTI_PAIR_GZ'} - String specifying the output type. - - Returns - ------- - extension : str - The file extension for the output type. - """ - - try: - return cls.ftypes[output_type] - except KeyError: - msg = "Invalid FSLOUTPUTTYPE: ", output_type - raise KeyError(msg) - - @classmethod - def output_type(cls): - """Get the global FSL output file type FSLOUTPUTTYPE. - - This returns the value of the environment variable - FSLOUTPUTTYPE. An exception is raised if it is not defined. - - Returns - ------- - fsl_ftype : string - Represents the current environment setting of FSLOUTPUTTYPE - """ - try: - return os.environ["FSLOUTPUTTYPE"] - except KeyError: - IFLOGGER.warning( - "FSLOUTPUTTYPE environment variable is not set. " - "Setting FSLOUTPUTTYPE=NIFTI" - ) - return "NIFTI" - - @staticmethod - def standard_image(img_name=None): - """Grab an image from the standard location. - - Returns a list of standard images if called without arguments. - - Could be made more fancy to allow for more relocatability""" - try: - fsldir = os.environ["FSLDIR"] - except KeyError: - raise Exception("FSL environment variables not set") - stdpath = os.path.join(fsldir, "data", "standard") - if img_name is None: - return [ - filename.replace(stdpath + "/", "") - for filename in glob(os.path.join(stdpath, "*nii*")) - ] - return os.path.join(stdpath, img_name) diff --git a/nipype-auto-conv/specs/interfaces/temporal_filter.yaml b/nipype-auto-conv/specs/interfaces/temporal_filter.yaml index 7f7667a..b5f9c4a 100644 --- a/nipype-auto-conv/specs/interfaces/temporal_filter.yaml +++ b/nipype-auto-conv/specs/interfaces/temporal_filter.yaml @@ -6,7 +6,7 @@ # Docs # ---- # Use fslmaths to apply a low, high, or bandpass temporal filter to a -# timeseries. +# timeseries. # # task_name: TemporalFilter @@ -25,9 +25,6 @@ inputs: # passed to the field in the automatically generated unittests. in_file: generic/file # type=file|default=: image to operate on - out_file: Path - # type=file: image written after calculations - # type=file|default=: image to write callable_defaults: # dict[str, str] - names of methods/callable classes defined in the adjacent `*_callables.py` # to set as the `default` method of input fields @@ -51,7 +48,7 @@ outputs: # dict[str, str] - names of methods/callable classes defined in the adjacent `*_callables.py` # to set to the `callable` attribute of output fields templates: - # dict[str, str] - `output_file_template` values to be provided to output fields + # dict[str, str] - `path_template` values to be provided to output fields out_file: out_file # type=file: image written after calculations # type=file|default=: image to write @@ -77,13 +74,13 @@ tests: nan2zeros: # type=bool|default=False: change NaNs to zeros before doing anything output_type: - # type=enum|default='NIFTI'|allowed['NIFTI','NIFTI_GZ','NIFTI_PAIR','NIFTI_PAIR_GZ']: FSL output type + # type=enum|default='NIFTI'|allowed['GIFTI','NIFTI','NIFTI_GZ','NIFTI_PAIR','NIFTI_PAIR_GZ']: FSL output type args: # type=str|default='': Additional parameters to the command environ: # type=dict|default={}: Environment variables imports: - # list[nipype2pydra.task.base.importstatement] - list import statements required by the test, with each list item + # list[nipype2pydra.statements.imports.explicitimport] - list import statements required by the test, with each list item # consisting of 'module', 'name', and optionally 'alias' keys expected_outputs: # dict[str, str] - expected values for selected outputs, noting that tests will typically diff --git a/nipype-auto-conv/specs/interfaces/temporal_filter_callables.py b/nipype-auto-conv/specs/interfaces/temporal_filter_callables.py deleted file mode 100644 index 8da590f..0000000 --- a/nipype-auto-conv/specs/interfaces/temporal_filter_callables.py +++ /dev/null @@ -1,329 +0,0 @@ -"""Module to put any functions that are referred to in the "callables" section of TemporalFilter.yaml""" - -import attrs -import logging -import os -import os.path as op -from glob import glob -from pathlib import Path - - -def out_file_default(inputs): - return _gen_filename("out_file", inputs=inputs) - - -def out_file_callable(output_dir, inputs, stdout, stderr): - outputs = _list_outputs( - output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr - ) - return outputs["out_file"] - - -IFLOGGER = logging.getLogger("nipype.interface") - - -# Original source at L61 of /interfaces/fsl/maths.py -def _gen_filename(name, inputs=None, stdout=None, stderr=None, output_dir=None): - if name == "out_file": - return _list_outputs( - inputs=inputs, stdout=stdout, stderr=stderr, output_dir=output_dir - )["out_file"] - return None - - -# Original source at L205 of /interfaces/fsl/base.py -def _gen_fname( - basename, - cwd=None, - suffix=None, - change_ext=True, - ext=None, - inputs=None, - stdout=None, - stderr=None, - output_dir=None, -): - """Generate a filename based on the given parameters. - - The filename will take the form: cwd/basename. - If change_ext is True, it will use the extensions specified in - inputs.output_type. - - Parameters - ---------- - basename : str - Filename to base the new filename on. - cwd : str - Path to prefix to the new filename. (default is output_dir) - suffix : str - Suffix to add to the `basename`. (defaults is '' ) - change_ext : bool - Flag to change the filename extension to the FSL output type. - (default True) - - Returns - ------- - fname : str - New filename based on given parameters. - - """ - - if basename == "": - msg = "Unable to generate filename for command %s. " % "fslmaths" - msg += "basename is not set!" - raise ValueError(msg) - if cwd is None: - cwd = output_dir - if ext is None: - ext = Info.output_type_to_ext(inputs.output_type) - if change_ext: - if suffix: - suffix = "".join((suffix, ext)) - else: - suffix = ext - if suffix is None: - suffix = "" - fname = fname_presuffix(basename, suffix=suffix, use_ext=False, newpath=cwd) - return fname - - -# Original source at L51 of /interfaces/fsl/maths.py -def _list_outputs(inputs=None, stdout=None, stderr=None, output_dir=None): - outputs = {} - outputs["out_file"] = inputs.out_file - if inputs.out_file is attrs.NOTHING: - outputs["out_file"] = _gen_fname( - inputs.in_file, - suffix=_suffix, - inputs=inputs, - stdout=stdout, - stderr=stderr, - output_dir=output_dir, - ) - outputs["out_file"] = os.path.abspath(outputs["out_file"]) - return outputs - - -# Original source at L108 of /utils/filemanip.py -def fname_presuffix(fname, prefix="", suffix="", newpath=None, use_ext=True): - """Manipulates path and name of input filename - - Parameters - ---------- - fname : string - A filename (may or may not include path) - prefix : string - Characters to prepend to the filename - suffix : string - Characters to append to the filename - newpath : string - Path to replace the path of the input fname - use_ext : boolean - If True (default), appends the extension of the original file - to the output name. - - Returns - ------- - Absolute path of the modified filename - - >>> from nipype.utils.filemanip import fname_presuffix - >>> fname = 'foo.nii.gz' - >>> fname_presuffix(fname,'pre','post','/tmp') - '/tmp/prefoopost.nii.gz' - - >>> from nipype.interfaces.base import attrs.NOTHING - >>> fname_presuffix(fname, 'pre', 'post', attrs.NOTHING) == \ - fname_presuffix(fname, 'pre', 'post') - True - - """ - pth, fname, ext = split_filename(fname) - if not use_ext: - ext = "" - - # No need for : bool(attrs.NOTHING is not attrs.NOTHING) evaluates to False - if newpath: - pth = op.abspath(newpath) - return op.join(pth, prefix + fname + suffix + ext) - - -# Original source at L58 of /utils/filemanip.py -def split_filename(fname): - """Split a filename into parts: path, base filename and extension. - - Parameters - ---------- - fname : str - file or path name - - Returns - ------- - pth : str - base path from fname - fname : str - filename from fname, without extension - ext : str - file extension from fname - - Examples - -------- - >>> from nipype.utils.filemanip import split_filename - >>> pth, fname, ext = split_filename('/home/data/subject.nii.gz') - >>> pth - '/home/data' - - >>> fname - 'subject' - - >>> ext - '.nii.gz' - - """ - - special_extensions = [".nii.gz", ".tar.gz", ".niml.dset"] - - pth = op.dirname(fname) - fname = op.basename(fname) - - ext = None - for special_ext in special_extensions: - ext_len = len(special_ext) - if (len(fname) > ext_len) and (fname[-ext_len:].lower() == special_ext.lower()): - ext = fname[-ext_len:] - fname = fname[:-ext_len] - break - if not ext: - fname, ext = op.splitext(fname) - - return pth, fname, ext - - -# Original source at L1069 of /interfaces/base/core.py -class PackageInfo(object): - _version = None - version_cmd = None - version_file = None - - @classmethod - def version(klass): - if klass._version is None: - if klass.version_cmd is not None: - try: - clout = CommandLine( - command=klass.version_cmd, - resource_monitor=False, - terminal_output="allatonce", - ).run() - except IOError: - return None - - raw_info = clout.runtime.stdout - elif klass.version_file is not None: - try: - with open(klass.version_file, "rt") as fobj: - raw_info = fobj.read() - except OSError: - return None - else: - return None - - klass._version = klass.parse_version(raw_info) - - return klass._version - - @staticmethod - def parse_version(raw_info): - raise NotImplementedError - - -# Original source at L40 of /interfaces/fsl/base.py -class Info(PackageInfo): - """ - Handle FSL ``output_type`` and version information. - - output type refers to the type of file fsl defaults to writing - eg, NIFTI, NIFTI_GZ - - Examples - -------- - - >>> from nipype.interfaces.fsl import Info - >>> Info.version() # doctest: +SKIP - >>> Info.output_type() # doctest: +SKIP - - """ - - ftypes = { - "NIFTI": ".nii", - "NIFTI_PAIR": ".img", - "NIFTI_GZ": ".nii.gz", - "NIFTI_PAIR_GZ": ".img.gz", - } - - if os.getenv("FSLDIR"): - version_file = os.path.join(os.getenv("FSLDIR"), "etc", "fslversion") - - @staticmethod - def parse_version(raw_info): - return raw_info.splitlines()[0] - - @classmethod - def output_type_to_ext(cls, output_type): - """Get the file extension for the given output type. - - Parameters - ---------- - output_type : {'NIFTI', 'NIFTI_GZ', 'NIFTI_PAIR', 'NIFTI_PAIR_GZ'} - String specifying the output type. - - Returns - ------- - extension : str - The file extension for the output type. - """ - - try: - return cls.ftypes[output_type] - except KeyError: - msg = "Invalid FSLOUTPUTTYPE: ", output_type - raise KeyError(msg) - - @classmethod - def output_type(cls): - """Get the global FSL output file type FSLOUTPUTTYPE. - - This returns the value of the environment variable - FSLOUTPUTTYPE. An exception is raised if it is not defined. - - Returns - ------- - fsl_ftype : string - Represents the current environment setting of FSLOUTPUTTYPE - """ - try: - return os.environ["FSLOUTPUTTYPE"] - except KeyError: - IFLOGGER.warning( - "FSLOUTPUTTYPE environment variable is not set. " - "Setting FSLOUTPUTTYPE=NIFTI" - ) - return "NIFTI" - - @staticmethod - def standard_image(img_name=None): - """Grab an image from the standard location. - - Returns a list of standard images if called without arguments. - - Could be made more fancy to allow for more relocatability""" - try: - fsldir = os.environ["FSLDIR"] - except KeyError: - raise Exception("FSL environment variables not set") - stdpath = os.path.join(fsldir, "data", "standard") - if img_name is None: - return [ - filename.replace(stdpath + "/", "") - for filename in glob(os.path.join(stdpath, "*nii*")) - ] - return os.path.join(stdpath, img_name) diff --git a/nipype-auto-conv/specs/interfaces/text_2_vest.yaml b/nipype-auto-conv/specs/interfaces/text_2_vest.yaml index 7c7ec5d..96873f8 100644 --- a/nipype-auto-conv/specs/interfaces/text_2_vest.yaml +++ b/nipype-auto-conv/specs/interfaces/text_2_vest.yaml @@ -6,18 +6,18 @@ # Docs # ---- # -# Use FSL Text2Vest`https://web.mit.edu/fsl_v5.0.10/fsl/doc/wiki/GLM(2f)CreatingDesignMatricesByHand.html`_ -# to convert your plain text design matrix data into the format used by the FSL tools. +# Use FSL Text2Vest`https://web.mit.edu/fsl_v5.0.10/fsl/doc/wiki/GLM(2f)CreatingDesignMatricesByHand.html`_ +# to convert your plain text design matrix data into the format used by the FSL tools. # -# Examples -# -------- -# >>> from nipype.interfaces.fsl import Text2Vest -# >>> t2v = Text2Vest() -# >>> t2v.inputs.in_file = "design.txt" -# >>> t2v.inputs.out_file = "design.mat" -# >>> t2v.cmdline -# 'Text2Vest design.txt design.mat' -# >>> res = t2v.run() # doctest: +SKIP +# Examples +# -------- +# >>> from nipype.interfaces.fsl import Text2Vest +# >>> t2v = Text2Vest() +# >>> t2v.inputs.in_file = "design.txt" +# >>> t2v.inputs.out_file = "design.mat" +# >>> t2v.cmdline +# 'Text2Vest design.txt design.mat' +# >>> res = t2v.run() # doctest: +SKIP # task_name: Text2Vest nipype_name: Text2Vest @@ -35,9 +35,6 @@ inputs: # passed to the field in the automatically generated unittests. in_file: text/text-file # type=file|default=: plain text file representing your design, contrast, or f-test matrix - out_file: Path - # type=file: matrix data in the format used by FSL tools - # type=file|default=: file name to store matrix data in the format used by FSL tools (e.g., design.mat, design.con design.fts) callable_defaults: # dict[str, str] - names of methods/callable classes defined in the adjacent `*_callables.py` # to set as the `default` method of input fields @@ -54,14 +51,14 @@ outputs: # from the nipype interface, but you may want to be more specific, particularly # for file types, where specifying the format also specifies the file that will be # passed to the field in the automatically generated unittests. - out_file: datascience/text-matrix + out_file: generic/file # type=file: matrix data in the format used by FSL tools # type=file|default=: file name to store matrix data in the format used by FSL tools (e.g., design.mat, design.con design.fts) callables: # dict[str, str] - names of methods/callable classes defined in the adjacent `*_callables.py` # to set to the `callable` attribute of output fields templates: - # dict[str, str] - `output_file_template` values to be provided to output fields + # dict[str, str] - `path_template` values to be provided to output fields requirements: # dict[str, list[str]] - input fields that are required to be provided for the output field to be present tests: @@ -74,13 +71,13 @@ tests: # type=file: matrix data in the format used by FSL tools # type=file|default=: file name to store matrix data in the format used by FSL tools (e.g., design.mat, design.con design.fts) output_type: - # type=enum|default='NIFTI'|allowed['NIFTI','NIFTI_GZ','NIFTI_PAIR','NIFTI_PAIR_GZ']: FSL output type + # type=enum|default='NIFTI'|allowed['GIFTI','NIFTI','NIFTI_GZ','NIFTI_PAIR','NIFTI_PAIR_GZ']: FSL output type args: # type=str|default='': Additional parameters to the command environ: # type=dict|default={}: Environment variables imports: - # list[nipype2pydra.task.base.importstatement] - list import statements required by the test, with each list item + # list[nipype2pydra.statements.imports.explicitimport] - list import statements required by the test, with each list item # consisting of 'module', 'name', and optionally 'alias' keys expected_outputs: # dict[str, str] - expected values for selected outputs, noting that tests will typically @@ -99,11 +96,8 @@ tests: # (if not specified, will try to choose a sensible value) in_file: # type=file|default=: plain text file representing your design, contrast, or f-test matrix - out_file: '"design.mat"' - # type=file: matrix data in the format used by FSL tools - # type=file|default=: file name to store matrix data in the format used by FSL tools (e.g., design.mat, design.con design.fts) imports: - # list[nipype2pydra.task.base.importstatement] - list import statements required by the test, with each list item + # list[nipype2pydra.statements.imports.explicitimport] - list import statements required by the test, with each list item # consisting of 'module', 'name', and optionally 'alias' keys expected_outputs: # dict[str, str] - expected values for selected outputs, noting that tests will typically @@ -118,7 +112,7 @@ tests: # bool - whether the unittest is expected to fail or not. Set to false # when you are satisfied with the edits you have made to this file doctests: -- cmdline: Text2Vest design.txt design.mat +- cmdline: # str - the expected cmdline output inputs: # dict[str, str] - name-value pairs for inputs to be provided to the doctest. @@ -126,11 +120,8 @@ doctests: # '.mock()' method of the corresponding class is used instead. in_file: '"design.txt"' # type=file|default=: plain text file representing your design, contrast, or f-test matrix - out_file: '"design.mat"' - # type=file: matrix data in the format used by FSL tools - # type=file|default=: file name to store matrix data in the format used by FSL tools (e.g., design.mat, design.con design.fts) imports: - # list[nipype2pydra.task.base.importstatement] - list import statements required by the test, with each list item + # list[nipype2pydra.statements.imports.explicitimport] - list import statements required by the test, with each list item # consisting of 'module', 'name', and optionally 'alias' keys directive: # str - any doctest directive to place on the cmdline call, e.g. # doctest: +ELLIPSIS diff --git a/nipype-auto-conv/specs/interfaces/text_2_vest_callables.py b/nipype-auto-conv/specs/interfaces/text_2_vest_callables.py deleted file mode 100644 index f8c73e2..0000000 --- a/nipype-auto-conv/specs/interfaces/text_2_vest_callables.py +++ /dev/null @@ -1,338 +0,0 @@ -"""Module to put any functions that are referred to in the "callables" section of Text2Vest.yaml""" - -import attrs -import logging -import os -import os.path as op -from glob import glob - - -def out_file_callable(output_dir, inputs, stdout, stderr): - outputs = _list_outputs( - output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr - ) - return outputs["out_file"] - - -IFLOGGER = logging.getLogger("nipype.interface") - - -iflogger = logging.getLogger("nipype.interface") - - -# Original source at L809 of /interfaces/base/core.py -def _filename_from_source( - name, chain=None, inputs=None, stdout=None, stderr=None, output_dir=None -): - if chain is None: - chain = [] - - trait_spec = inputs.trait(name) - retval = getattr(inputs, name) - source_ext = None - if (retval is attrs.NOTHING) or "%s" in retval: - if not trait_spec.name_source: - return retval - - # Do not generate filename when excluded by other inputs - if any( - (getattr(inputs, field) is not attrs.NOTHING) - for field in trait_spec.xor or () - ): - return retval - - # Do not generate filename when required fields are missing - if not all( - (getattr(inputs, field) is not attrs.NOTHING) - for field in trait_spec.requires or () - ): - return retval - - if (retval is not attrs.NOTHING) and "%s" in retval: - name_template = retval - else: - name_template = trait_spec.name_template - if not name_template: - name_template = "%s_generated" - - ns = trait_spec.name_source - while isinstance(ns, (list, tuple)): - if len(ns) > 1: - iflogger.warning("Only one name_source per trait is allowed") - ns = ns[0] - - if not isinstance(ns, (str, bytes)): - raise ValueError( - "name_source of '{}' trait should be an input trait " - "name, but a type {} object was found".format(name, type(ns)) - ) - - if getattr(inputs, ns) is not attrs.NOTHING: - name_source = ns - source = getattr(inputs, name_source) - while isinstance(source, list): - source = source[0] - - # special treatment for files - try: - _, base, source_ext = split_filename(source) - except (AttributeError, TypeError): - base = source - else: - if name in chain: - raise NipypeInterfaceError("Mutually pointing name_sources") - - chain.append(name) - base = _filename_from_source( - ns, - chain, - inputs=inputs, - stdout=stdout, - stderr=stderr, - output_dir=output_dir, - ) - if base is not attrs.NOTHING: - _, _, source_ext = split_filename(base) - else: - # Do not generate filename when required fields are missing - return retval - - chain = None - retval = name_template % base - _, _, ext = split_filename(retval) - if trait_spec.keep_extension and (ext or source_ext): - if (ext is None or not ext) and source_ext: - retval = retval + source_ext - else: - retval = _overload_extension( - retval, - name, - inputs=inputs, - stdout=stdout, - stderr=stderr, - output_dir=output_dir, - ) - return retval - - -# Original source at L885 of /interfaces/base/core.py -def _gen_filename(name, inputs=None, stdout=None, stderr=None, output_dir=None): - raise NotImplementedError - - -# Original source at L891 of /interfaces/base/core.py -def _list_outputs(inputs=None, stdout=None, stderr=None, output_dir=None): - metadata = dict(name_source=lambda t: t is not None) - traits = inputs.traits(**metadata) - if traits: - outputs = {} - for name, trait_spec in list(traits.items()): - out_name = name - if trait_spec.output_name is not None: - out_name = trait_spec.output_name - fname = _filename_from_source( - name, inputs=inputs, stdout=stdout, stderr=stderr, output_dir=output_dir - ) - if fname is not attrs.NOTHING: - outputs[out_name] = os.path.abspath(fname) - return outputs - - -# Original source at L249 of /interfaces/fsl/base.py -def _overload_extension( - value, name=None, inputs=None, stdout=None, stderr=None, output_dir=None -): - return value + Info.output_type_to_ext(inputs.output_type) - - -# Original source at L58 of /utils/filemanip.py -def split_filename(fname): - """Split a filename into parts: path, base filename and extension. - - Parameters - ---------- - fname : str - file or path name - - Returns - ------- - pth : str - base path from fname - fname : str - filename from fname, without extension - ext : str - file extension from fname - - Examples - -------- - >>> from nipype.utils.filemanip import split_filename - >>> pth, fname, ext = split_filename('/home/data/subject.nii.gz') - >>> pth - '/home/data' - - >>> fname - 'subject' - - >>> ext - '.nii.gz' - - """ - - special_extensions = [".nii.gz", ".tar.gz", ".niml.dset"] - - pth = op.dirname(fname) - fname = op.basename(fname) - - ext = None - for special_ext in special_extensions: - ext_len = len(special_ext) - if (len(fname) > ext_len) and (fname[-ext_len:].lower() == special_ext.lower()): - ext = fname[-ext_len:] - fname = fname[:-ext_len] - break - if not ext: - fname, ext = op.splitext(fname) - - return pth, fname, ext - - -# Original source at L1069 of /interfaces/base/core.py -class PackageInfo(object): - _version = None - version_cmd = None - version_file = None - - @classmethod - def version(klass): - if klass._version is None: - if klass.version_cmd is not None: - try: - clout = CommandLine( - command=klass.version_cmd, - resource_monitor=False, - terminal_output="allatonce", - ).run() - except IOError: - return None - - raw_info = clout.runtime.stdout - elif klass.version_file is not None: - try: - with open(klass.version_file, "rt") as fobj: - raw_info = fobj.read() - except OSError: - return None - else: - return None - - klass._version = klass.parse_version(raw_info) - - return klass._version - - @staticmethod - def parse_version(raw_info): - raise NotImplementedError - - -# Original source at L40 of /interfaces/fsl/base.py -class Info(PackageInfo): - """ - Handle FSL ``output_type`` and version information. - - output type refers to the type of file fsl defaults to writing - eg, NIFTI, NIFTI_GZ - - Examples - -------- - - >>> from nipype.interfaces.fsl import Info - >>> Info.version() # doctest: +SKIP - >>> Info.output_type() # doctest: +SKIP - - """ - - ftypes = { - "NIFTI": ".nii", - "NIFTI_PAIR": ".img", - "NIFTI_GZ": ".nii.gz", - "NIFTI_PAIR_GZ": ".img.gz", - } - - if os.getenv("FSLDIR"): - version_file = os.path.join(os.getenv("FSLDIR"), "etc", "fslversion") - - @staticmethod - def parse_version(raw_info): - return raw_info.splitlines()[0] - - @classmethod - def output_type_to_ext(cls, output_type): - """Get the file extension for the given output type. - - Parameters - ---------- - output_type : {'NIFTI', 'NIFTI_GZ', 'NIFTI_PAIR', 'NIFTI_PAIR_GZ'} - String specifying the output type. - - Returns - ------- - extension : str - The file extension for the output type. - """ - - try: - return cls.ftypes[output_type] - except KeyError: - msg = "Invalid FSLOUTPUTTYPE: ", output_type - raise KeyError(msg) - - @classmethod - def output_type(cls): - """Get the global FSL output file type FSLOUTPUTTYPE. - - This returns the value of the environment variable - FSLOUTPUTTYPE. An exception is raised if it is not defined. - - Returns - ------- - fsl_ftype : string - Represents the current environment setting of FSLOUTPUTTYPE - """ - try: - return os.environ["FSLOUTPUTTYPE"] - except KeyError: - IFLOGGER.warning( - "FSLOUTPUTTYPE environment variable is not set. " - "Setting FSLOUTPUTTYPE=NIFTI" - ) - return "NIFTI" - - @staticmethod - def standard_image(img_name=None): - """Grab an image from the standard location. - - Returns a list of standard images if called without arguments. - - Could be made more fancy to allow for more relocatability""" - try: - fsldir = os.environ["FSLDIR"] - except KeyError: - raise Exception("FSL environment variables not set") - stdpath = os.path.join(fsldir, "data", "standard") - if img_name is None: - return [ - filename.replace(stdpath + "/", "") - for filename in glob(os.path.join(stdpath, "*nii*")) - ] - return os.path.join(stdpath, img_name) - - -# Original source at L125 of /interfaces/base/support.py -class NipypeInterfaceError(Exception): - """Custom error for interfaces""" - - def __init__(self, value): - self.value = value - - def __str__(self): - return "{}".format(self.value) diff --git a/nipype-auto-conv/specs/interfaces/threshold.yaml b/nipype-auto-conv/specs/interfaces/threshold.yaml index 3b898b7..d05e9b3 100644 --- a/nipype-auto-conv/specs/interfaces/threshold.yaml +++ b/nipype-auto-conv/specs/interfaces/threshold.yaml @@ -22,9 +22,6 @@ inputs: # passed to the field in the automatically generated unittests. in_file: generic/file # type=file|default=: image to operate on - out_file: Path - # type=file: image written after calculations - # type=file|default=: image to write callable_defaults: # dict[str, str] - names of methods/callable classes defined in the adjacent `*_callables.py` # to set as the `default` method of input fields @@ -48,7 +45,7 @@ outputs: # dict[str, str] - names of methods/callable classes defined in the adjacent `*_callables.py` # to set to the `callable` attribute of output fields templates: - # dict[str, str] - `output_file_template` values to be provided to output fields + # dict[str, str] - `path_template` values to be provided to output fields out_file: out_file # type=file: image written after calculations # type=file|default=: image to write @@ -78,13 +75,13 @@ tests: nan2zeros: # type=bool|default=False: change NaNs to zeros before doing anything output_type: - # type=enum|default='NIFTI'|allowed['NIFTI','NIFTI_GZ','NIFTI_PAIR','NIFTI_PAIR_GZ']: FSL output type + # type=enum|default='NIFTI'|allowed['GIFTI','NIFTI','NIFTI_GZ','NIFTI_PAIR','NIFTI_PAIR_GZ']: FSL output type args: # type=str|default='': Additional parameters to the command environ: # type=dict|default={}: Environment variables imports: - # list[nipype2pydra.task.base.importstatement] - list import statements required by the test, with each list item + # list[nipype2pydra.statements.imports.explicitimport] - list import statements required by the test, with each list item # consisting of 'module', 'name', and optionally 'alias' keys expected_outputs: # dict[str, str] - expected values for selected outputs, noting that tests will typically diff --git a/nipype-auto-conv/specs/interfaces/threshold_callables.py b/nipype-auto-conv/specs/interfaces/threshold_callables.py deleted file mode 100644 index 5f6a848..0000000 --- a/nipype-auto-conv/specs/interfaces/threshold_callables.py +++ /dev/null @@ -1,329 +0,0 @@ -"""Module to put any functions that are referred to in the "callables" section of Threshold.yaml""" - -import attrs -import logging -import os -import os.path as op -from glob import glob -from pathlib import Path - - -def out_file_default(inputs): - return _gen_filename("out_file", inputs=inputs) - - -def out_file_callable(output_dir, inputs, stdout, stderr): - outputs = _list_outputs( - output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr - ) - return outputs["out_file"] - - -IFLOGGER = logging.getLogger("nipype.interface") - - -# Original source at L61 of /interfaces/fsl/maths.py -def _gen_filename(name, inputs=None, stdout=None, stderr=None, output_dir=None): - if name == "out_file": - return _list_outputs( - inputs=inputs, stdout=stdout, stderr=stderr, output_dir=output_dir - )["out_file"] - return None - - -# Original source at L205 of /interfaces/fsl/base.py -def _gen_fname( - basename, - cwd=None, - suffix=None, - change_ext=True, - ext=None, - inputs=None, - stdout=None, - stderr=None, - output_dir=None, -): - """Generate a filename based on the given parameters. - - The filename will take the form: cwd/basename. - If change_ext is True, it will use the extensions specified in - inputs.output_type. - - Parameters - ---------- - basename : str - Filename to base the new filename on. - cwd : str - Path to prefix to the new filename. (default is output_dir) - suffix : str - Suffix to add to the `basename`. (defaults is '' ) - change_ext : bool - Flag to change the filename extension to the FSL output type. - (default True) - - Returns - ------- - fname : str - New filename based on given parameters. - - """ - - if basename == "": - msg = "Unable to generate filename for command %s. " % "fslmaths" - msg += "basename is not set!" - raise ValueError(msg) - if cwd is None: - cwd = output_dir - if ext is None: - ext = Info.output_type_to_ext(inputs.output_type) - if change_ext: - if suffix: - suffix = "".join((suffix, ext)) - else: - suffix = ext - if suffix is None: - suffix = "" - fname = fname_presuffix(basename, suffix=suffix, use_ext=False, newpath=cwd) - return fname - - -# Original source at L51 of /interfaces/fsl/maths.py -def _list_outputs(inputs=None, stdout=None, stderr=None, output_dir=None): - outputs = {} - outputs["out_file"] = inputs.out_file - if inputs.out_file is attrs.NOTHING: - outputs["out_file"] = _gen_fname( - inputs.in_file, - suffix=_suffix, - inputs=inputs, - stdout=stdout, - stderr=stderr, - output_dir=output_dir, - ) - outputs["out_file"] = os.path.abspath(outputs["out_file"]) - return outputs - - -# Original source at L108 of /utils/filemanip.py -def fname_presuffix(fname, prefix="", suffix="", newpath=None, use_ext=True): - """Manipulates path and name of input filename - - Parameters - ---------- - fname : string - A filename (may or may not include path) - prefix : string - Characters to prepend to the filename - suffix : string - Characters to append to the filename - newpath : string - Path to replace the path of the input fname - use_ext : boolean - If True (default), appends the extension of the original file - to the output name. - - Returns - ------- - Absolute path of the modified filename - - >>> from nipype.utils.filemanip import fname_presuffix - >>> fname = 'foo.nii.gz' - >>> fname_presuffix(fname,'pre','post','/tmp') - '/tmp/prefoopost.nii.gz' - - >>> from nipype.interfaces.base import attrs.NOTHING - >>> fname_presuffix(fname, 'pre', 'post', attrs.NOTHING) == \ - fname_presuffix(fname, 'pre', 'post') - True - - """ - pth, fname, ext = split_filename(fname) - if not use_ext: - ext = "" - - # No need for : bool(attrs.NOTHING is not attrs.NOTHING) evaluates to False - if newpath: - pth = op.abspath(newpath) - return op.join(pth, prefix + fname + suffix + ext) - - -# Original source at L58 of /utils/filemanip.py -def split_filename(fname): - """Split a filename into parts: path, base filename and extension. - - Parameters - ---------- - fname : str - file or path name - - Returns - ------- - pth : str - base path from fname - fname : str - filename from fname, without extension - ext : str - file extension from fname - - Examples - -------- - >>> from nipype.utils.filemanip import split_filename - >>> pth, fname, ext = split_filename('/home/data/subject.nii.gz') - >>> pth - '/home/data' - - >>> fname - 'subject' - - >>> ext - '.nii.gz' - - """ - - special_extensions = [".nii.gz", ".tar.gz", ".niml.dset"] - - pth = op.dirname(fname) - fname = op.basename(fname) - - ext = None - for special_ext in special_extensions: - ext_len = len(special_ext) - if (len(fname) > ext_len) and (fname[-ext_len:].lower() == special_ext.lower()): - ext = fname[-ext_len:] - fname = fname[:-ext_len] - break - if not ext: - fname, ext = op.splitext(fname) - - return pth, fname, ext - - -# Original source at L1069 of /interfaces/base/core.py -class PackageInfo(object): - _version = None - version_cmd = None - version_file = None - - @classmethod - def version(klass): - if klass._version is None: - if klass.version_cmd is not None: - try: - clout = CommandLine( - command=klass.version_cmd, - resource_monitor=False, - terminal_output="allatonce", - ).run() - except IOError: - return None - - raw_info = clout.runtime.stdout - elif klass.version_file is not None: - try: - with open(klass.version_file, "rt") as fobj: - raw_info = fobj.read() - except OSError: - return None - else: - return None - - klass._version = klass.parse_version(raw_info) - - return klass._version - - @staticmethod - def parse_version(raw_info): - raise NotImplementedError - - -# Original source at L40 of /interfaces/fsl/base.py -class Info(PackageInfo): - """ - Handle FSL ``output_type`` and version information. - - output type refers to the type of file fsl defaults to writing - eg, NIFTI, NIFTI_GZ - - Examples - -------- - - >>> from nipype.interfaces.fsl import Info - >>> Info.version() # doctest: +SKIP - >>> Info.output_type() # doctest: +SKIP - - """ - - ftypes = { - "NIFTI": ".nii", - "NIFTI_PAIR": ".img", - "NIFTI_GZ": ".nii.gz", - "NIFTI_PAIR_GZ": ".img.gz", - } - - if os.getenv("FSLDIR"): - version_file = os.path.join(os.getenv("FSLDIR"), "etc", "fslversion") - - @staticmethod - def parse_version(raw_info): - return raw_info.splitlines()[0] - - @classmethod - def output_type_to_ext(cls, output_type): - """Get the file extension for the given output type. - - Parameters - ---------- - output_type : {'NIFTI', 'NIFTI_GZ', 'NIFTI_PAIR', 'NIFTI_PAIR_GZ'} - String specifying the output type. - - Returns - ------- - extension : str - The file extension for the output type. - """ - - try: - return cls.ftypes[output_type] - except KeyError: - msg = "Invalid FSLOUTPUTTYPE: ", output_type - raise KeyError(msg) - - @classmethod - def output_type(cls): - """Get the global FSL output file type FSLOUTPUTTYPE. - - This returns the value of the environment variable - FSLOUTPUTTYPE. An exception is raised if it is not defined. - - Returns - ------- - fsl_ftype : string - Represents the current environment setting of FSLOUTPUTTYPE - """ - try: - return os.environ["FSLOUTPUTTYPE"] - except KeyError: - IFLOGGER.warning( - "FSLOUTPUTTYPE environment variable is not set. " - "Setting FSLOUTPUTTYPE=NIFTI" - ) - return "NIFTI" - - @staticmethod - def standard_image(img_name=None): - """Grab an image from the standard location. - - Returns a list of standard images if called without arguments. - - Could be made more fancy to allow for more relocatability""" - try: - fsldir = os.environ["FSLDIR"] - except KeyError: - raise Exception("FSL environment variables not set") - stdpath = os.path.join(fsldir, "data", "standard") - if img_name is None: - return [ - filename.replace(stdpath + "/", "") - for filename in glob(os.path.join(stdpath, "*nii*")) - ] - return os.path.join(stdpath, img_name) diff --git a/nipype-auto-conv/specs/interfaces/topup.yaml b/nipype-auto-conv/specs/interfaces/topup.yaml index cd2b5cb..425a410 100644 --- a/nipype-auto-conv/specs/interfaces/topup.yaml +++ b/nipype-auto-conv/specs/interfaces/topup.yaml @@ -6,25 +6,25 @@ # Docs # ---- # -# Interface for FSL topup, a tool for estimating and correcting -# susceptibility induced distortions. See FSL documentation for -# `reference `_, -# `usage examples -# `_, -# and `exemplary config files -# `_. +# Interface for FSL topup, a tool for estimating and correcting +# susceptibility induced distortions. See FSL documentation for +# `reference `_, +# `usage examples +# `_, +# and `exemplary config files +# `_. # -# Examples -# -------- +# Examples +# -------- # -# >>> from nipype.interfaces.fsl import TOPUP -# >>> topup = TOPUP() -# >>> topup.inputs.in_file = "b0_b0rev.nii" -# >>> topup.inputs.encoding_file = "topup_encoding.txt" -# >>> topup.inputs.output_type = "NIFTI_GZ" -# >>> topup.cmdline # doctest: +ELLIPSIS -# 'topup --config=b02b0.cnf --datain=topup_encoding.txt --imain=b0_b0rev.nii --out=b0_b0rev_base --iout=b0_b0rev_corrected.nii.gz --fout=b0_b0rev_field.nii.gz --jacout=jac --logout=b0_b0rev_topup.log --rbmout=xfm --dfout=warpfield' -# >>> res = topup.run() # doctest: +SKIP +# >>> from nipype.interfaces.fsl import TOPUP +# >>> topup = TOPUP() +# >>> topup.inputs.in_file = "b0_b0rev.nii" +# >>> topup.inputs.encoding_file = "topup_encoding.txt" +# >>> topup.inputs.output_type = "NIFTI_GZ" +# >>> topup.cmdline # doctest: +ELLIPSIS +# 'topup --config=b02b0.cnf --datain=topup_encoding.txt --imain=b0_b0rev.nii --out=b0_b0rev_base --iout=b0_b0rev_corrected.nii.gz --fout=b0_b0rev_field.nii.gz --jacout=jac --logout=b0_b0rev_topup.log --rbmout=xfm --dfout=warpfield' +# >>> res = topup.run() # doctest: +SKIP # # task_name: TOPUP @@ -36,43 +36,19 @@ inputs: rename: # dict[str, str] - fields to rename in the Pydra interface types: - # dict[str, type] - override inferred types (use "mime-like" string for file-format types, - # e.g. 'medimage/nifti-gz'). For most fields the type will be correctly inferred - # from the nipype interface, but you may want to be more specific, particularly - # for file types, where specifying the format also specifies the file that will be - # passed to the field in the automatically generated unittests. - encoding_file: text/text-file + # dict[str, type] - override inferred types (use "mime-like" string for file-format types, + # e.g. 'medimage/nifti-gz'). For most fields the type will be correctly inferred + # from the nipype interface, but you may want to be more specific, particularly + # for file types, where specifying the format also specifies the file that will be + # passed to the field in the automatically generated unittests. + encoding_file: generic/file # type=file|default=: name of text file with PE directions/times in_file: medimage/nifti1 # type=file|default=: name of 4D file with images - out_base: Path - # type=file|default=: base-name of output files (spline coefficients (Hz) and movement parameters) - readout_times: generic/file+list-of - # type=inputmultiobject|default=[]: readout times (dwell times by # phase-encode steps minus 1) - out_corrected: Path - # type=file: name of 4D image file with unwarped images - # type=file|default=: name of 4D image file with unwarped images - out_field: Path - # type=file: name of image file with field (Hz) - out_logfile: Path - # type=file: name of log-file - # type=file|default=: name of log-file - warp_res: typing.List[float] - subsamp: typing.List[int] - fwhm: typing.List[float] - reg_lambda: typing.List[float] - regmod: str - estmov: typing.List[int] - minmet: typing.List[int] - splineorder: int - interp: str - scale: bool callable_defaults: - # dict[str, str] - names of methods/callable classes defined in the adjacent `*_callables.py` - # to set as the `default` method of input fields + # dict[str, str] - names of methods/callable classes defined in the adjacent `*_callables.py` + # to set as the `default` method of input fields metadata: - scale: - argstr: "--scale {int(scale)}" # dict[str, dict[str, any]] - additional metadata to set on any of the input fields (e.g. out_file: position: 1) outputs: omit: @@ -80,11 +56,11 @@ outputs: rename: # dict[str, str] - fields to rename in the Pydra interface types: - # dict[str, type] - override inferred types (use "mime-like" string for file-format types, - # e.g. 'medimage/nifti-gz'). For most fields the type will be correctly inferred - # from the nipype interface, but you may want to be more specific, particularly - # for file types, where specifying the format also specifies the file that will be - # passed to the field in the automatically generated unittests. + # dict[str, type] - override inferred types (use "mime-like" string for file-format types, + # e.g. 'medimage/nifti-gz'). For most fields the type will be correctly inferred + # from the nipype interface, but you may want to be more specific, particularly + # for file types, where specifying the format also specifies the file that will be + # passed to the field in the automatically generated unittests. out_corrected: generic/file # type=file: name of 4D image file with unwarped images # type=file|default=: name of 4D image file with unwarped images @@ -110,128 +86,124 @@ outputs: # dict[str, str] - names of methods/callable classes defined in the adjacent `*_callables.py` # to set to the `callable` attribute of output fields templates: - # dict[str, str] - `output_file_template` values to be provided to output fields + # dict[str, str] - `path_template` values to be provided to output fields requirements: # dict[str, list[str]] - input fields that are required to be provided for the output field to be present tests: - - inputs: - # dict[str, str] - values to provide to inputs fields in the task initialisation - # (if not specified, will try to choose a sensible value) - in_file: - # type=file|default=: name of 4D file with images - encoding_file: - # type=file|default=: name of text file with PE directions/times - encoding_direction: - # type=list|default=[]: encoding direction for automatic generation of encoding_file - readout_times: - # type=inputmultiobject|default=[]: readout times (dwell times by # phase-encode steps minus 1) - out_base: - # type=file|default=: base-name of output files (spline coefficients (Hz) and movement parameters) - out_field: - # type=file: name of image file with field (Hz) - # type=file|default=: name of image file with field (Hz) - out_warp_prefix: - # type=str|default='warpfield': prefix for the warpfield images (in mm) - out_mat_prefix: - # type=str|default='xfm': prefix for the realignment matrices - out_jac_prefix: - # type=str|default='jac': prefix for the warpfield images - out_corrected: - # type=file: name of 4D image file with unwarped images - # type=file|default=: name of 4D image file with unwarped images - out_logfile: - # type=file: name of log-file - # type=file|default=: name of log-file - warp_res: - # type=float|default=0.0: (approximate) resolution (in mm) of warp basis for the different sub-sampling levels - subsamp: - # type=int|default=0: sub-sampling scheme - fwhm: - # type=float|default=0.0: FWHM (in mm) of gaussian smoothing kernel - config: - # type=string|default='b02b0.cnf': Name of config file specifying command line arguments - max_iter: - # type=int|default=0: max # of non-linear iterations - reg_lambda: - # type=float|default=0.0: Weight of regularisation, default depending on --ssqlambda and --regmod switches. - ssqlambda: - # type=enum|default=1|allowed[0,1]: Weight lambda by the current value of the ssd. If used (=1), the effective weight of regularisation term becomes higher for the initial iterations, therefore initial steps are a little smoother than they would without weighting. This reduces the risk of finding a local minimum. - regmod: - # type=enum|default='bending_energy'|allowed['bending_energy','membrane_energy']: Regularisation term implementation. Defaults to bending_energy. Note that the two functions have vastly different scales. The membrane energy is based on the first derivatives and the bending energy on the second derivatives. The second derivatives will typically be much smaller than the first derivatives, so input lambda will have to be larger for bending_energy to yield approximately the same level of regularisation. - estmov: - # type=enum|default=1|allowed[0,1]: estimate movements if set - minmet: - # type=enum|default=0|allowed[0,1]: Minimisation method 0=Levenberg-Marquardt, 1=Scaled Conjugate Gradient - splineorder: - # type=int|default=0: order of spline, 2->Qadratic spline, 3->Cubic spline - numprec: - # type=enum|default='double'|allowed['double','float']: Precision for representing Hessian, double or float. - interp: - # type=enum|default='spline'|allowed['linear','spline']: Image interpolation model, linear or spline. - scale: - # type=enum|default=0|allowed[0,1]: If set (=1), the images are individually scaled to a common mean - regrid: - # type=enum|default=1|allowed[0,1]: If set (=1), the calculations are done in a different grid - output_type: - # type=enum|default='NIFTI'|allowed['NIFTI','NIFTI_GZ','NIFTI_PAIR','NIFTI_PAIR_GZ']: FSL output type - args: - # type=str|default='': Additional parameters to the command - environ: - # type=dict|default={}: Environment variables - imports: - # list[nipype2pydra.task.base.importstatement] - list import statements required by the test, with each list item - # consisting of 'module', 'name', and optionally 'alias' keys - expected_outputs: - # dict[str, str] - expected values for selected outputs, noting that tests will typically - # be terminated before they complete for time-saving reasons, and therefore - # these values will be ignored, when running in CI - timeout: 10 - # int - the value to set for the timeout in the generated test, - # after which the test will be considered to have been initialised - # successfully. Set to 0 to disable the timeout (warning, this could - # lead to the unittests taking a very long time to complete) - xfail: true - # bool - whether the unittest is expected to fail or not. Set to false - # when you are satisfied with the edits you have made to this file - - inputs: - # dict[str, str] - values to provide to inputs fields in the task initialisation - # (if not specified, will try to choose a sensible value) - in_file: - # type=file|default=: name of 4D file with images - encoding_file: - # type=file|default=: name of text file with PE directions/times - output_type: '"NIFTI_GZ"' - # type=enum|default='NIFTI'|allowed['NIFTI','NIFTI_GZ','NIFTI_PAIR','NIFTI_PAIR_GZ']: FSL output type - imports: - # list[nipype2pydra.task.base.importstatement] - list import statements required by the test, with each list item - # consisting of 'module', 'name', and optionally 'alias' keys - expected_outputs: - # dict[str, str] - expected values for selected outputs, noting that tests will typically - # be terminated before they complete for time-saving reasons, and therefore - # these values will be ignored, when running in CI - timeout: 10 - # int - the value to set for the timeout in the generated test, - # after which the test will be considered to have been initialised - # successfully. Set to 0 to disable the timeout (warning, this could - # lead to the unittests taking a very long time to complete) - xfail: true - # bool - whether the unittest is expected to fail or not. Set to false - # when you are satisfied with the edits you have made to this file +- inputs: + # dict[str, str] - values to provide to inputs fields in the task initialisation + # (if not specified, will try to choose a sensible value) + in_file: + # type=file|default=: name of 4D file with images + encoding_file: + # type=file|default=: name of text file with PE directions/times + encoding_direction: + # type=list|default=[]: encoding direction for automatic generation of encoding_file + readout_times: + # type=inputmultiobject|default=[]: readout times (dwell times by # phase-encode steps minus 1) + out_base: + # type=file|default=: base-name of output files (spline coefficients (Hz) and movement parameters) + out_field: + # type=file: name of image file with field (Hz) + # type=file|default=: name of image file with field (Hz) + out_warp_prefix: + # type=str|default='warpfield': prefix for the warpfield images (in mm) + out_mat_prefix: + # type=str|default='xfm': prefix for the realignment matrices + out_jac_prefix: + # type=str|default='jac': prefix for the warpfield images + out_corrected: + # type=file: name of 4D image file with unwarped images + # type=file|default=: name of 4D image file with unwarped images + out_logfile: + # type=file: name of log-file + # type=file|default=: name of log-file + warp_res: + # type=float|default=0.0: (approximate) resolution (in mm) of warp basis for the different sub-sampling levels + subsamp: + # type=int|default=0: sub-sampling scheme + fwhm: + # type=float|default=0.0: FWHM (in mm) of gaussian smoothing kernel + config: + # type=string|default='b02b0.cnf': Name of config file specifying command line arguments + max_iter: + # type=int|default=0: max # of non-linear iterations + reg_lambda: + # type=float|default=0.0: Weight of regularisation, default depending on --ssqlambda and --regmod switches. + ssqlambda: + # type=enum|default=1|allowed[0,1]: Weight lambda by the current value of the ssd. If used (=1), the effective weight of regularisation term becomes higher for the initial iterations, therefore initial steps are a little smoother than they would without weighting. This reduces the risk of finding a local minimum. + regmod: + # type=enum|default='bending_energy'|allowed['bending_energy','membrane_energy']: Regularisation term implementation. Defaults to bending_energy. Note that the two functions have vastly different scales. The membrane energy is based on the first derivatives and the bending energy on the second derivatives. The second derivatives will typically be much smaller than the first derivatives, so input lambda will have to be larger for bending_energy to yield approximately the same level of regularisation. + estmov: + # type=enum|default=1|allowed[0,1]: estimate movements if set + minmet: + # type=enum|default=0|allowed[0,1]: Minimisation method 0=Levenberg-Marquardt, 1=Scaled Conjugate Gradient + splineorder: + # type=int|default=0: order of spline, 2->Qadratic spline, 3->Cubic spline + numprec: + # type=enum|default='double'|allowed['double','float']: Precision for representing Hessian, double or float. + interp: + # type=enum|default='spline'|allowed['linear','spline']: Image interpolation model, linear or spline. + scale: + # type=enum|default=0|allowed[0,1]: If set (=1), the images are individually scaled to a common mean + regrid: + # type=enum|default=1|allowed[0,1]: If set (=1), the calculations are done in a different grid + output_type: + # type=enum|default='NIFTI'|allowed['GIFTI','NIFTI','NIFTI_GZ','NIFTI_PAIR','NIFTI_PAIR_GZ']: FSL output type + args: + # type=str|default='': Additional parameters to the command + environ: + # type=dict|default={}: Environment variables + imports: + # list[nipype2pydra.statements.imports.explicitimport] - list import statements required by the test, with each list item + # consisting of 'module', 'name', and optionally 'alias' keys + expected_outputs: + # dict[str, str] - expected values for selected outputs, noting that tests will typically + # be terminated before they complete for time-saving reasons, and therefore + # these values will be ignored, when running in CI + timeout: 10 + # int - the value to set for the timeout in the generated test, + # after which the test will be considered to have been initialised + # successfully. Set to 0 to disable the timeout (warning, this could + # lead to the unittests taking a very long time to complete) + xfail: true + # bool - whether the unittest is expected to fail or not. Set to false + # when you are satisfied with the edits you have made to this file +- inputs: + # dict[str, str] - values to provide to inputs fields in the task initialisation + # (if not specified, will try to choose a sensible value) + in_file: + # type=file|default=: name of 4D file with images + output_type: '"NIFTI_GZ"' + # type=enum|default='NIFTI'|allowed['GIFTI','NIFTI','NIFTI_GZ','NIFTI_PAIR','NIFTI_PAIR_GZ']: FSL output type + imports: + # list[nipype2pydra.statements.imports.explicitimport] - list import statements required by the test, with each list item + # consisting of 'module', 'name', and optionally 'alias' keys + expected_outputs: + # dict[str, str] - expected values for selected outputs, noting that tests will typically + # be terminated before they complete for time-saving reasons, and therefore + # these values will be ignored, when running in CI + timeout: 10 + # int - the value to set for the timeout in the generated test, + # after which the test will be considered to have been initialised + # successfully. Set to 0 to disable the timeout (warning, this could + # lead to the unittests taking a very long time to complete) + xfail: true + # bool - whether the unittest is expected to fail or not. Set to false + # when you are satisfied with the edits you have made to this file doctests: - - cmdline: topup --config=b02b0.cnf --datain=topup_encoding.txt --imain=b0_b0rev.nii --out=b0_b0rev_base --iout=b0_b0rev_corrected.nii.gz --fout=b0_b0rev_field.nii.gz --jacout=jac --logout=b0_b0rev_topup.log --rbmout=xfm --dfout=warpfield - # str - the expected cmdline output - inputs: - # dict[str, str] - name-value pairs for inputs to be provided to the doctest. - # If the field is of file-format type and the value is None, then the - # '.mock()' method of the corresponding class is used instead. - in_file: '"b0_b0rev.nii"' - # type=file|default=: name of 4D file with images - encoding_file: '"topup_encoding.txt"' - # type=file|default=: name of text file with PE directions/times - output_type: '"NIFTI_GZ"' - # type=enum|default='NIFTI'|allowed['NIFTI','NIFTI_GZ','NIFTI_PAIR','NIFTI_PAIR_GZ']: FSL output type - imports: - # list[nipype2pydra.task.base.importstatement] - list import statements required by the test, with each list item - # consisting of 'module', 'name', and optionally 'alias' keys - directive: - # str - any doctest directive to place on the cmdline call, e.g. # doctest: +ELLIPSIS +- cmdline: topup --config=b02b0.cnf --datain=topup_encoding.txt --imain=b0_b0rev.nii --out=b0_b0rev_base --iout=b0_b0rev_corrected.nii.gz --fout=b0_b0rev_field.nii.gz --jacout=jac --logout=b0_b0rev_topup.log --rbmout=xfm --dfout=warpfield + # str - the expected cmdline output + inputs: + # dict[str, str] - name-value pairs for inputs to be provided to the doctest. + # If the field is of file-format type and the value is None, then the + # '.mock()' method of the corresponding class is used instead. + in_file: '"b0_b0rev.nii"' + # type=file|default=: name of 4D file with images + output_type: '"NIFTI_GZ"' + # type=enum|default='NIFTI'|allowed['GIFTI','NIFTI','NIFTI_GZ','NIFTI_PAIR','NIFTI_PAIR_GZ']: FSL output type + imports: + # list[nipype2pydra.statements.imports.explicitimport] - list import statements required by the test, with each list item + # consisting of 'module', 'name', and optionally 'alias' keys + directive: + # str - any doctest directive to place on the cmdline call, e.g. # doctest: +ELLIPSIS diff --git a/nipype-auto-conv/specs/interfaces/topup_callables.py b/nipype-auto-conv/specs/interfaces/topup_callables.py deleted file mode 100644 index 93a2073..0000000 --- a/nipype-auto-conv/specs/interfaces/topup_callables.py +++ /dev/null @@ -1,565 +0,0 @@ -"""Module to put any functions that are referred to in the "callables" section of TOPUP.yaml""" - -import attrs -import logging -import nibabel as nb -import os -import os.path as op -from glob import glob -from pathlib import Path - - -def out_corrected_callable(output_dir, inputs, stdout, stderr): - outputs = _list_outputs( - output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr - ) - return outputs["out_corrected"] - - -def out_enc_file_callable(output_dir, inputs, stdout, stderr): - outputs = _list_outputs( - output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr - ) - return outputs["out_enc_file"] - - -def out_field_callable(output_dir, inputs, stdout, stderr): - outputs = _list_outputs( - output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr - ) - return outputs["out_field"] - - -def out_fieldcoef_callable(output_dir, inputs, stdout, stderr): - outputs = _list_outputs( - output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr - ) - return outputs["out_fieldcoef"] - - -def out_jacs_callable(output_dir, inputs, stdout, stderr): - outputs = _list_outputs( - output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr - ) - return outputs["out_jacs"] - - -def out_logfile_callable(output_dir, inputs, stdout, stderr): - outputs = _list_outputs( - output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr - ) - return outputs["out_logfile"] - - -def out_mats_callable(output_dir, inputs, stdout, stderr): - outputs = _list_outputs( - output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr - ) - return outputs["out_mats"] - - -def out_movpar_callable(output_dir, inputs, stdout, stderr): - outputs = _list_outputs( - output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr - ) - return outputs["out_movpar"] - - -def out_warps_callable(output_dir, inputs, stdout, stderr): - outputs = _list_outputs( - output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr - ) - return outputs["out_warps"] - - -IFLOGGER = logging.getLogger("nipype.interface") - - -iflogger = logging.getLogger("nipype.interface") - - -# Original source at L809 of /interfaces/base/core.py -def _filename_from_source( - name, chain=None, inputs=None, stdout=None, stderr=None, output_dir=None -): - if chain is None: - chain = [] - - trait_spec = inputs.trait(name) - retval = getattr(inputs, name) - source_ext = None - if (retval is attrs.NOTHING) or "%s" in retval: - if not trait_spec.name_source: - return retval - - # Do not generate filename when excluded by other inputs - if any( - (getattr(inputs, field) is not attrs.NOTHING) - for field in trait_spec.xor or () - ): - return retval - - # Do not generate filename when required fields are missing - if not all( - (getattr(inputs, field) is not attrs.NOTHING) - for field in trait_spec.requires or () - ): - return retval - - if (retval is not attrs.NOTHING) and "%s" in retval: - name_template = retval - else: - name_template = trait_spec.name_template - if not name_template: - name_template = "%s_generated" - - ns = trait_spec.name_source - while isinstance(ns, (list, tuple)): - if len(ns) > 1: - iflogger.warning("Only one name_source per trait is allowed") - ns = ns[0] - - if not isinstance(ns, (str, bytes)): - raise ValueError( - "name_source of '{}' trait should be an input trait " - "name, but a type {} object was found".format(name, type(ns)) - ) - - if getattr(inputs, ns) is not attrs.NOTHING: - name_source = ns - source = getattr(inputs, name_source) - while isinstance(source, list): - source = source[0] - - # special treatment for files - try: - _, base, source_ext = split_filename(source) - except (AttributeError, TypeError): - base = source - else: - if name in chain: - raise NipypeInterfaceError("Mutually pointing name_sources") - - chain.append(name) - base = _filename_from_source( - ns, - chain, - inputs=inputs, - stdout=stdout, - stderr=stderr, - output_dir=output_dir, - ) - if base is not attrs.NOTHING: - _, _, source_ext = split_filename(base) - else: - # Do not generate filename when required fields are missing - return retval - - chain = None - retval = name_template % base - _, _, ext = split_filename(retval) - if trait_spec.keep_extension and (ext or source_ext): - if (ext is None or not ext) and source_ext: - retval = retval + source_ext - else: - retval = nipype_interfaces_fsl__FSLCommand___overload_extension( - retval, - name, - inputs=inputs, - stdout=stdout, - stderr=stderr, - output_dir=output_dir, - ) - return retval - - -# Original source at L885 of /interfaces/base/core.py -def _gen_filename(name, inputs=None, stdout=None, stderr=None, output_dir=None): - raise NotImplementedError - - -# Original source at L205 of /interfaces/fsl/base.py -def _gen_fname( - basename, - cwd=None, - suffix=None, - change_ext=True, - ext=None, - inputs=None, - stdout=None, - stderr=None, - output_dir=None, -): - """Generate a filename based on the given parameters. - - The filename will take the form: cwd/basename. - If change_ext is True, it will use the extensions specified in - inputs.output_type. - - Parameters - ---------- - basename : str - Filename to base the new filename on. - cwd : str - Path to prefix to the new filename. (default is output_dir) - suffix : str - Suffix to add to the `basename`. (defaults is '' ) - change_ext : bool - Flag to change the filename extension to the FSL output type. - (default True) - - Returns - ------- - fname : str - New filename based on given parameters. - - """ - - if basename == "": - msg = "Unable to generate filename for command %s. " % "topup" - msg += "basename is not set!" - raise ValueError(msg) - if cwd is None: - cwd = output_dir - if ext is None: - ext = Info.output_type_to_ext(inputs.output_type) - if change_ext: - if suffix: - suffix = "".join((suffix, ext)) - else: - suffix = ext - if suffix is None: - suffix = "" - fname = fname_presuffix(basename, suffix=suffix, use_ext=False, newpath=cwd) - return fname - - -# Original source at L398 of /interfaces/fsl/epi.py -def _get_encfilename(inputs=None, stdout=None, stderr=None, output_dir=None): - out_file = os.path.join( - output_dir, ("%s_encfile.txt" % split_filename(inputs.in_file)[1]) - ) - return out_file - - -# Original source at L361 of /interfaces/fsl/epi.py -def _list_outputs(inputs=None, stdout=None, stderr=None, output_dir=None): - outputs = nipype_interfaces_fsl__FSLCommand___list_outputs() - del outputs["out_base"] - base_path = None - if inputs.out_base is not attrs.NOTHING: - base_path, base, _ = split_filename(inputs.out_base) - if base_path == "": - base_path = None - else: - base = split_filename(inputs.in_file)[1] + "_base" - outputs["out_fieldcoef"] = _gen_fname( - base, - suffix="_fieldcoef", - cwd=base_path, - inputs=inputs, - stdout=stdout, - stderr=stderr, - output_dir=output_dir, - ) - outputs["out_movpar"] = _gen_fname( - base, - suffix="_movpar", - ext=".txt", - cwd=base_path, - inputs=inputs, - stdout=stdout, - stderr=stderr, - output_dir=output_dir, - ) - - n_vols = nb.load(inputs.in_file).shape[-1] - ext = Info.output_type_to_ext(inputs.output_type) - fmt = os.path.abspath("{prefix}_{i:02d}{ext}").format - outputs["out_warps"] = [ - fmt(prefix=inputs.out_warp_prefix, i=i, ext=ext) for i in range(1, n_vols + 1) - ] - outputs["out_jacs"] = [ - fmt(prefix=inputs.out_jac_prefix, i=i, ext=ext) for i in range(1, n_vols + 1) - ] - outputs["out_mats"] = [ - fmt(prefix=inputs.out_mat_prefix, i=i, ext=".mat") for i in range(1, n_vols + 1) - ] - - if inputs.encoding_direction is not attrs.NOTHING: - outputs["out_enc_file"] = _get_encfilename( - inputs=inputs, stdout=stdout, stderr=stderr, output_dir=output_dir - ) - return outputs - - -# Original source at L430 of /interfaces/fsl/epi.py -def _overload_extension( - value, name=None, inputs=None, stdout=None, stderr=None, output_dir=None -): - if name == "out_base": - return value - return nipype_interfaces_fsl__FSLCommand___overload_extension(value, name) - - -# Original source at L108 of /utils/filemanip.py -def fname_presuffix(fname, prefix="", suffix="", newpath=None, use_ext=True): - """Manipulates path and name of input filename - - Parameters - ---------- - fname : string - A filename (may or may not include path) - prefix : string - Characters to prepend to the filename - suffix : string - Characters to append to the filename - newpath : string - Path to replace the path of the input fname - use_ext : boolean - If True (default), appends the extension of the original file - to the output name. - - Returns - ------- - Absolute path of the modified filename - - >>> from nipype.utils.filemanip import fname_presuffix - >>> fname = 'foo.nii.gz' - >>> fname_presuffix(fname,'pre','post','/tmp') - '/tmp/prefoopost.nii.gz' - - >>> from nipype.interfaces.base import attrs.NOTHING - >>> fname_presuffix(fname, 'pre', 'post', attrs.NOTHING) == \ - fname_presuffix(fname, 'pre', 'post') - True - - """ - pth, fname, ext = split_filename(fname) - if not use_ext: - ext = "" - - # No need for : bool(attrs.NOTHING is not attrs.NOTHING) evaluates to False - if newpath: - pth = op.abspath(newpath) - return op.join(pth, prefix + fname + suffix + ext) - - -# Original source at L891 of /interfaces/base/core.py -def nipype_interfaces_fsl__FSLCommand___list_outputs( - inputs=None, stdout=None, stderr=None, output_dir=None -): - metadata = dict(name_source=lambda t: t is not None) - traits = inputs.traits(**metadata) - if traits: - outputs = {} - for name, trait_spec in list(traits.items()): - out_name = name - if trait_spec.output_name is not None: - out_name = trait_spec.output_name - fname = _filename_from_source( - name, inputs=inputs, stdout=stdout, stderr=stderr, output_dir=output_dir - ) - if fname is not attrs.NOTHING: - outputs[out_name] = os.path.abspath(fname) - return outputs - - -# Original source at L249 of /interfaces/fsl/base.py -def nipype_interfaces_fsl__FSLCommand___overload_extension( - value, name=None, inputs=None, stdout=None, stderr=None, output_dir=None -): - return value + Info.output_type_to_ext(inputs.output_type) - - -# Original source at L58 of /utils/filemanip.py -def split_filename(fname): - """Split a filename into parts: path, base filename and extension. - - Parameters - ---------- - fname : str - file or path name - - Returns - ------- - pth : str - base path from fname - fname : str - filename from fname, without extension - ext : str - file extension from fname - - Examples - -------- - >>> from nipype.utils.filemanip import split_filename - >>> pth, fname, ext = split_filename('/home/data/subject.nii.gz') - >>> pth - '/home/data' - - >>> fname - 'subject' - - >>> ext - '.nii.gz' - - """ - - special_extensions = [".nii.gz", ".tar.gz", ".niml.dset"] - - pth = op.dirname(fname) - fname = op.basename(fname) - - ext = None - for special_ext in special_extensions: - ext_len = len(special_ext) - if (len(fname) > ext_len) and (fname[-ext_len:].lower() == special_ext.lower()): - ext = fname[-ext_len:] - fname = fname[:-ext_len] - break - if not ext: - fname, ext = op.splitext(fname) - - return pth, fname, ext - - -# Original source at L125 of /interfaces/base/support.py -class NipypeInterfaceError(Exception): - """Custom error for interfaces""" - - def __init__(self, value): - self.value = value - - def __str__(self): - return "{}".format(self.value) - - -# Original source at L1069 of /interfaces/base/core.py -class PackageInfo(object): - _version = None - version_cmd = None - version_file = None - - @classmethod - def version(klass): - if klass._version is None: - if klass.version_cmd is not None: - try: - clout = CommandLine( - command=klass.version_cmd, - resource_monitor=False, - terminal_output="allatonce", - ).run() - except IOError: - return None - - raw_info = clout.runtime.stdout - elif klass.version_file is not None: - try: - with open(klass.version_file, "rt") as fobj: - raw_info = fobj.read() - except OSError: - return None - else: - return None - - klass._version = klass.parse_version(raw_info) - - return klass._version - - @staticmethod - def parse_version(raw_info): - raise NotImplementedError - - -# Original source at L40 of /interfaces/fsl/base.py -class Info(PackageInfo): - """ - Handle FSL ``output_type`` and version information. - - output type refers to the type of file fsl defaults to writing - eg, NIFTI, NIFTI_GZ - - Examples - -------- - - >>> from nipype.interfaces.fsl import Info - >>> Info.version() # doctest: +SKIP - >>> Info.output_type() # doctest: +SKIP - - """ - - ftypes = { - "NIFTI": ".nii", - "NIFTI_PAIR": ".img", - "NIFTI_GZ": ".nii.gz", - "NIFTI_PAIR_GZ": ".img.gz", - } - - if os.getenv("FSLDIR"): - version_file = os.path.join(os.getenv("FSLDIR"), "etc", "fslversion") - - @staticmethod - def parse_version(raw_info): - return raw_info.splitlines()[0] - - @classmethod - def output_type_to_ext(cls, output_type): - """Get the file extension for the given output type. - - Parameters - ---------- - output_type : {'NIFTI', 'NIFTI_GZ', 'NIFTI_PAIR', 'NIFTI_PAIR_GZ'} - String specifying the output type. - - Returns - ------- - extension : str - The file extension for the output type. - """ - - try: - return cls.ftypes[output_type] - except KeyError: - msg = "Invalid FSLOUTPUTTYPE: ", output_type - raise KeyError(msg) - - @classmethod - def output_type(cls): - """Get the global FSL output file type FSLOUTPUTTYPE. - - This returns the value of the environment variable - FSLOUTPUTTYPE. An exception is raised if it is not defined. - - Returns - ------- - fsl_ftype : string - Represents the current environment setting of FSLOUTPUTTYPE - """ - try: - return os.environ["FSLOUTPUTTYPE"] - except KeyError: - IFLOGGER.warning( - "FSLOUTPUTTYPE environment variable is not set. " - "Setting FSLOUTPUTTYPE=NIFTI" - ) - return "NIFTI" - - @staticmethod - def standard_image(img_name=None): - """Grab an image from the standard location. - - Returns a list of standard images if called without arguments. - - Could be made more fancy to allow for more relocatability""" - try: - fsldir = os.environ["FSLDIR"] - except KeyError: - raise Exception("FSL environment variables not set") - stdpath = os.path.join(fsldir, "data", "standard") - if img_name is None: - return [ - filename.replace(stdpath + "/", "") - for filename in glob(os.path.join(stdpath, "*nii*")) - ] - return os.path.join(stdpath, img_name) diff --git a/nipype-auto-conv/specs/interfaces/tract_skeleton.yaml b/nipype-auto-conv/specs/interfaces/tract_skeleton.yaml index bf7687c..e84aca4 100644 --- a/nipype-auto-conv/specs/interfaces/tract_skeleton.yaml +++ b/nipype-auto-conv/specs/interfaces/tract_skeleton.yaml @@ -6,26 +6,26 @@ # Docs # ---- # Use FSL's tbss_skeleton to skeletonise an FA image or project arbitrary -# values onto a skeleton. +# values onto a skeleton. # -# There are two ways to use this interface. To create a skeleton from an FA -# image, just supply the ``in_file`` and set ``skeleton_file`` to True (or -# specify a skeleton filename. To project values onto a skeleton, you must -# set ``project_data`` to True, and then also supply values for -# ``threshold``, ``distance_map``, and ``data_file``. The -# ``search_mask_file`` and ``use_cingulum_mask`` inputs are also used in data -# projection, but ``use_cingulum_mask`` is set to True by default. This mask -# controls where the projection algorithm searches within a circular space -# around a tract, rather than in a single perpendicular direction. +# There are two ways to use this interface. To create a skeleton from an FA +# image, just supply the ``in_file`` and set ``skeleton_file`` to True (or +# specify a skeleton filename. To project values onto a skeleton, you must +# set ``project_data`` to True, and then also supply values for +# ``threshold``, ``distance_map``, and ``data_file``. The +# ``search_mask_file`` and ``use_cingulum_mask`` inputs are also used in data +# projection, but ``use_cingulum_mask`` is set to True by default. This mask +# controls where the projection algorithm searches within a circular space +# around a tract, rather than in a single perpendicular direction. # -# Example -# ------- +# Example +# ------- # -# >>> import nipype.interfaces.fsl as fsl -# >>> skeletor = fsl.TractSkeleton() -# >>> skeletor.inputs.in_file = "all_FA.nii.gz" -# >>> skeletor.inputs.skeleton_file = True -# >>> skeletor.run() # doctest: +SKIP +# >>> import nipype.interfaces.fsl as fsl +# >>> skeletor = fsl.TractSkeleton() +# >>> skeletor.inputs.in_file = "all_FA.nii.gz" +# >>> skeletor.inputs.skeleton_file = True +# >>> skeletor.run() # doctest: +SKIP # # task_name: TractSkeleton @@ -52,9 +52,6 @@ inputs: # type=file|default=: distance map image in_file: generic/file # type=file|default=: input image (typically mean FA volume) - projected_data: Path - # type=file: input data projected onto skeleton - # type=file|default=: input data projected onto skeleton search_mask_file: generic/file # type=file|default=: mask in which to use alternate search rule callable_defaults: @@ -83,7 +80,7 @@ outputs: # dict[str, str] - names of methods/callable classes defined in the adjacent `*_callables.py` # to set to the `callable` attribute of output fields templates: - # dict[str, str] - `output_file_template` values to be provided to output fields + # dict[str, str] - `path_template` values to be provided to output fields requirements: # dict[str, list[str]] - input fields that are required to be provided for the output field to be present tests: @@ -115,13 +112,13 @@ tests: # type=file: tract skeleton image # type=traitcompound|default=None: write out skeleton image output_type: - # type=enum|default='NIFTI'|allowed['NIFTI','NIFTI_GZ','NIFTI_PAIR','NIFTI_PAIR_GZ']: FSL output type + # type=enum|default='NIFTI'|allowed['GIFTI','NIFTI','NIFTI_GZ','NIFTI_PAIR','NIFTI_PAIR_GZ']: FSL output type args: # type=str|default='': Additional parameters to the command environ: # type=dict|default={}: Environment variables imports: - # list[nipype2pydra.task.base.importstatement] - list import statements required by the test, with each list item + # list[nipype2pydra.statements.imports.explicitimport] - list import statements required by the test, with each list item # consisting of 'module', 'name', and optionally 'alias' keys expected_outputs: # dict[str, str] - expected values for selected outputs, noting that tests will typically diff --git a/nipype-auto-conv/specs/interfaces/tract_skeleton_callables.py b/nipype-auto-conv/specs/interfaces/tract_skeleton_callables.py deleted file mode 100644 index 9833726..0000000 --- a/nipype-auto-conv/specs/interfaces/tract_skeleton_callables.py +++ /dev/null @@ -1,141 +0,0 @@ -"""Module to put any functions that are referred to in the "callables" section of TractSkeleton.yaml""" - -import attrs -import os.path as op -from pathlib import Path - - -def projected_data_callable(output_dir, inputs, stdout, stderr): - outputs = _list_outputs( - output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr - ) - return outputs["projected_data"] - - -def skeleton_file_callable(output_dir, inputs, stdout, stderr): - outputs = _list_outputs( - output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr - ) - return outputs["skeleton_file"] - - -# Original source at L885 of /interfaces/base/core.py -def _gen_filename(name, inputs=None, stdout=None, stderr=None, output_dir=None): - raise NotImplementedError - - -# Original source at L1445 of /interfaces/fsl/dti.py -def _list_outputs(inputs=None, stdout=None, stderr=None, output_dir=None): - outputs = {} - _si = inputs - if (_si.project_data is not attrs.NOTHING) and _si.project_data: - proj_data = _si.projected_data - outputs["projected_data"] = proj_data - if proj_data is attrs.NOTHING: - stem = _si.data_file - if _si.alt_data_file is not attrs.NOTHING: - stem = _si.alt_data_file - outputs["projected_data"] = fname_presuffix( - stem, suffix="_skeletonised", newpath=output_dir, use_ext=True - ) - if (_si.skeleton_file is not attrs.NOTHING) and _si.skeleton_file: - outputs["skeleton_file"] = _si.skeleton_file - if isinstance(_si.skeleton_file, bool): - outputs["skeleton_file"] = fname_presuffix( - _si.in_file, suffix="_skeleton", newpath=output_dir, use_ext=True - ) - return outputs - - -# Original source at L108 of /utils/filemanip.py -def fname_presuffix(fname, prefix="", suffix="", newpath=None, use_ext=True): - """Manipulates path and name of input filename - - Parameters - ---------- - fname : string - A filename (may or may not include path) - prefix : string - Characters to prepend to the filename - suffix : string - Characters to append to the filename - newpath : string - Path to replace the path of the input fname - use_ext : boolean - If True (default), appends the extension of the original file - to the output name. - - Returns - ------- - Absolute path of the modified filename - - >>> from nipype.utils.filemanip import fname_presuffix - >>> fname = 'foo.nii.gz' - >>> fname_presuffix(fname,'pre','post','/tmp') - '/tmp/prefoopost.nii.gz' - - >>> from nipype.interfaces.base import attrs.NOTHING - >>> fname_presuffix(fname, 'pre', 'post', attrs.NOTHING) == \ - fname_presuffix(fname, 'pre', 'post') - True - - """ - pth, fname, ext = split_filename(fname) - if not use_ext: - ext = "" - - # No need for : bool(attrs.NOTHING is not attrs.NOTHING) evaluates to False - if newpath: - pth = op.abspath(newpath) - return op.join(pth, prefix + fname + suffix + ext) - - -# Original source at L58 of /utils/filemanip.py -def split_filename(fname): - """Split a filename into parts: path, base filename and extension. - - Parameters - ---------- - fname : str - file or path name - - Returns - ------- - pth : str - base path from fname - fname : str - filename from fname, without extension - ext : str - file extension from fname - - Examples - -------- - >>> from nipype.utils.filemanip import split_filename - >>> pth, fname, ext = split_filename('/home/data/subject.nii.gz') - >>> pth - '/home/data' - - >>> fname - 'subject' - - >>> ext - '.nii.gz' - - """ - - special_extensions = [".nii.gz", ".tar.gz", ".niml.dset"] - - pth = op.dirname(fname) - fname = op.basename(fname) - - ext = None - for special_ext in special_extensions: - ext_len = len(special_ext) - if (len(fname) > ext_len) and (fname[-ext_len:].lower() == special_ext.lower()): - ext = fname[-ext_len:] - fname = fname[:-ext_len] - break - if not ext: - fname, ext = op.splitext(fname) - - return pth, fname, ext diff --git a/nipype-auto-conv/specs/interfaces/training.yaml b/nipype-auto-conv/specs/interfaces/training.yaml index fde680a..2024b28 100644 --- a/nipype-auto-conv/specs/interfaces/training.yaml +++ b/nipype-auto-conv/specs/interfaces/training.yaml @@ -6,7 +6,7 @@ # Docs # ---- # -# Train the classifier based on your own FEAT/MELODIC output directory. +# Train the classifier based on your own FEAT/MELODIC output directory. # task_name: Training nipype_name: Training @@ -22,7 +22,7 @@ inputs: # from the nipype interface, but you may want to be more specific, particularly # for file types, where specifying the format also specifies the file that will be # passed to the field in the automatically generated unittests. - mel_icas: generic/file+list-of + mel_icas: generic/directory+list-of # type=inputmultiobject|default=[]: Melodic output directories callable_defaults: # dict[str, str] - names of methods/callable classes defined in the adjacent `*_callables.py` @@ -46,7 +46,7 @@ outputs: # dict[str, str] - names of methods/callable classes defined in the adjacent `*_callables.py` # to set to the `callable` attribute of output fields templates: - # dict[str, str] - `output_file_template` values to be provided to output fields + # dict[str, str] - `path_template` values to be provided to output fields requirements: # dict[str, list[str]] - input fields that are required to be provided for the output field to be present tests: @@ -64,7 +64,7 @@ tests: environ: # type=dict|default={}: Environment variables imports: - # list[nipype2pydra.task.base.importstatement] - list import statements required by the test, with each list item + # list[nipype2pydra.statements.imports.explicitimport] - list import statements required by the test, with each list item # consisting of 'module', 'name', and optionally 'alias' keys expected_outputs: # dict[str, str] - expected values for selected outputs, noting that tests will typically diff --git a/nipype-auto-conv/specs/interfaces/training_callables.py b/nipype-auto-conv/specs/interfaces/training_callables.py deleted file mode 100644 index b162530..0000000 --- a/nipype-auto-conv/specs/interfaces/training_callables.py +++ /dev/null @@ -1,28 +0,0 @@ -"""Module to put any functions that are referred to in the "callables" section of Training.yaml""" - -import attrs -import os - - -def trained_wts_file_callable(output_dir, inputs, stdout, stderr): - outputs = _list_outputs( - output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr - ) - return outputs["trained_wts_file"] - - -# Original source at L885 of /interfaces/base/core.py -def _gen_filename(name, inputs=None, stdout=None, stderr=None, output_dir=None): - raise NotImplementedError - - -# Original source at L200 of /interfaces/fsl/fix.py -def _list_outputs(inputs=None, stdout=None, stderr=None, output_dir=None): - outputs = {} - if inputs.trained_wts_filestem is not attrs.NOTHING: - outputs["trained_wts_file"] = os.path.abspath( - inputs.trained_wts_filestem + ".RData" - ) - else: - outputs["trained_wts_file"] = os.path.abspath("trained_wts_file.RData") - return outputs diff --git a/nipype-auto-conv/specs/interfaces/training_set_creator.yaml b/nipype-auto-conv/specs/interfaces/training_set_creator.yaml index 69429c7..ac01c2e 100644 --- a/nipype-auto-conv/specs/interfaces/training_set_creator.yaml +++ b/nipype-auto-conv/specs/interfaces/training_set_creator.yaml @@ -6,11 +6,11 @@ # Docs # ---- # Goes through set of provided melodic output directories, to find all -# the ones that have a hand_labels_noise.txt file in them. +# the ones that have a hand_labels_noise.txt file in them. # -# This is outsourced as a separate class, so that the pipeline is -# rerun every time a handlabeled file has been changed, or a new one -# created. +# This is outsourced as a separate class, so that the pipeline is +# rerun every time a handlabeled file has been changed, or a new one +# created. # # task_name: TrainingSetCreator @@ -51,7 +51,7 @@ outputs: # dict[str, str] - names of methods/callable classes defined in the adjacent `*_callables.py` # to set to the `callable` attribute of output fields templates: - # dict[str, str] - `output_file_template` values to be provided to output fields + # dict[str, str] - `path_template` values to be provided to output fields requirements: # dict[str, list[str]] - input fields that are required to be provided for the output field to be present tests: @@ -61,7 +61,7 @@ tests: mel_icas_in: # type=inputmultiobject|default=[]: Melodic output directories imports: - # list[nipype2pydra.task.base.importstatement] - list import statements required by the test, with each list item + # list[nipype2pydra.statements.imports.explicitimport] - list import statements required by the test, with each list item # consisting of 'module', 'name', and optionally 'alias' keys expected_outputs: # dict[str, str] - expected values for selected outputs, noting that tests will typically diff --git a/nipype-auto-conv/specs/interfaces/training_set_creator_callables.py b/nipype-auto-conv/specs/interfaces/training_set_creator_callables.py deleted file mode 100644 index 02bfada..0000000 --- a/nipype-auto-conv/specs/interfaces/training_set_creator_callables.py +++ /dev/null @@ -1,21 +0,0 @@ -"""Module to put any functions that are referred to in the "callables" section of TrainingSetCreator.yaml""" - -import os - - -def mel_icas_out_callable(output_dir, inputs, stdout, stderr): - outputs = _list_outputs( - output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr - ) - return outputs["mel_icas_out"] - - -# Original source at L122 of /interfaces/fsl/fix.py -def _list_outputs(inputs=None, stdout=None, stderr=None, output_dir=None): - mel_icas = [] - for item in inputs.mel_icas_in: - if os.path.exists(os.path.join(item, "hand_labels_noise.txt")): - mel_icas.append(item) - outputs = {} - outputs["mel_icas_out"] = mel_icas - return outputs diff --git a/nipype-auto-conv/specs/interfaces/unary_maths.yaml b/nipype-auto-conv/specs/interfaces/unary_maths.yaml index 7666385..5ead0ac 100644 --- a/nipype-auto-conv/specs/interfaces/unary_maths.yaml +++ b/nipype-auto-conv/specs/interfaces/unary_maths.yaml @@ -22,9 +22,6 @@ inputs: # passed to the field in the automatically generated unittests. in_file: generic/file # type=file|default=: image to operate on - out_file: Path - # type=file: image written after calculations - # type=file|default=: image to write callable_defaults: # dict[str, str] - names of methods/callable classes defined in the adjacent `*_callables.py` # to set as the `default` method of input fields @@ -48,7 +45,7 @@ outputs: # dict[str, str] - names of methods/callable classes defined in the adjacent `*_callables.py` # to set to the `callable` attribute of output fields templates: - # dict[str, str] - `output_file_template` values to be provided to output fields + # dict[str, str] - `path_template` values to be provided to output fields out_file: out_file # type=file: image written after calculations # type=file|default=: image to write @@ -72,13 +69,13 @@ tests: nan2zeros: # type=bool|default=False: change NaNs to zeros before doing anything output_type: - # type=enum|default='NIFTI'|allowed['NIFTI','NIFTI_GZ','NIFTI_PAIR','NIFTI_PAIR_GZ']: FSL output type + # type=enum|default='NIFTI'|allowed['GIFTI','NIFTI','NIFTI_GZ','NIFTI_PAIR','NIFTI_PAIR_GZ']: FSL output type args: # type=str|default='': Additional parameters to the command environ: # type=dict|default={}: Environment variables imports: - # list[nipype2pydra.task.base.importstatement] - list import statements required by the test, with each list item + # list[nipype2pydra.statements.imports.explicitimport] - list import statements required by the test, with each list item # consisting of 'module', 'name', and optionally 'alias' keys expected_outputs: # dict[str, str] - expected values for selected outputs, noting that tests will typically diff --git a/nipype-auto-conv/specs/interfaces/unary_maths_callables.py b/nipype-auto-conv/specs/interfaces/unary_maths_callables.py deleted file mode 100644 index 62b82c1..0000000 --- a/nipype-auto-conv/specs/interfaces/unary_maths_callables.py +++ /dev/null @@ -1,337 +0,0 @@ -"""Module to put any functions that are referred to in the "callables" section of UnaryMaths.yaml""" - -import attrs -import logging -import os -import os.path as op -from glob import glob -from pathlib import Path - - -def out_file_default(inputs): - return _gen_filename("out_file", inputs=inputs) - - -def out_file_callable(output_dir, inputs, stdout, stderr): - outputs = _list_outputs( - output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr - ) - return outputs["out_file"] - - -IFLOGGER = logging.getLogger("nipype.interface") - - -# Original source at L61 of /interfaces/fsl/maths.py -def _gen_filename(name, inputs=None, stdout=None, stderr=None, output_dir=None): - if name == "out_file": - return nipype_interfaces_fsl_maths__MathsCommand___list_outputs( - inputs=inputs, stdout=stdout, stderr=stderr, output_dir=output_dir - )["out_file"] - return None - - -# Original source at L205 of /interfaces/fsl/base.py -def _gen_fname( - basename, - cwd=None, - suffix=None, - change_ext=True, - ext=None, - inputs=None, - stdout=None, - stderr=None, - output_dir=None, -): - """Generate a filename based on the given parameters. - - The filename will take the form: cwd/basename. - If change_ext is True, it will use the extensions specified in - inputs.output_type. - - Parameters - ---------- - basename : str - Filename to base the new filename on. - cwd : str - Path to prefix to the new filename. (default is output_dir) - suffix : str - Suffix to add to the `basename`. (defaults is '' ) - change_ext : bool - Flag to change the filename extension to the FSL output type. - (default True) - - Returns - ------- - fname : str - New filename based on given parameters. - - """ - - if basename == "": - msg = "Unable to generate filename for command %s. " % "fslmaths" - msg += "basename is not set!" - raise ValueError(msg) - if cwd is None: - cwd = output_dir - if ext is None: - ext = Info.output_type_to_ext(inputs.output_type) - if change_ext: - if suffix: - suffix = "".join((suffix, ext)) - else: - suffix = ext - if suffix is None: - suffix = "" - fname = fname_presuffix(basename, suffix=suffix, use_ext=False, newpath=cwd) - return fname - - -# Original source at L502 of /interfaces/fsl/maths.py -def _list_outputs(inputs=None, stdout=None, stderr=None, output_dir=None): - _suffix = "_" + inputs.operation - return nipype_interfaces_fsl_maths__MathsCommand___list_outputs() - - -# Original source at L108 of /utils/filemanip.py -def fname_presuffix(fname, prefix="", suffix="", newpath=None, use_ext=True): - """Manipulates path and name of input filename - - Parameters - ---------- - fname : string - A filename (may or may not include path) - prefix : string - Characters to prepend to the filename - suffix : string - Characters to append to the filename - newpath : string - Path to replace the path of the input fname - use_ext : boolean - If True (default), appends the extension of the original file - to the output name. - - Returns - ------- - Absolute path of the modified filename - - >>> from nipype.utils.filemanip import fname_presuffix - >>> fname = 'foo.nii.gz' - >>> fname_presuffix(fname,'pre','post','/tmp') - '/tmp/prefoopost.nii.gz' - - >>> from nipype.interfaces.base import attrs.NOTHING - >>> fname_presuffix(fname, 'pre', 'post', attrs.NOTHING) == \ - fname_presuffix(fname, 'pre', 'post') - True - - """ - pth, fname, ext = split_filename(fname) - if not use_ext: - ext = "" - - # No need for : bool(attrs.NOTHING is not attrs.NOTHING) evaluates to False - if newpath: - pth = op.abspath(newpath) - return op.join(pth, prefix + fname + suffix + ext) - - -# Original source at L51 of /interfaces/fsl/maths.py -def nipype_interfaces_fsl_maths__MathsCommand___list_outputs( - inputs=None, stdout=None, stderr=None, output_dir=None -): - outputs = {} - outputs["out_file"] = inputs.out_file - if inputs.out_file is attrs.NOTHING: - outputs["out_file"] = _gen_fname( - inputs.in_file, - suffix=_suffix, - inputs=inputs, - stdout=stdout, - stderr=stderr, - output_dir=output_dir, - ) - outputs["out_file"] = os.path.abspath(outputs["out_file"]) - return outputs - - -# Original source at L58 of /utils/filemanip.py -def split_filename(fname): - """Split a filename into parts: path, base filename and extension. - - Parameters - ---------- - fname : str - file or path name - - Returns - ------- - pth : str - base path from fname - fname : str - filename from fname, without extension - ext : str - file extension from fname - - Examples - -------- - >>> from nipype.utils.filemanip import split_filename - >>> pth, fname, ext = split_filename('/home/data/subject.nii.gz') - >>> pth - '/home/data' - - >>> fname - 'subject' - - >>> ext - '.nii.gz' - - """ - - special_extensions = [".nii.gz", ".tar.gz", ".niml.dset"] - - pth = op.dirname(fname) - fname = op.basename(fname) - - ext = None - for special_ext in special_extensions: - ext_len = len(special_ext) - if (len(fname) > ext_len) and (fname[-ext_len:].lower() == special_ext.lower()): - ext = fname[-ext_len:] - fname = fname[:-ext_len] - break - if not ext: - fname, ext = op.splitext(fname) - - return pth, fname, ext - - -# Original source at L1069 of /interfaces/base/core.py -class PackageInfo(object): - _version = None - version_cmd = None - version_file = None - - @classmethod - def version(klass): - if klass._version is None: - if klass.version_cmd is not None: - try: - clout = CommandLine( - command=klass.version_cmd, - resource_monitor=False, - terminal_output="allatonce", - ).run() - except IOError: - return None - - raw_info = clout.runtime.stdout - elif klass.version_file is not None: - try: - with open(klass.version_file, "rt") as fobj: - raw_info = fobj.read() - except OSError: - return None - else: - return None - - klass._version = klass.parse_version(raw_info) - - return klass._version - - @staticmethod - def parse_version(raw_info): - raise NotImplementedError - - -# Original source at L40 of /interfaces/fsl/base.py -class Info(PackageInfo): - """ - Handle FSL ``output_type`` and version information. - - output type refers to the type of file fsl defaults to writing - eg, NIFTI, NIFTI_GZ - - Examples - -------- - - >>> from nipype.interfaces.fsl import Info - >>> Info.version() # doctest: +SKIP - >>> Info.output_type() # doctest: +SKIP - - """ - - ftypes = { - "NIFTI": ".nii", - "NIFTI_PAIR": ".img", - "NIFTI_GZ": ".nii.gz", - "NIFTI_PAIR_GZ": ".img.gz", - } - - if os.getenv("FSLDIR"): - version_file = os.path.join(os.getenv("FSLDIR"), "etc", "fslversion") - - @staticmethod - def parse_version(raw_info): - return raw_info.splitlines()[0] - - @classmethod - def output_type_to_ext(cls, output_type): - """Get the file extension for the given output type. - - Parameters - ---------- - output_type : {'NIFTI', 'NIFTI_GZ', 'NIFTI_PAIR', 'NIFTI_PAIR_GZ'} - String specifying the output type. - - Returns - ------- - extension : str - The file extension for the output type. - """ - - try: - return cls.ftypes[output_type] - except KeyError: - msg = "Invalid FSLOUTPUTTYPE: ", output_type - raise KeyError(msg) - - @classmethod - def output_type(cls): - """Get the global FSL output file type FSLOUTPUTTYPE. - - This returns the value of the environment variable - FSLOUTPUTTYPE. An exception is raised if it is not defined. - - Returns - ------- - fsl_ftype : string - Represents the current environment setting of FSLOUTPUTTYPE - """ - try: - return os.environ["FSLOUTPUTTYPE"] - except KeyError: - IFLOGGER.warning( - "FSLOUTPUTTYPE environment variable is not set. " - "Setting FSLOUTPUTTYPE=NIFTI" - ) - return "NIFTI" - - @staticmethod - def standard_image(img_name=None): - """Grab an image from the standard location. - - Returns a list of standard images if called without arguments. - - Could be made more fancy to allow for more relocatability""" - try: - fsldir = os.environ["FSLDIR"] - except KeyError: - raise Exception("FSL environment variables not set") - stdpath = os.path.join(fsldir, "data", "standard") - if img_name is None: - return [ - filename.replace(stdpath + "/", "") - for filename in glob(os.path.join(stdpath, "*nii*")) - ] - return os.path.join(stdpath, img_name) diff --git a/nipype-auto-conv/specs/interfaces/vec_reg.yaml b/nipype-auto-conv/specs/interfaces/vec_reg.yaml index d8908a0..5698b2e 100644 --- a/nipype-auto-conv/specs/interfaces/vec_reg.yaml +++ b/nipype-auto-conv/specs/interfaces/vec_reg.yaml @@ -6,16 +6,16 @@ # Docs # ---- # Use FSL vecreg for registering vector data -# For complete details, see the FDT Documentation -# +# For complete details, see the FDT Documentation +# # -# Example -# ------- +# Example +# ------- # -# >>> from nipype.interfaces import fsl -# >>> vreg = fsl.VecReg(in_file='diffusion.nii', affine_mat='trans.mat', ref_vol='mni.nii', out_file='diffusion_vreg.nii') -# >>> vreg.cmdline -# 'vecreg -t trans.mat -i diffusion.nii -o diffusion_vreg.nii -r mni.nii' +# >>> from nipype.interfaces import fsl +# >>> vreg = fsl.VecReg(in_file='diffusion.nii', affine_mat='trans.mat', ref_vol='mni.nii', out_file='diffusion_vreg.nii') +# >>> vreg.cmdline +# 'vecreg -t trans.mat -i diffusion.nii -o diffusion_vreg.nii -r mni.nii' # # task_name: VecReg @@ -38,9 +38,6 @@ inputs: # type=file|default=: filename for input vector or tensor field mask: generic/file # type=file|default=: brain mask in input space - out_file: Path - # type=file: path/name of filename for the registered vector or tensor field - # type=file|default=: filename for output registered vector or tensor field ref_mask: generic/file # type=file|default=: brain mask in output space (useful for speed up of nonlinear reg) ref_vol: medimage/nifti1 @@ -74,7 +71,7 @@ outputs: # dict[str, str] - names of methods/callable classes defined in the adjacent `*_callables.py` # to set to the `callable` attribute of output fields templates: - # dict[str, str] - `output_file_template` values to be provided to output fields + # dict[str, str] - `path_template` values to be provided to output fields out_file: '"diffusion_vreg.nii"' # type=file: path/name of filename for the registered vector or tensor field # type=file|default=: filename for output registered vector or tensor field @@ -106,13 +103,13 @@ tests: ref_mask: # type=file|default=: brain mask in output space (useful for speed up of nonlinear reg) output_type: - # type=enum|default='NIFTI'|allowed['NIFTI','NIFTI_GZ','NIFTI_PAIR','NIFTI_PAIR_GZ']: FSL output type + # type=enum|default='NIFTI'|allowed['GIFTI','NIFTI','NIFTI_GZ','NIFTI_PAIR','NIFTI_PAIR_GZ']: FSL output type args: # type=str|default='': Additional parameters to the command environ: # type=dict|default={}: Environment variables imports: - # list[nipype2pydra.task.base.importstatement] - list import statements required by the test, with each list item + # list[nipype2pydra.statements.imports.explicitimport] - list import statements required by the test, with each list item # consisting of 'module', 'name', and optionally 'alias' keys expected_outputs: # dict[str, str] - expected values for selected outputs, noting that tests will typically @@ -139,7 +136,7 @@ tests: # type=file: path/name of filename for the registered vector or tensor field # type=file|default=: filename for output registered vector or tensor field imports: - # list[nipype2pydra.task.base.importstatement] - list import statements required by the test, with each list item + # list[nipype2pydra.statements.imports.explicitimport] - list import statements required by the test, with each list item # consisting of 'module', 'name', and optionally 'alias' keys expected_outputs: # dict[str, str] - expected values for selected outputs, noting that tests will typically @@ -170,7 +167,7 @@ doctests: # type=file: path/name of filename for the registered vector or tensor field # type=file|default=: filename for output registered vector or tensor field imports: - # list[nipype2pydra.task.base.importstatement] - list import statements required by the test, with each list item + # list[nipype2pydra.statements.imports.explicitimport] - list import statements required by the test, with each list item # consisting of 'module', 'name', and optionally 'alias' keys directive: # str - any doctest directive to place on the cmdline call, e.g. # doctest: +ELLIPSIS diff --git a/nipype-auto-conv/specs/interfaces/vec_reg_callables.py b/nipype-auto-conv/specs/interfaces/vec_reg_callables.py deleted file mode 100644 index 1a7b200..0000000 --- a/nipype-auto-conv/specs/interfaces/vec_reg_callables.py +++ /dev/null @@ -1,332 +0,0 @@ -"""Module to put any functions that are referred to in the "callables" section of VecReg.yaml""" - -import attrs -import logging -import os -import os.path as op -from glob import glob -from pathlib import Path - - -def out_file_default(inputs): - return _gen_filename("out_file", inputs=inputs) - - -def out_file_callable(output_dir, inputs, stdout, stderr): - outputs = _list_outputs( - output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr - ) - return outputs["out_file"] - - -IFLOGGER = logging.getLogger("nipype.interface") - - -# Original source at L1216 of /interfaces/fsl/dti.py -def _gen_filename(name, inputs=None, stdout=None, stderr=None, output_dir=None): - if name == "out_file": - return _list_outputs( - inputs=inputs, stdout=stdout, stderr=stderr, output_dir=output_dir - )[name] - else: - return None - - -# Original source at L205 of /interfaces/fsl/base.py -def _gen_fname( - basename, - cwd=None, - suffix=None, - change_ext=True, - ext=None, - inputs=None, - stdout=None, - stderr=None, - output_dir=None, -): - """Generate a filename based on the given parameters. - - The filename will take the form: cwd/basename. - If change_ext is True, it will use the extensions specified in - inputs.output_type. - - Parameters - ---------- - basename : str - Filename to base the new filename on. - cwd : str - Path to prefix to the new filename. (default is output_dir) - suffix : str - Suffix to add to the `basename`. (defaults is '' ) - change_ext : bool - Flag to change the filename extension to the FSL output type. - (default True) - - Returns - ------- - fname : str - New filename based on given parameters. - - """ - - if basename == "": - msg = "Unable to generate filename for command %s. " % "vecreg" - msg += "basename is not set!" - raise ValueError(msg) - if cwd is None: - cwd = output_dir - if ext is None: - ext = Info.output_type_to_ext(inputs.output_type) - if change_ext: - if suffix: - suffix = "".join((suffix, ext)) - else: - suffix = ext - if suffix is None: - suffix = "" - fname = fname_presuffix(basename, suffix=suffix, use_ext=False, newpath=cwd) - return fname - - -# Original source at L1205 of /interfaces/fsl/dti.py -def _list_outputs(inputs=None, stdout=None, stderr=None, output_dir=None): - outputs = {} - outputs["out_file"] = inputs.out_file - if (outputs["out_file"] is attrs.NOTHING) and (inputs.in_file is not attrs.NOTHING): - pth, base_name = os.path.split(inputs.in_file) - outputs["out_file"] = _gen_fname( - base_name, - cwd=os.path.abspath(pth), - suffix="_vreg", - inputs=inputs, - stdout=stdout, - stderr=stderr, - output_dir=output_dir, - ) - outputs["out_file"] = os.path.abspath(outputs["out_file"]) - return outputs - - -# Original source at L108 of /utils/filemanip.py -def fname_presuffix(fname, prefix="", suffix="", newpath=None, use_ext=True): - """Manipulates path and name of input filename - - Parameters - ---------- - fname : string - A filename (may or may not include path) - prefix : string - Characters to prepend to the filename - suffix : string - Characters to append to the filename - newpath : string - Path to replace the path of the input fname - use_ext : boolean - If True (default), appends the extension of the original file - to the output name. - - Returns - ------- - Absolute path of the modified filename - - >>> from nipype.utils.filemanip import fname_presuffix - >>> fname = 'foo.nii.gz' - >>> fname_presuffix(fname,'pre','post','/tmp') - '/tmp/prefoopost.nii.gz' - - >>> from nipype.interfaces.base import attrs.NOTHING - >>> fname_presuffix(fname, 'pre', 'post', attrs.NOTHING) == \ - fname_presuffix(fname, 'pre', 'post') - True - - """ - pth, fname, ext = split_filename(fname) - if not use_ext: - ext = "" - - # No need for : bool(attrs.NOTHING is not attrs.NOTHING) evaluates to False - if newpath: - pth = op.abspath(newpath) - return op.join(pth, prefix + fname + suffix + ext) - - -# Original source at L58 of /utils/filemanip.py -def split_filename(fname): - """Split a filename into parts: path, base filename and extension. - - Parameters - ---------- - fname : str - file or path name - - Returns - ------- - pth : str - base path from fname - fname : str - filename from fname, without extension - ext : str - file extension from fname - - Examples - -------- - >>> from nipype.utils.filemanip import split_filename - >>> pth, fname, ext = split_filename('/home/data/subject.nii.gz') - >>> pth - '/home/data' - - >>> fname - 'subject' - - >>> ext - '.nii.gz' - - """ - - special_extensions = [".nii.gz", ".tar.gz", ".niml.dset"] - - pth = op.dirname(fname) - fname = op.basename(fname) - - ext = None - for special_ext in special_extensions: - ext_len = len(special_ext) - if (len(fname) > ext_len) and (fname[-ext_len:].lower() == special_ext.lower()): - ext = fname[-ext_len:] - fname = fname[:-ext_len] - break - if not ext: - fname, ext = op.splitext(fname) - - return pth, fname, ext - - -# Original source at L1069 of /interfaces/base/core.py -class PackageInfo(object): - _version = None - version_cmd = None - version_file = None - - @classmethod - def version(klass): - if klass._version is None: - if klass.version_cmd is not None: - try: - clout = CommandLine( - command=klass.version_cmd, - resource_monitor=False, - terminal_output="allatonce", - ).run() - except IOError: - return None - - raw_info = clout.runtime.stdout - elif klass.version_file is not None: - try: - with open(klass.version_file, "rt") as fobj: - raw_info = fobj.read() - except OSError: - return None - else: - return None - - klass._version = klass.parse_version(raw_info) - - return klass._version - - @staticmethod - def parse_version(raw_info): - raise NotImplementedError - - -# Original source at L40 of /interfaces/fsl/base.py -class Info(PackageInfo): - """ - Handle FSL ``output_type`` and version information. - - output type refers to the type of file fsl defaults to writing - eg, NIFTI, NIFTI_GZ - - Examples - -------- - - >>> from nipype.interfaces.fsl import Info - >>> Info.version() # doctest: +SKIP - >>> Info.output_type() # doctest: +SKIP - - """ - - ftypes = { - "NIFTI": ".nii", - "NIFTI_PAIR": ".img", - "NIFTI_GZ": ".nii.gz", - "NIFTI_PAIR_GZ": ".img.gz", - } - - if os.getenv("FSLDIR"): - version_file = os.path.join(os.getenv("FSLDIR"), "etc", "fslversion") - - @staticmethod - def parse_version(raw_info): - return raw_info.splitlines()[0] - - @classmethod - def output_type_to_ext(cls, output_type): - """Get the file extension for the given output type. - - Parameters - ---------- - output_type : {'NIFTI', 'NIFTI_GZ', 'NIFTI_PAIR', 'NIFTI_PAIR_GZ'} - String specifying the output type. - - Returns - ------- - extension : str - The file extension for the output type. - """ - - try: - return cls.ftypes[output_type] - except KeyError: - msg = "Invalid FSLOUTPUTTYPE: ", output_type - raise KeyError(msg) - - @classmethod - def output_type(cls): - """Get the global FSL output file type FSLOUTPUTTYPE. - - This returns the value of the environment variable - FSLOUTPUTTYPE. An exception is raised if it is not defined. - - Returns - ------- - fsl_ftype : string - Represents the current environment setting of FSLOUTPUTTYPE - """ - try: - return os.environ["FSLOUTPUTTYPE"] - except KeyError: - IFLOGGER.warning( - "FSLOUTPUTTYPE environment variable is not set. " - "Setting FSLOUTPUTTYPE=NIFTI" - ) - return "NIFTI" - - @staticmethod - def standard_image(img_name=None): - """Grab an image from the standard location. - - Returns a list of standard images if called without arguments. - - Could be made more fancy to allow for more relocatability""" - try: - fsldir = os.environ["FSLDIR"] - except KeyError: - raise Exception("FSL environment variables not set") - stdpath = os.path.join(fsldir, "data", "standard") - if img_name is None: - return [ - filename.replace(stdpath + "/", "") - for filename in glob(os.path.join(stdpath, "*nii*")) - ] - return os.path.join(stdpath, img_name) diff --git a/nipype-auto-conv/specs/interfaces/vest_2_text.yaml b/nipype-auto-conv/specs/interfaces/vest_2_text.yaml index b4dbeb3..d2a098c 100644 --- a/nipype-auto-conv/specs/interfaces/vest_2_text.yaml +++ b/nipype-auto-conv/specs/interfaces/vest_2_text.yaml @@ -6,17 +6,17 @@ # Docs # ---- # -# Use FSL Vest2Text`https://web.mit.edu/fsl_v5.0.10/fsl/doc/wiki/GLM(2f)CreatingDesignMatricesByHand.html`_ -# to convert your design.mat design.con and design.fts files into plain text. +# Use FSL Vest2Text`https://web.mit.edu/fsl_v5.0.10/fsl/doc/wiki/GLM(2f)CreatingDesignMatricesByHand.html`_ +# to convert your design.mat design.con and design.fts files into plain text. # -# Examples -# -------- -# >>> from nipype.interfaces.fsl import Vest2Text -# >>> v2t = Vest2Text() -# >>> v2t.inputs.in_file = "design.mat" -# >>> v2t.cmdline -# 'Vest2Text design.mat design.txt' -# >>> res = v2t.run() # doctest: +SKIP +# Examples +# -------- +# >>> from nipype.interfaces.fsl import Vest2Text +# >>> v2t = Vest2Text() +# >>> v2t.inputs.in_file = "design.mat" +# >>> v2t.cmdline +# 'Vest2Text design.mat design.txt' +# >>> res = v2t.run() # doctest: +SKIP # task_name: Vest2Text nipype_name: Vest2Text @@ -34,9 +34,6 @@ inputs: # passed to the field in the automatically generated unittests. in_file: datascience/text-matrix # type=file|default=: matrix data stored in the format used by FSL tools - out_file: Path - # type=file: plain text representation of FSL matrix - # type=file|default='design.txt': file name to store text output from matrix callable_defaults: # dict[str, str] - names of methods/callable classes defined in the adjacent `*_callables.py` # to set as the `default` method of input fields @@ -60,7 +57,7 @@ outputs: # dict[str, str] - names of methods/callable classes defined in the adjacent `*_callables.py` # to set to the `callable` attribute of output fields templates: - # dict[str, str] - `output_file_template` values to be provided to output fields + # dict[str, str] - `path_template` values to be provided to output fields requirements: # dict[str, list[str]] - input fields that are required to be provided for the output field to be present tests: @@ -73,13 +70,13 @@ tests: # type=file: plain text representation of FSL matrix # type=file|default='design.txt': file name to store text output from matrix output_type: - # type=enum|default='NIFTI'|allowed['NIFTI','NIFTI_GZ','NIFTI_PAIR','NIFTI_PAIR_GZ']: FSL output type + # type=enum|default='NIFTI'|allowed['GIFTI','NIFTI','NIFTI_GZ','NIFTI_PAIR','NIFTI_PAIR_GZ']: FSL output type args: # type=str|default='': Additional parameters to the command environ: # type=dict|default={}: Environment variables imports: - # list[nipype2pydra.task.base.importstatement] - list import statements required by the test, with each list item + # list[nipype2pydra.statements.imports.explicitimport] - list import statements required by the test, with each list item # consisting of 'module', 'name', and optionally 'alias' keys expected_outputs: # dict[str, str] - expected values for selected outputs, noting that tests will typically @@ -99,7 +96,7 @@ tests: in_file: # type=file|default=: matrix data stored in the format used by FSL tools imports: - # list[nipype2pydra.task.base.importstatement] - list import statements required by the test, with each list item + # list[nipype2pydra.statements.imports.explicitimport] - list import statements required by the test, with each list item # consisting of 'module', 'name', and optionally 'alias' keys expected_outputs: # dict[str, str] - expected values for selected outputs, noting that tests will typically @@ -114,7 +111,7 @@ tests: # bool - whether the unittest is expected to fail or not. Set to false # when you are satisfied with the edits you have made to this file doctests: -- cmdline: Vest2Text design.mat design.txt +- cmdline: # str - the expected cmdline output inputs: # dict[str, str] - name-value pairs for inputs to be provided to the doctest. @@ -123,7 +120,7 @@ doctests: in_file: '"design.mat"' # type=file|default=: matrix data stored in the format used by FSL tools imports: - # list[nipype2pydra.task.base.importstatement] - list import statements required by the test, with each list item + # list[nipype2pydra.statements.imports.explicitimport] - list import statements required by the test, with each list item # consisting of 'module', 'name', and optionally 'alias' keys directive: # str - any doctest directive to place on the cmdline call, e.g. # doctest: +ELLIPSIS diff --git a/nipype-auto-conv/specs/interfaces/vest_2_text_callables.py b/nipype-auto-conv/specs/interfaces/vest_2_text_callables.py deleted file mode 100644 index 2b60243..0000000 --- a/nipype-auto-conv/specs/interfaces/vest_2_text_callables.py +++ /dev/null @@ -1,338 +0,0 @@ -"""Module to put any functions that are referred to in the "callables" section of Vest2Text.yaml""" - -import attrs -import logging -import os -import os.path as op -from glob import glob - - -def out_file_callable(output_dir, inputs, stdout, stderr): - outputs = _list_outputs( - output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr - ) - return outputs["out_file"] - - -IFLOGGER = logging.getLogger("nipype.interface") - - -iflogger = logging.getLogger("nipype.interface") - - -# Original source at L809 of /interfaces/base/core.py -def _filename_from_source( - name, chain=None, inputs=None, stdout=None, stderr=None, output_dir=None -): - if chain is None: - chain = [] - - trait_spec = inputs.trait(name) - retval = getattr(inputs, name) - source_ext = None - if (retval is attrs.NOTHING) or "%s" in retval: - if not trait_spec.name_source: - return retval - - # Do not generate filename when excluded by other inputs - if any( - (getattr(inputs, field) is not attrs.NOTHING) - for field in trait_spec.xor or () - ): - return retval - - # Do not generate filename when required fields are missing - if not all( - (getattr(inputs, field) is not attrs.NOTHING) - for field in trait_spec.requires or () - ): - return retval - - if (retval is not attrs.NOTHING) and "%s" in retval: - name_template = retval - else: - name_template = trait_spec.name_template - if not name_template: - name_template = "%s_generated" - - ns = trait_spec.name_source - while isinstance(ns, (list, tuple)): - if len(ns) > 1: - iflogger.warning("Only one name_source per trait is allowed") - ns = ns[0] - - if not isinstance(ns, (str, bytes)): - raise ValueError( - "name_source of '{}' trait should be an input trait " - "name, but a type {} object was found".format(name, type(ns)) - ) - - if getattr(inputs, ns) is not attrs.NOTHING: - name_source = ns - source = getattr(inputs, name_source) - while isinstance(source, list): - source = source[0] - - # special treatment for files - try: - _, base, source_ext = split_filename(source) - except (AttributeError, TypeError): - base = source - else: - if name in chain: - raise NipypeInterfaceError("Mutually pointing name_sources") - - chain.append(name) - base = _filename_from_source( - ns, - chain, - inputs=inputs, - stdout=stdout, - stderr=stderr, - output_dir=output_dir, - ) - if base is not attrs.NOTHING: - _, _, source_ext = split_filename(base) - else: - # Do not generate filename when required fields are missing - return retval - - chain = None - retval = name_template % base - _, _, ext = split_filename(retval) - if trait_spec.keep_extension and (ext or source_ext): - if (ext is None or not ext) and source_ext: - retval = retval + source_ext - else: - retval = _overload_extension( - retval, - name, - inputs=inputs, - stdout=stdout, - stderr=stderr, - output_dir=output_dir, - ) - return retval - - -# Original source at L885 of /interfaces/base/core.py -def _gen_filename(name, inputs=None, stdout=None, stderr=None, output_dir=None): - raise NotImplementedError - - -# Original source at L891 of /interfaces/base/core.py -def _list_outputs(inputs=None, stdout=None, stderr=None, output_dir=None): - metadata = dict(name_source=lambda t: t is not None) - traits = inputs.traits(**metadata) - if traits: - outputs = {} - for name, trait_spec in list(traits.items()): - out_name = name - if trait_spec.output_name is not None: - out_name = trait_spec.output_name - fname = _filename_from_source( - name, inputs=inputs, stdout=stdout, stderr=stderr, output_dir=output_dir - ) - if fname is not attrs.NOTHING: - outputs[out_name] = os.path.abspath(fname) - return outputs - - -# Original source at L249 of /interfaces/fsl/base.py -def _overload_extension( - value, name=None, inputs=None, stdout=None, stderr=None, output_dir=None -): - return value + Info.output_type_to_ext(inputs.output_type) - - -# Original source at L58 of /utils/filemanip.py -def split_filename(fname): - """Split a filename into parts: path, base filename and extension. - - Parameters - ---------- - fname : str - file or path name - - Returns - ------- - pth : str - base path from fname - fname : str - filename from fname, without extension - ext : str - file extension from fname - - Examples - -------- - >>> from nipype.utils.filemanip import split_filename - >>> pth, fname, ext = split_filename('/home/data/subject.nii.gz') - >>> pth - '/home/data' - - >>> fname - 'subject' - - >>> ext - '.nii.gz' - - """ - - special_extensions = [".nii.gz", ".tar.gz", ".niml.dset"] - - pth = op.dirname(fname) - fname = op.basename(fname) - - ext = None - for special_ext in special_extensions: - ext_len = len(special_ext) - if (len(fname) > ext_len) and (fname[-ext_len:].lower() == special_ext.lower()): - ext = fname[-ext_len:] - fname = fname[:-ext_len] - break - if not ext: - fname, ext = op.splitext(fname) - - return pth, fname, ext - - -# Original source at L1069 of /interfaces/base/core.py -class PackageInfo(object): - _version = None - version_cmd = None - version_file = None - - @classmethod - def version(klass): - if klass._version is None: - if klass.version_cmd is not None: - try: - clout = CommandLine( - command=klass.version_cmd, - resource_monitor=False, - terminal_output="allatonce", - ).run() - except IOError: - return None - - raw_info = clout.runtime.stdout - elif klass.version_file is not None: - try: - with open(klass.version_file, "rt") as fobj: - raw_info = fobj.read() - except OSError: - return None - else: - return None - - klass._version = klass.parse_version(raw_info) - - return klass._version - - @staticmethod - def parse_version(raw_info): - raise NotImplementedError - - -# Original source at L40 of /interfaces/fsl/base.py -class Info(PackageInfo): - """ - Handle FSL ``output_type`` and version information. - - output type refers to the type of file fsl defaults to writing - eg, NIFTI, NIFTI_GZ - - Examples - -------- - - >>> from nipype.interfaces.fsl import Info - >>> Info.version() # doctest: +SKIP - >>> Info.output_type() # doctest: +SKIP - - """ - - ftypes = { - "NIFTI": ".nii", - "NIFTI_PAIR": ".img", - "NIFTI_GZ": ".nii.gz", - "NIFTI_PAIR_GZ": ".img.gz", - } - - if os.getenv("FSLDIR"): - version_file = os.path.join(os.getenv("FSLDIR"), "etc", "fslversion") - - @staticmethod - def parse_version(raw_info): - return raw_info.splitlines()[0] - - @classmethod - def output_type_to_ext(cls, output_type): - """Get the file extension for the given output type. - - Parameters - ---------- - output_type : {'NIFTI', 'NIFTI_GZ', 'NIFTI_PAIR', 'NIFTI_PAIR_GZ'} - String specifying the output type. - - Returns - ------- - extension : str - The file extension for the output type. - """ - - try: - return cls.ftypes[output_type] - except KeyError: - msg = "Invalid FSLOUTPUTTYPE: ", output_type - raise KeyError(msg) - - @classmethod - def output_type(cls): - """Get the global FSL output file type FSLOUTPUTTYPE. - - This returns the value of the environment variable - FSLOUTPUTTYPE. An exception is raised if it is not defined. - - Returns - ------- - fsl_ftype : string - Represents the current environment setting of FSLOUTPUTTYPE - """ - try: - return os.environ["FSLOUTPUTTYPE"] - except KeyError: - IFLOGGER.warning( - "FSLOUTPUTTYPE environment variable is not set. " - "Setting FSLOUTPUTTYPE=NIFTI" - ) - return "NIFTI" - - @staticmethod - def standard_image(img_name=None): - """Grab an image from the standard location. - - Returns a list of standard images if called without arguments. - - Could be made more fancy to allow for more relocatability""" - try: - fsldir = os.environ["FSLDIR"] - except KeyError: - raise Exception("FSL environment variables not set") - stdpath = os.path.join(fsldir, "data", "standard") - if img_name is None: - return [ - filename.replace(stdpath + "/", "") - for filename in glob(os.path.join(stdpath, "*nii*")) - ] - return os.path.join(stdpath, img_name) - - -# Original source at L125 of /interfaces/base/support.py -class NipypeInterfaceError(Exception): - """Custom error for interfaces""" - - def __init__(self, value): - self.value = value - - def __str__(self): - return "{}".format(self.value) diff --git a/nipype-auto-conv/specs/interfaces/warp_points.yaml b/nipype-auto-conv/specs/interfaces/warp_points.yaml index 4acaa41..dd45860 100644 --- a/nipype-auto-conv/specs/interfaces/warp_points.yaml +++ b/nipype-auto-conv/specs/interfaces/warp_points.yaml @@ -6,24 +6,24 @@ # Docs # ---- # Use FSL `img2imgcoord `_ -# to transform point sets. Accepts plain text files and vtk files. +# to transform point sets. Accepts plain text files and vtk files. # -# .. Note:: transformation of TrackVis trk files is not yet implemented +# .. Note:: transformation of TrackVis trk files is not yet implemented # # -# Examples -# -------- +# Examples +# -------- # -# >>> from nipype.interfaces.fsl import WarpPoints -# >>> warppoints = WarpPoints() -# >>> warppoints.inputs.in_coords = 'surf.txt' -# >>> warppoints.inputs.src_file = 'epi.nii' -# >>> warppoints.inputs.dest_file = 'T1.nii' -# >>> warppoints.inputs.warp_file = 'warpfield.nii' -# >>> warppoints.inputs.coord_mm = True -# >>> warppoints.cmdline # doctest: +ELLIPSIS -# 'img2imgcoord -mm -dest T1.nii -src epi.nii -warp warpfield.nii surf.txt' -# >>> res = warppoints.run() # doctest: +SKIP +# >>> from nipype.interfaces.fsl import WarpPoints +# >>> warppoints = WarpPoints() +# >>> warppoints.inputs.in_coords = 'surf.txt' +# >>> warppoints.inputs.src_file = 'epi.nii' +# >>> warppoints.inputs.dest_file = 'T1.nii' +# >>> warppoints.inputs.warp_file = 'warpfield.nii' +# >>> warppoints.inputs.coord_mm = True +# >>> warppoints.cmdline # doctest: +ELLIPSIS +# 'img2imgcoord -mm -dest T1.nii -src epi.nii -warp warpfield.nii surf.txt' +# >>> res = warppoints.run() # doctest: +SKIP # # # @@ -45,12 +45,9 @@ inputs: # type=file|default=: filename of destination image in_coords: text/text-file # type=file|default=: filename of file containing coordinates - out_file: Path - # type=file: Name of output file, containing the warp as field or coefficients. - # type=file|default=: output file name - src_file: medimage/nifti1 + src_file: generic/file # type=file|default=: filename of source image - warp_file: medimage/nifti1 + warp_file: generic/file # type=file|default=: filename of warpfield (e.g. intermediate2dest_warp.nii.gz) xfm_file: generic/file # type=file|default=: filename of affine transform (e.g. source2dest.mat) @@ -77,7 +74,7 @@ outputs: # dict[str, str] - names of methods/callable classes defined in the adjacent `*_callables.py` # to set to the `callable` attribute of output fields templates: - # dict[str, str] - `output_file_template` values to be provided to output fields + # dict[str, str] - `path_template` values to be provided to output fields requirements: # dict[str, list[str]] - input fields that are required to be provided for the output field to be present tests: @@ -106,7 +103,7 @@ tests: environ: # type=dict|default={}: Environment variables imports: - # list[nipype2pydra.task.base.importstatement] - list import statements required by the test, with each list item + # list[nipype2pydra.statements.imports.explicitimport] - list import statements required by the test, with each list item # consisting of 'module', 'name', and optionally 'alias' keys expected_outputs: # dict[str, str] - expected values for selected outputs, noting that tests will typically @@ -125,16 +122,12 @@ tests: # (if not specified, will try to choose a sensible value) in_coords: # type=file|default=: filename of file containing coordinates - src_file: - # type=file|default=: filename of source image dest_file: # type=file|default=: filename of destination image - warp_file: - # type=file|default=: filename of warpfield (e.g. intermediate2dest_warp.nii.gz) coord_mm: 'True' # type=bool|default=False: all coordinates in mm imports: - # list[nipype2pydra.task.base.importstatement] - list import statements required by the test, with each list item + # list[nipype2pydra.statements.imports.explicitimport] - list import statements required by the test, with each list item # consisting of 'module', 'name', and optionally 'alias' keys expected_outputs: # dict[str, str] - expected values for selected outputs, noting that tests will typically @@ -157,16 +150,12 @@ doctests: # '.mock()' method of the corresponding class is used instead. in_coords: '"surf.txt"' # type=file|default=: filename of file containing coordinates - src_file: '"epi.nii"' - # type=file|default=: filename of source image dest_file: '"T1.nii"' # type=file|default=: filename of destination image - warp_file: '"warpfield.nii"' - # type=file|default=: filename of warpfield (e.g. intermediate2dest_warp.nii.gz) coord_mm: 'True' # type=bool|default=False: all coordinates in mm imports: - # list[nipype2pydra.task.base.importstatement] - list import statements required by the test, with each list item + # list[nipype2pydra.statements.imports.explicitimport] - list import statements required by the test, with each list item # consisting of 'module', 'name', and optionally 'alias' keys directive: # str - any doctest directive to place on the cmdline call, e.g. # doctest: +ELLIPSIS diff --git a/nipype-auto-conv/specs/interfaces/warp_points_callables.py b/nipype-auto-conv/specs/interfaces/warp_points_callables.py deleted file mode 100644 index 1962865..0000000 --- a/nipype-auto-conv/specs/interfaces/warp_points_callables.py +++ /dev/null @@ -1,204 +0,0 @@ -"""Module to put any functions that are referred to in the "callables" section of WarpPoints.yaml""" - -import attrs -import logging -import os -import os.path as op - - -def out_file_callable(output_dir, inputs, stdout, stderr): - outputs = _list_outputs( - output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr - ) - return outputs["out_file"] - - -iflogger = logging.getLogger("nipype.interface") - - -# Original source at L809 of /interfaces/base/core.py -def _filename_from_source( - name, chain=None, inputs=None, stdout=None, stderr=None, output_dir=None -): - if chain is None: - chain = [] - - trait_spec = inputs.trait(name) - retval = getattr(inputs, name) - source_ext = None - if (retval is attrs.NOTHING) or "%s" in retval: - if not trait_spec.name_source: - return retval - - # Do not generate filename when excluded by other inputs - if any( - (getattr(inputs, field) is not attrs.NOTHING) - for field in trait_spec.xor or () - ): - return retval - - # Do not generate filename when required fields are missing - if not all( - (getattr(inputs, field) is not attrs.NOTHING) - for field in trait_spec.requires or () - ): - return retval - - if (retval is not attrs.NOTHING) and "%s" in retval: - name_template = retval - else: - name_template = trait_spec.name_template - if not name_template: - name_template = "%s_generated" - - ns = trait_spec.name_source - while isinstance(ns, (list, tuple)): - if len(ns) > 1: - iflogger.warning("Only one name_source per trait is allowed") - ns = ns[0] - - if not isinstance(ns, (str, bytes)): - raise ValueError( - "name_source of '{}' trait should be an input trait " - "name, but a type {} object was found".format(name, type(ns)) - ) - - if getattr(inputs, ns) is not attrs.NOTHING: - name_source = ns - source = getattr(inputs, name_source) - while isinstance(source, list): - source = source[0] - - # special treatment for files - try: - _, base, source_ext = split_filename(source) - except (AttributeError, TypeError): - base = source - else: - if name in chain: - raise NipypeInterfaceError("Mutually pointing name_sources") - - chain.append(name) - base = _filename_from_source( - ns, - chain, - inputs=inputs, - stdout=stdout, - stderr=stderr, - output_dir=output_dir, - ) - if base is not attrs.NOTHING: - _, _, source_ext = split_filename(base) - else: - # Do not generate filename when required fields are missing - return retval - - chain = None - retval = name_template % base - _, _, ext = split_filename(retval) - if trait_spec.keep_extension and (ext or source_ext): - if (ext is None or not ext) and source_ext: - retval = retval + source_ext - else: - retval = _overload_extension( - retval, - name, - inputs=inputs, - stdout=stdout, - stderr=stderr, - output_dir=output_dir, - ) - return retval - - -# Original source at L885 of /interfaces/base/core.py -def _gen_filename(name, inputs=None, stdout=None, stderr=None, output_dir=None): - raise NotImplementedError - - -# Original source at L891 of /interfaces/base/core.py -def _list_outputs(inputs=None, stdout=None, stderr=None, output_dir=None): - metadata = dict(name_source=lambda t: t is not None) - traits = inputs.traits(**metadata) - if traits: - outputs = {} - for name, trait_spec in list(traits.items()): - out_name = name - if trait_spec.output_name is not None: - out_name = trait_spec.output_name - fname = _filename_from_source( - name, inputs=inputs, stdout=stdout, stderr=stderr, output_dir=output_dir - ) - if fname is not attrs.NOTHING: - outputs[out_name] = os.path.abspath(fname) - return outputs - - -# Original source at L2585 of /interfaces/fsl/utils.py -def _overload_extension( - value, name, inputs=None, stdout=None, stderr=None, output_dir=None -): - if name == "out_file": - return "%s.%s" % (value, getattr(self, "_outformat")) - - -# Original source at L58 of /utils/filemanip.py -def split_filename(fname): - """Split a filename into parts: path, base filename and extension. - - Parameters - ---------- - fname : str - file or path name - - Returns - ------- - pth : str - base path from fname - fname : str - filename from fname, without extension - ext : str - file extension from fname - - Examples - -------- - >>> from nipype.utils.filemanip import split_filename - >>> pth, fname, ext = split_filename('/home/data/subject.nii.gz') - >>> pth - '/home/data' - - >>> fname - 'subject' - - >>> ext - '.nii.gz' - - """ - - special_extensions = [".nii.gz", ".tar.gz", ".niml.dset"] - - pth = op.dirname(fname) - fname = op.basename(fname) - - ext = None - for special_ext in special_extensions: - ext_len = len(special_ext) - if (len(fname) > ext_len) and (fname[-ext_len:].lower() == special_ext.lower()): - ext = fname[-ext_len:] - fname = fname[:-ext_len] - break - if not ext: - fname, ext = op.splitext(fname) - - return pth, fname, ext - - -# Original source at L125 of /interfaces/base/support.py -class NipypeInterfaceError(Exception): - """Custom error for interfaces""" - - def __init__(self, value): - self.value = value - - def __str__(self): - return "{}".format(self.value) diff --git a/nipype-auto-conv/specs/interfaces/warp_points_from_std.yaml b/nipype-auto-conv/specs/interfaces/warp_points_from_std.yaml index 7320302..c88febf 100644 --- a/nipype-auto-conv/specs/interfaces/warp_points_from_std.yaml +++ b/nipype-auto-conv/specs/interfaces/warp_points_from_std.yaml @@ -6,24 +6,24 @@ # Docs # ---- # -# Use FSL `std2imgcoord `_ -# to transform point sets to standard space coordinates. Accepts plain text coordinates -# files. +# Use FSL `std2imgcoord `_ +# to transform point sets to standard space coordinates. Accepts plain text coordinates +# files. # # -# Examples -# -------- +# Examples +# -------- # -# >>> from nipype.interfaces.fsl import WarpPointsFromStd -# >>> warppoints = WarpPointsFromStd() -# >>> warppoints.inputs.in_coords = 'surf.txt' -# >>> warppoints.inputs.img_file = 'T1.nii' -# >>> warppoints.inputs.std_file = 'mni.nii' -# >>> warppoints.inputs.warp_file = 'warpfield.nii' -# >>> warppoints.inputs.coord_mm = True -# >>> warppoints.cmdline # doctest: +ELLIPSIS -# 'std2imgcoord -mm -img T1.nii -std mni.nii -warp warpfield.nii surf.txt' -# >>> res = warppoints.run() # doctest: +SKIP +# >>> from nipype.interfaces.fsl import WarpPointsFromStd +# >>> warppoints = WarpPointsFromStd() +# >>> warppoints.inputs.in_coords = 'surf.txt' +# >>> warppoints.inputs.img_file = 'T1.nii' +# >>> warppoints.inputs.std_file = 'mni.nii' +# >>> warppoints.inputs.warp_file = 'warpfield.nii' +# >>> warppoints.inputs.coord_mm = True +# >>> warppoints.cmdline # doctest: +ELLIPSIS +# 'std2imgcoord -mm -img T1.nii -std mni.nii -warp warpfield.nii surf.txt' +# >>> res = warppoints.run() # doctest: +SKIP # # # @@ -41,13 +41,13 @@ inputs: # from the nipype interface, but you may want to be more specific, particularly # for file types, where specifying the format also specifies the file that will be # passed to the field in the automatically generated unittests. - img_file: medimage/nifti1 + img_file: generic/file # type=file|default=: filename of a destination image in_coords: text/text-file # type=file|default=: filename of file containing coordinates std_file: medimage/nifti1 # type=file|default=: filename of the image in standard space - warp_file: medimage/nifti1 + warp_file: generic/file # type=file|default=: filename of warpfield (e.g. intermediate2dest_warp.nii.gz) xfm_file: generic/file # type=file|default=: filename of affine transform (e.g. source2dest.mat) @@ -73,7 +73,7 @@ outputs: # dict[str, str] - names of methods/callable classes defined in the adjacent `*_callables.py` # to set to the `callable` attribute of output fields templates: - # dict[str, str] - `output_file_template` values to be provided to output fields + # dict[str, str] - `path_template` values to be provided to output fields requirements: # dict[str, list[str]] - input fields that are required to be provided for the output field to be present tests: @@ -99,7 +99,7 @@ tests: environ: # type=dict|default={}: Environment variables imports: - # list[nipype2pydra.task.base.importstatement] - list import statements required by the test, with each list item + # list[nipype2pydra.statements.imports.explicitimport] - list import statements required by the test, with each list item # consisting of 'module', 'name', and optionally 'alias' keys expected_outputs: # dict[str, str] - expected values for selected outputs, noting that tests will typically @@ -118,16 +118,12 @@ tests: # (if not specified, will try to choose a sensible value) in_coords: # type=file|default=: filename of file containing coordinates - img_file: - # type=file|default=: filename of a destination image std_file: # type=file|default=: filename of the image in standard space - warp_file: - # type=file|default=: filename of warpfield (e.g. intermediate2dest_warp.nii.gz) coord_mm: 'True' # type=bool|default=False: all coordinates in mm imports: - # list[nipype2pydra.task.base.importstatement] - list import statements required by the test, with each list item + # list[nipype2pydra.statements.imports.explicitimport] - list import statements required by the test, with each list item # consisting of 'module', 'name', and optionally 'alias' keys expected_outputs: # dict[str, str] - expected values for selected outputs, noting that tests will typically @@ -150,16 +146,12 @@ doctests: # '.mock()' method of the corresponding class is used instead. in_coords: '"surf.txt"' # type=file|default=: filename of file containing coordinates - img_file: '"T1.nii"' - # type=file|default=: filename of a destination image std_file: '"mni.nii"' # type=file|default=: filename of the image in standard space - warp_file: '"warpfield.nii"' - # type=file|default=: filename of warpfield (e.g. intermediate2dest_warp.nii.gz) coord_mm: 'True' # type=bool|default=False: all coordinates in mm imports: - # list[nipype2pydra.task.base.importstatement] - list import statements required by the test, with each list item + # list[nipype2pydra.statements.imports.explicitimport] - list import statements required by the test, with each list item # consisting of 'module', 'name', and optionally 'alias' keys directive: # str - any doctest directive to place on the cmdline call, e.g. # doctest: +ELLIPSIS diff --git a/nipype-auto-conv/specs/interfaces/warp_points_from_std_callables.py b/nipype-auto-conv/specs/interfaces/warp_points_from_std_callables.py deleted file mode 100644 index 0bd158b..0000000 --- a/nipype-auto-conv/specs/interfaces/warp_points_from_std_callables.py +++ /dev/null @@ -1,22 +0,0 @@ -"""Module to put any functions that are referred to in the "callables" section of WarpPointsFromStd.yaml""" - -import os.path as op - - -def out_file_callable(output_dir, inputs, stdout, stderr): - outputs = _list_outputs( - output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr - ) - return outputs["out_file"] - - -# Original source at L885 of /interfaces/base/core.py -def _gen_filename(name, inputs=None, stdout=None, stderr=None, output_dir=None): - raise NotImplementedError - - -# Original source at L2744 of /interfaces/fsl/utils.py -def _list_outputs(inputs=None, stdout=None, stderr=None, output_dir=None): - outputs = {} - outputs["out_file"] = op.abspath("stdout.nipype") - return outputs diff --git a/nipype-auto-conv/specs/interfaces/warp_points_to_std.yaml b/nipype-auto-conv/specs/interfaces/warp_points_to_std.yaml index 0cfbe88..1bb65b5 100644 --- a/nipype-auto-conv/specs/interfaces/warp_points_to_std.yaml +++ b/nipype-auto-conv/specs/interfaces/warp_points_to_std.yaml @@ -6,26 +6,26 @@ # Docs # ---- # -# Use FSL `img2stdcoord `_ -# to transform point sets to standard space coordinates. Accepts plain text -# files and vtk files. +# Use FSL `img2stdcoord `_ +# to transform point sets to standard space coordinates. Accepts plain text +# files and vtk files. # -# .. Note:: transformation of TrackVis trk files is not yet implemented +# .. Note:: transformation of TrackVis trk files is not yet implemented # # -# Examples -# -------- +# Examples +# -------- # -# >>> from nipype.interfaces.fsl import WarpPointsToStd -# >>> warppoints = WarpPointsToStd() -# >>> warppoints.inputs.in_coords = 'surf.txt' -# >>> warppoints.inputs.img_file = 'T1.nii' -# >>> warppoints.inputs.std_file = 'mni.nii' -# >>> warppoints.inputs.warp_file = 'warpfield.nii' -# >>> warppoints.inputs.coord_mm = True -# >>> warppoints.cmdline # doctest: +ELLIPSIS -# 'img2stdcoord -mm -img T1.nii -std mni.nii -warp warpfield.nii surf.txt' -# >>> res = warppoints.run() # doctest: +SKIP +# >>> from nipype.interfaces.fsl import WarpPointsToStd +# >>> warppoints = WarpPointsToStd() +# >>> warppoints.inputs.in_coords = 'surf.txt' +# >>> warppoints.inputs.img_file = 'T1.nii' +# >>> warppoints.inputs.std_file = 'mni.nii' +# >>> warppoints.inputs.warp_file = 'warpfield.nii' +# >>> warppoints.inputs.coord_mm = True +# >>> warppoints.cmdline # doctest: +ELLIPSIS +# 'img2stdcoord -mm -img T1.nii -std mni.nii -warp warpfield.nii surf.txt' +# >>> res = warppoints.run() # doctest: +SKIP # # # @@ -43,18 +43,15 @@ inputs: # from the nipype interface, but you may want to be more specific, particularly # for file types, where specifying the format also specifies the file that will be # passed to the field in the automatically generated unittests. - img_file: medimage/nifti1 + img_file: generic/file # type=file|default=: filename of input image in_coords: text/text-file # type=file|default=: filename of file containing coordinates - out_file: Path - # type=file: Name of output file, containing the warp as field or coefficients. - # type=file|default=: output file name premat_file: generic/file # type=file|default=: filename of pre-warp affine transform (e.g. example_func2highres.mat) std_file: medimage/nifti1 # type=file|default=: filename of destination image - warp_file: medimage/nifti1 + warp_file: generic/file # type=file|default=: filename of warpfield (e.g. intermediate2dest_warp.nii.gz) xfm_file: generic/file # type=file|default=: filename of affine transform (e.g. source2dest.mat) @@ -81,7 +78,7 @@ outputs: # dict[str, str] - names of methods/callable classes defined in the adjacent `*_callables.py` # to set to the `callable` attribute of output fields templates: - # dict[str, str] - `output_file_template` values to be provided to output fields + # dict[str, str] - `path_template` values to be provided to output fields requirements: # dict[str, list[str]] - input fields that are required to be provided for the output field to be present tests: @@ -112,7 +109,7 @@ tests: environ: # type=dict|default={}: Environment variables imports: - # list[nipype2pydra.task.base.importstatement] - list import statements required by the test, with each list item + # list[nipype2pydra.statements.imports.explicitimport] - list import statements required by the test, with each list item # consisting of 'module', 'name', and optionally 'alias' keys expected_outputs: # dict[str, str] - expected values for selected outputs, noting that tests will typically @@ -131,16 +128,12 @@ tests: # (if not specified, will try to choose a sensible value) in_coords: # type=file|default=: filename of file containing coordinates - img_file: - # type=file|default=: filename of input image std_file: # type=file|default=: filename of destination image - warp_file: - # type=file|default=: filename of warpfield (e.g. intermediate2dest_warp.nii.gz) coord_mm: 'True' # type=bool|default=False: all coordinates in mm imports: - # list[nipype2pydra.task.base.importstatement] - list import statements required by the test, with each list item + # list[nipype2pydra.statements.imports.explicitimport] - list import statements required by the test, with each list item # consisting of 'module', 'name', and optionally 'alias' keys expected_outputs: # dict[str, str] - expected values for selected outputs, noting that tests will typically @@ -163,16 +156,12 @@ doctests: # '.mock()' method of the corresponding class is used instead. in_coords: '"surf.txt"' # type=file|default=: filename of file containing coordinates - img_file: '"T1.nii"' - # type=file|default=: filename of input image std_file: '"mni.nii"' # type=file|default=: filename of destination image - warp_file: '"warpfield.nii"' - # type=file|default=: filename of warpfield (e.g. intermediate2dest_warp.nii.gz) coord_mm: 'True' # type=bool|default=False: all coordinates in mm imports: - # list[nipype2pydra.task.base.importstatement] - list import statements required by the test, with each list item + # list[nipype2pydra.statements.imports.explicitimport] - list import statements required by the test, with each list item # consisting of 'module', 'name', and optionally 'alias' keys directive: # str - any doctest directive to place on the cmdline call, e.g. # doctest: +ELLIPSIS diff --git a/nipype-auto-conv/specs/interfaces/warp_points_to_std_callables.py b/nipype-auto-conv/specs/interfaces/warp_points_to_std_callables.py deleted file mode 100644 index 078867b..0000000 --- a/nipype-auto-conv/specs/interfaces/warp_points_to_std_callables.py +++ /dev/null @@ -1,204 +0,0 @@ -"""Module to put any functions that are referred to in the "callables" section of WarpPointsToStd.yaml""" - -import attrs -import logging -import os -import os.path as op - - -def out_file_callable(output_dir, inputs, stdout, stderr): - outputs = _list_outputs( - output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr - ) - return outputs["out_file"] - - -iflogger = logging.getLogger("nipype.interface") - - -# Original source at L809 of /interfaces/base/core.py -def _filename_from_source( - name, chain=None, inputs=None, stdout=None, stderr=None, output_dir=None -): - if chain is None: - chain = [] - - trait_spec = inputs.trait(name) - retval = getattr(inputs, name) - source_ext = None - if (retval is attrs.NOTHING) or "%s" in retval: - if not trait_spec.name_source: - return retval - - # Do not generate filename when excluded by other inputs - if any( - (getattr(inputs, field) is not attrs.NOTHING) - for field in trait_spec.xor or () - ): - return retval - - # Do not generate filename when required fields are missing - if not all( - (getattr(inputs, field) is not attrs.NOTHING) - for field in trait_spec.requires or () - ): - return retval - - if (retval is not attrs.NOTHING) and "%s" in retval: - name_template = retval - else: - name_template = trait_spec.name_template - if not name_template: - name_template = "%s_generated" - - ns = trait_spec.name_source - while isinstance(ns, (list, tuple)): - if len(ns) > 1: - iflogger.warning("Only one name_source per trait is allowed") - ns = ns[0] - - if not isinstance(ns, (str, bytes)): - raise ValueError( - "name_source of '{}' trait should be an input trait " - "name, but a type {} object was found".format(name, type(ns)) - ) - - if getattr(inputs, ns) is not attrs.NOTHING: - name_source = ns - source = getattr(inputs, name_source) - while isinstance(source, list): - source = source[0] - - # special treatment for files - try: - _, base, source_ext = split_filename(source) - except (AttributeError, TypeError): - base = source - else: - if name in chain: - raise NipypeInterfaceError("Mutually pointing name_sources") - - chain.append(name) - base = _filename_from_source( - ns, - chain, - inputs=inputs, - stdout=stdout, - stderr=stderr, - output_dir=output_dir, - ) - if base is not attrs.NOTHING: - _, _, source_ext = split_filename(base) - else: - # Do not generate filename when required fields are missing - return retval - - chain = None - retval = name_template % base - _, _, ext = split_filename(retval) - if trait_spec.keep_extension and (ext or source_ext): - if (ext is None or not ext) and source_ext: - retval = retval + source_ext - else: - retval = _overload_extension( - retval, - name, - inputs=inputs, - stdout=stdout, - stderr=stderr, - output_dir=output_dir, - ) - return retval - - -# Original source at L885 of /interfaces/base/core.py -def _gen_filename(name, inputs=None, stdout=None, stderr=None, output_dir=None): - raise NotImplementedError - - -# Original source at L891 of /interfaces/base/core.py -def _list_outputs(inputs=None, stdout=None, stderr=None, output_dir=None): - metadata = dict(name_source=lambda t: t is not None) - traits = inputs.traits(**metadata) - if traits: - outputs = {} - for name, trait_spec in list(traits.items()): - out_name = name - if trait_spec.output_name is not None: - out_name = trait_spec.output_name - fname = _filename_from_source( - name, inputs=inputs, stdout=stdout, stderr=stderr, output_dir=output_dir - ) - if fname is not attrs.NOTHING: - outputs[out_name] = os.path.abspath(fname) - return outputs - - -# Original source at L2585 of /interfaces/fsl/utils.py -def _overload_extension( - value, name, inputs=None, stdout=None, stderr=None, output_dir=None -): - if name == "out_file": - return "%s.%s" % (value, getattr(self, "_outformat")) - - -# Original source at L58 of /utils/filemanip.py -def split_filename(fname): - """Split a filename into parts: path, base filename and extension. - - Parameters - ---------- - fname : str - file or path name - - Returns - ------- - pth : str - base path from fname - fname : str - filename from fname, without extension - ext : str - file extension from fname - - Examples - -------- - >>> from nipype.utils.filemanip import split_filename - >>> pth, fname, ext = split_filename('/home/data/subject.nii.gz') - >>> pth - '/home/data' - - >>> fname - 'subject' - - >>> ext - '.nii.gz' - - """ - - special_extensions = [".nii.gz", ".tar.gz", ".niml.dset"] - - pth = op.dirname(fname) - fname = op.basename(fname) - - ext = None - for special_ext in special_extensions: - ext_len = len(special_ext) - if (len(fname) > ext_len) and (fname[-ext_len:].lower() == special_ext.lower()): - ext = fname[-ext_len:] - fname = fname[:-ext_len] - break - if not ext: - fname, ext = op.splitext(fname) - - return pth, fname, ext - - -# Original source at L125 of /interfaces/base/support.py -class NipypeInterfaceError(Exception): - """Custom error for interfaces""" - - def __init__(self, value): - self.value = value - - def __str__(self): - return "{}".format(self.value) diff --git a/nipype-auto-conv/specs/interfaces/warp_utils.yaml b/nipype-auto-conv/specs/interfaces/warp_utils.yaml index 0210a25..7d22999 100644 --- a/nipype-auto-conv/specs/interfaces/warp_utils.yaml +++ b/nipype-auto-conv/specs/interfaces/warp_utils.yaml @@ -6,22 +6,22 @@ # Docs # ---- # Use FSL `fnirtfileutils `_ -# to convert field->coefficients, coefficients->field, coefficients->other_coefficients etc +# to convert field->coefficients, coefficients->field, coefficients->other_coefficients etc # # -# Examples -# -------- +# Examples +# -------- # -# >>> from nipype.interfaces.fsl import WarpUtils -# >>> warputils = WarpUtils() -# >>> warputils.inputs.in_file = "warpfield.nii" -# >>> warputils.inputs.reference = "T1.nii" -# >>> warputils.inputs.out_format = 'spline' -# >>> warputils.inputs.warp_resolution = (10,10,10) -# >>> warputils.inputs.output_type = "NIFTI_GZ" -# >>> warputils.cmdline # doctest: +ELLIPSIS -# 'fnirtfileutils --in=warpfield.nii --outformat=spline --ref=T1.nii --warpres=10.0000,10.0000,10.0000 --out=warpfield_coeffs.nii.gz' -# >>> res = invwarp.run() # doctest: +SKIP +# >>> from nipype.interfaces.fsl import WarpUtils +# >>> warputils = WarpUtils() +# >>> warputils.inputs.in_file = "warpfield.nii" +# >>> warputils.inputs.reference = "T1.nii" +# >>> warputils.inputs.out_format = 'spline' +# >>> warputils.inputs.warp_resolution = (10,10,10) +# >>> warputils.inputs.output_type = "NIFTI_GZ" +# >>> warputils.cmdline # doctest: +ELLIPSIS +# 'fnirtfileutils --in=warpfield.nii --outformat=spline --ref=T1.nii --warpres=10.0000,10.0000,10.0000 --out=warpfield_coeffs.nii.gz' +# >>> res = invwarp.run() # doctest: +SKIP # # # @@ -41,13 +41,7 @@ inputs: # passed to the field in the automatically generated unittests. in_file: medimage/nifti1 # type=file|default=: Name of file containing warp-coefficients/fields. This would typically be the output from the --cout switch of fnirt (but can also use fields, like the output from --fout). - out_file: Path - # type=file: Name of output file, containing the warp as field or coefficients. - # type=file|default=: Name of output file. The format of the output depends on what other parameters are set. The default format is a (4D) field-file. If the --outformat is set to spline the format will be a (4D) file of spline coefficients. - out_jacobian: Path - # type=file: Name of output file, containing the map of the determinant of the Jacobian - # type=file|default=: Specifies that a (3D) file of Jacobian determinants corresponding to --in should be produced and written to filename. - reference: medimage/nifti1 + reference: generic/file # type=file|default=: Name of a file in target space. Note that the target space is now different from the target space that was used to create the --warp file. It would typically be the file that was specified with the --in argument when running fnirt. callable_defaults: # dict[str, str] - names of methods/callable classes defined in the adjacent `*_callables.py` @@ -75,7 +69,7 @@ outputs: # dict[str, str] - names of methods/callable classes defined in the adjacent `*_callables.py` # to set to the `callable` attribute of output fields templates: - # dict[str, str] - `output_file_template` values to be provided to output fields + # dict[str, str] - `path_template` values to be provided to output fields requirements: # dict[str, list[str]] - input fields that are required to be provided for the output field to be present tests: @@ -103,13 +97,13 @@ tests: with_affine: # type=bool|default=False: Specifies that the affine transform (i.e. that which was specified for the --aff parameter in fnirt) should be included as displacements in the --out file. That can be useful for interfacing with software that cannot decode FSL/fnirt coefficient-files (where the affine transform is stored separately from the displacements). output_type: - # type=enum|default='NIFTI'|allowed['NIFTI','NIFTI_GZ','NIFTI_PAIR','NIFTI_PAIR_GZ']: FSL output type + # type=enum|default='NIFTI'|allowed['GIFTI','NIFTI','NIFTI_GZ','NIFTI_PAIR','NIFTI_PAIR_GZ']: FSL output type args: # type=str|default='': Additional parameters to the command environ: # type=dict|default={}: Environment variables imports: - # list[nipype2pydra.task.base.importstatement] - list import statements required by the test, with each list item + # list[nipype2pydra.statements.imports.explicitimport] - list import statements required by the test, with each list item # consisting of 'module', 'name', and optionally 'alias' keys expected_outputs: # dict[str, str] - expected values for selected outputs, noting that tests will typically @@ -128,16 +122,12 @@ tests: # (if not specified, will try to choose a sensible value) in_file: # type=file|default=: Name of file containing warp-coefficients/fields. This would typically be the output from the --cout switch of fnirt (but can also use fields, like the output from --fout). - reference: - # type=file|default=: Name of a file in target space. Note that the target space is now different from the target space that was used to create the --warp file. It would typically be the file that was specified with the --in argument when running fnirt. out_format: '"spline"' # type=enum|default='spline'|allowed['field','spline']: Specifies the output format. If set to field (default) the output will be a (4D) field-file. If set to spline the format will be a (4D) file of spline coefficients. - warp_resolution: (10,10,10) - # type=tuple|default=(0.0, 0.0, 0.0): Specifies the resolution/knot-spacing of the splines pertaining to the coefficients in the --out file. This parameter is only relevant if --outformat is set to spline. It should be noted that if the --in file has a higher resolution, the resulting coefficients will pertain to the closest (in a least-squares sense) file in the space of fields with the --warpres resolution. It should also be noted that the resolution will always be an integer multiple of the voxel size. output_type: '"NIFTI_GZ"' - # type=enum|default='NIFTI'|allowed['NIFTI','NIFTI_GZ','NIFTI_PAIR','NIFTI_PAIR_GZ']: FSL output type + # type=enum|default='NIFTI'|allowed['GIFTI','NIFTI','NIFTI_GZ','NIFTI_PAIR','NIFTI_PAIR_GZ']: FSL output type imports: - # list[nipype2pydra.task.base.importstatement] - list import statements required by the test, with each list item + # list[nipype2pydra.statements.imports.explicitimport] - list import statements required by the test, with each list item # consisting of 'module', 'name', and optionally 'alias' keys expected_outputs: # dict[str, str] - expected values for selected outputs, noting that tests will typically @@ -160,16 +150,12 @@ doctests: # '.mock()' method of the corresponding class is used instead. in_file: '"warpfield.nii"' # type=file|default=: Name of file containing warp-coefficients/fields. This would typically be the output from the --cout switch of fnirt (but can also use fields, like the output from --fout). - reference: '"T1.nii"' - # type=file|default=: Name of a file in target space. Note that the target space is now different from the target space that was used to create the --warp file. It would typically be the file that was specified with the --in argument when running fnirt. out_format: '"spline"' # type=enum|default='spline'|allowed['field','spline']: Specifies the output format. If set to field (default) the output will be a (4D) field-file. If set to spline the format will be a (4D) file of spline coefficients. - warp_resolution: (10,10,10) - # type=tuple|default=(0.0, 0.0, 0.0): Specifies the resolution/knot-spacing of the splines pertaining to the coefficients in the --out file. This parameter is only relevant if --outformat is set to spline. It should be noted that if the --in file has a higher resolution, the resulting coefficients will pertain to the closest (in a least-squares sense) file in the space of fields with the --warpres resolution. It should also be noted that the resolution will always be an integer multiple of the voxel size. output_type: '"NIFTI_GZ"' - # type=enum|default='NIFTI'|allowed['NIFTI','NIFTI_GZ','NIFTI_PAIR','NIFTI_PAIR_GZ']: FSL output type + # type=enum|default='NIFTI'|allowed['GIFTI','NIFTI','NIFTI_GZ','NIFTI_PAIR','NIFTI_PAIR_GZ']: FSL output type imports: - # list[nipype2pydra.task.base.importstatement] - list import statements required by the test, with each list item + # list[nipype2pydra.statements.imports.explicitimport] - list import statements required by the test, with each list item # consisting of 'module', 'name', and optionally 'alias' keys directive: # str - any doctest directive to place on the cmdline call, e.g. # doctest: +ELLIPSIS diff --git a/nipype-auto-conv/specs/interfaces/warp_utils_callables.py b/nipype-auto-conv/specs/interfaces/warp_utils_callables.py deleted file mode 100644 index 25df087..0000000 --- a/nipype-auto-conv/specs/interfaces/warp_utils_callables.py +++ /dev/null @@ -1,345 +0,0 @@ -"""Module to put any functions that are referred to in the "callables" section of WarpUtils.yaml""" - -import attrs -import logging -import os -import os.path as op -from glob import glob - - -def out_file_callable(output_dir, inputs, stdout, stderr): - outputs = _list_outputs( - output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr - ) - return outputs["out_file"] - - -def out_jacobian_callable(output_dir, inputs, stdout, stderr): - outputs = _list_outputs( - output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr - ) - return outputs["out_jacobian"] - - -IFLOGGER = logging.getLogger("nipype.interface") - - -iflogger = logging.getLogger("nipype.interface") - - -# Original source at L809 of /interfaces/base/core.py -def _filename_from_source( - name, chain=None, inputs=None, stdout=None, stderr=None, output_dir=None -): - if chain is None: - chain = [] - - trait_spec = inputs.trait(name) - retval = getattr(inputs, name) - source_ext = None - if (retval is attrs.NOTHING) or "%s" in retval: - if not trait_spec.name_source: - return retval - - # Do not generate filename when excluded by other inputs - if any( - (getattr(inputs, field) is not attrs.NOTHING) - for field in trait_spec.xor or () - ): - return retval - - # Do not generate filename when required fields are missing - if not all( - (getattr(inputs, field) is not attrs.NOTHING) - for field in trait_spec.requires or () - ): - return retval - - if (retval is not attrs.NOTHING) and "%s" in retval: - name_template = retval - else: - name_template = trait_spec.name_template - if not name_template: - name_template = "%s_generated" - - ns = trait_spec.name_source - while isinstance(ns, (list, tuple)): - if len(ns) > 1: - iflogger.warning("Only one name_source per trait is allowed") - ns = ns[0] - - if not isinstance(ns, (str, bytes)): - raise ValueError( - "name_source of '{}' trait should be an input trait " - "name, but a type {} object was found".format(name, type(ns)) - ) - - if getattr(inputs, ns) is not attrs.NOTHING: - name_source = ns - source = getattr(inputs, name_source) - while isinstance(source, list): - source = source[0] - - # special treatment for files - try: - _, base, source_ext = split_filename(source) - except (AttributeError, TypeError): - base = source - else: - if name in chain: - raise NipypeInterfaceError("Mutually pointing name_sources") - - chain.append(name) - base = _filename_from_source( - ns, - chain, - inputs=inputs, - stdout=stdout, - stderr=stderr, - output_dir=output_dir, - ) - if base is not attrs.NOTHING: - _, _, source_ext = split_filename(base) - else: - # Do not generate filename when required fields are missing - return retval - - chain = None - retval = name_template % base - _, _, ext = split_filename(retval) - if trait_spec.keep_extension and (ext or source_ext): - if (ext is None or not ext) and source_ext: - retval = retval + source_ext - else: - retval = _overload_extension( - retval, - name, - inputs=inputs, - stdout=stdout, - stderr=stderr, - output_dir=output_dir, - ) - return retval - - -# Original source at L885 of /interfaces/base/core.py -def _gen_filename(name, inputs=None, stdout=None, stderr=None, output_dir=None): - raise NotImplementedError - - -# Original source at L891 of /interfaces/base/core.py -def _list_outputs(inputs=None, stdout=None, stderr=None, output_dir=None): - metadata = dict(name_source=lambda t: t is not None) - traits = inputs.traits(**metadata) - if traits: - outputs = {} - for name, trait_spec in list(traits.items()): - out_name = name - if trait_spec.output_name is not None: - out_name = trait_spec.output_name - fname = _filename_from_source( - name, inputs=inputs, stdout=stdout, stderr=stderr, output_dir=output_dir - ) - if fname is not attrs.NOTHING: - outputs[out_name] = os.path.abspath(fname) - return outputs - - -# Original source at L249 of /interfaces/fsl/base.py -def _overload_extension( - value, name=None, inputs=None, stdout=None, stderr=None, output_dir=None -): - return value + Info.output_type_to_ext(inputs.output_type) - - -# Original source at L58 of /utils/filemanip.py -def split_filename(fname): - """Split a filename into parts: path, base filename and extension. - - Parameters - ---------- - fname : str - file or path name - - Returns - ------- - pth : str - base path from fname - fname : str - filename from fname, without extension - ext : str - file extension from fname - - Examples - -------- - >>> from nipype.utils.filemanip import split_filename - >>> pth, fname, ext = split_filename('/home/data/subject.nii.gz') - >>> pth - '/home/data' - - >>> fname - 'subject' - - >>> ext - '.nii.gz' - - """ - - special_extensions = [".nii.gz", ".tar.gz", ".niml.dset"] - - pth = op.dirname(fname) - fname = op.basename(fname) - - ext = None - for special_ext in special_extensions: - ext_len = len(special_ext) - if (len(fname) > ext_len) and (fname[-ext_len:].lower() == special_ext.lower()): - ext = fname[-ext_len:] - fname = fname[:-ext_len] - break - if not ext: - fname, ext = op.splitext(fname) - - return pth, fname, ext - - -# Original source at L1069 of /interfaces/base/core.py -class PackageInfo(object): - _version = None - version_cmd = None - version_file = None - - @classmethod - def version(klass): - if klass._version is None: - if klass.version_cmd is not None: - try: - clout = CommandLine( - command=klass.version_cmd, - resource_monitor=False, - terminal_output="allatonce", - ).run() - except IOError: - return None - - raw_info = clout.runtime.stdout - elif klass.version_file is not None: - try: - with open(klass.version_file, "rt") as fobj: - raw_info = fobj.read() - except OSError: - return None - else: - return None - - klass._version = klass.parse_version(raw_info) - - return klass._version - - @staticmethod - def parse_version(raw_info): - raise NotImplementedError - - -# Original source at L40 of /interfaces/fsl/base.py -class Info(PackageInfo): - """ - Handle FSL ``output_type`` and version information. - - output type refers to the type of file fsl defaults to writing - eg, NIFTI, NIFTI_GZ - - Examples - -------- - - >>> from nipype.interfaces.fsl import Info - >>> Info.version() # doctest: +SKIP - >>> Info.output_type() # doctest: +SKIP - - """ - - ftypes = { - "NIFTI": ".nii", - "NIFTI_PAIR": ".img", - "NIFTI_GZ": ".nii.gz", - "NIFTI_PAIR_GZ": ".img.gz", - } - - if os.getenv("FSLDIR"): - version_file = os.path.join(os.getenv("FSLDIR"), "etc", "fslversion") - - @staticmethod - def parse_version(raw_info): - return raw_info.splitlines()[0] - - @classmethod - def output_type_to_ext(cls, output_type): - """Get the file extension for the given output type. - - Parameters - ---------- - output_type : {'NIFTI', 'NIFTI_GZ', 'NIFTI_PAIR', 'NIFTI_PAIR_GZ'} - String specifying the output type. - - Returns - ------- - extension : str - The file extension for the output type. - """ - - try: - return cls.ftypes[output_type] - except KeyError: - msg = "Invalid FSLOUTPUTTYPE: ", output_type - raise KeyError(msg) - - @classmethod - def output_type(cls): - """Get the global FSL output file type FSLOUTPUTTYPE. - - This returns the value of the environment variable - FSLOUTPUTTYPE. An exception is raised if it is not defined. - - Returns - ------- - fsl_ftype : string - Represents the current environment setting of FSLOUTPUTTYPE - """ - try: - return os.environ["FSLOUTPUTTYPE"] - except KeyError: - IFLOGGER.warning( - "FSLOUTPUTTYPE environment variable is not set. " - "Setting FSLOUTPUTTYPE=NIFTI" - ) - return "NIFTI" - - @staticmethod - def standard_image(img_name=None): - """Grab an image from the standard location. - - Returns a list of standard images if called without arguments. - - Could be made more fancy to allow for more relocatability""" - try: - fsldir = os.environ["FSLDIR"] - except KeyError: - raise Exception("FSL environment variables not set") - stdpath = os.path.join(fsldir, "data", "standard") - if img_name is None: - return [ - filename.replace(stdpath + "/", "") - for filename in glob(os.path.join(stdpath, "*nii*")) - ] - return os.path.join(stdpath, img_name) - - -# Original source at L125 of /interfaces/base/support.py -class NipypeInterfaceError(Exception): - """Custom error for interfaces""" - - def __init__(self, value): - self.value = value - - def __str__(self): - return "{}".format(self.value) diff --git a/nipype-auto-conv/specs/interfaces/x_fibres_5.yaml b/nipype-auto-conv/specs/interfaces/x_fibres_5.yaml index 95887a8..0ac0434 100644 --- a/nipype-auto-conv/specs/interfaces/x_fibres_5.yaml +++ b/nipype-auto-conv/specs/interfaces/x_fibres_5.yaml @@ -6,8 +6,8 @@ # Docs # ---- # -# Perform model parameters estimation for local (voxelwise) diffusion -# parameters +# Perform model parameters estimation for local (voxelwise) diffusion +# parameters # task_name: XFibres5 nipype_name: XFibres5 @@ -18,11 +18,11 @@ inputs: rename: # dict[str, str] - fields to rename in the Pydra interface types: - # dict[str, type] - override inferred types (use "mime-like" string for file-format types, - # e.g. 'medimage/nifti-gz'). For most fields the type will be correctly inferred - # from the nipype interface, but you may want to be more specific, particularly - # for file types, where specifying the format also specifies the file that will be - # passed to the field in the automatically generated unittests. + # dict[str, type] - override inferred types (use "mime-like" string for file-format types, + # e.g. 'medimage/nifti-gz'). For most fields the type will be correctly inferred + # from the nipype interface, but you may want to be more specific, particularly + # for file types, where specifying the format also specifies the file that will be + # passed to the field in the automatically generated unittests. bvals: generic/file # type=file|default=: b values file bvecs: generic/file @@ -46,17 +46,17 @@ outputs: rename: # dict[str, str] - fields to rename in the Pydra interface types: - # dict[str, type] - override inferred types (use "mime-like" string for file-format types, - # e.g. 'medimage/nifti-gz'). For most fields the type will be correctly inferred - # from the nipype interface, but you may want to be more specific, particularly - # for file types, where specifying the format also specifies the file that will be - # passed to the field in the automatically generated unittests. + # dict[str, type] - override inferred types (use "mime-like" string for file-format types, + # e.g. 'medimage/nifti-gz'). For most fields the type will be correctly inferred + # from the nipype interface, but you may want to be more specific, particularly + # for file types, where specifying the format also specifies the file that will be + # passed to the field in the automatically generated unittests. dyads: generic/file+list-of # type=outputmultiobject: Mean of PDD distribution in vector form. fsamples: generic/file+list-of # type=outputmultiobject: Samples from the distribution on f anisotropy mean_S0samples: generic/file - # type=file: Mean of distribution on T2wbaseline signal intensity S0 + # type=file: Mean of distribution on T2w baseline signal intensity S0 mean_dsamples: generic/file # type=file: Mean of distribution on diffusivity d mean_fsamples: generic/file+list-of @@ -71,80 +71,80 @@ outputs: # dict[str, str] - names of methods/callable classes defined in the adjacent `*_callables.py` # to set to the `callable` attribute of output fields templates: - # dict[str, str] - `output_file_template` values to be provided to output fields + # dict[str, str] - `path_template` values to be provided to output fields requirements: # dict[str, list[str]] - input fields that are required to be provided for the output field to be present tests: - - inputs: - # dict[str, str] - values to provide to inputs fields in the task initialisation - # (if not specified, will try to choose a sensible value) - gradnonlin: - # type=file|default=: gradient file corresponding to slice - dwi: - # type=file|default=: diffusion weighted image data file - mask: - # type=file|default=: brain binary mask file (i.e. from BET) - bvecs: - # type=file|default=: b vectors file - bvals: - # type=file|default=: b values file - logdir: - # type=directory|default='.': - n_fibres: - # type=range|default=2: Maximum number of fibres to fit in each voxel - model: - # type=enum|default=1|allowed[1,2,3]: use monoexponential (1, default, required for single-shell) or multiexponential (2, multi-shell) model - fudge: - # type=int|default=0: ARD fudge factor - n_jumps: - # type=int|default=5000: Num of jumps to be made by MCMC - burn_in: - # type=range|default=0: Total num of jumps at start of MCMC to be discarded - burn_in_no_ard: - # type=range|default=0: num of burnin jumps before the ard is imposed - sample_every: - # type=range|default=1: Num of jumps for each sample (MCMC) - update_proposal_every: - # type=range|default=40: Num of jumps for each update to the proposal density std (MCMC) - seed: - # type=int|default=0: seed for pseudo random number generator - no_ard: - # type=bool|default=False: Turn ARD off on all fibres - all_ard: - # type=bool|default=False: Turn ARD on on all fibres - no_spat: - # type=bool|default=False: Initialise with tensor, not spatially - non_linear: - # type=bool|default=False: Initialise with nonlinear fitting - cnlinear: - # type=bool|default=False: Initialise with constrained nonlinear fitting - rician: - # type=bool|default=False: use Rician noise modeling - f0_noard: - # type=bool|default=False: Noise floor model: add to the model an unattenuated signal compartment f0 - f0_ard: - # type=bool|default=False: Noise floor model: add to the model an unattenuated signal compartment f0 - force_dir: - # type=bool|default=True: use the actual directory name given (do not add + to make a new directory) - output_type: - # type=enum|default='NIFTI'|allowed['NIFTI','NIFTI_GZ','NIFTI_PAIR','NIFTI_PAIR_GZ']: FSL output type - args: - # type=str|default='': Additional parameters to the command - environ: - # type=dict|default={}: Environment variables - imports: - # list[nipype2pydra.task.base.importstatement] - list import statements required by the test, with each list item - # consisting of 'module', 'name', and optionally 'alias' keys - expected_outputs: - # dict[str, str] - expected values for selected outputs, noting that tests will typically - # be terminated before they complete for time-saving reasons, and therefore - # these values will be ignored, when running in CI - timeout: 10 - # int - the value to set for the timeout in the generated test, - # after which the test will be considered to have been initialised - # successfully. Set to 0 to disable the timeout (warning, this could - # lead to the unittests taking a very long time to complete) - xfail: true - # bool - whether the unittest is expected to fail or not. Set to false - # when you are satisfied with the edits you have made to this file +- inputs: + # dict[str, str] - values to provide to inputs fields in the task initialisation + # (if not specified, will try to choose a sensible value) + gradnonlin: + # type=file|default=: gradient file corresponding to slice + dwi: + # type=file|default=: diffusion weighted image data file + mask: + # type=file|default=: brain binary mask file (i.e. from BET) + bvecs: + # type=file|default=: b vectors file + bvals: + # type=file|default=: b values file + logdir: + # type=directory|default='.': + n_fibres: + # type=range|default=2: Maximum number of fibres to fit in each voxel + model: + # type=enum|default=1|allowed[1,2,3]: use monoexponential (1, default, required for single-shell) or multiexponential (2, multi-shell) model + fudge: + # type=int|default=0: ARD fudge factor + n_jumps: + # type=int|default=5000: Num of jumps to be made by MCMC + burn_in: + # type=range|default=0: Total num of jumps at start of MCMC to be discarded + burn_in_no_ard: + # type=range|default=0: num of burnin jumps before the ard is imposed + sample_every: + # type=range|default=1: Num of jumps for each sample (MCMC) + update_proposal_every: + # type=range|default=40: Num of jumps for each update to the proposal density std (MCMC) + seed: + # type=int|default=0: seed for pseudo random number generator + no_ard: + # type=bool|default=False: Turn ARD off on all fibres + all_ard: + # type=bool|default=False: Turn ARD on on all fibres + no_spat: + # type=bool|default=False: Initialise with tensor, not spatially + non_linear: + # type=bool|default=False: Initialise with nonlinear fitting + cnlinear: + # type=bool|default=False: Initialise with constrained nonlinear fitting + rician: + # type=bool|default=False: use Rician noise modeling + f0_noard: + # type=bool|default=False: Noise floor model: add to the model an unattenuated signal compartment f0 + f0_ard: + # type=bool|default=False: Noise floor model: add to the model an unattenuated signal compartment f0 + force_dir: + # type=bool|default=True: use the actual directory name given (do not add + to make a new directory) + output_type: + # type=enum|default='NIFTI'|allowed['GIFTI','NIFTI','NIFTI_GZ','NIFTI_PAIR','NIFTI_PAIR_GZ']: FSL output type + args: + # type=str|default='': Additional parameters to the command + environ: + # type=dict|default={}: Environment variables + imports: + # list[nipype2pydra.statements.imports.explicitimport] - list import statements required by the test, with each list item + # consisting of 'module', 'name', and optionally 'alias' keys + expected_outputs: + # dict[str, str] - expected values for selected outputs, noting that tests will typically + # be terminated before they complete for time-saving reasons, and therefore + # these values will be ignored, when running in CI + timeout: 10 + # int - the value to set for the timeout in the generated test, + # after which the test will be considered to have been initialised + # successfully. Set to 0 to disable the timeout (warning, this could + # lead to the unittests taking a very long time to complete) + xfail: true + # bool - whether the unittest is expected to fail or not. Set to false + # when you are satisfied with the edits you have made to this file doctests: [] diff --git a/nipype-auto-conv/specs/interfaces/x_fibres_5_callables.py b/nipype-auto-conv/specs/interfaces/x_fibres_5_callables.py deleted file mode 100644 index 8b82c68..0000000 --- a/nipype-auto-conv/specs/interfaces/x_fibres_5_callables.py +++ /dev/null @@ -1,446 +0,0 @@ -"""Module to put any functions that are referred to in the "callables" section of XFibres5.yaml""" - -import attrs -import logging -import os -import os.path as op -from glob import glob -from pathlib import Path - - -def dyads_callable(output_dir, inputs, stdout, stderr): - outputs = _list_outputs( - output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr - ) - return outputs["dyads"] - - -def fsamples_callable(output_dir, inputs, stdout, stderr): - outputs = _list_outputs( - output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr - ) - return outputs["fsamples"] - - -def mean_S0samples_callable(output_dir, inputs, stdout, stderr): - outputs = _list_outputs( - output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr - ) - return outputs["mean_S0samples"] - - -def mean_dsamples_callable(output_dir, inputs, stdout, stderr): - outputs = _list_outputs( - output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr - ) - return outputs["mean_dsamples"] - - -def mean_fsamples_callable(output_dir, inputs, stdout, stderr): - outputs = _list_outputs( - output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr - ) - return outputs["mean_fsamples"] - - -def mean_tausamples_callable(output_dir, inputs, stdout, stderr): - outputs = _list_outputs( - output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr - ) - return outputs["mean_tausamples"] - - -def phsamples_callable(output_dir, inputs, stdout, stderr): - outputs = _list_outputs( - output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr - ) - return outputs["phsamples"] - - -def thsamples_callable(output_dir, inputs, stdout, stderr): - outputs = _list_outputs( - output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr - ) - return outputs["thsamples"] - - -IFLOGGER = logging.getLogger("nipype.interface") - - -# Original source at L885 of /interfaces/base/core.py -def _gen_filename(name, inputs=None, stdout=None, stderr=None, output_dir=None): - raise NotImplementedError - - -# Original source at L205 of /interfaces/fsl/base.py -def _gen_fname( - basename, - cwd=None, - suffix=None, - change_ext=True, - ext=None, - inputs=None, - stdout=None, - stderr=None, - output_dir=None, -): - """Generate a filename based on the given parameters. - - The filename will take the form: cwd/basename. - If change_ext is True, it will use the extensions specified in - inputs.output_type. - - Parameters - ---------- - basename : str - Filename to base the new filename on. - cwd : str - Path to prefix to the new filename. (default is output_dir) - suffix : str - Suffix to add to the `basename`. (defaults is '' ) - change_ext : bool - Flag to change the filename extension to the FSL output type. - (default True) - - Returns - ------- - fname : str - New filename based on given parameters. - - """ - - if basename == "": - msg = "Unable to generate filename for command %s. " % "xfibres" - msg += "basename is not set!" - raise ValueError(msg) - if cwd is None: - cwd = output_dir - if ext is None: - ext = Info.output_type_to_ext(inputs.output_type) - if change_ext: - if suffix: - suffix = "".join((suffix, ext)) - else: - suffix = ext - if suffix is None: - suffix = "" - fname = fname_presuffix(basename, suffix=suffix, use_ext=False, newpath=cwd) - return fname - - -# Original source at L298 of /interfaces/fsl/dti.py -def _list_outputs(out_dir=None, inputs=None, stdout=None, stderr=None, output_dir=None): - outputs = {} - n_fibres = inputs.n_fibres - if not out_dir: - if inputs.logdir is not attrs.NOTHING: - out_dir = os.path.abspath(inputs.logdir) - else: - out_dir = os.path.abspath("logdir") - - multi_out = ["dyads", "fsamples", "mean_fsamples", "phsamples", "thsamples"] - single_out = ["mean_dsamples", "mean_S0samples"] - - for k in single_out: - outputs[k] = _gen_fname( - k, - cwd=out_dir, - inputs=inputs, - stdout=stdout, - stderr=stderr, - output_dir=output_dir, - ) - - if (inputs.rician is not attrs.NOTHING) and inputs.rician: - outputs["mean_tausamples"] = _gen_fname( - "mean_tausamples", - cwd=out_dir, - inputs=inputs, - stdout=stdout, - stderr=stderr, - output_dir=output_dir, - ) - - for k in multi_out: - outputs[k] = [] - - for i in range(1, n_fibres + 1): - outputs["fsamples"].append( - _gen_fname( - "f%dsamples" % i, - cwd=out_dir, - inputs=inputs, - stdout=stdout, - stderr=stderr, - output_dir=output_dir, - ) - ) - outputs["mean_fsamples"].append( - _gen_fname( - "mean_f%dsamples" % i, - cwd=out_dir, - inputs=inputs, - stdout=stdout, - stderr=stderr, - output_dir=output_dir, - ) - ) - - for i in range(1, n_fibres + 1): - outputs["dyads"].append( - _gen_fname( - "dyads%d" % i, - cwd=out_dir, - inputs=inputs, - stdout=stdout, - stderr=stderr, - output_dir=output_dir, - ) - ) - outputs["phsamples"].append( - _gen_fname( - "ph%dsamples" % i, - cwd=out_dir, - inputs=inputs, - stdout=stdout, - stderr=stderr, - output_dir=output_dir, - ) - ) - outputs["thsamples"].append( - _gen_fname( - "th%dsamples" % i, - cwd=out_dir, - inputs=inputs, - stdout=stdout, - stderr=stderr, - output_dir=output_dir, - ) - ) - - return outputs - - -# Original source at L108 of /utils/filemanip.py -def fname_presuffix(fname, prefix="", suffix="", newpath=None, use_ext=True): - """Manipulates path and name of input filename - - Parameters - ---------- - fname : string - A filename (may or may not include path) - prefix : string - Characters to prepend to the filename - suffix : string - Characters to append to the filename - newpath : string - Path to replace the path of the input fname - use_ext : boolean - If True (default), appends the extension of the original file - to the output name. - - Returns - ------- - Absolute path of the modified filename - - >>> from nipype.utils.filemanip import fname_presuffix - >>> fname = 'foo.nii.gz' - >>> fname_presuffix(fname,'pre','post','/tmp') - '/tmp/prefoopost.nii.gz' - - >>> from nipype.interfaces.base import attrs.NOTHING - >>> fname_presuffix(fname, 'pre', 'post', attrs.NOTHING) == \ - fname_presuffix(fname, 'pre', 'post') - True - - """ - pth, fname, ext = split_filename(fname) - if not use_ext: - ext = "" - - # No need for : bool(attrs.NOTHING is not attrs.NOTHING) evaluates to False - if newpath: - pth = op.abspath(newpath) - return op.join(pth, prefix + fname + suffix + ext) - - -# Original source at L58 of /utils/filemanip.py -def split_filename(fname): - """Split a filename into parts: path, base filename and extension. - - Parameters - ---------- - fname : str - file or path name - - Returns - ------- - pth : str - base path from fname - fname : str - filename from fname, without extension - ext : str - file extension from fname - - Examples - -------- - >>> from nipype.utils.filemanip import split_filename - >>> pth, fname, ext = split_filename('/home/data/subject.nii.gz') - >>> pth - '/home/data' - - >>> fname - 'subject' - - >>> ext - '.nii.gz' - - """ - - special_extensions = [".nii.gz", ".tar.gz", ".niml.dset"] - - pth = op.dirname(fname) - fname = op.basename(fname) - - ext = None - for special_ext in special_extensions: - ext_len = len(special_ext) - if (len(fname) > ext_len) and (fname[-ext_len:].lower() == special_ext.lower()): - ext = fname[-ext_len:] - fname = fname[:-ext_len] - break - if not ext: - fname, ext = op.splitext(fname) - - return pth, fname, ext - - -# Original source at L1069 of /interfaces/base/core.py -class PackageInfo(object): - _version = None - version_cmd = None - version_file = None - - @classmethod - def version(klass): - if klass._version is None: - if klass.version_cmd is not None: - try: - clout = CommandLine( - command=klass.version_cmd, - resource_monitor=False, - terminal_output="allatonce", - ).run() - except IOError: - return None - - raw_info = clout.runtime.stdout - elif klass.version_file is not None: - try: - with open(klass.version_file, "rt") as fobj: - raw_info = fobj.read() - except OSError: - return None - else: - return None - - klass._version = klass.parse_version(raw_info) - - return klass._version - - @staticmethod - def parse_version(raw_info): - raise NotImplementedError - - -# Original source at L40 of /interfaces/fsl/base.py -class Info(PackageInfo): - """ - Handle FSL ``output_type`` and version information. - - output type refers to the type of file fsl defaults to writing - eg, NIFTI, NIFTI_GZ - - Examples - -------- - - >>> from nipype.interfaces.fsl import Info - >>> Info.version() # doctest: +SKIP - >>> Info.output_type() # doctest: +SKIP - - """ - - ftypes = { - "NIFTI": ".nii", - "NIFTI_PAIR": ".img", - "NIFTI_GZ": ".nii.gz", - "NIFTI_PAIR_GZ": ".img.gz", - } - - if os.getenv("FSLDIR"): - version_file = os.path.join(os.getenv("FSLDIR"), "etc", "fslversion") - - @staticmethod - def parse_version(raw_info): - return raw_info.splitlines()[0] - - @classmethod - def output_type_to_ext(cls, output_type): - """Get the file extension for the given output type. - - Parameters - ---------- - output_type : {'NIFTI', 'NIFTI_GZ', 'NIFTI_PAIR', 'NIFTI_PAIR_GZ'} - String specifying the output type. - - Returns - ------- - extension : str - The file extension for the output type. - """ - - try: - return cls.ftypes[output_type] - except KeyError: - msg = "Invalid FSLOUTPUTTYPE: ", output_type - raise KeyError(msg) - - @classmethod - def output_type(cls): - """Get the global FSL output file type FSLOUTPUTTYPE. - - This returns the value of the environment variable - FSLOUTPUTTYPE. An exception is raised if it is not defined. - - Returns - ------- - fsl_ftype : string - Represents the current environment setting of FSLOUTPUTTYPE - """ - try: - return os.environ["FSLOUTPUTTYPE"] - except KeyError: - IFLOGGER.warning( - "FSLOUTPUTTYPE environment variable is not set. " - "Setting FSLOUTPUTTYPE=NIFTI" - ) - return "NIFTI" - - @staticmethod - def standard_image(img_name=None): - """Grab an image from the standard location. - - Returns a list of standard images if called without arguments. - - Could be made more fancy to allow for more relocatability""" - try: - fsldir = os.environ["FSLDIR"] - except KeyError: - raise Exception("FSL environment variables not set") - stdpath = os.path.join(fsldir, "data", "standard") - if img_name is None: - return [ - filename.replace(stdpath + "/", "") - for filename in glob(os.path.join(stdpath, "*nii*")) - ] - return os.path.join(stdpath, img_name) diff --git a/nipype-auto-conv/specs/package.yaml b/nipype-auto-conv/specs/package.yaml index 7dcbc56..431e9ca 100644 --- a/nipype-auto-conv/specs/package.yaml +++ b/nipype-auto-conv/specs/package.yaml @@ -2,7 +2,33 @@ name: pydra.tasks.fsl # name of the nipype package to generate from (e.g. mriqc) nipype_name: nipype.interfaces.fsl +# Whether the package is an interface-only package (i.e. only contains interfaces and not workflows) +interface_only: null # The name of the global struct/dict that contains workflow inputs that are to be converted to inputs of the function along with the type of the struct, either "dict" or "class" config_params: null +# specifications for helper functions defined within the workflow package +functions: null +# specifications for helper class defined within the workflow package +classes: null # Mappings between nipype packages and their pydra equivalents. Regular expressions are supported import_translations: null +# Generic regular expression substitutions to be run over the code after it is processed +find_replace: null +# Generic regular expression substitutions to be run over the code after it is processed and the imports have been prepended +import_find_replace: null +# Names of modules (untranslated) that shouldn't be included in the converted package +omit_modules: null +# Addresses of classes (untranslated) that shouldn't be included in the converted package +omit_classes: null +# Addresses of functions (untranslated) that shouldn't be included in the converted package +omit_functions: null +# Addresses of constants (untranslated) that shouldn't be included in the converted package +omit_constants: +- nipype.logging +# The depth at which __init__ files should include imports from sub-modules by default +init_depth: 3 +# The depth at which __init__ files should include imports from sub-modules by default +auto_import_init_depth: 4 +# Packages that should be copied directly into the new package without modification +copy_packages: null +target_version: v6 diff --git a/pydra/tasks/fsl/__init__.py b/pydra/tasks/fsl/__init__.py index dc03b8a..57df2e1 100644 --- a/pydra/tasks/fsl/__init__.py +++ b/pydra/tasks/fsl/__init__.py @@ -18,21 +18,5 @@ "pydra-fsl has not been properly installed, please run " f"`pip install -e {str(pkg_path)}` to install a development version" ) -if "nipype" not in __version__: - try: - from .auto._version import nipype_version, nipype2pydra_version - except ImportError: - warn( - "Nipype interfaces haven't been automatically converted from their specs in " - f"`nipype-auto-conv`. Please run `{str(pkg_path / 'nipype-auto-conv' / 'generate')}` " - "to generated the converted Nipype interfaces in pydra.tasks.fsl.auto" - ) - else: - n_ver = nipype_version.replace(".", "_") - n2p_ver = nipype2pydra_version.replace(".", "_") - __version__ += ( - "_" if "+" in __version__ else "+" - ) + f"nipype{n_ver}_nipype2pydra{n2p_ver}" - __all__ = ["__version__"] diff --git a/pydra/tasks/fsl/latest.py b/pydra/tasks/fsl/latest.py index edb8f37..626451d 100644 --- a/pydra/tasks/fsl/latest.py +++ b/pydra/tasks/fsl/latest.py @@ -1,3 +1,3 @@ -PACKAGE_VERSION = "v6_0" +PACKAGE_VERSION = "v6" -from .v6_0 import * # noqa +from .v6 import * # noqa diff --git a/pydra/tasks/fsl/v6/__init__.py b/pydra/tasks/fsl/v6/__init__.py new file mode 100644 index 0000000..31718fc --- /dev/null +++ b/pydra/tasks/fsl/v6/__init__.py @@ -0,0 +1,128 @@ +from .aroma import ICA_AROMA +from .base import IFLOGGER, Info +from .dti import ( + BEDPOSTX5, + DTIFit, + DistanceMap, + FindTheBiggest, + MakeDyadicVectors, + ProbTrackX, + ProbTrackX2, + ProjThresh, + TractSkeleton, + VecReg, + XFibres5, +) +from .epi import ( + ApplyTOPUP, + EPIDeWarp, + Eddy, + EddyCorrect, + EddyQuad, + EpiReg, + PrepareFieldmap, + TOPUP, +) +from .fix import ( + AccuracyTester, + Classifier, + Cleaner, + FeatureExtractor, + Training, + TrainingSetCreator, +) +from .maths import ( + AR1Image, + ApplyMask, + BinaryMaths, + ChangeDataType, + DilateImage, + ErodeImage, + IsotropicSmooth, + MathsCommand, + MaxImage, + MaxnImage, + MeanImage, + MedianImage, + MinImage, + MultiImageMaths, + PercentileImage, + SpatialFilter, + StdImage, + TemporalFilter, + Threshold, + UnaryMaths, +) +from .model import ( + Cluster, + ContrastMgr, + DualRegression, + FEAT, + FEATModel, + FILMGLS, + FLAMEO, + GLM, + L2Model, + Level1Design, + MELODIC, + MultipleRegressDesign, + Randomise, + SMM, + SmoothEstimate, +) +from .nipype_ports import ( + fname_presuffix, + human_order_sorted, + load_json, + save_json, + simplify_list, + split_filename, +) +from .possum import B0Calc +from .preprocess import ( + ApplyWarp, + ApplyXFM, + BET, + FAST, + FIRST, + FLIRT, + FNIRT, + FUGUE, + MCFLIRT, + PRELUDE, + SUSAN, + SliceTimer, +) +from .utils import ( + AvScale, + Complex, + ConvertWarp, + ConvertXFM, + CopyGeom, + ExtractROI, + FilterRegressor, + ImageMaths, + ImageMeants, + ImageStats, + InvWarp, + Merge, + MotionOutliers, + Overlay, + PlotMotionParams, + PlotTimeSeries, + PowerSpectrum, + Reorient2Std, + RobustFOV, + SigLoss, + Slice, + Slicer, + Smooth, + Split, + SwapDimensions, + Text2Vest, + Vest2Text, + WarpPoints, + WarpPointsFromStd, + WarpPointsToStd, + WarpUtils, +) diff --git a/pydra/tasks/fsl/v6/_post_release.py b/pydra/tasks/fsl/v6/_post_release.py new file mode 100644 index 0000000..f3f2232 --- /dev/null +++ b/pydra/tasks/fsl/v6/_post_release.py @@ -0,0 +1,5 @@ +# Auto-generated by /Users/tclo7153/git/workflows/nipype2pydra/nipype2pydra/package.py, do not edit as it will be overwritten + +src_pkg_version = "1.10.0" +nipype2pydra_version = "0.5.1" +post_release = "1100051" diff --git a/pydra/tasks/fsl/v6/aroma/__init__.py b/pydra/tasks/fsl/v6/aroma/__init__.py new file mode 100644 index 0000000..70da950 --- /dev/null +++ b/pydra/tasks/fsl/v6/aroma/__init__.py @@ -0,0 +1 @@ +from .ica__aroma import ICA_AROMA diff --git a/pydra/tasks/fsl/v6/aroma/ica__aroma.py b/pydra/tasks/fsl/v6/aroma/ica__aroma.py new file mode 100644 index 0000000..c7ebbad --- /dev/null +++ b/pydra/tasks/fsl/v6/aroma/ica__aroma.py @@ -0,0 +1,153 @@ +import attrs +from fileformats.generic import Directory, File +from fileformats.medimage import Nifti1, NiftiGz +import logging +import os +from pydra.compose import shell +import typing as ty + + +logger = logging.getLogger(__name__) + + +def _format_arg(name, value, inputs, argstr): + if value is None: + return "" + + if name == "out_dir": + return argstr.format(**{name: os.path.abspath(value)}) + + return argstr.format(**inputs) + + +def out_dir_formatter(field, inputs): + return _format_arg("out_dir", field, inputs, argstr="-o {out_dir}") + + +def _list_outputs(inputs=None, stdout=None, stderr=None, output_dir=None): + inputs = attrs.asdict(inputs) + + outputs = {} + outputs["out_dir"] = os.path.abspath(inputs["out_dir"]) + out_dir = outputs["out_dir"] + + if inputs["denoise_type"] in ("aggr", "both"): + outputs["aggr_denoised_file"] = os.path.join( + out_dir, "denoised_func_data_aggr.nii.gz" + ) + if inputs["denoise_type"] in ("nonaggr", "both"): + outputs["nonaggr_denoised_file"] = os.path.join( + out_dir, "denoised_func_data_nonaggr.nii.gz" + ) + return outputs + + +def aggr_denoised_file_callable(output_dir, inputs, stdout, stderr): + outputs = _list_outputs( + output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr + ) + return outputs.get("aggr_denoised_file") + + +def nonaggr_denoised_file_callable(output_dir, inputs, stdout, stderr): + outputs = _list_outputs( + output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr + ) + return outputs.get("nonaggr_denoised_file") + + +def out_dir_callable(output_dir, inputs, stdout, stderr): + outputs = _list_outputs( + output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr + ) + return outputs.get("out_dir") + + +@shell.define( + xor=[ + ["feat_dir", "fnirt_warp_file"], + ["feat_dir", "fnirt_warp_file", "in_file", "mat_file", "motion_parameters"], + ["feat_dir", "in_file"], + ["feat_dir", "mask"], + ["feat_dir", "mat_file"], + ["feat_dir", "motion_parameters"], + ] +) +class ICA_AROMA(shell.Task["ICA_AROMA.Outputs"]): + """ + Examples + ------- + + >>> from fileformats.generic import Directory, File + >>> from fileformats.medimage import Nifti1, NiftiGz + >>> from pydra.tasks.fsl.v6.aroma.ica__aroma import ICA_AROMA + + >>> task = ICA_AROMA() + >>> task.feat_dir = Directory.mock() + >>> task.in_file = Nifti1.mock("functional.nii") + >>> task.out_dir = "ICA_testout" + >>> task.mask = NiftiGz.mock("mask.nii.gz") + >>> task.melodic_dir = Directory.mock() + >>> task.mat_file = File.mock() + >>> task.fnirt_warp_file = Nifti1.mock("warpfield.nii") + >>> task.motion_parameters = File.mock() + >>> task.cmdline + 'ICA_AROMA.py -den both -warp warpfield.nii -i functional.nii -m mask.nii.gz -affmat func_to_struct.mat -mc fsl_mcflirt_movpar.txt -o .../ICA_testout' + + + """ + + executable = "ICA_AROMA.py" + feat_dir: Directory | None = shell.arg( + help="If a feat directory exists and temporal filtering has not been run yet, ICA_AROMA can use the files in this directory.", + argstr="-feat {feat_dir}", + ) + in_file: Nifti1 | None = shell.arg( + help="volume to be denoised", argstr="-i {in_file}" + ) + out_dir: ty.Any | None = shell.arg( + help="output directory", formatter="out_dir_formatter", default="out" + ) + mask: NiftiGz | None = shell.arg(help="path/name volume mask", argstr="-m {mask}") + dim: int = shell.arg( + help="Dimensionality reduction when running MELODIC (default is automatic estimation)", + argstr="-dim {dim}", + ) + TR: float = shell.arg( + help="TR in seconds. If this is not specified the TR will be extracted from the header of the fMRI nifti file.", + argstr="-tr {TR:.3}", + ) + melodic_dir: Directory = shell.arg( + help="path to MELODIC directory if MELODIC has already been run", + argstr="-meldir {melodic_dir}", + ) + mat_file: File | None = shell.arg( + help="path/name of the mat-file describing the affine registration (e.g. FSL FLIRT) of the functional data to structural space (.mat file)", + argstr="-affmat {mat_file}", + ) + fnirt_warp_file: Nifti1 | None = shell.arg( + help="File name of the warp-file describing the non-linear registration (e.g. FSL FNIRT) of the structural data to MNI152 space (.nii.gz)", + argstr="-warp {fnirt_warp_file}", + ) + motion_parameters: File | None = shell.arg( + help="motion parameters file", argstr="-mc {motion_parameters}" + ) + denoise_type: ty.Any | None = shell.arg( + help="Type of denoising strategy:\n-no: only classification, no denoising\n-nonaggr (default): non-aggresssive denoising, i.e. partial component regression\n-aggr: aggressive denoising, i.e. full component regression\n-both: both aggressive and non-aggressive denoising (two outputs)", + argstr="-den {denoise_type}", + default="nonaggr", + ) + + class Outputs(shell.Outputs): + aggr_denoised_file: File | None = shell.out( + help="if generated: aggressively denoised volume", + callable=aggr_denoised_file_callable, + ) + nonaggr_denoised_file: File | None = shell.out( + help="if generated: non aggressively denoised volume", + callable=nonaggr_denoised_file_callable, + ) + out_dir: Directory | None = shell.out( + help="directory contains (in addition to the denoised files): melodic.ica + classified_motion_components + classification_overview + feature_scores + melodic_ic_mni)", + callable=out_dir_callable, + ) diff --git a/pydra/tasks/fsl/v6/aroma/tests/conftest.py b/pydra/tasks/fsl/v6/aroma/tests/conftest.py new file mode 100644 index 0000000..8c8af14 --- /dev/null +++ b/pydra/tasks/fsl/v6/aroma/tests/conftest.py @@ -0,0 +1,24 @@ +# For debugging in IDE's don't catch raised exceptions and let the IDE +# break at it +import os +import pytest + + +if os.getenv("_PYTEST_RAISE", "0") != "0": + + @pytest.hookimpl(tryfirst=True) + def pytest_exception_interact(call): + raise call.excinfo.value # raise internal errors instead of capturing them + + @pytest.hookimpl(tryfirst=True) + def pytest_internalerror(excinfo): + raise excinfo.value # raise internal errors instead of capturing them + + def pytest_configure(config): + config.option.capture = "no" # allow print statements to show up in the console + config.option.log_cli = True # show log messages in the console + config.option.log_level = "INFO" # set the log level to INFO + + CATCH_CLI_EXCEPTIONS = False +else: + CATCH_CLI_EXCEPTIONS = True diff --git a/pydra/tasks/fsl/v6/aroma/tests/test_ica_aroma.py b/pydra/tasks/fsl/v6/aroma/tests/test_ica_aroma.py new file mode 100644 index 0000000..115b98c --- /dev/null +++ b/pydra/tasks/fsl/v6/aroma/tests/test_ica_aroma.py @@ -0,0 +1,38 @@ +from fileformats.generic import Directory, File +from fileformats.medimage import Nifti1, NiftiGz +import logging +from nipype2pydra.testing import PassAfterTimeoutWorker +from pydra.tasks.fsl.v6.aroma.ica__aroma import ICA_AROMA +import pytest + + +logger = logging.getLogger(__name__) + + +@pytest.mark.xfail +def test_ica_aroma_1(): + task = ICA_AROMA() + task.feat_dir = Directory.sample(seed=0) + task.in_file = Nifti1.sample(seed=1) + task.out_dir = "out" + task.mask = NiftiGz.sample(seed=3) + task.melodic_dir = Directory.sample(seed=6) + task.mat_file = File.sample(seed=7) + task.fnirt_warp_file = Nifti1.sample(seed=8) + task.motion_parameters = File.sample(seed=9) + task.denoise_type = "nonaggr" + print(f"CMDLINE: {task.cmdline}\n\n") + res = task(worker=PassAfterTimeoutWorker) + print("RESULT: ", res) + + +@pytest.mark.xfail +def test_ica_aroma_2(): + task = ICA_AROMA() + task.in_file = Nifti1.sample(seed=1) + task.out_dir = "ICA_testout" + task.mask = NiftiGz.sample(seed=3) + task.fnirt_warp_file = Nifti1.sample(seed=8) + print(f"CMDLINE: {task.cmdline}\n\n") + res = task(worker=PassAfterTimeoutWorker) + print("RESULT: ", res) diff --git a/pydra/tasks/fsl/v6/base.py b/pydra/tasks/fsl/v6/base.py new file mode 100644 index 0000000..ea8ce34 --- /dev/null +++ b/pydra/tasks/fsl/v6/base.py @@ -0,0 +1,102 @@ +from glob import glob +import logging +import os + + +logger = logging.getLogger(__name__) + + +class Info: + """ + Handle FSL ``output_type`` and version information. + + output type refers to the type of file fsl defaults to writing + eg, NIFTI, NIFTI_GZ + + Examples + -------- + + >>> from nipype.interfaces.fsl import Info + >>> Info.version() # doctest: +SKIP + >>> Info.output_type() # doctest: +SKIP + + """ + + ftypes = { + "NIFTI": ".nii", + "NIFTI_PAIR": ".img", + "NIFTI_GZ": ".nii.gz", + "NIFTI_PAIR_GZ": ".img.gz", + "GIFTI": ".func.gii", + } + + if os.getenv("FSLDIR"): + version_file = os.path.join(os.getenv("FSLDIR"), "etc", "fslversion") + + @staticmethod + def parse_version(raw_info): + + return raw_info.splitlines()[0] + + @classmethod + def output_type_to_ext(cls, output_type): + """Get the file extension for the given output type. + + Parameters + ---------- + output_type : {'NIFTI', 'NIFTI_GZ', 'NIFTI_PAIR', 'NIFTI_PAIR_GZ', 'GIFTI'} + String specifying the output type. Note: limited GIFTI support. + + Returns + ------- + extension : str + The file extension for the output type. + """ + try: + return cls.ftypes[output_type] + except KeyError: + msg = "Invalid FSLOUTPUTTYPE: ", output_type + raise KeyError(msg) + + @classmethod + def output_type(cls): + """Get the global FSL output file type FSLOUTPUTTYPE. + + This returns the value of the environment variable + FSLOUTPUTTYPE. An exception is raised if it is not defined. + + Returns + ------- + fsl_ftype : string + Represents the current environment setting of FSLOUTPUTTYPE + """ + try: + return os.environ["FSLOUTPUTTYPE"] + except KeyError: + IFLOGGER.warning( + "FSLOUTPUTTYPE environment variable is not set. " + "Setting FSLOUTPUTTYPE=NIFTI" + ) + return "NIFTI" + + @staticmethod + def standard_image(img_name=None): + """Grab an image from the standard location. + + Returns a list of standard images if called without arguments. + + Could be made more fancy to allow for more relocatability""" + try: + fsldir = os.environ["FSLDIR"] + except KeyError: + raise Exception("FSL environment variables not set") + stdpath = os.path.join(fsldir, "data", "standard") + if img_name is None: + return [ + filename.replace(stdpath + "/", "") + for filename in glob(os.path.join(stdpath, "*nii*")) + ] + return os.path.join(stdpath, img_name) + + +IFLOGGER = logging.getLogger("nipype.interface") diff --git a/pydra/tasks/fsl/v6/dti/__init__.py b/pydra/tasks/fsl/v6/dti/__init__.py new file mode 100644 index 0000000..a2bccb7 --- /dev/null +++ b/pydra/tasks/fsl/v6/dti/__init__.py @@ -0,0 +1,11 @@ +from .bedpostx5 import BEDPOSTX5 +from .distance_map import DistanceMap +from .dti_fit import DTIFit +from .find_the_biggest import FindTheBiggest +from .make_dyadic_vectors import MakeDyadicVectors +from .prob_track_x import ProbTrackX +from .prob_track_x2 import ProbTrackX2 +from .proj_thresh import ProjThresh +from .tract_skeleton import TractSkeleton +from .vec_reg import VecReg +from .x_fibres_5 import XFibres5 diff --git a/pydra/tasks/fsl/v6/dti/bedpostx5.py b/pydra/tasks/fsl/v6/dti/bedpostx5.py new file mode 100644 index 0000000..60745d2 --- /dev/null +++ b/pydra/tasks/fsl/v6/dti/bedpostx5.py @@ -0,0 +1,410 @@ +import attrs +from fileformats.generic import Directory, File +from fileformats.medimage import Bval, Bvec, Nifti1 +import logging +from pydra.tasks.fsl.v6.nipype_ports.utils.filemanip import fname_presuffix +from pydra.compose import shell +import typing as ty + + +logger = logging.getLogger(__name__) + + +def _list_outputs(inputs=None, stdout=None, stderr=None, output_dir=None): + inputs = attrs.asdict(inputs) + + outputs = {} + n_fibres = inputs["n_fibres"] + + multi_out = [ + "merged_thsamples", + "merged_fsamples", + "merged_phsamples", + "mean_phsamples", + "mean_thsamples", + "mean_fsamples", + "dyads_dispersion", + "dyads", + ] + + single_out = ["mean_dsamples", "mean_S0samples"] + + for k in single_out: + outputs[k] = _gen_fname( + k, + cwd=parsed_inputs["_out_dir"], + output_type=inputs["output_type"], + inputs=inputs["inputs"], + output_dir=inputs["output_dir"], + stderr=inputs["stderr"], + stdout=inputs["stdout"], + ) + + for k in multi_out: + outputs[k] = [] + + for i in range(1, n_fibres + 1): + outputs["merged_thsamples"].append( + _gen_fname( + "merged_th%dsamples" % i, + cwd=parsed_inputs["_out_dir"], + output_type=inputs["output_type"], + inputs=inputs["inputs"], + output_dir=inputs["output_dir"], + stderr=inputs["stderr"], + stdout=inputs["stdout"], + ) + ) + outputs["merged_fsamples"].append( + _gen_fname( + "merged_f%dsamples" % i, + cwd=parsed_inputs["_out_dir"], + output_type=inputs["output_type"], + inputs=inputs["inputs"], + output_dir=inputs["output_dir"], + stderr=inputs["stderr"], + stdout=inputs["stdout"], + ) + ) + outputs["merged_phsamples"].append( + _gen_fname( + "merged_ph%dsamples" % i, + cwd=parsed_inputs["_out_dir"], + output_type=inputs["output_type"], + inputs=inputs["inputs"], + output_dir=inputs["output_dir"], + stderr=inputs["stderr"], + stdout=inputs["stdout"], + ) + ) + outputs["mean_thsamples"].append( + _gen_fname( + "mean_th%dsamples" % i, + cwd=parsed_inputs["_out_dir"], + output_type=inputs["output_type"], + inputs=inputs["inputs"], + output_dir=inputs["output_dir"], + stderr=inputs["stderr"], + stdout=inputs["stdout"], + ) + ) + outputs["mean_phsamples"].append( + _gen_fname( + "mean_ph%dsamples" % i, + cwd=parsed_inputs["_out_dir"], + output_type=inputs["output_type"], + inputs=inputs["inputs"], + output_dir=inputs["output_dir"], + stderr=inputs["stderr"], + stdout=inputs["stdout"], + ) + ) + outputs["mean_fsamples"].append( + _gen_fname( + "mean_f%dsamples" % i, + cwd=parsed_inputs["_out_dir"], + output_type=inputs["output_type"], + inputs=inputs["inputs"], + output_dir=inputs["output_dir"], + stderr=inputs["stderr"], + stdout=inputs["stdout"], + ) + ) + outputs["dyads"].append( + _gen_fname( + "dyads%d" % i, + cwd=parsed_inputs["_out_dir"], + output_type=inputs["output_type"], + inputs=inputs["inputs"], + output_dir=inputs["output_dir"], + stderr=inputs["stderr"], + stdout=inputs["stdout"], + ) + ) + outputs["dyads_dispersion"].append( + _gen_fname( + "dyads%d_dispersion" % i, + cwd=parsed_inputs["_out_dir"], + output_type=inputs["output_type"], + inputs=inputs["inputs"], + output_dir=inputs["output_dir"], + stderr=inputs["stderr"], + stdout=inputs["stdout"], + ) + ) + return outputs + + +def mean_dsamples_callable(output_dir, inputs, stdout, stderr): + outputs = _list_outputs( + output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr + ) + return outputs.get("mean_dsamples") + + +def mean_fsamples_callable(output_dir, inputs, stdout, stderr): + outputs = _list_outputs( + output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr + ) + return outputs.get("mean_fsamples") + + +def mean_S0samples_callable(output_dir, inputs, stdout, stderr): + outputs = _list_outputs( + output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr + ) + return outputs.get("mean_S0samples") + + +def mean_phsamples_callable(output_dir, inputs, stdout, stderr): + outputs = _list_outputs( + output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr + ) + return outputs.get("mean_phsamples") + + +def mean_thsamples_callable(output_dir, inputs, stdout, stderr): + outputs = _list_outputs( + output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr + ) + return outputs.get("mean_thsamples") + + +def merged_thsamples_callable(output_dir, inputs, stdout, stderr): + outputs = _list_outputs( + output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr + ) + return outputs.get("merged_thsamples") + + +def merged_phsamples_callable(output_dir, inputs, stdout, stderr): + outputs = _list_outputs( + output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr + ) + return outputs.get("merged_phsamples") + + +def merged_fsamples_callable(output_dir, inputs, stdout, stderr): + outputs = _list_outputs( + output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr + ) + return outputs.get("merged_fsamples") + + +def dyads_callable(output_dir, inputs, stdout, stderr): + outputs = _list_outputs( + output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr + ) + return outputs.get("dyads") + + +def dyads_dispersion_callable(output_dir, inputs, stdout, stderr): + outputs = _list_outputs( + output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr + ) + return outputs.get("dyads_dispersion") + + +@shell.define( + xor=[ + ["all_ard", "f0_ard", "f0_noard"], + ["all_ard", "no_ard"], + ["cnlinear", "no_spat", "non_linear"], + ["f0_ard", "f0_noard"], + ] +) +class BEDPOSTX5(shell.Task["BEDPOSTX5.Outputs"]): + """ + Examples + ------- + + >>> from fileformats.generic import Directory, File + >>> from fileformats.medimage import Bval, Bvec, Nifti1 + >>> from pydra.tasks.fsl.v6.dti.bedpostx5 import BEDPOSTX5 + + >>> task = BEDPOSTX5() + >>> task.dwi = Nifti1.mock("diffusion.nii") + >>> task.mask = Nifti1.mock("mask.nii") + >>> task.bvecs = Bvec.mock("bvecs") + >>> task.bvals = Bval.mock("bvals") + >>> task.logdir = Directory.mock() + >>> task.n_fibres = 1 + >>> task.out_dir = Directory.mock() + >>> task.grad_dev = File.mock() + >>> task.cmdline + 'bedpostx bedpostx -b 0 --burnin_noard=0 --forcedir -n 1 -j 5000 -s 1 --updateproposalevery=40' + + + """ + + executable = "bedpostx" + dwi: Nifti1 = shell.arg(help="diffusion weighted image data file") + mask: Nifti1 = shell.arg(help="bet binary mask file") + bvecs: Bvec = shell.arg(help="b vectors file") + bvals: Bval = shell.arg(help="b values file") + logdir: Directory = shell.arg(help="", argstr="--logdir={logdir}") + n_fibres: ty.Any | None = shell.arg( + help="Maximum number of fibres to fit in each voxel", + argstr="-n {n_fibres}", + default=2, + ) + model: ty.Any = shell.arg( + help="use monoexponential (1, default, required for single-shell) or multiexponential (2, multi-shell) model", + argstr="-model {model}", + ) + fudge: int = shell.arg(help="ARD fudge factor", argstr="-w {fudge}") + n_jumps: int = shell.arg( + help="Num of jumps to be made by MCMC", argstr="-j {n_jumps}", default=5000 + ) + burn_in: ty.Any = shell.arg( + help="Total num of jumps at start of MCMC to be discarded", + argstr="-b {burn_in}", + default=0, + ) + sample_every: ty.Any = shell.arg( + help="Num of jumps for each sample (MCMC)", + argstr="-s {sample_every}", + default=1, + ) + out_dir: Directory | None = shell.arg( + help="output directory", argstr="{out_dir}", position=1, default="bedpostx" + ) + gradnonlin: bool = shell.arg( + help="consider gradient nonlinearities, default off", argstr="-g" + ) + grad_dev: File = shell.arg(help="grad_dev file, if gradnonlin, -g is True") + use_gpu: bool = shell.arg(help="Use the GPU version of bedpostx") + burn_in_no_ard: ty.Any = shell.arg( + help="num of burnin jumps before the ard is imposed", + argstr="--burnin_noard={burn_in_no_ard}", + default=0, + ) + update_proposal_every: ty.Any = shell.arg( + help="Num of jumps for each update to the proposal density std (MCMC)", + argstr="--updateproposalevery={update_proposal_every}", + default=40, + ) + seed: int = shell.arg( + help="seed for pseudo random number generator", argstr="--seed={seed}" + ) + no_ard: bool = shell.arg(help="Turn ARD off on all fibres", argstr="--noard") + all_ard: bool = shell.arg(help="Turn ARD on on all fibres", argstr="--allard") + no_spat: bool = shell.arg( + help="Initialise with tensor, not spatially", argstr="--nospat" + ) + non_linear: bool = shell.arg( + help="Initialise with nonlinear fitting", argstr="--nonlinear" + ) + cnlinear: bool = shell.arg( + help="Initialise with constrained nonlinear fitting", argstr="--cnonlinear" + ) + rician: bool = shell.arg(help="use Rician noise modeling", argstr="--rician") + f0_noard: bool = shell.arg( + help="Noise floor model: add to the model an unattenuated signal compartment f0", + argstr="--f0", + ) + f0_ard: bool = shell.arg( + help="Noise floor model: add to the model an unattenuated signal compartment f0", + argstr="--f0 --ardf0", + ) + force_dir: bool = shell.arg( + help="use the actual directory name given (do not add + to make a new directory)", + argstr="--forcedir", + default=True, + ) + + class Outputs(shell.Outputs): + mean_dsamples: File | None = shell.out( + help="Mean of distribution on diffusivity d", + callable=mean_dsamples_callable, + ) + mean_fsamples: list[File] | None = shell.out( + help="Mean of distribution on f anisotropy", callable=mean_fsamples_callable + ) + mean_S0samples: File | None = shell.out( + help="Mean of distribution on T2w baseline signal intensity S0", + callable=mean_S0samples_callable, + ) + mean_phsamples: list[File] | None = shell.out( + help="Mean of distribution on phi", callable=mean_phsamples_callable + ) + mean_thsamples: list[File] | None = shell.out( + help="Mean of distribution on theta", callable=mean_thsamples_callable + ) + merged_thsamples: list[File] | None = shell.out( + help="Samples from the distribution on theta", + callable=merged_thsamples_callable, + ) + merged_phsamples: list[File] | None = shell.out( + help="Samples from the distribution on phi", + callable=merged_phsamples_callable, + ) + merged_fsamples: list[File] | None = shell.out( + help="Samples from the distribution on anisotropic volume fraction", + callable=merged_fsamples_callable, + ) + dyads: list[File] | None = shell.out( + help="Mean of PDD distribution in vector form.", callable=dyads_callable + ) + dyads_dispersion: list[File] | None = shell.out( + help="Dispersion", callable=dyads_dispersion_callable + ) + + +def _gen_fname( + basename, + cwd=None, + suffix=None, + change_ext=True, + ext=None, + output_type=None, + inputs=None, + output_dir=None, + stderr=None, + stdout=None, +): + """Generate a filename based on the given parameters. + + The filename will take the form: cwd/basename. + If change_ext is True, it will use the extensions specified in + inputs.output_type. + + Parameters + ---------- + basename : str + Filename to base the new filename on. + cwd : str + Path to prefix to the new filename. (default is output_dir) + suffix : str + Suffix to add to the `basename`. (defaults is '' ) + change_ext : bool + Flag to change the filename extension to the FSL output type. + (default True) + + Returns + ------- + fname : str + New filename based on given parameters. + + """ + + if basename == "": + msg = "Unable to generate filename for command %s. " % "bedpostx" + msg += "basename is not set!" + raise ValueError(msg) + if cwd is None: + cwd = output_dir + if ext is None: + ext = Info.output_type_to_ext(output_type) + if change_ext: + if suffix: + suffix = f"{suffix}{ext}" + else: + suffix = ext + if suffix is None: + suffix = "" + fname = fname_presuffix(basename, suffix=suffix, use_ext=False, newpath=cwd) + return fname + + +IFLOGGER = logging.getLogger("nipype.interface") diff --git a/pydra/tasks/fsl/v6/dti/distance_map.py b/pydra/tasks/fsl/v6/dti/distance_map.py new file mode 100644 index 0000000..0c7acc2 --- /dev/null +++ b/pydra/tasks/fsl/v6/dti/distance_map.py @@ -0,0 +1,103 @@ +import attrs +from fileformats.generic import File +import logging +from pydra.tasks.fsl.v6.nipype_ports.utils.filemanip import fname_presuffix +import os +from pathlib import Path +from pathlib import Path +from pydra.compose import shell +import typing as ty + + +logger = logging.getLogger(__name__) + + +def _format_arg(name, value, inputs, argstr): + if value is None: + return "" + + if name == "local_max_file": + if isinstance(value, bool): + return argstr.format(**{name: _list_outputs()["local_max_file"]}) + + return argstr.format(**inputs) + + +def local_max_file_formatter(field, inputs): + return _format_arg( + "local_max_file", field, inputs, argstr="--localmax={local_max_file}" + ) + + +def _list_outputs(inputs=None, stdout=None, stderr=None, output_dir=None): + inputs = attrs.asdict(inputs) + self_dict = {} + + outputs = {} + _si = self_dict["inputs"] + outputs["distance_map"] = _si.distance_map + if _si.distance_map is attrs.NOTHING: + outputs["distance_map"] = fname_presuffix( + _si.in_file, suffix="_dstmap", use_ext=True, newpath=os.getcwd() + ) + outputs["distance_map"] = os.path.abspath(outputs["distance_map"]) + if _si.local_max_file is not attrs.NOTHING: + outputs["local_max_file"] = _si.local_max_file + if isinstance(_si.local_max_file, bool): + outputs["local_max_file"] = fname_presuffix( + _si.in_file, suffix="_lclmax", use_ext=True, newpath=os.getcwd() + ) + outputs["local_max_file"] = os.path.abspath(outputs["local_max_file"]) + return outputs + + +def local_max_file_callable(output_dir, inputs, stdout, stderr): + outputs = _list_outputs( + output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr + ) + return outputs.get("local_max_file") + + +def _gen_filename(name, inputs): + if name == "distance_map": + return _list_outputs()["distance_map"] + return None + + +def distance_map_default(inputs): + return _gen_filename("distance_map", inputs=inputs) + + +@shell.define +class DistanceMap(shell.Task["DistanceMap.Outputs"]): + """ + Examples + ------- + + >>> from fileformats.generic import File + >>> from pathlib import Path + >>> from pydra.tasks.fsl.v6.dti.distance_map import DistanceMap + + """ + + executable = "distancemap" + in_file: File = shell.arg( + help="image to calculate distance values for", argstr="--in={in_file}" + ) + mask_file: File = shell.arg( + help="binary mask to constrain calculations", argstr="--mask={mask_file}" + ) + invert_input: bool = shell.arg(help="invert input image", argstr="--invert") + local_max_file: ty.Any = shell.arg( + help="write an image of the local maxima", formatter="local_max_file_formatter" + ) + + class Outputs(shell.Outputs): + distance_map: Path = shell.outarg( + help="distance map to write", + argstr="--out={distance_map}", + path_template="distance_map", + ) + local_max_file: File | None = shell.out( + help="image of local maxima", callable=local_max_file_callable + ) diff --git a/pydra/tasks/fsl/v6/dti/dti_fit.py b/pydra/tasks/fsl/v6/dti/dti_fit.py new file mode 100644 index 0000000..c6a68ce --- /dev/null +++ b/pydra/tasks/fsl/v6/dti/dti_fit.py @@ -0,0 +1,277 @@ +import attrs +from fileformats.generic import File +from fileformats.medimage import Bval, Nifti1 +import logging +from pydra.tasks.fsl.v6.nipype_ports.utils.filemanip import fname_presuffix +from pydra.compose import shell + + +logger = logging.getLogger(__name__) + + +def _list_outputs(inputs=None, stdout=None, stderr=None, output_dir=None): + inputs = attrs.asdict(inputs) + + keys_to_ignore = {"outputtype", "environ", "args"} + + opt_output = {"tensor": inputs["save_tensor"], "sse": inputs["sse"]} + + for output, input_flag in opt_output.items(): + if (input_flag is not attrs.NOTHING) and input_flag: + + continue + keys_to_ignore.add(output) + + outputs = {} + for k in set(outputs["keys"]()) - keys_to_ignore: + outputs[k] = _gen_fname( + inputs["base_name"], + suffix="_" + k, + output_type=inputs["output_type"], + inputs=inputs["inputs"], + output_dir=inputs["output_dir"], + stderr=inputs["stderr"], + stdout=inputs["stdout"], + ) + return outputs + + +def V1_callable(output_dir, inputs, stdout, stderr): + outputs = _list_outputs( + output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr + ) + return outputs.get("V1") + + +def V2_callable(output_dir, inputs, stdout, stderr): + outputs = _list_outputs( + output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr + ) + return outputs.get("V2") + + +def V3_callable(output_dir, inputs, stdout, stderr): + outputs = _list_outputs( + output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr + ) + return outputs.get("V3") + + +def L1_callable(output_dir, inputs, stdout, stderr): + outputs = _list_outputs( + output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr + ) + return outputs.get("L1") + + +def L2_callable(output_dir, inputs, stdout, stderr): + outputs = _list_outputs( + output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr + ) + return outputs.get("L2") + + +def L3_callable(output_dir, inputs, stdout, stderr): + outputs = _list_outputs( + output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr + ) + return outputs.get("L3") + + +def MD_callable(output_dir, inputs, stdout, stderr): + outputs = _list_outputs( + output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr + ) + return outputs.get("MD") + + +def FA_callable(output_dir, inputs, stdout, stderr): + outputs = _list_outputs( + output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr + ) + return outputs.get("FA") + + +def MO_callable(output_dir, inputs, stdout, stderr): + outputs = _list_outputs( + output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr + ) + return outputs.get("MO") + + +def S0_callable(output_dir, inputs, stdout, stderr): + outputs = _list_outputs( + output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr + ) + return outputs.get("S0") + + +def tensor_callable(output_dir, inputs, stdout, stderr): + outputs = _list_outputs( + output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr + ) + return outputs.get("tensor") + + +def sse_callable(output_dir, inputs, stdout, stderr): + outputs = _list_outputs( + output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr + ) + return outputs.get("sse") + + +@shell.define +class DTIFit(shell.Task["DTIFit.Outputs"]): + """ + Examples + ------- + + >>> from fileformats.generic import File + >>> from fileformats.medimage import Bval, Nifti1 + >>> from pydra.tasks.fsl.v6.dti.dti_fit import DTIFit + + >>> task = DTIFit() + >>> task.dwi = Nifti1.mock("diffusion.nii") + >>> task.mask = Nifti1.mock("mask.nii") + >>> task.bvecs = File.mock() + >>> task.bvals = Bval.mock("bvals") + >>> task.cni = File.mock() + >>> task.gradnonlin = File.mock() + >>> task.cmdline + 'dtifit -k diffusion.nii -o TP -m mask.nii -r bvecs -b bvals' + + + """ + + executable = "dtifit" + dwi: Nifti1 = shell.arg( + help="diffusion weighted image data file", argstr="-k {dwi}", position=1 + ) + base_name: str = shell.arg( + help="base_name that all output files will start with", + argstr="-o {base_name}", + position=2, + default="dtifit_", + ) + mask: Nifti1 = shell.arg( + help="bet binary mask file", argstr="-m {mask}", position=3 + ) + bvecs: File = shell.arg(help="b vectors file", argstr="-r {bvecs}", position=4) + bvals: Bval = shell.arg(help="b values file", argstr="-b {bvals}", position=5) + min_z: int = shell.arg(help="min z", argstr="-z {min_z}") + max_z: int = shell.arg(help="max z", argstr="-Z {max_z}") + min_y: int = shell.arg(help="min y", argstr="-y {min_y}") + max_y: int = shell.arg(help="max y", argstr="-Y {max_y}") + min_x: int = shell.arg(help="min x", argstr="-x {min_x}") + max_x: int = shell.arg(help="max x", argstr="-X {max_x}") + save_tensor: bool = shell.arg( + help="save the elements of the tensor", argstr="--save_tensor" + ) + sse: bool = shell.arg(help="output sum of squared errors", argstr="--sse") + cni: File = shell.arg(help="input counfound regressors", argstr="--cni={cni}") + little_bit: bool = shell.arg( + help="only process small area of brain", argstr="--littlebit" + ) + gradnonlin: File = shell.arg( + help="gradient non linearities", argstr="--gradnonlin={gradnonlin}" + ) + + class Outputs(shell.Outputs): + V1: File | None = shell.out( + help="path/name of file with the 1st eigenvector", callable=V1_callable + ) + V2: File | None = shell.out( + help="path/name of file with the 2nd eigenvector", callable=V2_callable + ) + V3: File | None = shell.out( + help="path/name of file with the 3rd eigenvector", callable=V3_callable + ) + L1: File | None = shell.out( + help="path/name of file with the 1st eigenvalue", callable=L1_callable + ) + L2: File | None = shell.out( + help="path/name of file with the 2nd eigenvalue", callable=L2_callable + ) + L3: File | None = shell.out( + help="path/name of file with the 3rd eigenvalue", callable=L3_callable + ) + MD: File | None = shell.out( + help="path/name of file with the mean diffusivity", callable=MD_callable + ) + FA: File | None = shell.out( + help="path/name of file with the fractional anisotropy", + callable=FA_callable, + ) + MO: File | None = shell.out( + help="path/name of file with the mode of anisotropy", callable=MO_callable + ) + S0: File | None = shell.out( + help="path/name of file with the raw T2 signal with no diffusion weighting", + callable=S0_callable, + ) + tensor: File | None = shell.out( + help="path/name of file with the 4D tensor volume", callable=tensor_callable + ) + sse: File | None = shell.out( + help="path/name of file with the summed squared error", + callable=sse_callable, + ) + + +def _gen_fname( + basename, + cwd=None, + suffix=None, + change_ext=True, + ext=None, + output_type=None, + inputs=None, + output_dir=None, + stderr=None, + stdout=None, +): + """Generate a filename based on the given parameters. + + The filename will take the form: cwd/basename. + If change_ext is True, it will use the extensions specified in + inputs.output_type. + + Parameters + ---------- + basename : str + Filename to base the new filename on. + cwd : str + Path to prefix to the new filename. (default is output_dir) + suffix : str + Suffix to add to the `basename`. (defaults is '' ) + change_ext : bool + Flag to change the filename extension to the FSL output type. + (default True) + + Returns + ------- + fname : str + New filename based on given parameters. + + """ + + if basename == "": + msg = "Unable to generate filename for command %s. " % "dtifit" + msg += "basename is not set!" + raise ValueError(msg) + if cwd is None: + cwd = output_dir + if ext is None: + ext = Info.output_type_to_ext(output_type) + if change_ext: + if suffix: + suffix = f"{suffix}{ext}" + else: + suffix = ext + if suffix is None: + suffix = "" + fname = fname_presuffix(basename, suffix=suffix, use_ext=False, newpath=cwd) + return fname + + +IFLOGGER = logging.getLogger("nipype.interface") diff --git a/pydra/tasks/fsl/v6/dti/find_the_biggest.py b/pydra/tasks/fsl/v6/dti/find_the_biggest.py new file mode 100644 index 0000000..90b2710 --- /dev/null +++ b/pydra/tasks/fsl/v6/dti/find_the_biggest.py @@ -0,0 +1,120 @@ +import attrs +from fileformats.generic import File +import logging +from pydra.tasks.fsl.v6.nipype_ports.utils.filemanip import fname_presuffix +import os +from pathlib import Path +from pathlib import Path +from pydra.compose import shell + + +logger = logging.getLogger(__name__) + + +def _gen_filename(name, inputs): + if name == "out_file": + return _list_outputs( + out_file=inputs["out_file"], output_type=inputs["output_type"] + )[name] + else: + return None + + +def out_file_default(inputs): + return _gen_filename("out_file", inputs=inputs) + + +@shell.define +class FindTheBiggest(shell.Task["FindTheBiggest.Outputs"]): + """ + Examples + ------- + + >>> from fileformats.generic import File + >>> from pathlib import Path + >>> from pydra.tasks.fsl.v6.dti.find_the_biggest import FindTheBiggest + + >>> task = FindTheBiggest() + >>> task.in_files = ldir + >>> task.out_file = "biggestSegmentation" + >>> task.cmdline + 'find_the_biggest seeds_to_M1.nii seeds_to_M2.nii biggestSegmentation' + + + """ + + executable = "find_the_biggest" + in_files: list[File] = shell.arg( + help="a list of input volumes or a singleMatrixFile", + argstr="{in_files}", + position=1, + ) + + class Outputs(shell.Outputs): + out_file: Path = shell.outarg( + help="file with the resulting segmentation", + argstr="{out_file}", + path_template='"biggestSegmentation"', + position=3, + ) + + +def _gen_fname( + basename, cwd=None, suffix=None, change_ext=True, ext=None, output_type=None +): + """Generate a filename based on the given parameters. + + The filename will take the form: cwd/basename. + If change_ext is True, it will use the extensions specified in + inputs.output_type. + + Parameters + ---------- + basename : str + Filename to base the new filename on. + cwd : str + Path to prefix to the new filename. (default is output_dir) + suffix : str + Suffix to add to the `basename`. (defaults is '' ) + change_ext : bool + Flag to change the filename extension to the FSL output type. + (default True) + + Returns + ------- + fname : str + New filename based on given parameters. + + """ + + if basename == "": + msg = "Unable to generate filename for command %s. " % "find_the_biggest" + msg += "basename is not set!" + raise ValueError(msg) + if cwd is None: + cwd = output_dir + if ext is None: + ext = Info.output_type_to_ext(output_type) + if change_ext: + if suffix: + suffix = f"{suffix}{ext}" + else: + suffix = ext + if suffix is None: + suffix = "" + fname = fname_presuffix(basename, suffix=suffix, use_ext=False, newpath=cwd) + return fname + + +def _list_outputs(out_file=None, output_type=None): + outputs = {} + outputs["out_file"] = out_file + if outputs["out_file"] is attrs.NOTHING: + outputs["out_file"] = _gen_fname( + "biggestSegmentation", suffix="", output_type=output_type + ) + outputs["out_file"] = os.path.abspath(outputs["out_file"]) + return outputs + + +IFLOGGER = logging.getLogger("nipype.interface") diff --git a/pydra/tasks/fsl/v6/dti/make_dyadic_vectors.py b/pydra/tasks/fsl/v6/dti/make_dyadic_vectors.py new file mode 100644 index 0000000..417b768 --- /dev/null +++ b/pydra/tasks/fsl/v6/dti/make_dyadic_vectors.py @@ -0,0 +1,133 @@ +import attrs +from fileformats.generic import File +import logging +from pydra.tasks.fsl.v6.nipype_ports.utils.filemanip import fname_presuffix +from pydra.compose import shell + + +logger = logging.getLogger(__name__) + + +def _list_outputs(inputs=None, stdout=None, stderr=None, output_dir=None): + inputs = attrs.asdict(inputs) + + outputs = {} + outputs["dyads"] = _gen_fname( + inputs["output"], + output_type=inputs["output_type"], + inputs=inputs["inputs"], + output_dir=inputs["output_dir"], + stderr=inputs["stderr"], + stdout=inputs["stdout"], + ) + outputs["dispersion"] = _gen_fname( + inputs["output"], + suffix="_dispersion", + output_type=inputs["output_type"], + inputs=inputs["inputs"], + output_dir=inputs["output_dir"], + stderr=inputs["stderr"], + stdout=inputs["stdout"], + ) + + return outputs + + +def dyads_callable(output_dir, inputs, stdout, stderr): + outputs = _list_outputs( + output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr + ) + return outputs.get("dyads") + + +def dispersion_callable(output_dir, inputs, stdout, stderr): + outputs = _list_outputs( + output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr + ) + return outputs.get("dispersion") + + +@shell.define +class MakeDyadicVectors(shell.Task["MakeDyadicVectors.Outputs"]): + """ + Examples + ------- + + >>> from fileformats.generic import File + >>> from pydra.tasks.fsl.v6.dti.make_dyadic_vectors import MakeDyadicVectors + + """ + + executable = "make_dyadic_vectors" + theta_vol: File = shell.arg(help="", argstr="{theta_vol}", position=1) + phi_vol: File = shell.arg(help="", argstr="{phi_vol}", position=2) + mask: File = shell.arg(help="", argstr="{mask}", position=3) + output: File = shell.arg(help="", argstr="{output}", position=4, default="dyads") + perc: float = shell.arg( + help="the {perc}% angle of the output cone of uncertainty (output will be in degrees)", + argstr="{perc}", + position=5, + ) + + class Outputs(shell.Outputs): + dyads: File | None = shell.out(callable=dyads_callable) + dispersion: File | None = shell.out(callable=dispersion_callable) + + +def _gen_fname( + basename, + cwd=None, + suffix=None, + change_ext=True, + ext=None, + output_type=None, + inputs=None, + output_dir=None, + stderr=None, + stdout=None, +): + """Generate a filename based on the given parameters. + + The filename will take the form: cwd/basename. + If change_ext is True, it will use the extensions specified in + inputs.output_type. + + Parameters + ---------- + basename : str + Filename to base the new filename on. + cwd : str + Path to prefix to the new filename. (default is output_dir) + suffix : str + Suffix to add to the `basename`. (defaults is '' ) + change_ext : bool + Flag to change the filename extension to the FSL output type. + (default True) + + Returns + ------- + fname : str + New filename based on given parameters. + + """ + + if basename == "": + msg = "Unable to generate filename for command %s. " % "make_dyadic_vectors" + msg += "basename is not set!" + raise ValueError(msg) + if cwd is None: + cwd = output_dir + if ext is None: + ext = Info.output_type_to_ext(output_type) + if change_ext: + if suffix: + suffix = f"{suffix}{ext}" + else: + suffix = ext + if suffix is None: + suffix = "" + fname = fname_presuffix(basename, suffix=suffix, use_ext=False, newpath=cwd) + return fname + + +IFLOGGER = logging.getLogger("nipype.interface") diff --git a/pydra/tasks/fsl/v6/dti/prob_track_x.py b/pydra/tasks/fsl/v6/dti/prob_track_x.py new file mode 100644 index 0000000..0f8810f --- /dev/null +++ b/pydra/tasks/fsl/v6/dti/prob_track_x.py @@ -0,0 +1,413 @@ +import attrs +from fileformats.datascience import TextMatrix +from fileformats.generic import File +from fileformats.medimage import Nifti1 +import logging +from pydra.tasks.fsl.v6.nipype_ports.utils.filemanip import fname_presuffix +import os +from pydra.compose import shell +import typing as ty + + +logger = logging.getLogger(__name__) + + +def _format_arg(name, value, inputs, argstr): + if value is None: + return "" + + if name == "target_masks" and (value is not attrs.NOTHING): + fname = "targets.txt" + pass + elif name == "seed" and isinstance(value, list): + fname = "seeds.txt" + pass + else: + pass + + return argstr.format(**inputs) + + +def target_masks_formatter(field, inputs): + return _format_arg( + "target_masks", field, inputs, argstr="--targetmasks={target_masks}" + ) + + +def seed_formatter(field, inputs): + return _format_arg("seed", field, inputs, argstr="--seed={seed}") + + +def _list_outputs(inputs=None, stdout=None, stderr=None, output_dir=None): + inputs = attrs.asdict(inputs) + + outputs = {} + if inputs["out_dir"] is attrs.NOTHING: + out_dir = _gen_filename( + "out_dir", + seed=inputs["seed"], + inputs=inputs["inputs"], + output_dir=inputs["output_dir"], + stderr=inputs["stderr"], + stdout=inputs["stdout"], + ) + else: + out_dir = inputs["out_dir"] + + outputs["log"] = os.path.abspath(os.path.join(out_dir, "probtrackx.log")) + + if inputs["opd"] is True is not attrs.NOTHING: + if isinstance(inputs["seed"], list) and isinstance(inputs["seed"][0], list): + outputs["fdt_paths"] = [] + for seed in inputs["seed"]: + outputs["fdt_paths"].append( + os.path.abspath( + _gen_fname( + ("fdt_paths_%s" % ("_".join([str(s) for s in seed]))), + cwd=out_dir, + suffix="", + output_type=inputs["output_type"], + inputs=inputs["inputs"], + output_dir=inputs["output_dir"], + stderr=inputs["stderr"], + stdout=inputs["stdout"], + ) + ) + ) + else: + outputs["fdt_paths"] = os.path.abspath( + _gen_fname( + "fdt_paths", + cwd=out_dir, + suffix="", + output_type=inputs["output_type"], + inputs=inputs["inputs"], + output_dir=inputs["output_dir"], + stderr=inputs["stderr"], + stdout=inputs["stdout"], + ) + ) + + if inputs["target_masks"] is not attrs.NOTHING: + outputs["targets"] = [] + for target in inputs["target_masks"]: + outputs["targets"].append( + os.path.abspath( + _gen_fname( + "seeds_to_" + os.path.split(target)[1], + cwd=out_dir, + suffix="", + output_type=inputs["output_type"], + inputs=inputs["inputs"], + output_dir=inputs["output_dir"], + stderr=inputs["stderr"], + stdout=inputs["stdout"], + ) + ) + ) + if (inputs["verbose"] is not attrs.NOTHING) and inputs["verbose"] == 2: + outputs["particle_files"] = [ + os.path.abspath(os.path.join(out_dir, "particle%d" % i)) + for i in range(inputs["n_samples"]) + ] + return outputs + + +def log_callable(output_dir, inputs, stdout, stderr): + outputs = _list_outputs( + output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr + ) + return outputs.get("log") + + +def fdt_paths_callable(output_dir, inputs, stdout, stderr): + outputs = _list_outputs( + output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr + ) + return outputs.get("fdt_paths") + + +def way_total_callable(output_dir, inputs, stdout, stderr): + outputs = _list_outputs( + output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr + ) + return outputs.get("way_total") + + +def targets_callable(output_dir, inputs, stdout, stderr): + outputs = _list_outputs( + output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr + ) + return outputs.get("targets") + + +def particle_files_callable(output_dir, inputs, stdout, stderr): + outputs = _list_outputs( + output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr + ) + return outputs.get("particle_files") + + +def _gen_filename(name, inputs): + if name == "out_dir": + return os.getcwd() + elif name == "mode": + if isinstance(inputs["seed"], list) and isinstance(inputs["seed"][0], list): + return "simple" + else: + return "seedmask" + + +def mode_default(inputs): + return _gen_filename("mode", inputs=inputs) + + +def out_dir_default(inputs): + return _gen_filename("out_dir", inputs=inputs) + + +@shell.define +class ProbTrackX(shell.Task["ProbTrackX.Outputs"]): + """ + Examples + ------- + + >>> from fileformats.datascience import TextMatrix + >>> from fileformats.generic import File + >>> from fileformats.medimage import Nifti1 + >>> from pydra.tasks.fsl.v6.dti.prob_track_x import ProbTrackX + + >>> task = ProbTrackX() + >>> task.mode = "seedmask" + >>> task.mask2 = File.mock() + >>> task.mesh = File.mock() + >>> task.thsamples = [Nifti1.mock("m"), Nifti1.mock("e"), Nifti1.mock("r"), Nifti1.mock("g"), Nifti1.mock("e"), Nifti1.mock("d"), Nifti1.mock("_"), Nifti1.mock("t"), Nifti1.mock("h"), Nifti1.mock("s"), Nifti1.mock("a"), Nifti1.mock("m"), Nifti1.mock("p"), Nifti1.mock("l"), Nifti1.mock("e"), Nifti1.mock("s"), Nifti1.mock("."), Nifti1.mock("n"), Nifti1.mock("i"), Nifti1.mock("i")] + >>> task.phsamples = [Nifti1.mock("m"), Nifti1.mock("e"), Nifti1.mock("r"), Nifti1.mock("g"), Nifti1.mock("e"), Nifti1.mock("d"), Nifti1.mock("_"), Nifti1.mock("p"), Nifti1.mock("h"), Nifti1.mock("s"), Nifti1.mock("a"), Nifti1.mock("m"), Nifti1.mock("p"), Nifti1.mock("l"), Nifti1.mock("e"), Nifti1.mock("s"), Nifti1.mock("."), Nifti1.mock("n"), Nifti1.mock("i"), Nifti1.mock("i")] + >>> task.fsamples = [Nifti1.mock("m"), Nifti1.mock("e"), Nifti1.mock("r"), Nifti1.mock("g"), Nifti1.mock("e"), Nifti1.mock("d"), Nifti1.mock("_"), Nifti1.mock("f"), Nifti1.mock("s"), Nifti1.mock("a"), Nifti1.mock("m"), Nifti1.mock("p"), Nifti1.mock("l"), Nifti1.mock("e"), Nifti1.mock("s"), Nifti1.mock("."), Nifti1.mock("n"), Nifti1.mock("i"), Nifti1.mock("i")] + >>> task.samples_base_name = "merged" + >>> task.mask = Nifti1.mock("mask.nii") + >>> task.seed = "MASK_average_thal_right.nii" + >>> task.target_masks = [Nifti1.mock("targets_MASK1.nii"), Nifti1.mock("targets_MASK2.nii")] + >>> task.waypoints = File.mock() + >>> task.seed_ref = File.mock() + >>> task.out_dir = "." + >>> task.force_dir = True + >>> task.opd = True + >>> task.os2t = True + >>> task.avoid_mp = File.mock() + >>> task.stop_mask = File.mock() + >>> task.xfm = TextMatrix.mock("trans.mat") + >>> task.inv_xfm = File.mock() + >>> task.n_samples = 3 + >>> task.n_steps = 10 + >>> task.cmdline + 'probtrackx --forcedir -m mask.nii --mode=seedmask --nsamples=3 --nsteps=10 --opd --os2t --dir=. --samples=merged --seed=MASK_average_thal_right.nii --targetmasks=targets.txt --xfm=trans.mat' + + + """ + + executable = "probtrackx" + mode: ty.Any = shell.arg( + help="options: simple (single seed voxel), seedmask (mask of seed voxels), twomask_symm (two bet binary masks)", + argstr="--mode={mode}", + ) + mask2: File = shell.arg( + help="second bet binary mask (in diffusion space) in twomask_symm mode", + argstr="--mask2={mask2}", + ) + mesh: File = shell.arg( + help="Freesurfer-type surface descriptor (in ascii format)", + argstr="--mesh={mesh}", + ) + thsamples: list[Nifti1] = shell.arg(help="") + phsamples: list[Nifti1] = shell.arg(help="") + fsamples: list[Nifti1] = shell.arg(help="") + samples_base_name: str = shell.arg( + help="the rootname/base_name for samples files", + argstr="--samples={samples_base_name}", + default="merged", + ) + mask: Nifti1 = shell.arg( + help="bet binary mask file in diffusion space", argstr="-m {mask}" + ) + seed: ty.Any = shell.arg( + help="seed volume(s), or voxel(s) or freesurfer label file", + formatter="seed_formatter", + ) + target_masks: list[Nifti1] = shell.arg( + help="list of target masks - required for seeds_to_targets classification", + formatter="target_masks_formatter", + ) + waypoints: File = shell.arg( + help="waypoint mask or ascii list of waypoint masks - only keep paths going through ALL the masks", + argstr="--waypoints={waypoints}", + ) + network: bool = shell.arg( + help="activate network mode - only keep paths going through at least one seed mask (required if multiple seed masks)", + argstr="--network", + ) + seed_ref: File = shell.arg( + help="reference vol to define seed space in simple mode - diffusion space assumed if absent", + argstr="--seedref={seed_ref}", + ) + out_dir: ty.Any = shell.arg( + help="directory to put the final volumes in", argstr="--dir={out_dir}" + ) + force_dir: bool = shell.arg( + help="use the actual directory name given - i.e. do not add + to make a new directory", + argstr="--forcedir", + default=True, + ) + opd: bool = shell.arg( + help="outputs path distributions", argstr="--opd", default=True + ) + correct_path_distribution: bool = shell.arg( + help="correct path distribution for the length of the pathways", argstr="--pd" + ) + os2t: bool = shell.arg(help="Outputs seeds to targets", argstr="--os2t") + avoid_mp: File = shell.arg( + help="reject pathways passing through locations given by this mask", + argstr="--avoid={avoid_mp}", + ) + stop_mask: File = shell.arg( + help="stop tracking at locations given by this mask file", + argstr="--stop={stop_mask}", + ) + xfm: TextMatrix = shell.arg( + help="transformation matrix taking seed space to DTI space (either FLIRT matrix or FNIRT warp_field) - default is identity", + argstr="--xfm={xfm}", + ) + inv_xfm: File = shell.arg( + help="transformation matrix taking DTI space to seed space (compulsory when using a warp_field for seeds_to_dti)", + argstr="--invxfm={inv_xfm}", + ) + n_samples: int = shell.arg( + help="number of samples - default=5000", + argstr="--nsamples={n_samples}", + default=5000, + ) + n_steps: int = shell.arg( + help="number of steps per sample - default=2000", argstr="--nsteps={n_steps}" + ) + dist_thresh: float = shell.arg( + help="discards samples shorter than this threshold (in mm - default=0)", + argstr="--distthresh={dist_thresh:.3}", + ) + c_thresh: float = shell.arg( + help="curvature threshold - default=0.2", argstr="--cthr={c_thresh:.3}" + ) + sample_random_points: float = shell.arg( + help="sample random points within seed voxels", + argstr="--sampvox={sample_random_points:.3}", + ) + step_length: float = shell.arg( + help="step_length in mm - default=0.5", argstr="--steplength={step_length:.3}" + ) + loop_check: bool = shell.arg( + help="perform loop_checks on paths - slower, but allows lower curvature threshold", + argstr="--loopcheck", + ) + use_anisotropy: bool = shell.arg( + help="use anisotropy to constrain tracking", argstr="--usef" + ) + rand_fib: ty.Any = shell.arg( + help="options: 0 - default, 1 - to randomly sample initial fibres (with f > fibthresh), 2 - to sample in proportion fibres (with f>fibthresh) to f, 3 - to sample ALL populations at random (even if f. + If change_ext is True, it will use the extensions specified in + inputs.output_type. + + Parameters + ---------- + basename : str + Filename to base the new filename on. + cwd : str + Path to prefix to the new filename. (default is output_dir) + suffix : str + Suffix to add to the `basename`. (defaults is '' ) + change_ext : bool + Flag to change the filename extension to the FSL output type. + (default True) + + Returns + ------- + fname : str + New filename based on given parameters. + + """ + + if basename == "": + msg = "Unable to generate filename for command %s. " % "probtrackx" + msg += "basename is not set!" + raise ValueError(msg) + if cwd is None: + cwd = output_dir + if ext is None: + ext = Info.output_type_to_ext(output_type) + if change_ext: + if suffix: + suffix = f"{suffix}{ext}" + else: + suffix = ext + if suffix is None: + suffix = "" + fname = fname_presuffix(basename, suffix=suffix, use_ext=False, newpath=cwd) + return fname + + +IFLOGGER = logging.getLogger("nipype.interface") diff --git a/pydra/tasks/fsl/v6/dti/prob_track_x2.py b/pydra/tasks/fsl/v6/dti/prob_track_x2.py new file mode 100644 index 0000000..3f82759 --- /dev/null +++ b/pydra/tasks/fsl/v6/dti/prob_track_x2.py @@ -0,0 +1,538 @@ +import attrs +from fileformats.generic import File +from fileformats.medimage import NiftiGz +import logging +from pydra.tasks.fsl.v6.nipype_ports.utils.filemanip import fname_presuffix +import os +from pydra.compose import shell +import typing as ty + + +logger = logging.getLogger(__name__) + + +def _format_arg(name, value, inputs, argstr): + if value is None: + return "" + + if name == "target_masks" and (value is not attrs.NOTHING): + fname = "targets.txt" + pass + elif name == "seed" and isinstance(value, list): + fname = "seeds.txt" + pass + else: + pass + + return argstr.format(**inputs) + + +def target_masks_formatter(field, inputs): + return _format_arg( + "target_masks", field, inputs, argstr="--targetmasks={target_masks}" + ) + + +def seed_formatter(field, inputs): + return _format_arg("seed", field, inputs, argstr="--seed={seed}") + + +def _list_outputs(inputs=None, stdout=None, stderr=None, output_dir=None): + inputs = attrs.asdict(inputs) + + outputs = {} + if inputs["out_dir"] is attrs.NOTHING: + out_dir = _gen_filename( + "out_dir", + seed=inputs["seed"], + inputs=inputs["inputs"], + output_dir=inputs["output_dir"], + stderr=inputs["stderr"], + stdout=inputs["stdout"], + ) + else: + out_dir = inputs["out_dir"] + + outputs["log"] = os.path.abspath(os.path.join(out_dir, "probtrackx.log")) + + if inputs["opd"] is True is not attrs.NOTHING: + if isinstance(inputs["seed"], list) and isinstance(inputs["seed"][0], list): + outputs["fdt_paths"] = [] + for seed in inputs["seed"]: + outputs["fdt_paths"].append( + os.path.abspath( + _gen_fname( + ("fdt_paths_%s" % ("_".join([str(s) for s in seed]))), + cwd=out_dir, + suffix="", + output_type=inputs["output_type"], + inputs=inputs["inputs"], + output_dir=inputs["output_dir"], + stderr=inputs["stderr"], + stdout=inputs["stdout"], + ) + ) + ) + else: + outputs["fdt_paths"] = os.path.abspath( + _gen_fname( + "fdt_paths", + cwd=out_dir, + suffix="", + output_type=inputs["output_type"], + inputs=inputs["inputs"], + output_dir=inputs["output_dir"], + stderr=inputs["stderr"], + stdout=inputs["stdout"], + ) + ) + + if inputs["target_masks"] is not attrs.NOTHING: + outputs["targets"] = [] + for target in inputs["target_masks"]: + outputs["targets"].append( + os.path.abspath( + _gen_fname( + "seeds_to_" + os.path.split(target)[1], + cwd=out_dir, + suffix="", + output_type=inputs["output_type"], + inputs=inputs["inputs"], + output_dir=inputs["output_dir"], + stderr=inputs["stderr"], + stdout=inputs["stdout"], + ) + ) + ) + if (inputs["verbose"] is not attrs.NOTHING) and inputs["verbose"] == 2: + outputs["particle_files"] = [ + os.path.abspath(os.path.join(out_dir, "particle%d" % i)) + for i in range(inputs["n_samples"]) + ] + + if inputs["out_dir"] is attrs.NOTHING: + out_dir = os.getcwd() + else: + out_dir = inputs["out_dir"] + + outputs["way_total"] = os.path.abspath(os.path.join(out_dir, "waytotal")) + + if inputs["omatrix1"] is not attrs.NOTHING: + outputs["network_matrix"] = os.path.abspath( + os.path.join(out_dir, "matrix_seeds_to_all_targets") + ) + outputs["matrix1_dot"] = os.path.abspath( + os.path.join(out_dir, "fdt_matrix1.dot") + ) + + if inputs["omatrix2"] is not attrs.NOTHING: + outputs["lookup_tractspace"] = os.path.abspath( + os.path.join(out_dir, "lookup_tractspace_fdt_matrix2.nii.gz") + ) + outputs["matrix2_dot"] = os.path.abspath( + os.path.join(out_dir, "fdt_matrix2.dot") + ) + + if inputs["omatrix3"] is not attrs.NOTHING: + outputs["matrix3_dot"] = os.path.abspath( + os.path.join(out_dir, "fdt_matrix3.dot") + ) + return outputs + + +def network_matrix_callable(output_dir, inputs, stdout, stderr): + outputs = _list_outputs( + output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr + ) + return outputs.get("network_matrix") + + +def matrix1_dot_callable(output_dir, inputs, stdout, stderr): + outputs = _list_outputs( + output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr + ) + return outputs.get("matrix1_dot") + + +def lookup_tractspace_callable(output_dir, inputs, stdout, stderr): + outputs = _list_outputs( + output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr + ) + return outputs.get("lookup_tractspace") + + +def matrix2_dot_callable(output_dir, inputs, stdout, stderr): + outputs = _list_outputs( + output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr + ) + return outputs.get("matrix2_dot") + + +def matrix3_dot_callable(output_dir, inputs, stdout, stderr): + outputs = _list_outputs( + output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr + ) + return outputs.get("matrix3_dot") + + +def log_callable(output_dir, inputs, stdout, stderr): + outputs = _list_outputs( + output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr + ) + return outputs.get("log") + + +def fdt_paths_callable(output_dir, inputs, stdout, stderr): + outputs = _list_outputs( + output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr + ) + return outputs.get("fdt_paths") + + +def way_total_callable(output_dir, inputs, stdout, stderr): + outputs = _list_outputs( + output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr + ) + return outputs.get("way_total") + + +def targets_callable(output_dir, inputs, stdout, stderr): + outputs = _list_outputs( + output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr + ) + return outputs.get("targets") + + +def particle_files_callable(output_dir, inputs, stdout, stderr): + outputs = _list_outputs( + output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr + ) + return outputs.get("particle_files") + + +def _gen_filename(name, inputs): + if name == "out_dir": + return os.getcwd() + elif name == "mode": + if isinstance(inputs["seed"], list) and isinstance(inputs["seed"][0], list): + return "simple" + else: + return "seedmask" + + +def out_dir_default(inputs): + return _gen_filename("out_dir", inputs=inputs) + + +@shell.define +class ProbTrackX2(shell.Task["ProbTrackX2.Outputs"]): + """ + Examples + ------- + + >>> from fileformats.generic import File + >>> from fileformats.medimage import NiftiGz + >>> from pydra.tasks.fsl.v6.dti.prob_track_x2 import ProbTrackX2 + + >>> task = ProbTrackX2() + >>> task.fopd = File.mock() + >>> task.target2 = File.mock() + >>> task.target3 = File.mock() + >>> task.lrtarget3 = File.mock() + >>> task.colmask4 = File.mock() + >>> task.target4 = File.mock() + >>> task.fsamples = [NiftiGz.mock("m"), NiftiGz.mock("e"), NiftiGz.mock("r"), NiftiGz.mock("g"), NiftiGz.mock("e"), NiftiGz.mock("d"), NiftiGz.mock("_"), NiftiGz.mock("f"), NiftiGz.mock("1"), NiftiGz.mock("s"), NiftiGz.mock("a"), NiftiGz.mock("m"), NiftiGz.mock("p"), NiftiGz.mock("l"), NiftiGz.mock("e"), NiftiGz.mock("s"), NiftiGz.mock("."), NiftiGz.mock("n"), NiftiGz.mock("i"), NiftiGz.mock("i"), NiftiGz.mock("."), NiftiGz.mock("g"), NiftiGz.mock("z")] + >>> task.mask = NiftiGz.mock("nodif_brain_mask.nii.gz") + >>> task.seed = "seed_source.nii.gz" + >>> task.waypoints = File.mock() + >>> task.seed_ref = File.mock() + >>> task.avoid_mp = File.mock() + >>> task.stop_mask = File.mock() + >>> task.xfm = File.mock() + >>> task.inv_xfm = File.mock() + >>> task.n_samples = 3 + >>> task.cmdline + 'probtrackx2 --forcedir -m nodif_brain_mask.nii.gz --nsamples=3 --nsteps=10 --opd --dir=. --samples=merged --seed=seed_source.nii.gz' + + + """ + + executable = "probtrackx2" + simple: bool = shell.arg( + help="rack from a list of voxels (seed must be a ASCII list of coordinates)", + argstr="--simple", + ) + fopd: File = shell.arg( + help="Other mask for binning tract distribution", argstr="--fopd={fopd}" + ) + waycond: ty.Any = shell.arg( + help='Waypoint condition. Either "AND" (default) or "OR"', + argstr="--waycond={waycond}", + ) + wayorder: bool = shell.arg( + help="Reject streamlines that do not hit waypoints in given order. Only valid if waycond=AND", + argstr="--wayorder", + ) + onewaycondition: bool = shell.arg( + help="Apply waypoint conditions to each half tract separately", + argstr="--onewaycondition", + ) + omatrix1: bool = shell.arg( + help="Output matrix1 - SeedToSeed Connectivity", argstr="--omatrix1" + ) + distthresh1: float = shell.arg( + help="Discards samples (in matrix1) shorter than this threshold (in mm - default=0)", + argstr="--distthresh1={distthresh1:.3}", + ) + omatrix2: bool = shell.arg( + help="Output matrix2 - SeedToLowResMask", + argstr="--omatrix2", + requires=["target2"], + ) + target2: File = shell.arg( + help="Low resolution binary brain mask for storing connectivity distribution in matrix2 mode", + argstr="--target2={target2}", + ) + omatrix3: bool = shell.arg( + help="Output matrix3 (NxN connectivity matrix)", + argstr="--omatrix3", + requires=["target3", "lrtarget3"], + ) + target3: File = shell.arg( + help="Mask used for NxN connectivity matrix (or Nxn if lrtarget3 is set)", + argstr="--target3={target3}", + ) + lrtarget3: File = shell.arg( + help="Column-space mask used for Nxn connectivity matrix", + argstr="--lrtarget3={lrtarget3}", + ) + distthresh3: float = shell.arg( + help="Discards samples (in matrix3) shorter than this threshold (in mm - default=0)", + argstr="--distthresh3={distthresh3:.3}", + ) + omatrix4: bool = shell.arg( + help="Output matrix4 - DtiMaskToSeed (special Oxford Sparse Format)", + argstr="--omatrix4", + ) + colmask4: File = shell.arg( + help="Mask for columns of matrix4 (default=seed mask)", + argstr="--colmask4={colmask4}", + ) + target4: File = shell.arg( + help="Brain mask in DTI space", argstr="--target4={target4}" + ) + meshspace: ty.Any = shell.arg( + help='Mesh reference space - either "caret" (default) or "freesurfer" or "first" or "vox"', + argstr="--meshspace={meshspace}", + ) + thsamples: list[File] = shell.arg(help="") + phsamples: list[File] = shell.arg(help="") + fsamples: list[NiftiGz] = shell.arg(help="") + samples_base_name: str = shell.arg( + help="the rootname/base_name for samples files", + argstr="--samples={samples_base_name}", + default="merged", + ) + mask: NiftiGz = shell.arg( + help="bet binary mask file in diffusion space", argstr="-m {mask}" + ) + seed: ty.Any = shell.arg( + help="seed volume(s), or voxel(s) or freesurfer label file", + formatter="seed_formatter", + ) + target_masks: list[File] = shell.arg( + help="list of target masks - required for seeds_to_targets classification", + formatter="target_masks_formatter", + ) + waypoints: File = shell.arg( + help="waypoint mask or ascii list of waypoint masks - only keep paths going through ALL the masks", + argstr="--waypoints={waypoints}", + ) + network: bool = shell.arg( + help="activate network mode - only keep paths going through at least one seed mask (required if multiple seed masks)", + argstr="--network", + ) + seed_ref: File = shell.arg( + help="reference vol to define seed space in simple mode - diffusion space assumed if absent", + argstr="--seedref={seed_ref}", + ) + out_dir: ty.Any = shell.arg( + help="directory to put the final volumes in", argstr="--dir={out_dir}" + ) + force_dir: bool = shell.arg( + help="use the actual directory name given - i.e. do not add + to make a new directory", + argstr="--forcedir", + default=True, + ) + opd: bool = shell.arg( + help="outputs path distributions", argstr="--opd", default=True + ) + correct_path_distribution: bool = shell.arg( + help="correct path distribution for the length of the pathways", argstr="--pd" + ) + os2t: bool = shell.arg(help="Outputs seeds to targets", argstr="--os2t") + avoid_mp: File = shell.arg( + help="reject pathways passing through locations given by this mask", + argstr="--avoid={avoid_mp}", + ) + stop_mask: File = shell.arg( + help="stop tracking at locations given by this mask file", + argstr="--stop={stop_mask}", + ) + xfm: File = shell.arg( + help="transformation matrix taking seed space to DTI space (either FLIRT matrix or FNIRT warp_field) - default is identity", + argstr="--xfm={xfm}", + ) + inv_xfm: File = shell.arg( + help="transformation matrix taking DTI space to seed space (compulsory when using a warp_field for seeds_to_dti)", + argstr="--invxfm={inv_xfm}", + ) + n_samples: int = shell.arg( + help="number of samples - default=5000", + argstr="--nsamples={n_samples}", + default=5000, + ) + n_steps: int = shell.arg( + help="number of steps per sample - default=2000", argstr="--nsteps={n_steps}" + ) + dist_thresh: float = shell.arg( + help="discards samples shorter than this threshold (in mm - default=0)", + argstr="--distthresh={dist_thresh:.3}", + ) + c_thresh: float = shell.arg( + help="curvature threshold - default=0.2", argstr="--cthr={c_thresh:.3}" + ) + sample_random_points: float = shell.arg( + help="sample random points within seed voxels", + argstr="--sampvox={sample_random_points:.3}", + ) + step_length: float = shell.arg( + help="step_length in mm - default=0.5", argstr="--steplength={step_length:.3}" + ) + loop_check: bool = shell.arg( + help="perform loop_checks on paths - slower, but allows lower curvature threshold", + argstr="--loopcheck", + ) + use_anisotropy: bool = shell.arg( + help="use anisotropy to constrain tracking", argstr="--usef" + ) + rand_fib: ty.Any = shell.arg( + help="options: 0 - default, 1 - to randomly sample initial fibres (with f > fibthresh), 2 - to sample in proportion fibres (with f>fibthresh) to f, 3 - to sample ALL populations at random (even if f. + If change_ext is True, it will use the extensions specified in + inputs.output_type. + + Parameters + ---------- + basename : str + Filename to base the new filename on. + cwd : str + Path to prefix to the new filename. (default is output_dir) + suffix : str + Suffix to add to the `basename`. (defaults is '' ) + change_ext : bool + Flag to change the filename extension to the FSL output type. + (default True) + + Returns + ------- + fname : str + New filename based on given parameters. + + """ + + if basename == "": + msg = "Unable to generate filename for command %s. " % "probtrackx2" + msg += "basename is not set!" + raise ValueError(msg) + if cwd is None: + cwd = output_dir + if ext is None: + ext = Info.output_type_to_ext(output_type) + if change_ext: + if suffix: + suffix = f"{suffix}{ext}" + else: + suffix = ext + if suffix is None: + suffix = "" + fname = fname_presuffix(basename, suffix=suffix, use_ext=False, newpath=cwd) + return fname + + +IFLOGGER = logging.getLogger("nipype.interface") diff --git a/pydra/tasks/fsl/v6/dti/proj_thresh.py b/pydra/tasks/fsl/v6/dti/proj_thresh.py new file mode 100644 index 0000000..6fd7bf1 --- /dev/null +++ b/pydra/tasks/fsl/v6/dti/proj_thresh.py @@ -0,0 +1,132 @@ +import attrs +from fileformats.generic import File +import logging +from pydra.tasks.fsl.v6.nipype_ports.utils.filemanip import fname_presuffix +import os +from pydra.compose import shell + + +logger = logging.getLogger(__name__) + + +def _list_outputs(inputs=None, stdout=None, stderr=None, output_dir=None): + inputs = attrs.asdict(inputs) + + outputs = {} + outputs["out_files"] = [] + for name in inputs["in_files"]: + cwd, base_name = os.path.split(name) + outputs["out_files"].append( + _gen_fname( + base_name, + cwd=cwd, + suffix=f"_proj_seg_thr_{inputs['threshold']}", + output_type=inputs["output_type"], + inputs=inputs["inputs"], + output_dir=inputs["output_dir"], + stderr=inputs["stderr"], + stdout=inputs["stdout"], + ) + ) + return outputs + + +def out_files_callable(output_dir, inputs, stdout, stderr): + outputs = _list_outputs( + output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr + ) + return outputs.get("out_files") + + +@shell.define +class ProjThresh(shell.Task["ProjThresh.Outputs"]): + """ + Examples + ------- + + >>> from fileformats.generic import File + >>> from pydra.tasks.fsl.v6.dti.proj_thresh import ProjThresh + + >>> task = ProjThresh() + >>> task.in_files = ldir + >>> task.threshold = 3 + >>> task.cmdline + 'proj_thresh seeds_to_M1.nii seeds_to_M2.nii 3' + + + """ + + executable = "proj_thresh" + in_files: list[File] = shell.arg( + help="a list of input volumes", argstr="{in_files}", position=1 + ) + threshold: int = shell.arg( + help="threshold indicating minimum number of seed voxels entering this mask region", + argstr="{threshold}", + position=2, + ) + + class Outputs(shell.Outputs): + out_files: list[File] | None = shell.out( + help="path/name of output volume after thresholding", + callable=out_files_callable, + ) + + +def _gen_fname( + basename, + cwd=None, + suffix=None, + change_ext=True, + ext=None, + output_type=None, + inputs=None, + output_dir=None, + stderr=None, + stdout=None, +): + """Generate a filename based on the given parameters. + + The filename will take the form: cwd/basename. + If change_ext is True, it will use the extensions specified in + inputs.output_type. + + Parameters + ---------- + basename : str + Filename to base the new filename on. + cwd : str + Path to prefix to the new filename. (default is output_dir) + suffix : str + Suffix to add to the `basename`. (defaults is '' ) + change_ext : bool + Flag to change the filename extension to the FSL output type. + (default True) + + Returns + ------- + fname : str + New filename based on given parameters. + + """ + + if basename == "": + msg = "Unable to generate filename for command %s. " % "proj_thresh" + msg += "basename is not set!" + raise ValueError(msg) + if cwd is None: + cwd = output_dir + if ext is None: + ext = Info.output_type_to_ext(output_type) + if change_ext: + if suffix: + suffix = f"{suffix}{ext}" + else: + suffix = ext + if suffix is None: + suffix = "" + fname = fname_presuffix(basename, suffix=suffix, use_ext=False, newpath=cwd) + return fname + + +IFLOGGER = logging.getLogger("nipype.interface") diff --git a/pydra/tasks/fsl/v6/dti/tests/conftest.py b/pydra/tasks/fsl/v6/dti/tests/conftest.py new file mode 100644 index 0000000..8c8af14 --- /dev/null +++ b/pydra/tasks/fsl/v6/dti/tests/conftest.py @@ -0,0 +1,24 @@ +# For debugging in IDE's don't catch raised exceptions and let the IDE +# break at it +import os +import pytest + + +if os.getenv("_PYTEST_RAISE", "0") != "0": + + @pytest.hookimpl(tryfirst=True) + def pytest_exception_interact(call): + raise call.excinfo.value # raise internal errors instead of capturing them + + @pytest.hookimpl(tryfirst=True) + def pytest_internalerror(excinfo): + raise excinfo.value # raise internal errors instead of capturing them + + def pytest_configure(config): + config.option.capture = "no" # allow print statements to show up in the console + config.option.log_cli = True # show log messages in the console + config.option.log_level = "INFO" # set the log level to INFO + + CATCH_CLI_EXCEPTIONS = False +else: + CATCH_CLI_EXCEPTIONS = True diff --git a/pydra/tasks/fsl/v6/dti/tests/test_bedpostx5.py b/pydra/tasks/fsl/v6/dti/tests/test_bedpostx5.py new file mode 100644 index 0000000..a3e4648 --- /dev/null +++ b/pydra/tasks/fsl/v6/dti/tests/test_bedpostx5.py @@ -0,0 +1,44 @@ +from fileformats.generic import Directory, File +from fileformats.medimage import Bval, Bvec, Nifti1 +import logging +from nipype2pydra.testing import PassAfterTimeoutWorker +from pydra.tasks.fsl.v6.dti.bedpostx5 import BEDPOSTX5 +import pytest + + +logger = logging.getLogger(__name__) + + +@pytest.mark.xfail +def test_bedpostx5_1(): + task = BEDPOSTX5() + task.dwi = Nifti1.sample(seed=0) + task.mask = Nifti1.sample(seed=1) + task.bvecs = Bvec.sample(seed=2) + task.bvals = Bval.sample(seed=3) + task.logdir = Directory.sample(seed=4) + task.n_fibres = 2 + task.n_jumps = 5000 + task.burn_in = 0 + task.sample_every = 1 + task.out_dir = Directory.sample(seed=11) + task.grad_dev = File.sample(seed=13) + task.burn_in_no_ard = 0 + task.update_proposal_every = 40 + task.force_dir = True + print(f"CMDLINE: {task.cmdline}\n\n") + res = task(worker=PassAfterTimeoutWorker) + print("RESULT: ", res) + + +@pytest.mark.xfail +def test_bedpostx5_2(): + task = BEDPOSTX5() + task.dwi = Nifti1.sample(seed=0) + task.mask = Nifti1.sample(seed=1) + task.bvecs = Bvec.sample(seed=2) + task.bvals = Bval.sample(seed=3) + task.n_fibres = 1 + print(f"CMDLINE: {task.cmdline}\n\n") + res = task(worker=PassAfterTimeoutWorker) + print("RESULT: ", res) diff --git a/pydra/tasks/fsl/v6/dti/tests/test_distancemap.py b/pydra/tasks/fsl/v6/dti/tests/test_distancemap.py new file mode 100644 index 0000000..336ba68 --- /dev/null +++ b/pydra/tasks/fsl/v6/dti/tests/test_distancemap.py @@ -0,0 +1,18 @@ +from fileformats.generic import File +import logging +from nipype2pydra.testing import PassAfterTimeoutWorker +from pydra.tasks.fsl.v6.dti.distance_map import DistanceMap +import pytest + + +logger = logging.getLogger(__name__) + + +@pytest.mark.xfail +def test_distancemap_1(): + task = DistanceMap() + task.in_file = File.sample(seed=0) + task.mask_file = File.sample(seed=1) + print(f"CMDLINE: {task.cmdline}\n\n") + res = task(worker=PassAfterTimeoutWorker) + print("RESULT: ", res) diff --git a/pydra/tasks/fsl/v6/dti/tests/test_dtifit.py b/pydra/tasks/fsl/v6/dti/tests/test_dtifit.py new file mode 100644 index 0000000..e894a73 --- /dev/null +++ b/pydra/tasks/fsl/v6/dti/tests/test_dtifit.py @@ -0,0 +1,35 @@ +from fileformats.generic import File +from fileformats.medimage import Bval, Nifti1 +import logging +from nipype2pydra.testing import PassAfterTimeoutWorker +from pydra.tasks.fsl.v6.dti.dti_fit import DTIFit +import pytest + + +logger = logging.getLogger(__name__) + + +@pytest.mark.xfail +def test_dtifit_1(): + task = DTIFit() + task.dwi = Nifti1.sample(seed=0) + task.base_name = "dtifit_" + task.mask = Nifti1.sample(seed=2) + task.bvecs = File.sample(seed=3) + task.bvals = Bval.sample(seed=4) + task.cni = File.sample(seed=13) + task.gradnonlin = File.sample(seed=15) + print(f"CMDLINE: {task.cmdline}\n\n") + res = task(worker=PassAfterTimeoutWorker) + print("RESULT: ", res) + + +@pytest.mark.xfail +def test_dtifit_2(): + task = DTIFit() + task.dwi = Nifti1.sample(seed=0) + task.mask = Nifti1.sample(seed=2) + task.bvals = Bval.sample(seed=4) + print(f"CMDLINE: {task.cmdline}\n\n") + res = task(worker=PassAfterTimeoutWorker) + print("RESULT: ", res) diff --git a/pydra/tasks/fsl/v6/dti/tests/test_findthebiggest.py b/pydra/tasks/fsl/v6/dti/tests/test_findthebiggest.py new file mode 100644 index 0000000..17a4da2 --- /dev/null +++ b/pydra/tasks/fsl/v6/dti/tests/test_findthebiggest.py @@ -0,0 +1,27 @@ +from fileformats.generic import File +import logging +from nipype2pydra.testing import PassAfterTimeoutWorker +from pydra.tasks.fsl.v6.dti.find_the_biggest import FindTheBiggest +import pytest + + +logger = logging.getLogger(__name__) + + +@pytest.mark.xfail +def test_findthebiggest_1(): + task = FindTheBiggest() + task.in_files = [File.sample(seed=0)] + print(f"CMDLINE: {task.cmdline}\n\n") + res = task(worker=PassAfterTimeoutWorker) + print("RESULT: ", res) + + +@pytest.mark.xfail +def test_findthebiggest_2(): + task = FindTheBiggest() + task.in_files = [File.sample(seed=0)] + task.out_file = "biggestSegmentation" + print(f"CMDLINE: {task.cmdline}\n\n") + res = task(worker=PassAfterTimeoutWorker) + print("RESULT: ", res) diff --git a/pydra/tasks/fsl/v6/dti/tests/test_makedyadicvectors.py b/pydra/tasks/fsl/v6/dti/tests/test_makedyadicvectors.py new file mode 100644 index 0000000..d8c44c6 --- /dev/null +++ b/pydra/tasks/fsl/v6/dti/tests/test_makedyadicvectors.py @@ -0,0 +1,20 @@ +from fileformats.generic import File +import logging +from nipype2pydra.testing import PassAfterTimeoutWorker +from pydra.tasks.fsl.v6.dti.make_dyadic_vectors import MakeDyadicVectors +import pytest + + +logger = logging.getLogger(__name__) + + +@pytest.mark.xfail +def test_makedyadicvectors_1(): + task = MakeDyadicVectors() + task.theta_vol = File.sample(seed=0) + task.phi_vol = File.sample(seed=1) + task.mask = File.sample(seed=2) + task.output = File.sample(seed=3) + print(f"CMDLINE: {task.cmdline}\n\n") + res = task(worker=PassAfterTimeoutWorker) + print("RESULT: ", res) diff --git a/pydra/tasks/fsl/v6/dti/tests/test_probtrackx.py b/pydra/tasks/fsl/v6/dti/tests/test_probtrackx.py new file mode 100644 index 0000000..cc8f48c --- /dev/null +++ b/pydra/tasks/fsl/v6/dti/tests/test_probtrackx.py @@ -0,0 +1,58 @@ +from fileformats.datascience import TextMatrix +from fileformats.generic import File +from fileformats.medimage import Nifti1 +import logging +from nipype2pydra.testing import PassAfterTimeoutWorker +from pydra.tasks.fsl.v6.dti.prob_track_x import ProbTrackX +import pytest + + +logger = logging.getLogger(__name__) + + +@pytest.mark.xfail +def test_probtrackx_1(): + task = ProbTrackX() + task.mask2 = File.sample(seed=1) + task.mesh = File.sample(seed=2) + task.thsamples = [Nifti1.sample(seed=3)] + task.phsamples = [Nifti1.sample(seed=4)] + task.fsamples = [Nifti1.sample(seed=5)] + task.samples_base_name = "merged" + task.mask = Nifti1.sample(seed=7) + task.target_masks = [Nifti1.sample(seed=9)] + task.waypoints = File.sample(seed=10) + task.seed_ref = File.sample(seed=12) + task.force_dir = True + task.opd = True + task.avoid_mp = File.sample(seed=18) + task.stop_mask = File.sample(seed=19) + task.xfm = TextMatrix.sample(seed=20) + task.inv_xfm = File.sample(seed=21) + task.n_samples = 5000 + print(f"CMDLINE: {task.cmdline}\n\n") + res = task(worker=PassAfterTimeoutWorker) + print("RESULT: ", res) + + +@pytest.mark.xfail +def test_probtrackx_2(): + task = ProbTrackX() + task.mode = "seedmask" + task.thsamples = [Nifti1.sample(seed=3)] + task.phsamples = [Nifti1.sample(seed=4)] + task.fsamples = [Nifti1.sample(seed=5)] + task.samples_base_name = "merged" + task.mask = Nifti1.sample(seed=7) + task.seed = "MASK_average_thal_right.nii" + task.target_masks = [Nifti1.sample(seed=9)] + task.out_dir = "." + task.force_dir = True + task.opd = True + task.os2t = True + task.xfm = TextMatrix.sample(seed=20) + task.n_samples = 3 + task.n_steps = 10 + print(f"CMDLINE: {task.cmdline}\n\n") + res = task(worker=PassAfterTimeoutWorker) + print("RESULT: ", res) diff --git a/pydra/tasks/fsl/v6/dti/tests/test_probtrackx2.py b/pydra/tasks/fsl/v6/dti/tests/test_probtrackx2.py new file mode 100644 index 0000000..f051f3b --- /dev/null +++ b/pydra/tasks/fsl/v6/dti/tests/test_probtrackx2.py @@ -0,0 +1,50 @@ +from fileformats.generic import File +from fileformats.medimage import NiftiGz +import logging +from nipype2pydra.testing import PassAfterTimeoutWorker +from pydra.tasks.fsl.v6.dti.prob_track_x2 import ProbTrackX2 +import pytest + + +logger = logging.getLogger(__name__) + + +@pytest.mark.xfail +def test_probtrackx2_1(): + task = ProbTrackX2() + task.fopd = File.sample(seed=1) + task.target2 = File.sample(seed=8) + task.target3 = File.sample(seed=10) + task.lrtarget3 = File.sample(seed=11) + task.colmask4 = File.sample(seed=14) + task.target4 = File.sample(seed=15) + task.thsamples = [File.sample(seed=17)] + task.phsamples = [File.sample(seed=18)] + task.fsamples = [NiftiGz.sample(seed=19)] + task.samples_base_name = "merged" + task.mask = NiftiGz.sample(seed=21) + task.target_masks = [File.sample(seed=23)] + task.waypoints = File.sample(seed=24) + task.seed_ref = File.sample(seed=26) + task.force_dir = True + task.opd = True + task.avoid_mp = File.sample(seed=32) + task.stop_mask = File.sample(seed=33) + task.xfm = File.sample(seed=34) + task.inv_xfm = File.sample(seed=35) + task.n_samples = 5000 + print(f"CMDLINE: {task.cmdline}\n\n") + res = task(worker=PassAfterTimeoutWorker) + print("RESULT: ", res) + + +@pytest.mark.xfail +def test_probtrackx2_2(): + task = ProbTrackX2() + task.fsamples = [NiftiGz.sample(seed=19)] + task.mask = NiftiGz.sample(seed=21) + task.seed = "seed_source.nii.gz" + task.n_samples = 3 + print(f"CMDLINE: {task.cmdline}\n\n") + res = task(worker=PassAfterTimeoutWorker) + print("RESULT: ", res) diff --git a/pydra/tasks/fsl/v6/dti/tests/test_projthresh.py b/pydra/tasks/fsl/v6/dti/tests/test_projthresh.py new file mode 100644 index 0000000..efef04f --- /dev/null +++ b/pydra/tasks/fsl/v6/dti/tests/test_projthresh.py @@ -0,0 +1,27 @@ +from fileformats.generic import File +import logging +from nipype2pydra.testing import PassAfterTimeoutWorker +from pydra.tasks.fsl.v6.dti.proj_thresh import ProjThresh +import pytest + + +logger = logging.getLogger(__name__) + + +@pytest.mark.xfail +def test_projthresh_1(): + task = ProjThresh() + task.in_files = [File.sample(seed=0)] + print(f"CMDLINE: {task.cmdline}\n\n") + res = task(worker=PassAfterTimeoutWorker) + print("RESULT: ", res) + + +@pytest.mark.xfail +def test_projthresh_2(): + task = ProjThresh() + task.in_files = [File.sample(seed=0)] + task.threshold = 3 + print(f"CMDLINE: {task.cmdline}\n\n") + res = task(worker=PassAfterTimeoutWorker) + print("RESULT: ", res) diff --git a/pydra/tasks/fsl/v6/dti/tests/test_tractskeleton.py b/pydra/tasks/fsl/v6/dti/tests/test_tractskeleton.py new file mode 100644 index 0000000..797391c --- /dev/null +++ b/pydra/tasks/fsl/v6/dti/tests/test_tractskeleton.py @@ -0,0 +1,23 @@ +from fileformats.generic import File +import logging +from nipype2pydra.testing import PassAfterTimeoutWorker +from pydra.tasks.fsl.v6.dti.tract_skeleton import TractSkeleton +import pytest + + +logger = logging.getLogger(__name__) + + +@pytest.mark.xfail +def test_tractskeleton_1(): + task = TractSkeleton() + task.in_file = File.sample(seed=0) + task.distance_map = File.sample(seed=3) + task.search_mask_file = File.sample(seed=4) + task.use_cingulum_mask = True + task.data_file = File.sample(seed=6) + task.alt_data_file = File.sample(seed=7) + task.alt_skeleton = File.sample(seed=8) + print(f"CMDLINE: {task.cmdline}\n\n") + res = task(worker=PassAfterTimeoutWorker) + print("RESULT: ", res) diff --git a/pydra/tasks/fsl/v6/dti/tests/test_vecreg.py b/pydra/tasks/fsl/v6/dti/tests/test_vecreg.py new file mode 100644 index 0000000..6d284f8 --- /dev/null +++ b/pydra/tasks/fsl/v6/dti/tests/test_vecreg.py @@ -0,0 +1,38 @@ +from fileformats.datascience import TextMatrix +from fileformats.generic import File +from fileformats.medimage import Nifti1 +import logging +from nipype2pydra.testing import PassAfterTimeoutWorker +from pydra.tasks.fsl.v6.dti.vec_reg import VecReg +import pytest + + +logger = logging.getLogger(__name__) + + +@pytest.mark.xfail +def test_vecreg_1(): + task = VecReg() + task.in_file = Nifti1.sample(seed=0) + task.ref_vol = Nifti1.sample(seed=2) + task.affine_mat = TextMatrix.sample(seed=3) + task.warp_field = File.sample(seed=4) + task.rotation_mat = File.sample(seed=5) + task.rotation_warp = File.sample(seed=6) + task.mask = File.sample(seed=8) + task.ref_mask = File.sample(seed=9) + print(f"CMDLINE: {task.cmdline}\n\n") + res = task(worker=PassAfterTimeoutWorker) + print("RESULT: ", res) + + +@pytest.mark.xfail +def test_vecreg_2(): + task = VecReg() + task.in_file = Nifti1.sample(seed=0) + task.out_file = "diffusion_vreg.nii" + task.ref_vol = Nifti1.sample(seed=2) + task.affine_mat = TextMatrix.sample(seed=3) + print(f"CMDLINE: {task.cmdline}\n\n") + res = task(worker=PassAfterTimeoutWorker) + print("RESULT: ", res) diff --git a/pydra/tasks/fsl/v6/dti/tests/test_xfibres5.py b/pydra/tasks/fsl/v6/dti/tests/test_xfibres5.py new file mode 100644 index 0000000..8a63193 --- /dev/null +++ b/pydra/tasks/fsl/v6/dti/tests/test_xfibres5.py @@ -0,0 +1,29 @@ +from fileformats.generic import Directory, File +import logging +from nipype2pydra.testing import PassAfterTimeoutWorker +from pydra.tasks.fsl.v6.dti.x_fibres_5 import XFibres5 +import pytest + + +logger = logging.getLogger(__name__) + + +@pytest.mark.xfail +def test_xfibres5_1(): + task = XFibres5() + task.gradnonlin = File.sample(seed=0) + task.dwi = File.sample(seed=1) + task.mask = File.sample(seed=2) + task.bvecs = File.sample(seed=3) + task.bvals = File.sample(seed=4) + task.logdir = Directory.sample(seed=5) + task.n_fibres = 2 + task.n_jumps = 5000 + task.burn_in = 0 + task.burn_in_no_ard = 0 + task.sample_every = 1 + task.update_proposal_every = 40 + task.force_dir = True + print(f"CMDLINE: {task.cmdline}\n\n") + res = task(worker=PassAfterTimeoutWorker) + print("RESULT: ", res) diff --git a/pydra/tasks/fsl/v6/dti/tract_skeleton.py b/pydra/tasks/fsl/v6/dti/tract_skeleton.py new file mode 100644 index 0000000..55d8956 --- /dev/null +++ b/pydra/tasks/fsl/v6/dti/tract_skeleton.py @@ -0,0 +1,147 @@ +import attrs +from fileformats.generic import File +import logging +from pydra.tasks.fsl.v6.base import Info +from pydra.tasks.fsl.v6.nipype_ports.utils.filemanip import fname_presuffix +import os +from pathlib import Path +from pathlib import Path +from pydra.compose import shell +import typing as ty + + +logger = logging.getLogger(__name__) + + +def _format_arg(name, value, inputs, argstr): + if value is None: + return "" + self_dict = {} + + if name == "project_data": + if (value is not attrs.NOTHING) and value: + _si = self_dict["inputs"] + if (_si.use_cingulum_mask is not attrs.NOTHING) and _si.use_cingulum_mask: + mask_file = Info.standard_image("LowerCingulum_1mm.nii.gz") + else: + mask_file = _si.search_mask_file + if _si.projected_data is attrs.NOTHING: + proj_file = _list_outputs()["projected_data"] + else: + proj_file = _si.projected_data + return argstr % ( + _si.threshold, + _si.distance_map, + mask_file, + _si.data_file, + proj_file, + ) + elif name == "skeleton_file": + if isinstance(value, bool): + return argstr.format(**{name: _list_outputs()["skeleton_file"]}) + else: + return argstr.format(**{name: value}) + + return argstr.format(**inputs) + + +def project_data_formatter(field, inputs): + return _format_arg( + "project_data", + field, + inputs, + argstr="-p {project_data:d:.3} {project_data:d} {project_data:d} {project_data:d} {project_data:d}", + ) + + +def skeleton_file_formatter(field, inputs): + return _format_arg("skeleton_file", field, inputs, argstr="-o {skeleton_file}") + + +def _list_outputs(inputs=None, stdout=None, stderr=None, output_dir=None): + inputs = attrs.asdict(inputs) + self_dict = {} + + outputs = {} + _si = self_dict["inputs"] + if (_si.project_data is not attrs.NOTHING) and _si.project_data: + proj_data = _si.projected_data + outputs["projected_data"] = proj_data + if proj_data is attrs.NOTHING: + stem = _si.data_file + if _si.alt_data_file is not attrs.NOTHING: + stem = _si.alt_data_file + outputs["projected_data"] = fname_presuffix( + stem, suffix="_skeletonised", newpath=os.getcwd(), use_ext=True + ) + if (_si.skeleton_file is not attrs.NOTHING) and _si.skeleton_file: + outputs["skeleton_file"] = _si.skeleton_file + if isinstance(_si.skeleton_file, bool): + outputs["skeleton_file"] = fname_presuffix( + _si.in_file, suffix="_skeleton", newpath=os.getcwd(), use_ext=True + ) + return outputs + + +def projected_data_callable(output_dir, inputs, stdout, stderr): + outputs = _list_outputs( + output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr + ) + return outputs.get("projected_data") + + +def skeleton_file_callable(output_dir, inputs, stdout, stderr): + outputs = _list_outputs( + output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr + ) + return outputs.get("skeleton_file") + + +@shell.define(xor=[["search_mask_file", "use_cingulum_mask"]]) +class TractSkeleton(shell.Task["TractSkeleton.Outputs"]): + """ + Examples + ------- + + >>> from fileformats.generic import File + >>> from pathlib import Path + >>> from pydra.tasks.fsl.v6.dti.tract_skeleton import TractSkeleton + + """ + + executable = "tbss_skeleton" + in_file: File = shell.arg( + help="input image (typically mean FA volume)", argstr="-i {in_file}" + ) + project_data: bool = shell.arg( + help="project data onto skeleton", + requires=["threshold", "distance_map", "data_file"], + formatter="project_data_formatter", + ) + threshold: float = shell.arg(help="skeleton threshold value") + distance_map: File = shell.arg(help="distance map image") + search_mask_file: File | None = shell.arg( + help="mask in which to use alternate search rule" + ) + use_cingulum_mask: bool = shell.arg( + help="perform alternate search using built-in cingulum mask", default=True + ) + data_file: File = shell.arg(help="4D data to project onto skeleton (usually FA)") + alt_data_file: File = shell.arg( + help="4D non-FA data to project onto skeleton", argstr="-a {alt_data_file}" + ) + alt_skeleton: File = shell.arg( + help="alternate skeleton to use", argstr="-s {alt_skeleton}" + ) + projected_data: Path = shell.arg(help="input data projected onto skeleton") + skeleton_file: ty.Any = shell.arg( + help="write out skeleton image", formatter="skeleton_file_formatter" + ) + + class Outputs(shell.Outputs): + projected_data: File | None = shell.out( + help="input data projected onto skeleton", callable=projected_data_callable + ) + skeleton_file: File | None = shell.out( + help="tract skeleton image", callable=skeleton_file_callable + ) diff --git a/pydra/tasks/fsl/v6/dti/vec_reg.py b/pydra/tasks/fsl/v6/dti/vec_reg.py new file mode 100644 index 0000000..49ecaa3 --- /dev/null +++ b/pydra/tasks/fsl/v6/dti/vec_reg.py @@ -0,0 +1,159 @@ +import attrs +from fileformats.datascience import TextMatrix +from fileformats.generic import File +from fileformats.medimage import Nifti1 +import logging +from pydra.tasks.fsl.v6.nipype_ports.utils.filemanip import fname_presuffix +import os +from pathlib import Path +from pathlib import Path +from pydra.compose import shell +import typing as ty + + +logger = logging.getLogger(__name__) + + +def _gen_filename(name, inputs): + if name == "out_file": + return _list_outputs( + in_file=inputs["in_file"], + out_file=inputs["out_file"], + output_type=inputs["output_type"], + )[name] + else: + return None + + +def out_file_default(inputs): + return _gen_filename("out_file", inputs=inputs) + + +@shell.define +class VecReg(shell.Task["VecReg.Outputs"]): + """ + Examples + ------- + + >>> from fileformats.datascience import TextMatrix + >>> from fileformats.generic import File + >>> from fileformats.medimage import Nifti1 + >>> from pathlib import Path + >>> from pydra.tasks.fsl.v6.dti.vec_reg import VecReg + + >>> task = VecReg() + >>> task.in_file = Nifti1.mock("diffusion.nii") + >>> task.out_file = "diffusion_vreg.nii" + >>> task.ref_vol = Nifti1.mock("mni.nii") + >>> task.affine_mat = TextMatrix.mock("trans.mat") + >>> task.warp_field = File.mock() + >>> task.rotation_mat = File.mock() + >>> task.rotation_warp = File.mock() + >>> task.mask = File.mock() + >>> task.ref_mask = File.mock() + >>> task.cmdline + 'vecreg -t trans.mat -i diffusion.nii -o diffusion_vreg.nii -r mni.nii' + + + """ + + executable = "vecreg" + in_file: Nifti1 = shell.arg( + help="filename for input vector or tensor field", argstr="-i {in_file}" + ) + ref_vol: Nifti1 = shell.arg( + help="filename for reference (target) volume", argstr="-r {ref_vol}" + ) + affine_mat: TextMatrix = shell.arg( + help="filename for affine transformation matrix", argstr="-t {affine_mat}" + ) + warp_field: File = shell.arg( + help="filename for 4D warp field for nonlinear registration", + argstr="-w {warp_field}", + ) + rotation_mat: File = shell.arg( + help="filename for secondary affine matrix if set, this will be used for the rotation of the vector/tensor field", + argstr="--rotmat={rotation_mat}", + ) + rotation_warp: File = shell.arg( + help="filename for secondary warp field if set, this will be used for the rotation of the vector/tensor field", + argstr="--rotwarp={rotation_warp}", + ) + interpolation: ty.Any = shell.arg( + help="interpolation method : nearestneighbour, trilinear (default), sinc or spline", + argstr="--interp={interpolation}", + ) + mask: File = shell.arg(help="brain mask in input space", argstr="-m {mask}") + ref_mask: File = shell.arg( + help="brain mask in output space (useful for speed up of nonlinear reg)", + argstr="--refmask={ref_mask}", + ) + + class Outputs(shell.Outputs): + out_file: Path = shell.outarg( + help="filename for output registered vector or tensor field", + argstr="-o {out_file}", + path_template='"diffusion_vreg.nii"', + ) + + +def _gen_fname( + basename, cwd=None, suffix=None, change_ext=True, ext=None, output_type=None +): + """Generate a filename based on the given parameters. + + The filename will take the form: cwd/basename. + If change_ext is True, it will use the extensions specified in + inputs.output_type. + + Parameters + ---------- + basename : str + Filename to base the new filename on. + cwd : str + Path to prefix to the new filename. (default is output_dir) + suffix : str + Suffix to add to the `basename`. (defaults is '' ) + change_ext : bool + Flag to change the filename extension to the FSL output type. + (default True) + + Returns + ------- + fname : str + New filename based on given parameters. + + """ + + if basename == "": + msg = "Unable to generate filename for command %s. " % "vecreg" + msg += "basename is not set!" + raise ValueError(msg) + if cwd is None: + cwd = output_dir + if ext is None: + ext = Info.output_type_to_ext(output_type) + if change_ext: + if suffix: + suffix = f"{suffix}{ext}" + else: + suffix = ext + if suffix is None: + suffix = "" + fname = fname_presuffix(basename, suffix=suffix, use_ext=False, newpath=cwd) + return fname + + +def _list_outputs(in_file=None, out_file=None, output_type=None): + outputs = {} + outputs["out_file"] = out_file + if (outputs["out_file"] is attrs.NOTHING) and (in_file is not attrs.NOTHING): + pth, base_name = os.path.split(in_file) + outputs["out_file"] = _gen_fname( + base_name, cwd=os.path.abspath(pth), suffix="_vreg", output_type=output_type + ) + outputs["out_file"] = os.path.abspath(outputs["out_file"]) + return outputs + + +IFLOGGER = logging.getLogger("nipype.interface") diff --git a/pydra/tasks/fsl/v6/dti/x_fibres_5.py b/pydra/tasks/fsl/v6/dti/x_fibres_5.py new file mode 100644 index 0000000..47c5884 --- /dev/null +++ b/pydra/tasks/fsl/v6/dti/x_fibres_5.py @@ -0,0 +1,352 @@ +import attrs +from fileformats.generic import Directory, File +import logging +from pydra.tasks.fsl.v6.nipype_ports.utils.filemanip import fname_presuffix +import os +from pydra.compose import shell +import typing as ty + + +logger = logging.getLogger(__name__) + + +def _list_outputs(inputs=None, stdout=None, stderr=None, output_dir=None): + inputs = attrs.asdict(inputs) + + outputs = {} + n_fibres = inputs["n_fibres"] + if not out_dir: + if inputs["logdir"] is not attrs.NOTHING: + out_dir = os.path.abspath(inputs["logdir"]) + else: + out_dir = os.path.abspath("logdir") + + multi_out = ["dyads", "fsamples", "mean_fsamples", "phsamples", "thsamples"] + single_out = ["mean_dsamples", "mean_S0samples"] + + for k in single_out: + outputs[k] = _gen_fname( + k, + cwd=out_dir, + output_type=inputs["output_type"], + inputs=inputs["inputs"], + output_dir=inputs["output_dir"], + stderr=inputs["stderr"], + stdout=inputs["stdout"], + ) + + if (inputs["rician"] is not attrs.NOTHING) and inputs["rician"]: + outputs["mean_tausamples"] = _gen_fname( + "mean_tausamples", + cwd=out_dir, + output_type=inputs["output_type"], + inputs=inputs["inputs"], + output_dir=inputs["output_dir"], + stderr=inputs["stderr"], + stdout=inputs["stdout"], + ) + + for k in multi_out: + outputs[k] = [] + + for i in range(1, n_fibres + 1): + outputs["fsamples"].append( + _gen_fname( + "f%dsamples" % i, + cwd=out_dir, + output_type=inputs["output_type"], + inputs=inputs["inputs"], + output_dir=inputs["output_dir"], + stderr=inputs["stderr"], + stdout=inputs["stdout"], + ) + ) + outputs["mean_fsamples"].append( + _gen_fname( + "mean_f%dsamples" % i, + cwd=out_dir, + output_type=inputs["output_type"], + inputs=inputs["inputs"], + output_dir=inputs["output_dir"], + stderr=inputs["stderr"], + stdout=inputs["stdout"], + ) + ) + + for i in range(1, n_fibres + 1): + outputs["dyads"].append( + _gen_fname( + "dyads%d" % i, + cwd=out_dir, + output_type=inputs["output_type"], + inputs=inputs["inputs"], + output_dir=inputs["output_dir"], + stderr=inputs["stderr"], + stdout=inputs["stdout"], + ) + ) + outputs["phsamples"].append( + _gen_fname( + "ph%dsamples" % i, + cwd=out_dir, + output_type=inputs["output_type"], + inputs=inputs["inputs"], + output_dir=inputs["output_dir"], + stderr=inputs["stderr"], + stdout=inputs["stdout"], + ) + ) + outputs["thsamples"].append( + _gen_fname( + "th%dsamples" % i, + cwd=out_dir, + output_type=inputs["output_type"], + inputs=inputs["inputs"], + output_dir=inputs["output_dir"], + stderr=inputs["stderr"], + stdout=inputs["stdout"], + ) + ) + + return outputs + + +def dyads_callable(output_dir, inputs, stdout, stderr): + outputs = _list_outputs( + output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr + ) + return outputs.get("dyads") + + +def fsamples_callable(output_dir, inputs, stdout, stderr): + outputs = _list_outputs( + output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr + ) + return outputs.get("fsamples") + + +def mean_dsamples_callable(output_dir, inputs, stdout, stderr): + outputs = _list_outputs( + output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr + ) + return outputs.get("mean_dsamples") + + +def mean_fsamples_callable(output_dir, inputs, stdout, stderr): + outputs = _list_outputs( + output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr + ) + return outputs.get("mean_fsamples") + + +def mean_S0samples_callable(output_dir, inputs, stdout, stderr): + outputs = _list_outputs( + output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr + ) + return outputs.get("mean_S0samples") + + +def mean_tausamples_callable(output_dir, inputs, stdout, stderr): + outputs = _list_outputs( + output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr + ) + return outputs.get("mean_tausamples") + + +def phsamples_callable(output_dir, inputs, stdout, stderr): + outputs = _list_outputs( + output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr + ) + return outputs.get("phsamples") + + +def thsamples_callable(output_dir, inputs, stdout, stderr): + outputs = _list_outputs( + output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr + ) + return outputs.get("thsamples") + + +@shell.define( + xor=[ + ["all_ard", "f0_ard", "f0_noard"], + ["all_ard", "no_ard"], + ["cnlinear", "no_spat", "non_linear"], + ["f0_ard", "f0_noard"], + ] +) +class XFibres5(shell.Task["XFibres5.Outputs"]): + """ + Examples + ------- + + >>> from fileformats.generic import Directory, File + >>> from pydra.tasks.fsl.v6.dti.x_fibres_5 import XFibres5 + + """ + + executable = "xfibres" + gradnonlin: File = shell.arg( + help="gradient file corresponding to slice", argstr="--gradnonlin={gradnonlin}" + ) + dwi: File = shell.arg( + help="diffusion weighted image data file", argstr="--data={dwi}" + ) + mask: File = shell.arg( + help="brain binary mask file (i.e. from BET)", argstr="--mask={mask}" + ) + bvecs: File = shell.arg(help="b vectors file", argstr="--bvecs={bvecs}") + bvals: File = shell.arg(help="b values file", argstr="--bvals={bvals}") + logdir: Directory = shell.arg(help="", argstr="--logdir={logdir}", default=".") + n_fibres: ty.Any | None = shell.arg( + help="Maximum number of fibres to fit in each voxel", + argstr="--nfibres={n_fibres}", + default=2, + ) + model: ty.Any = shell.arg( + help="use monoexponential (1, default, required for single-shell) or multiexponential (2, multi-shell) model", + argstr="--model={model}", + ) + fudge: int = shell.arg(help="ARD fudge factor", argstr="--fudge={fudge}") + n_jumps: int = shell.arg( + help="Num of jumps to be made by MCMC", + argstr="--njumps={n_jumps}", + default=5000, + ) + burn_in: ty.Any = shell.arg( + help="Total num of jumps at start of MCMC to be discarded", + argstr="--burnin={burn_in}", + default=0, + ) + burn_in_no_ard: ty.Any = shell.arg( + help="num of burnin jumps before the ard is imposed", + argstr="--burnin_noard={burn_in_no_ard}", + default=0, + ) + sample_every: ty.Any = shell.arg( + help="Num of jumps for each sample (MCMC)", + argstr="--sampleevery={sample_every}", + default=1, + ) + update_proposal_every: ty.Any = shell.arg( + help="Num of jumps for each update to the proposal density std (MCMC)", + argstr="--updateproposalevery={update_proposal_every}", + default=40, + ) + seed: int = shell.arg( + help="seed for pseudo random number generator", argstr="--seed={seed}" + ) + no_ard: bool = shell.arg(help="Turn ARD off on all fibres", argstr="--noard") + all_ard: bool = shell.arg(help="Turn ARD on on all fibres", argstr="--allard") + no_spat: bool = shell.arg( + help="Initialise with tensor, not spatially", argstr="--nospat" + ) + non_linear: bool = shell.arg( + help="Initialise with nonlinear fitting", argstr="--nonlinear" + ) + cnlinear: bool = shell.arg( + help="Initialise with constrained nonlinear fitting", argstr="--cnonlinear" + ) + rician: bool = shell.arg(help="use Rician noise modeling", argstr="--rician") + f0_noard: bool = shell.arg( + help="Noise floor model: add to the model an unattenuated signal compartment f0", + argstr="--f0", + ) + f0_ard: bool = shell.arg( + help="Noise floor model: add to the model an unattenuated signal compartment f0", + argstr="--f0 --ardf0", + ) + force_dir: bool = shell.arg( + help="use the actual directory name given (do not add + to make a new directory)", + argstr="--forcedir", + default=True, + ) + + class Outputs(shell.Outputs): + dyads: list[File] | None = shell.out( + help="Mean of PDD distribution in vector form.", callable=dyads_callable + ) + fsamples: list[File] | None = shell.out( + help="Samples from the distribution on f anisotropy", + callable=fsamples_callable, + ) + mean_dsamples: File | None = shell.out( + help="Mean of distribution on diffusivity d", + callable=mean_dsamples_callable, + ) + mean_fsamples: list[File] | None = shell.out( + help="Mean of distribution on f anisotropy", callable=mean_fsamples_callable + ) + mean_S0samples: File | None = shell.out( + help="Mean of distribution on T2w baseline signal intensity S0", + callable=mean_S0samples_callable, + ) + mean_tausamples: File | None = shell.out( + help="Mean of distribution on tau samples (only with rician noise)", + callable=mean_tausamples_callable, + ) + phsamples: list[File] | None = shell.out( + help="phi samples, per fiber", callable=phsamples_callable + ) + thsamples: list[File] | None = shell.out( + help="theta samples, per fiber", callable=thsamples_callable + ) + + +def _gen_fname( + basename, + cwd=None, + suffix=None, + change_ext=True, + ext=None, + output_type=None, + inputs=None, + output_dir=None, + stderr=None, + stdout=None, +): + """Generate a filename based on the given parameters. + + The filename will take the form: cwd/basename. + If change_ext is True, it will use the extensions specified in + inputs.output_type. + + Parameters + ---------- + basename : str + Filename to base the new filename on. + cwd : str + Path to prefix to the new filename. (default is output_dir) + suffix : str + Suffix to add to the `basename`. (defaults is '' ) + change_ext : bool + Flag to change the filename extension to the FSL output type. + (default True) + + Returns + ------- + fname : str + New filename based on given parameters. + + """ + + if basename == "": + msg = "Unable to generate filename for command %s. " % "xfibres" + msg += "basename is not set!" + raise ValueError(msg) + if cwd is None: + cwd = output_dir + if ext is None: + ext = Info.output_type_to_ext(output_type) + if change_ext: + if suffix: + suffix = f"{suffix}{ext}" + else: + suffix = ext + if suffix is None: + suffix = "" + fname = fname_presuffix(basename, suffix=suffix, use_ext=False, newpath=cwd) + return fname + + +IFLOGGER = logging.getLogger("nipype.interface") diff --git a/pydra/tasks/fsl/v6/epi/__init__.py b/pydra/tasks/fsl/v6/epi/__init__.py new file mode 100644 index 0000000..2e118d4 --- /dev/null +++ b/pydra/tasks/fsl/v6/epi/__init__.py @@ -0,0 +1,8 @@ +from .apply_topup import ApplyTOPUP +from .eddy import Eddy +from .eddy_correct import EddyCorrect +from .eddy_quad import EddyQuad +from .epi_de_warp import EPIDeWarp +from .epi_reg import EpiReg +from .prepare_fieldmap import PrepareFieldmap +from .topup import TOPUP diff --git a/pydra/tasks/fsl/v6/epi/apply_topup.py b/pydra/tasks/fsl/v6/epi/apply_topup.py new file mode 100644 index 0000000..35d654d --- /dev/null +++ b/pydra/tasks/fsl/v6/epi/apply_topup.py @@ -0,0 +1,102 @@ +import attrs +from fileformats.generic import File +from fileformats.medimage import Nifti1, NiftiGz +import logging +import os +from pathlib import Path +from pathlib import Path +from pydra.compose import shell +import typing as ty + + +logger = logging.getLogger(__name__) + + +def _format_arg(name, value, inputs, argstr): + parsed_inputs = _parse_inputs(inputs) if inputs else {} + if value is None: + return "" + + if name == "in_topup_fieldcoef": + return argstr.format(**{name: value.split("_fieldcoef")[0]}) + + return argstr.format(**inputs) + + +def in_topup_fieldcoef_formatter(field, inputs): + return _format_arg( + "in_topup_fieldcoef", field, inputs, argstr="--topup={in_topup_fieldcoef}" + ) + + +def _parse_inputs(inputs, output_dir=None): + if not output_dir: + output_dir = os.getcwd() + parsed_inputs = {} + skip = [] + + if skip is None: + skip = [] + + if inputs["in_index"] is attrs.NOTHING: + inputs["in_index"] = list(range(1, len(inputs["in_files"]) + 1)) + + return parsed_inputs + + +@shell.define +class ApplyTOPUP(shell.Task["ApplyTOPUP.Outputs"]): + """ + Examples + ------- + + >>> from fileformats.generic import File + >>> from fileformats.medimage import Nifti1, NiftiGz + >>> from pathlib import Path + >>> from pydra.tasks.fsl.v6.epi.apply_topup import ApplyTOPUP + + >>> task = ApplyTOPUP() + >>> task.in_files = [Nifti1.mock("epi.nii"), Nifti1.mock("epi_rev.nii")] + >>> task.encoding_file = File.mock() + >>> task.in_topup_fieldcoef = NiftiGz.mock("topup_fieldcoef.nii.gz") + >>> task.in_topup_movpar = File.mock() + >>> task.cmdline + 'applytopup --datain=topup_encoding.txt --imain=epi.nii,epi_rev.nii --inindex=1,2 --topup=topup --out=epi_corrected.nii.gz' + + + """ + + executable = "applytopup" + in_files: list[Nifti1] = shell.arg( + help="name of file with images", argstr="--imain={in_files}", sep="," + ) + encoding_file: File = shell.arg( + help="name of text file with PE directions/times", + argstr="--datain={encoding_file}", + ) + in_index: list[int] = shell.arg( + help="comma separated list of indices corresponding to --datain", + argstr="--inindex={in_index}", + sep=",", + ) + in_topup_fieldcoef: NiftiGz | None = shell.arg( + help="topup file containing the field coefficients", + requires=["in_topup_movpar"], + formatter="in_topup_fieldcoef_formatter", + ) + in_topup_movpar: File | None = shell.arg( + help="topup movpar.txt file", requires=["in_topup_fieldcoef"] + ) + method: ty.Any = shell.arg( + help="use jacobian modulation (jac) or least-squares resampling (lsr)", + argstr="--method={method}", + ) + interp: ty.Any = shell.arg(help="interpolation method", argstr="--interp={interp}") + datatype: ty.Any = shell.arg(help="force output data type", argstr="-d={datatype}") + + class Outputs(shell.Outputs): + out_corrected: Path = shell.outarg( + help="output (warped) image", + argstr="--out={out_corrected}", + path_template="{in_files}_corrected", + ) diff --git a/pydra/tasks/fsl/v6/epi/eddy.py b/pydra/tasks/fsl/v6/epi/eddy.py new file mode 100644 index 0000000..7be76d4 --- /dev/null +++ b/pydra/tasks/fsl/v6/epi/eddy.py @@ -0,0 +1,534 @@ +import attrs +from fileformats.generic import File +from fileformats.medimage import Nifti1 +from fileformats.text import TextFile +import json +import logging +from pydra.tasks.fsl.v6.nipype_ports.utils.filemanip import fname_presuffix +import os +from pydra.compose import shell +import typing as ty + + +logger = logging.getLogger(__name__) + + +def _format_arg(name, value, inputs, argstr): + if value is None: + return "" + + if name == "in_topup_fieldcoef": + return argstr.format(**{name: value.split("_fieldcoef")[0]}) + if name == "field": + return argstr.format(**{name: fname_presuffix(value, use_ext=False)}) + if name == "out_base": + return argstr.format(**{name: os.path.abspath(value)}) + + return argstr.format(**inputs) + + +def in_topup_fieldcoef_formatter(field, inputs): + return _format_arg( + "in_topup_fieldcoef", field, inputs, argstr="--topup={in_topup_fieldcoef}" + ) + + +def field_formatter(field, inputs): + return _format_arg("field", field, inputs, argstr="--field={field}") + + +def out_base_formatter(field, inputs): + return _format_arg("out_base", field, inputs, argstr="--out={out_base}") + + +def _list_outputs(inputs=None, stdout=None, stderr=None, output_dir=None): + inputs = attrs.asdict(inputs) + + outputs = {} + outputs["out_corrected"] = os.path.abspath("%s.nii.gz" % inputs["out_base"]) + outputs["out_parameter"] = os.path.abspath( + "%s.eddy_parameters" % inputs["out_base"] + ) + + out_rotated_bvecs = os.path.abspath("%s.eddy_rotated_bvecs" % inputs["out_base"]) + out_movement_rms = os.path.abspath("%s.eddy_movement_rms" % inputs["out_base"]) + out_restricted_movement_rms = os.path.abspath( + "%s.eddy_restricted_movement_rms" % inputs["out_base"] + ) + out_shell_alignment_parameters = os.path.abspath( + "%s.eddy_post_eddy_shell_alignment_parameters" % inputs["out_base"] + ) + out_shell_pe_translation_parameters = os.path.abspath( + "%s.eddy_post_eddy_shell_PE_translation_parameters" % inputs["out_base"] + ) + out_outlier_map = os.path.abspath("%s.eddy_outlier_map" % inputs["out_base"]) + out_outlier_n_stdev_map = os.path.abspath( + "%s.eddy_outlier_n_stdev_map" % inputs["out_base"] + ) + out_outlier_n_sqr_stdev_map = os.path.abspath( + "%s.eddy_outlier_n_sqr_stdev_map" % inputs["out_base"] + ) + out_outlier_report = os.path.abspath("%s.eddy_outlier_report" % inputs["out_base"]) + if (inputs["repol"] is not attrs.NOTHING) and inputs["repol"]: + out_outlier_free = os.path.abspath( + "%s.eddy_outlier_free_data" % inputs["out_base"] + ) + if os.path.exists(out_outlier_free): + outputs["out_outlier_free"] = out_outlier_free + if (inputs["mporder"] is not attrs.NOTHING) and inputs["mporder"] > 0: + out_movement_over_time = os.path.abspath( + "%s.eddy_movement_over_time" % inputs["out_base"] + ) + if os.path.exists(out_movement_over_time): + outputs["out_movement_over_time"] = out_movement_over_time + if (inputs["cnr_maps"] is not attrs.NOTHING) and inputs["cnr_maps"]: + out_cnr_maps = os.path.abspath("%s.eddy_cnr_maps.nii.gz" % inputs["out_base"]) + if os.path.exists(out_cnr_maps): + outputs["out_cnr_maps"] = out_cnr_maps + if (inputs["residuals"] is not attrs.NOTHING) and inputs["residuals"]: + out_residuals = os.path.abspath("%s.eddy_residuals.nii.gz" % inputs["out_base"]) + if os.path.exists(out_residuals): + outputs["out_residuals"] = out_residuals + + if os.path.exists(out_rotated_bvecs): + outputs["out_rotated_bvecs"] = out_rotated_bvecs + if os.path.exists(out_movement_rms): + outputs["out_movement_rms"] = out_movement_rms + if os.path.exists(out_restricted_movement_rms): + outputs["out_restricted_movement_rms"] = out_restricted_movement_rms + if os.path.exists(out_shell_alignment_parameters): + outputs["out_shell_alignment_parameters"] = out_shell_alignment_parameters + if os.path.exists(out_shell_pe_translation_parameters): + outputs["out_shell_pe_translation_parameters"] = ( + out_shell_pe_translation_parameters + ) + if os.path.exists(out_outlier_map): + outputs["out_outlier_map"] = out_outlier_map + if os.path.exists(out_outlier_n_stdev_map): + outputs["out_outlier_n_stdev_map"] = out_outlier_n_stdev_map + if os.path.exists(out_outlier_n_sqr_stdev_map): + outputs["out_outlier_n_sqr_stdev_map"] = out_outlier_n_sqr_stdev_map + if os.path.exists(out_outlier_report): + outputs["out_outlier_report"] = out_outlier_report + + return outputs + + +def out_corrected_callable(output_dir, inputs, stdout, stderr): + outputs = _list_outputs( + output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr + ) + return outputs.get("out_corrected") + + +def out_parameter_callable(output_dir, inputs, stdout, stderr): + outputs = _list_outputs( + output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr + ) + return outputs.get("out_parameter") + + +def out_rotated_bvecs_callable(output_dir, inputs, stdout, stderr): + outputs = _list_outputs( + output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr + ) + return outputs.get("out_rotated_bvecs") + + +def out_movement_rms_callable(output_dir, inputs, stdout, stderr): + outputs = _list_outputs( + output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr + ) + return outputs.get("out_movement_rms") + + +def out_restricted_movement_rms_callable(output_dir, inputs, stdout, stderr): + outputs = _list_outputs( + output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr + ) + return outputs.get("out_restricted_movement_rms") + + +def out_shell_alignment_parameters_callable(output_dir, inputs, stdout, stderr): + outputs = _list_outputs( + output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr + ) + return outputs.get("out_shell_alignment_parameters") + + +def out_shell_pe_translation_parameters_callable(output_dir, inputs, stdout, stderr): + outputs = _list_outputs( + output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr + ) + return outputs.get("out_shell_pe_translation_parameters") + + +def out_outlier_map_callable(output_dir, inputs, stdout, stderr): + outputs = _list_outputs( + output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr + ) + return outputs.get("out_outlier_map") + + +def out_outlier_n_stdev_map_callable(output_dir, inputs, stdout, stderr): + outputs = _list_outputs( + output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr + ) + return outputs.get("out_outlier_n_stdev_map") + + +def out_outlier_n_sqr_stdev_map_callable(output_dir, inputs, stdout, stderr): + outputs = _list_outputs( + output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr + ) + return outputs.get("out_outlier_n_sqr_stdev_map") + + +def out_outlier_report_callable(output_dir, inputs, stdout, stderr): + outputs = _list_outputs( + output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr + ) + return outputs.get("out_outlier_report") + + +def out_outlier_free_callable(output_dir, inputs, stdout, stderr): + outputs = _list_outputs( + output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr + ) + return outputs.get("out_outlier_free") + + +def out_movement_over_time_callable(output_dir, inputs, stdout, stderr): + outputs = _list_outputs( + output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr + ) + return outputs.get("out_movement_over_time") + + +def out_cnr_maps_callable(output_dir, inputs, stdout, stderr): + outputs = _list_outputs( + output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr + ) + return outputs.get("out_cnr_maps") + + +def out_residuals_callable(output_dir, inputs, stdout, stderr): + outputs = _list_outputs( + output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr + ) + return outputs.get("out_residuals") + + +@shell.define(xor=[["json", "slice_order"]]) +class Eddy(shell.Task["Eddy.Outputs"]): + """ + Examples + ------- + + >>> from fileformats.generic import File + >>> from fileformats.medimage import Nifti1 + >>> from fileformats.text import TextFile + >>> from pydra.tasks.fsl.v6.epi.eddy import Eddy + + >>> task = Eddy() + >>> task.in_file = Nifti1.mock("epi.nii") + >>> task.in_mask = File.mock() + >>> task.in_index = TextFile.mock("epi_index.txt") + >>> task.in_acqp = File.mock() + >>> task.in_bvec = File.mock() + >>> task.in_bval = File.mock() + >>> task.session = File.mock() + >>> task.in_topup_fieldcoef = File.mock() + >>> task.in_topup_movpar = File.mock() + >>> task.field = File.mock() + >>> task.field_mat = File.mock() + >>> task.slice_order = TextFile.mock() + >>> task.json = File.mock() + >>> task.cmdline + 'eddy_openmp --flm=quadratic --ff=10.0 --acqp=epi_acqp.txt --bvals=bvals.scheme --bvecs=bvecs.scheme --imain=epi.nii --index=epi_index.txt --mask=epi_mask.nii --interp=spline --resamp=jac --niter=5 --nvoxhp=1000 --out=.../eddy_corrected --slm=none' + + + >>> task = Eddy() + >>> task.in_file = Nifti1.mock() + >>> task.in_mask = File.mock() + >>> task.in_index = TextFile.mock() + >>> task.in_acqp = File.mock() + >>> task.in_bvec = File.mock() + >>> task.in_bval = File.mock() + >>> task.session = File.mock() + >>> task.in_topup_fieldcoef = File.mock() + >>> task.in_topup_movpar = File.mock() + >>> task.field = File.mock() + >>> task.field_mat = File.mock() + >>> task.slice_order = TextFile.mock() + >>> task.json = File.mock() + >>> task.use_cuda = True + >>> task.cmdline + 'None' + + + >>> task = Eddy() + >>> task.in_file = Nifti1.mock() + >>> task.in_mask = File.mock() + >>> task.in_index = TextFile.mock() + >>> task.in_acqp = File.mock() + >>> task.in_bvec = File.mock() + >>> task.in_bval = File.mock() + >>> task.session = File.mock() + >>> task.in_topup_fieldcoef = File.mock() + >>> task.in_topup_movpar = File.mock() + >>> task.field = File.mock() + >>> task.field_mat = File.mock() + >>> task.mporder = 6 + >>> task.slice2vol_lambda = 1 + >>> task.slice_order = TextFile.mock("epi_slspec.txt") + >>> task.json = File.mock() + >>> task.cmdline + 'None' + + + """ + + executable = "eddy_openmp" + in_file: Nifti1 = shell.arg( + help="File containing all the images to estimate distortions for", + argstr="--imain={in_file}", + ) + in_mask: File = shell.arg(help="Mask to indicate brain", argstr="--mask={in_mask}") + in_index: TextFile = shell.arg( + help="File containing indices for all volumes in --imain into --acqp and --topup", + argstr="--index={in_index}", + ) + in_acqp: File = shell.arg( + help="File containing acquisition parameters", argstr="--acqp={in_acqp}" + ) + in_bvec: File = shell.arg( + help="File containing the b-vectors for all volumes in --imain", + argstr="--bvecs={in_bvec}", + ) + in_bval: File = shell.arg( + help="File containing the b-values for all volumes in --imain", + argstr="--bvals={in_bval}", + ) + out_base: str = shell.arg( + help="Basename for output image", + formatter="out_base_formatter", + default="eddy_corrected", + ) + session: File = shell.arg( + help="File containing session indices for all volumes in --imain", + argstr="--session={session}", + ) + in_topup_fieldcoef: File | None = shell.arg( + help="Topup results file containing the field coefficients", + requires=["in_topup_movpar"], + formatter="in_topup_fieldcoef_formatter", + ) + in_topup_movpar: File | None = shell.arg( + help="Topup results file containing the movement parameters (movpar.txt)", + requires=["in_topup_fieldcoef"], + ) + field: File = shell.arg( + help="Non-topup derived fieldmap scaled in Hz", formatter="field_formatter" + ) + field_mat: File = shell.arg( + help="Matrix specifying the relative positions of the fieldmap, --field, and the first volume of the input file, --imain", + argstr="--field_mat={field_mat}", + ) + flm: ty.Any = shell.arg( + help="First level EC model", argstr="--flm={flm}", default="quadratic" + ) + slm: ty.Any = shell.arg( + help="Second level EC model", argstr="--slm={slm}", default="none" + ) + fep: bool = shell.arg( + help="Fill empty planes in x- or y-directions", argstr="--fep" + ) + initrand: bool = shell.arg( + help="Resets rand for when selecting voxels", argstr="--initrand" + ) + interp: ty.Any = shell.arg( + help="Interpolation model for estimation step", + argstr="--interp={interp}", + default="spline", + ) + nvoxhp: int = shell.arg( + help="# of voxels used to estimate the hyperparameters", + argstr="--nvoxhp={nvoxhp}", + default=1000, + ) + fudge_factor: float = shell.arg( + help="Fudge factor for hyperparameter error variance", + argstr="--ff={fudge_factor}", + default=10.0, + ) + dont_sep_offs_move: bool = shell.arg( + help="Do NOT attempt to separate field offset from subject movement", + argstr="--dont_sep_offs_move", + ) + dont_peas: bool = shell.arg( + help="Do NOT perform a post-eddy alignment of shells", argstr="--dont_peas" + ) + fwhm: float = shell.arg( + help="FWHM for conditioning filter when estimating the parameters", + argstr="--fwhm={fwhm}", + ) + niter: int = shell.arg( + help="Number of iterations", argstr="--niter={niter}", default=5 + ) + method: ty.Any = shell.arg( + help="Final resampling method (jacobian/least squares)", + argstr="--resamp={method}", + default="jac", + ) + repol: bool = shell.arg(help="Detect and replace outlier slices", argstr="--repol") + outlier_nstd: int = shell.arg( + help="Number of std off to qualify as outlier", + argstr="--ol_nstd", + requires=["repol"], + ) + outlier_nvox: int = shell.arg( + help="Min # of voxels in a slice for inclusion in outlier detection", + argstr="--ol_nvox", + requires=["repol"], + ) + outlier_type: ty.Any = shell.arg( + help="Type of outliers, slicewise (sw), groupwise (gw) or both (both)", + argstr="--ol_type", + requires=["repol"], + ) + outlier_pos: bool = shell.arg( + help="Consider both positive and negative outliers if set", + argstr="--ol_pos", + requires=["repol"], + ) + outlier_sqr: bool = shell.arg( + help="Consider outliers among sums-of-squared differences if set", + argstr="--ol_sqr", + requires=["repol"], + ) + multiband_factor: int = shell.arg( + help="Multi-band factor", argstr="--mb={multiband_factor}" + ) + multiband_offset: ty.Any = shell.arg( + help="Multi-band offset (-1 if bottom slice removed, 1 if top slice removed", + argstr="--mb_offs={multiband_offset}", + requires=["multiband_factor"], + ) + mporder: int = shell.arg( + help="Order of slice-to-vol movement model", + argstr="--mporder={mporder}", + requires=["use_cuda"], + ) + slice2vol_niter: int = shell.arg( + help="Number of iterations for slice-to-vol", + argstr="--s2v_niter={slice2vol_niter}", + requires=["mporder"], + ) + slice2vol_lambda: int = shell.arg( + help="Regularisation weight for slice-to-vol movement (reasonable range 1-10)", + argstr="--s2v_lambda={slice2vol_lambda}", + requires=["mporder"], + ) + slice2vol_interp: ty.Any = shell.arg( + help="Slice-to-vol interpolation model for estimation step", + argstr="--s2v_interp={slice2vol_interp}", + requires=["mporder"], + ) + slice_order: TextFile | None = shell.arg( + help="Name of text file completely specifying slice/group acquisition", + argstr="--slspec={slice_order}", + requires=["mporder"], + ) + json: File | None = shell.arg( + help="Name of .json text file with information about slice timing", + argstr="--json={json}", + requires=["mporder"], + ) + estimate_move_by_susceptibility: bool = shell.arg( + help="Estimate how susceptibility field changes with subject movement", + argstr="--estimate_move_by_susceptibility", + ) + mbs_niter: int = shell.arg( + help="Number of iterations for MBS estimation", + argstr="--mbs_niter={mbs_niter}", + requires=["estimate_move_by_susceptibility"], + ) + mbs_lambda: int = shell.arg( + help="Weighting of regularisation for MBS estimation", + argstr="--mbs_lambda={mbs_lambda}", + requires=["estimate_move_by_susceptibility"], + ) + mbs_ksp: int = shell.arg( + help="Knot-spacing for MBS field estimation", + argstr="--mbs_ksp={mbs_ksp}mm", + requires=["estimate_move_by_susceptibility"], + ) + num_threads: int = shell.arg(help="Number of openmp threads to use", default=1) + is_shelled: bool = shell.arg( + help="Override internal check to ensure that date are acquired on a set of b-value shells", + argstr="--data_is_shelled", + ) + use_cuda: bool = shell.arg(help="Run eddy using cuda gpu") + cnr_maps: bool = shell.arg(help="Output CNR-Maps", argstr="--cnr_maps") + residuals: bool = shell.arg(help="Output Residuals", argstr="--residuals") + + class Outputs(shell.Outputs): + out_corrected: File | None = shell.out( + help="4D image file containing all the corrected volumes", + callable=out_corrected_callable, + ) + out_parameter: File | None = shell.out( + help="Text file with parameters defining the field and movement for each scan", + callable=out_parameter_callable, + ) + out_rotated_bvecs: File | None = shell.out( + help="File containing rotated b-values for all volumes", + callable=out_rotated_bvecs_callable, + ) + out_movement_rms: File | None = shell.out( + help="Summary of the 'total movement' in each volume", + callable=out_movement_rms_callable, + ) + out_restricted_movement_rms: File | None = shell.out( + help="Summary of the 'total movement' in each volume disregarding translation in the PE direction", + callable=out_restricted_movement_rms_callable, + ) + out_shell_alignment_parameters: File | None = shell.out( + help="Text file containing rigid body movement parameters between the different shells as estimated by a post-hoc mutual information based registration", + callable=out_shell_alignment_parameters_callable, + ) + out_shell_pe_translation_parameters: File | None = shell.out( + help="Text file containing translation along the PE-direction between the different shells as estimated by a post-hoc mutual information based registration", + callable=out_shell_pe_translation_parameters_callable, + ) + out_outlier_map: File | None = shell.out( + help='Matrix where rows represent volumes and columns represent slices. "0" indicates that scan-slice is not an outlier and "1" indicates that it is', + callable=out_outlier_map_callable, + ) + out_outlier_n_stdev_map: File | None = shell.out( + help="Matrix where rows represent volumes and columns represent slices. Values indicate number of standard deviations off the mean difference between observation and prediction is", + callable=out_outlier_n_stdev_map_callable, + ) + out_outlier_n_sqr_stdev_map: File | None = shell.out( + help="Matrix where rows represent volumes and columns represent slices. Values indicate number of standard deivations off the square root of the mean squared difference between observation and prediction is", + callable=out_outlier_n_sqr_stdev_map_callable, + ) + out_outlier_report: File | None = shell.out( + help="Text file with a plain language report on what outlier slices eddy has found", + callable=out_outlier_report_callable, + ) + out_outlier_free: File | None = shell.out( + help="4D image file not corrected for susceptibility or eddy-current distortions or subject movement but with outlier slices replaced", + callable=out_outlier_free_callable, + ) + out_movement_over_time: File | None = shell.out( + help="Text file containing translations (mm) and rotations (radians) for each excitation", + callable=out_movement_over_time_callable, + ) + out_cnr_maps: File | None = shell.out( + help="path/name of file with the cnr_maps", callable=out_cnr_maps_callable + ) + out_residuals: File | None = shell.out( + help="path/name of file with the residuals", callable=out_residuals_callable + ) diff --git a/pydra/tasks/fsl/v6/epi/eddy_correct.py b/pydra/tasks/fsl/v6/epi/eddy_correct.py new file mode 100644 index 0000000..64b79f6 --- /dev/null +++ b/pydra/tasks/fsl/v6/epi/eddy_correct.py @@ -0,0 +1,48 @@ +from fileformats.generic import File +from fileformats.medimage import Nifti1 +import logging +from pathlib import Path +from pathlib import Path +from pydra.compose import shell + + +logger = logging.getLogger(__name__) + + +def eddy_corrected_callable(output_dir, inputs, stdout, stderr): + return inputs.out_file + + +@shell.define +class EddyCorrect(shell.Task["EddyCorrect.Outputs"]): + """ + Examples + ------- + + >>> from fileformats.generic import File + >>> from fileformats.medimage import Nifti1 + >>> from pathlib import Path + >>> from pydra.tasks.fsl.v6.epi.eddy_correct import EddyCorrect + + >>> task = EddyCorrect() + >>> task.in_file = Nifti1.mock("diffusion.nii") + >>> task.out_file = "diffusion_edc.nii" + >>> task.ref_num = 0 + >>> task.cmdline + 'eddy_correct diffusion.nii diffusion_edc.nii 0' + + + """ + + executable = "eddy_correct" + in_file: Nifti1 = shell.arg(help="4D input file", argstr="{in_file}", position=1) + out_file: Path = shell.arg(help="4D output file", argstr="{out_file}", position=2) + ref_num: int | None = shell.arg( + help="reference number", argstr="{ref_num}", position=3, default=0 + ) + + class Outputs(shell.Outputs): + eddy_corrected: File | None = shell.out( + help="path/name of 4D eddy corrected output file", + callable=eddy_corrected_callable, + ) diff --git a/pydra/tasks/fsl/v6/epi/eddy_quad.py b/pydra/tasks/fsl/v6/epi/eddy_quad.py new file mode 100644 index 0000000..6ab8675 --- /dev/null +++ b/pydra/tasks/fsl/v6/epi/eddy_quad.py @@ -0,0 +1,196 @@ +import attrs +from fileformats.generic import File +from fileformats.text import TextFile +import logging +import os +from pydra.compose import shell + + +logger = logging.getLogger(__name__) + + +def _list_outputs(inputs=None, stdout=None, stderr=None, output_dir=None): + inputs = attrs.asdict(inputs) + + from glob import glob + + outputs = {} + + if inputs["output_dir"] is attrs.NOTHING: + out_dir = os.path.abspath(os.path.basename(inputs["base_name"]) + ".qc") + else: + out_dir = os.path.abspath(inputs["output_dir"]) + + outputs["qc_json"] = os.path.join(out_dir, "qc.json") + outputs["qc_pdf"] = os.path.join(out_dir, "qc.pdf") + + outputs["avg_b_png"] = sorted(glob(os.path.join(out_dir, "avg_b*.png"))) + + if inputs["field"] is not attrs.NOTHING: + outputs["avg_b0_pe_png"] = sorted(glob(os.path.join(out_dir, "avg_b0_pe*.png"))) + + for fname in outputs["avg_b0_pe_png"]: + outputs["avg_b_png"].remove(fname) + + outputs["vdm_png"] = os.path.join(out_dir, "vdm.png") + + outputs["cnr_png"] = sorted(glob(os.path.join(out_dir, "cnr*.png"))) + + residuals = os.path.join(out_dir, "eddy_msr.txt") + if os.path.isfile(residuals): + outputs["residuals"] = residuals + + clean_volumes = os.path.join(out_dir, "vols_no_outliers.txt") + if os.path.isfile(clean_volumes): + outputs["clean_volumes"] = clean_volumes + + return outputs + + +def qc_json_callable(output_dir, inputs, stdout, stderr): + outputs = _list_outputs( + output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr + ) + return outputs.get("qc_json") + + +def qc_pdf_callable(output_dir, inputs, stdout, stderr): + outputs = _list_outputs( + output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr + ) + return outputs.get("qc_pdf") + + +def avg_b_png_callable(output_dir, inputs, stdout, stderr): + outputs = _list_outputs( + output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr + ) + return outputs.get("avg_b_png") + + +def avg_b0_pe_png_callable(output_dir, inputs, stdout, stderr): + outputs = _list_outputs( + output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr + ) + return outputs.get("avg_b0_pe_png") + + +def cnr_png_callable(output_dir, inputs, stdout, stderr): + outputs = _list_outputs( + output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr + ) + return outputs.get("cnr_png") + + +def vdm_png_callable(output_dir, inputs, stdout, stderr): + outputs = _list_outputs( + output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr + ) + return outputs.get("vdm_png") + + +def residuals_callable(output_dir, inputs, stdout, stderr): + outputs = _list_outputs( + output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr + ) + return outputs.get("residuals") + + +def clean_volumes_callable(output_dir, inputs, stdout, stderr): + outputs = _list_outputs( + output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr + ) + return outputs.get("clean_volumes") + + +@shell.define +class EddyQuad(shell.Task["EddyQuad.Outputs"]): + """ + Examples + ------- + + >>> from fileformats.generic import File + >>> from fileformats.text import TextFile + >>> from pydra.tasks.fsl.v6.epi.eddy_quad import EddyQuad + + >>> task = EddyQuad() + >>> task.idx_file = File.mock() + >>> task.param_file = TextFile.mock("epi_acqp.txt") + >>> task.mask_file = File.mock() + >>> task.bval_file = File.mock() + >>> task.bvec_file = File.mock() + >>> task.output_dir = "eddy_corrected.qc" + >>> task.field = File.mock() + >>> task.slice_spec = File.mock() + >>> task.cmdline + 'eddy_quad eddy_corrected --bvals bvals.scheme --bvecs bvecs.scheme --field fieldmap_phase_fslprepared.nii --eddyIdx epi_index.txt --mask epi_mask.nii --output-dir eddy_corrected.qc --eddyParams epi_acqp.txt --verbose' + + + """ + + executable = "eddy_quad" + base_name: str = shell.arg( + help="Basename (including path) for EDDY output files, i.e., corrected images and QC files", + argstr="{base_name}", + position=1, + default="eddy_corrected", + ) + idx_file: File = shell.arg( + help="File containing indices for all volumes into acquisition parameters", + argstr="--eddyIdx {idx_file}", + ) + param_file: TextFile = shell.arg( + help="File containing acquisition parameters", + argstr="--eddyParams {param_file}", + ) + mask_file: File = shell.arg(help="Binary mask file", argstr="--mask {mask_file}") + bval_file: File = shell.arg(help="b-values file", argstr="--bvals {bval_file}") + bvec_file: File = shell.arg( + help="b-vectors file - only used when .eddy_residuals file is present", + argstr="--bvecs {bvec_file}", + ) + output_dir: str = shell.arg( + help="Output directory - default = '.qc'", + argstr="--output-dir {output_dir}", + ) + field: File = shell.arg( + help="TOPUP estimated field (in Hz)", argstr="--field {field}" + ) + slice_spec: File = shell.arg( + help="Text file specifying slice/group acquisition", + argstr="--slspec {slice_spec}", + ) + verbose: bool = shell.arg(help="Display debug messages", argstr="--verbose") + + class Outputs(shell.Outputs): + qc_json: File | None = shell.out( + help="Single subject database containing quality metrics and data info.", + callable=qc_json_callable, + ) + qc_pdf: File | None = shell.out( + help="Single subject QC report.", callable=qc_pdf_callable + ) + avg_b_png: list[File] | None = shell.out( + help="Image showing mid-sagittal, -coronal and -axial slices of each averaged b-shell volume.", + callable=avg_b_png_callable, + ) + avg_b0_pe_png: list[File] | None = shell.out( + help="Image showing mid-sagittal, -coronal and -axial slices of each averaged pe-direction b0 volume. Generated when using the -f option.", + callable=avg_b0_pe_png_callable, + ) + cnr_png: list[File] | None = shell.out( + help="Image showing mid-sagittal, -coronal and -axial slices of each b-shell CNR volume. Generated when CNR maps are available.", + callable=cnr_png_callable, + ) + vdm_png: File | None = shell.out( + help="Image showing mid-sagittal, -coronal and -axial slices of the voxel displacement map. Generated when using the -f option.", + callable=vdm_png_callable, + ) + residuals: File | None = shell.out( + help="Text file containing the volume-wise mask-averaged squared residuals. Generated when residual maps are available.", + callable=residuals_callable, + ) + clean_volumes: File | None = shell.out( + help="Text file containing a list of clean volumes, based on the eddy squared residuals. To generate a version of the pre-processed dataset without outlier volumes, use: `fslselectvols -i -o eddy_corrected_data_clean --vols=vols_no_outliers.txt`", + callable=clean_volumes_callable, + ) diff --git a/pydra/tasks/fsl/v6/epi/epi_de_warp.py b/pydra/tasks/fsl/v6/epi/epi_de_warp.py new file mode 100644 index 0000000..6138cb5 --- /dev/null +++ b/pydra/tasks/fsl/v6/epi/epi_de_warp.py @@ -0,0 +1,265 @@ +import attrs +from fileformats.generic import File +from fileformats.medimage import Nifti1 +import logging +from pydra.tasks.fsl.v6.nipype_ports.utils.filemanip import fname_presuffix +import os +from pydra.compose import shell +import typing as ty + + +logger = logging.getLogger(__name__) + + +def _list_outputs(inputs=None, stdout=None, stderr=None, output_dir=None): + inputs = attrs.asdict(inputs) + + outputs = {} + if inputs["exfdw"] is attrs.NOTHING: + outputs["exfdw"] = _gen_filename( + "exfdw", + epi_file=inputs["epi_file"], + exf_file=inputs["exf_file"], + output_type=inputs["output_type"], + inputs=inputs["inputs"], + output_dir=inputs["output_dir"], + stderr=inputs["stderr"], + stdout=inputs["stdout"], + ) + else: + outputs["exfdw"] = inputs["exfdw"] + if inputs["epi_file"] is not attrs.NOTHING: + if inputs["epidw"] is not attrs.NOTHING: + outputs["unwarped_file"] = inputs["epidw"] + else: + outputs["unwarped_file"] = _gen_filename( + "epidw", + epi_file=inputs["epi_file"], + exf_file=inputs["exf_file"], + output_type=inputs["output_type"], + inputs=inputs["inputs"], + output_dir=inputs["output_dir"], + stderr=inputs["stderr"], + stdout=inputs["stdout"], + ) + if inputs["vsm"] is attrs.NOTHING: + outputs["vsm_file"] = _gen_filename( + "vsm", + epi_file=inputs["epi_file"], + exf_file=inputs["exf_file"], + output_type=inputs["output_type"], + inputs=inputs["inputs"], + output_dir=inputs["output_dir"], + stderr=inputs["stderr"], + stdout=inputs["stdout"], + ) + else: + outputs["vsm_file"] = _gen_fname( + inputs["vsm"], + output_type=inputs["output_type"], + inputs=inputs["inputs"], + output_dir=inputs["output_dir"], + stderr=inputs["stderr"], + stdout=inputs["stdout"], + ) + if inputs["tmpdir"] is attrs.NOTHING: + outputs["exf_mask"] = _gen_fname( + cwd=_gen_filename( + "tmpdir", + epi_file=inputs["epi_file"], + exf_file=inputs["exf_file"], + output_type=inputs["output_type"], + inputs=inputs["inputs"], + output_dir=inputs["output_dir"], + stderr=inputs["stderr"], + stdout=inputs["stdout"], + ), + basename="maskexf", + ) + else: + outputs["exf_mask"] = _gen_fname( + cwd=inputs["tmpdir"], + basename="maskexf", + output_type=inputs["output_type"], + inputs=inputs["inputs"], + output_dir=inputs["output_dir"], + stderr=inputs["stderr"], + stdout=inputs["stdout"], + ) + return outputs + + +def unwarped_file_callable(output_dir, inputs, stdout, stderr): + outputs = _list_outputs( + output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr + ) + return outputs.get("unwarped_file") + + +def vsm_file_callable(output_dir, inputs, stdout, stderr): + outputs = _list_outputs( + output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr + ) + return outputs.get("vsm_file") + + +def exf_mask_callable(output_dir, inputs, stdout, stderr): + outputs = _list_outputs( + output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr + ) + return outputs.get("exf_mask") + + +def _gen_filename(name, inputs): + if name == "exfdw": + if inputs["exf_file"] is not attrs.NOTHING: + return _gen_fname( + inputs["exf_file"], suffix="_exfdw", output_type=inputs["output_type"] + ) + else: + return _gen_fname("exfdw", output_type=inputs["output_type"]) + if name == "epidw": + if inputs["epi_file"] is not attrs.NOTHING: + return _gen_fname( + inputs["epi_file"], suffix="_epidw", output_type=inputs["output_type"] + ) + if name == "vsm": + return _gen_fname("vsm", output_type=inputs["output_type"]) + if name == "tmpdir": + return os.path.join(os.getcwd(), "temp") + return None + + +def exfdw_default(inputs): + return _gen_filename("exfdw", inputs=inputs) + + +def tmpdir_default(inputs): + return _gen_filename("tmpdir", inputs=inputs) + + +def vsm_default(inputs): + return _gen_filename("vsm", inputs=inputs) + + +@shell.define +class EPIDeWarp(shell.Task["EPIDeWarp.Outputs"]): + """ + Examples + ------- + + >>> from fileformats.generic import File + >>> from fileformats.medimage import Nifti1 + >>> from pydra.tasks.fsl.v6.epi.epi_de_warp import EPIDeWarp + + >>> task = EPIDeWarp() + >>> task.mag_file = File.mock() + >>> task.dph_file = Nifti1.mock("phase.nii") + >>> task.exf_file = File.mock() + >>> task.epi_file = Nifti1.mock("functional.nii") + >>> task.cmdline + 'epidewarp.fsl --mag magnitude.nii --dph phase.nii --epi functional.nii --esp 0.58 --exfdw .../exfdw.nii.gz --nocleanup --sigma 2 --tediff 2.46 --tmpdir .../temp --vsm .../vsm.nii.gz' + + + """ + + executable = "epidewarp.fsl" + mag_file: File = shell.arg( + help="Magnitude file", argstr="--mag {mag_file}", position=1 + ) + dph_file: Nifti1 = shell.arg( + help="Phase file assumed to be scaled from 0 to 4095", argstr="--dph {dph_file}" + ) + exf_file: File = shell.arg( + help="example func volume (or use epi)", argstr="--exf {exf_file}" + ) + epi_file: Nifti1 = shell.arg(help="EPI volume to unwarp", argstr="--epi {epi_file}") + tediff: float = shell.arg( + help="difference in B0 field map TEs", argstr="--tediff {tediff}", default=2.46 + ) + esp: float = shell.arg(help="EPI echo spacing", argstr="--esp {esp}", default=0.58) + sigma: int = shell.arg( + help="2D spatial gaussing smoothing stdev (default = 2mm)", + argstr="--sigma {sigma}", + default=2, + ) + vsm: ty.Any = shell.arg(help="voxel shift map", argstr="--vsm {vsm}") + epidw: ty.Any = shell.arg(help="dewarped epi volume", argstr="--epidw {epidw}") + tmpdir: ty.Any = shell.arg(help="tmpdir", argstr="--tmpdir {tmpdir}") + nocleanup: bool = shell.arg(help="no cleanup", argstr="--nocleanup", default=True) + cleanup: bool = shell.arg(help="cleanup", argstr="--cleanup") + + class Outputs(shell.Outputs): + exfdw: ty.Any = shell.outarg( + help="dewarped example func volume", + argstr="--exfdw {exfdw}", + path_template="exfdw", + ) + unwarped_file: File | None = shell.out( + help="unwarped epi file", callable=unwarped_file_callable + ) + vsm_file: File | None = shell.out( + help="voxel shift map", callable=vsm_file_callable + ) + exf_mask: File | None = shell.out( + help="Mask from example functional volume", callable=exf_mask_callable + ) + + +def _gen_fname( + basename, + cwd=None, + suffix=None, + change_ext=True, + ext=None, + output_type=None, + inputs=None, + output_dir=None, + stderr=None, + stdout=None, +): + """Generate a filename based on the given parameters. + + The filename will take the form: cwd/basename. + If change_ext is True, it will use the extensions specified in + inputs.output_type. + + Parameters + ---------- + basename : str + Filename to base the new filename on. + cwd : str + Path to prefix to the new filename. (default is output_dir) + suffix : str + Suffix to add to the `basename`. (defaults is '' ) + change_ext : bool + Flag to change the filename extension to the FSL output type. + (default True) + + Returns + ------- + fname : str + New filename based on given parameters. + + """ + + if basename == "": + msg = "Unable to generate filename for command %s. " % "epidewarp.fsl" + msg += "basename is not set!" + raise ValueError(msg) + if cwd is None: + cwd = output_dir + if ext is None: + ext = Info.output_type_to_ext(output_type) + if change_ext: + if suffix: + suffix = f"{suffix}{ext}" + else: + suffix = ext + if suffix is None: + suffix = "" + fname = fname_presuffix(basename, suffix=suffix, use_ext=False, newpath=cwd) + return fname + + +IFLOGGER = logging.getLogger("nipype.interface") diff --git a/pydra/tasks/fsl/v6/epi/epi_reg.py b/pydra/tasks/fsl/v6/epi/epi_reg.py new file mode 100644 index 0000000..bd97db4 --- /dev/null +++ b/pydra/tasks/fsl/v6/epi/epi_reg.py @@ -0,0 +1,275 @@ +import attrs +from fileformats.generic import File +from fileformats.medimage import Nifti1 +import logging +import os +from pydra.compose import shell +import typing as ty + + +logger = logging.getLogger(__name__) + + +def _list_outputs(inputs=None, stdout=None, stderr=None, output_dir=None): + inputs = attrs.asdict(inputs) + + outputs = {} + outputs["out_file"] = os.path.join(os.getcwd(), inputs["out_base"] + ".nii.gz") + if not ((inputs["no_fmapreg"] is not attrs.NOTHING) and inputs["no_fmapreg"]) and ( + inputs["fmap"] is not attrs.NOTHING + ): + outputs["out_1vol"] = os.path.join( + os.getcwd(), inputs["out_base"] + "_1vol.nii.gz" + ) + outputs["fmap2str_mat"] = os.path.join( + os.getcwd(), inputs["out_base"] + "_fieldmap2str.mat" + ) + outputs["fmap2epi_mat"] = os.path.join( + os.getcwd(), inputs["out_base"] + "_fieldmaprads2epi.mat" + ) + outputs["fmap_epi"] = os.path.join( + os.getcwd(), inputs["out_base"] + "_fieldmaprads2epi.nii.gz" + ) + outputs["fmap_str"] = os.path.join( + os.getcwd(), inputs["out_base"] + "_fieldmaprads2str.nii.gz" + ) + outputs["fmapmag_str"] = os.path.join( + os.getcwd(), inputs["out_base"] + "_fieldmap2str.nii.gz" + ) + outputs["shiftmap"] = os.path.join( + os.getcwd(), inputs["out_base"] + "_fieldmaprads2epi_shift.nii.gz" + ) + outputs["fullwarp"] = os.path.join( + os.getcwd(), inputs["out_base"] + "_warp.nii.gz" + ) + outputs["epi2str_inv"] = os.path.join( + os.getcwd(), inputs["out_base"] + "_inv.mat" + ) + if inputs["wmseg"] is attrs.NOTHING: + outputs["wmedge"] = os.path.join( + os.getcwd(), inputs["out_base"] + "_fast_wmedge.nii.gz" + ) + outputs["wmseg"] = os.path.join( + os.getcwd(), inputs["out_base"] + "_fast_wmseg.nii.gz" + ) + outputs["seg"] = os.path.join( + os.getcwd(), inputs["out_base"] + "_fast_seg.nii.gz" + ) + outputs["epi2str_mat"] = os.path.join(os.getcwd(), inputs["out_base"] + ".mat") + return outputs + + +def out_file_callable(output_dir, inputs, stdout, stderr): + outputs = _list_outputs( + output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr + ) + return outputs.get("out_file") + + +def out_1vol_callable(output_dir, inputs, stdout, stderr): + outputs = _list_outputs( + output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr + ) + return outputs.get("out_1vol") + + +def fmap2str_mat_callable(output_dir, inputs, stdout, stderr): + outputs = _list_outputs( + output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr + ) + return outputs.get("fmap2str_mat") + + +def fmap2epi_mat_callable(output_dir, inputs, stdout, stderr): + outputs = _list_outputs( + output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr + ) + return outputs.get("fmap2epi_mat") + + +def fmap_epi_callable(output_dir, inputs, stdout, stderr): + outputs = _list_outputs( + output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr + ) + return outputs.get("fmap_epi") + + +def fmap_str_callable(output_dir, inputs, stdout, stderr): + outputs = _list_outputs( + output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr + ) + return outputs.get("fmap_str") + + +def fmapmag_str_callable(output_dir, inputs, stdout, stderr): + outputs = _list_outputs( + output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr + ) + return outputs.get("fmapmag_str") + + +def epi2str_inv_callable(output_dir, inputs, stdout, stderr): + outputs = _list_outputs( + output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr + ) + return outputs.get("epi2str_inv") + + +def epi2str_mat_callable(output_dir, inputs, stdout, stderr): + outputs = _list_outputs( + output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr + ) + return outputs.get("epi2str_mat") + + +def shiftmap_callable(output_dir, inputs, stdout, stderr): + outputs = _list_outputs( + output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr + ) + return outputs.get("shiftmap") + + +def fullwarp_callable(output_dir, inputs, stdout, stderr): + outputs = _list_outputs( + output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr + ) + return outputs.get("fullwarp") + + +def wmseg_callable(output_dir, inputs, stdout, stderr): + outputs = _list_outputs( + output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr + ) + return outputs.get("wmseg") + + +def seg_callable(output_dir, inputs, stdout, stderr): + outputs = _list_outputs( + output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr + ) + return outputs.get("seg") + + +def wmedge_callable(output_dir, inputs, stdout, stderr): + outputs = _list_outputs( + output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr + ) + return outputs.get("wmedge") + + +@shell.define +class EpiReg(shell.Task["EpiReg.Outputs"]): + """ + Examples + ------- + + >>> from fileformats.generic import File + >>> from fileformats.medimage import Nifti1 + >>> from pydra.tasks.fsl.v6.epi.epi_reg import EpiReg + + >>> task = EpiReg() + >>> task.epi = Nifti1.mock("epi.nii") + >>> task.t1_head = File.mock() + >>> task.t1_brain = Nifti1.mock("T1_brain.nii") + >>> task.fmap = Nifti1.mock("fieldmap_phase_fslprepared.nii") + >>> task.fmapmag = File.mock() + >>> task.fmapmagbrain = Nifti1.mock("fieldmap_mag_brain.nii") + >>> task.wmseg = File.mock() + >>> task.pedir = "y" + >>> task.weight_image = File.mock() + >>> task.cmdline + 'epi_reg --echospacing=0.000670 --fmap=fieldmap_phase_fslprepared.nii --fmapmag=fieldmap_mag.nii --fmapmagbrain=fieldmap_mag_brain.nii --noclean --pedir=y --epi=epi.nii --t1=T1.nii --t1brain=T1_brain.nii --out=epi2struct' + + + """ + + executable = "epi_reg" + epi: Nifti1 = shell.arg(help="EPI image", argstr="--epi={epi}", position=-4) + t1_head: File = shell.arg( + help="wholehead T1 image", argstr="--t1={t1_head}", position=-3 + ) + t1_brain: Nifti1 = shell.arg( + help="brain extracted T1 image", argstr="--t1brain={t1_brain}", position=-2 + ) + out_base: ty.Any = shell.arg( + help="output base name", + argstr="--out={out_base}", + position=-1, + default="epi2struct", + ) + fmap: Nifti1 = shell.arg(help="fieldmap image (in rad/s)", argstr="--fmap={fmap}") + fmapmag: File = shell.arg( + help="fieldmap magnitude image - wholehead", argstr="--fmapmag={fmapmag}" + ) + fmapmagbrain: Nifti1 = shell.arg( + help="fieldmap magnitude image - brain extracted", + argstr="--fmapmagbrain={fmapmagbrain}", + ) + wmseg: File = shell.arg( + help="white matter segmentation of T1 image, has to be named like the t1brain and end on _wmseg", + argstr="--wmseg={wmseg}", + ) + echospacing: float = shell.arg( + help="Effective EPI echo spacing (sometimes called dwell time) - in seconds", + argstr="--echospacing={echospacing}", + ) + pedir: ty.Any = shell.arg( + help="phase encoding direction, dir = x/y/z/-x/-y/-z", argstr="--pedir={pedir}" + ) + weight_image: File = shell.arg( + help="weighting image (in T1 space)", argstr="--weight={weight_image}" + ) + no_fmapreg: bool = shell.arg( + help="do not perform registration of fmap to T1 (use if fmap already registered)", + argstr="--nofmapreg", + ) + no_clean: bool = shell.arg( + help="do not clean up intermediate files", argstr="--noclean", default=True + ) + + class Outputs(shell.Outputs): + out_file: File | None = shell.out( + help="unwarped and coregistered epi input", callable=out_file_callable + ) + out_1vol: File | None = shell.out( + help="unwarped and coregistered single volume", callable=out_1vol_callable + ) + fmap2str_mat: File | None = shell.out( + help="rigid fieldmap-to-structural transform", + callable=fmap2str_mat_callable, + ) + fmap2epi_mat: File | None = shell.out( + help="rigid fieldmap-to-epi transform", callable=fmap2epi_mat_callable + ) + fmap_epi: File | None = shell.out( + help="fieldmap in epi space", callable=fmap_epi_callable + ) + fmap_str: File | None = shell.out( + help="fieldmap in structural space", callable=fmap_str_callable + ) + fmapmag_str: File | None = shell.out( + help="fieldmap magnitude image in structural space", + callable=fmapmag_str_callable, + ) + epi2str_inv: File | None = shell.out( + help="rigid structural-to-epi transform", callable=epi2str_inv_callable + ) + epi2str_mat: File | None = shell.out( + help="rigid epi-to-structural transform", callable=epi2str_mat_callable + ) + shiftmap: File | None = shell.out( + help="shiftmap in epi space", callable=shiftmap_callable + ) + fullwarp: File | None = shell.out( + help="warpfield to unwarp epi and transform into structural space", + callable=fullwarp_callable, + ) + wmseg: File | None = shell.out( + help="white matter segmentation used in flirt bbr", callable=wmseg_callable + ) + seg: File | None = shell.out( + help="white matter, gray matter, csf segmentation", callable=seg_callable + ) + wmedge: File | None = shell.out( + help="white matter edges for visualization", callable=wmedge_callable + ) diff --git a/pydra/tasks/fsl/v6/epi/prepare_fieldmap.py b/pydra/tasks/fsl/v6/epi/prepare_fieldmap.py new file mode 100644 index 0000000..7a3e2a0 --- /dev/null +++ b/pydra/tasks/fsl/v6/epi/prepare_fieldmap.py @@ -0,0 +1,155 @@ +import attrs +from fileformats.generic import File +from fileformats.medimage import Nifti1 +import logging +from pydra.tasks.fsl.v6.nipype_ports.utils.filemanip import fname_presuffix +import os +from pathlib import Path +from pathlib import Path +from pydra.compose import shell +import typing as ty + + +logger = logging.getLogger(__name__) + + +def _parse_inputs(inputs, output_dir=None): + if not output_dir: + output_dir = os.getcwd() + parsed_inputs = {} + skip = [] + + if skip is None: + skip = [] + + if inputs["out_fieldmap"] is attrs.NOTHING: + inputs["out_fieldmap"] = _gen_fname( + inputs["in_phase"], suffix="_fslprepared", output_type=inputs["output_type"] + ) + + if (inputs["nocheck"] is attrs.NOTHING) or not inputs["nocheck"]: + skip += ["nocheck"] + + return parsed_inputs + + +def _list_outputs(inputs=None, stdout=None, stderr=None, output_dir=None): + inputs = attrs.asdict(inputs) + parsed_inputs = _parse_inputs(inputs, output_dir=output_dir) + + outputs = {} + outputs["out_fieldmap"] = inputs["out_fieldmap"] + return outputs + + +def out_fieldmap_callable(output_dir, inputs, stdout, stderr): + outputs = _list_outputs( + output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr + ) + return outputs.get("out_fieldmap") + + +@shell.define +class PrepareFieldmap(shell.Task["PrepareFieldmap.Outputs"]): + """ + Examples + ------- + + >>> from fileformats.generic import File + >>> from fileformats.medimage import Nifti1 + >>> from pathlib import Path + >>> from pydra.tasks.fsl.v6.epi.prepare_fieldmap import PrepareFieldmap + + >>> task = PrepareFieldmap() + >>> task.in_phase = Nifti1.mock("phase.nii") + >>> task.in_magnitude = File.mock() + >>> task.cmdline + 'fsl_prepare_fieldmap SIEMENS phase.nii magnitude.nii .../phase_fslprepared.nii.gz 2.460000' + + + """ + + executable = "fsl_prepare_fieldmap" + scanner: ty.Any = shell.arg( + help="must be SIEMENS", argstr="{scanner}", position=1, default="SIEMENS" + ) + in_phase: Nifti1 = shell.arg( + help="Phase difference map, in SIEMENS format range from 0-4096 or 0-8192)", + argstr="{in_phase}", + position=2, + ) + in_magnitude: File = shell.arg( + help="Magnitude difference map, brain extracted", + argstr="{in_magnitude}", + position=3, + ) + delta_TE: float | None = shell.arg( + help="echo time difference of the fieldmap sequence in ms. (usually 2.46ms in Siemens)", + argstr="{delta_TE}", + position=-2, + default=2.46, + ) + nocheck: bool = shell.arg( + help="do not perform sanity checks for image size/range/dimensions", + argstr="--nocheck", + position=-1, + default=False, + ) + out_fieldmap: Path = shell.arg( + help="output name for prepared fieldmap", argstr="{out_fieldmap}", position=4 + ) + + class Outputs(shell.Outputs): + out_fieldmap: File | None = shell.out( + help="output name for prepared fieldmap", callable=out_fieldmap_callable + ) + + +def _gen_fname( + basename, cwd=None, suffix=None, change_ext=True, ext=None, output_type=None +): + """Generate a filename based on the given parameters. + + The filename will take the form: cwd/basename. + If change_ext is True, it will use the extensions specified in + inputs.output_type. + + Parameters + ---------- + basename : str + Filename to base the new filename on. + cwd : str + Path to prefix to the new filename. (default is output_dir) + suffix : str + Suffix to add to the `basename`. (defaults is '' ) + change_ext : bool + Flag to change the filename extension to the FSL output type. + (default True) + + Returns + ------- + fname : str + New filename based on given parameters. + + """ + + if basename == "": + msg = "Unable to generate filename for command %s. " % "fsl_prepare_fieldmap" + msg += "basename is not set!" + raise ValueError(msg) + if cwd is None: + cwd = output_dir + if ext is None: + ext = Info.output_type_to_ext(output_type) + if change_ext: + if suffix: + suffix = f"{suffix}{ext}" + else: + suffix = ext + if suffix is None: + suffix = "" + fname = fname_presuffix(basename, suffix=suffix, use_ext=False, newpath=cwd) + return fname + + +IFLOGGER = logging.getLogger("nipype.interface") diff --git a/pydra/tasks/fsl/v6/epi/tests/conftest.py b/pydra/tasks/fsl/v6/epi/tests/conftest.py new file mode 100644 index 0000000..8c8af14 --- /dev/null +++ b/pydra/tasks/fsl/v6/epi/tests/conftest.py @@ -0,0 +1,24 @@ +# For debugging in IDE's don't catch raised exceptions and let the IDE +# break at it +import os +import pytest + + +if os.getenv("_PYTEST_RAISE", "0") != "0": + + @pytest.hookimpl(tryfirst=True) + def pytest_exception_interact(call): + raise call.excinfo.value # raise internal errors instead of capturing them + + @pytest.hookimpl(tryfirst=True) + def pytest_internalerror(excinfo): + raise excinfo.value # raise internal errors instead of capturing them + + def pytest_configure(config): + config.option.capture = "no" # allow print statements to show up in the console + config.option.log_cli = True # show log messages in the console + config.option.log_level = "INFO" # set the log level to INFO + + CATCH_CLI_EXCEPTIONS = False +else: + CATCH_CLI_EXCEPTIONS = True diff --git a/pydra/tasks/fsl/v6/epi/tests/test_applytopup.py b/pydra/tasks/fsl/v6/epi/tests/test_applytopup.py new file mode 100644 index 0000000..f502e1f --- /dev/null +++ b/pydra/tasks/fsl/v6/epi/tests/test_applytopup.py @@ -0,0 +1,31 @@ +from fileformats.generic import File +from fileformats.medimage import Nifti1, NiftiGz +import logging +from nipype2pydra.testing import PassAfterTimeoutWorker +from pydra.tasks.fsl.v6.epi.apply_topup import ApplyTOPUP +import pytest + + +logger = logging.getLogger(__name__) + + +@pytest.mark.xfail +def test_applytopup_1(): + task = ApplyTOPUP() + task.in_files = [Nifti1.sample(seed=0)] + task.encoding_file = File.sample(seed=1) + task.in_topup_fieldcoef = NiftiGz.sample(seed=3) + task.in_topup_movpar = File.sample(seed=4) + print(f"CMDLINE: {task.cmdline}\n\n") + res = task(worker=PassAfterTimeoutWorker) + print("RESULT: ", res) + + +@pytest.mark.xfail +def test_applytopup_2(): + task = ApplyTOPUP() + task.in_files = [Nifti1.sample(seed=0)] + task.in_topup_fieldcoef = NiftiGz.sample(seed=3) + print(f"CMDLINE: {task.cmdline}\n\n") + res = task(worker=PassAfterTimeoutWorker) + print("RESULT: ", res) diff --git a/pydra/tasks/fsl/v6/epi/tests/test_eddy.py b/pydra/tasks/fsl/v6/epi/tests/test_eddy.py new file mode 100644 index 0000000..8c20fdd --- /dev/null +++ b/pydra/tasks/fsl/v6/epi/tests/test_eddy.py @@ -0,0 +1,70 @@ +from fileformats.generic import File +from fileformats.medimage import Nifti1 +from fileformats.text import TextFile +import logging +from nipype2pydra.testing import PassAfterTimeoutWorker +from pydra.tasks.fsl.v6.epi.eddy import Eddy +import pytest + + +logger = logging.getLogger(__name__) + + +@pytest.mark.xfail +def test_eddy_1(): + task = Eddy() + task.in_file = Nifti1.sample(seed=0) + task.in_mask = File.sample(seed=1) + task.in_index = TextFile.sample(seed=2) + task.in_acqp = File.sample(seed=3) + task.in_bvec = File.sample(seed=4) + task.in_bval = File.sample(seed=5) + task.out_base = "eddy_corrected" + task.session = File.sample(seed=7) + task.in_topup_fieldcoef = File.sample(seed=8) + task.in_topup_movpar = File.sample(seed=9) + task.field = File.sample(seed=10) + task.field_mat = File.sample(seed=11) + task.flm = "quadratic" + task.slm = "none" + task.interp = "spline" + task.nvoxhp = 1000 + task.fudge_factor = 10.0 + task.niter = 5 + task.method = "jac" + task.slice_order = TextFile.sample(seed=36) + task.json = File.sample(seed=37) + task.num_threads = 1 + print(f"CMDLINE: {task.cmdline}\n\n") + res = task(worker=PassAfterTimeoutWorker) + print("RESULT: ", res) + + +@pytest.mark.xfail +def test_eddy_2(): + task = Eddy() + task.in_file = Nifti1.sample(seed=0) + task.in_index = TextFile.sample(seed=2) + print(f"CMDLINE: {task.cmdline}\n\n") + res = task(worker=PassAfterTimeoutWorker) + print("RESULT: ", res) + + +@pytest.mark.xfail +def test_eddy_3(): + task = Eddy() + task.use_cuda = True + print(f"CMDLINE: {task.cmdline}\n\n") + res = task(worker=PassAfterTimeoutWorker) + print("RESULT: ", res) + + +@pytest.mark.xfail +def test_eddy_4(): + task = Eddy() + task.mporder = 6 + task.slice2vol_lambda = 1 + task.slice_order = TextFile.sample(seed=36) + print(f"CMDLINE: {task.cmdline}\n\n") + res = task(worker=PassAfterTimeoutWorker) + print("RESULT: ", res) diff --git a/pydra/tasks/fsl/v6/epi/tests/test_eddycorrect.py b/pydra/tasks/fsl/v6/epi/tests/test_eddycorrect.py new file mode 100644 index 0000000..c89ea76 --- /dev/null +++ b/pydra/tasks/fsl/v6/epi/tests/test_eddycorrect.py @@ -0,0 +1,29 @@ +from fileformats.medimage import Nifti1 +import logging +from nipype2pydra.testing import PassAfterTimeoutWorker +from pydra.tasks.fsl.v6.epi.eddy_correct import EddyCorrect +import pytest + + +logger = logging.getLogger(__name__) + + +@pytest.mark.xfail +def test_eddycorrect_1(): + task = EddyCorrect() + task.in_file = Nifti1.sample(seed=0) + task.ref_num = 0 + print(f"CMDLINE: {task.cmdline}\n\n") + res = task(worker=PassAfterTimeoutWorker) + print("RESULT: ", res) + + +@pytest.mark.xfail +def test_eddycorrect_2(): + task = EddyCorrect() + task.in_file = Nifti1.sample(seed=0) + task.out_file = "diffusion_edc.nii" + task.ref_num = 0 + print(f"CMDLINE: {task.cmdline}\n\n") + res = task(worker=PassAfterTimeoutWorker) + print("RESULT: ", res) diff --git a/pydra/tasks/fsl/v6/epi/tests/test_eddyquad.py b/pydra/tasks/fsl/v6/epi/tests/test_eddyquad.py new file mode 100644 index 0000000..45e84a7 --- /dev/null +++ b/pydra/tasks/fsl/v6/epi/tests/test_eddyquad.py @@ -0,0 +1,35 @@ +from fileformats.generic import File +from fileformats.text import TextFile +import logging +from nipype2pydra.testing import PassAfterTimeoutWorker +from pydra.tasks.fsl.v6.epi.eddy_quad import EddyQuad +import pytest + + +logger = logging.getLogger(__name__) + + +@pytest.mark.xfail +def test_eddyquad_1(): + task = EddyQuad() + task.base_name = "eddy_corrected" + task.idx_file = File.sample(seed=1) + task.param_file = TextFile.sample(seed=2) + task.mask_file = File.sample(seed=3) + task.bval_file = File.sample(seed=4) + task.bvec_file = File.sample(seed=5) + task.field = File.sample(seed=7) + task.slice_spec = File.sample(seed=8) + print(f"CMDLINE: {task.cmdline}\n\n") + res = task(worker=PassAfterTimeoutWorker) + print("RESULT: ", res) + + +@pytest.mark.xfail +def test_eddyquad_2(): + task = EddyQuad() + task.param_file = TextFile.sample(seed=2) + task.output_dir = "eddy_corrected.qc" + print(f"CMDLINE: {task.cmdline}\n\n") + res = task(worker=PassAfterTimeoutWorker) + print("RESULT: ", res) diff --git a/pydra/tasks/fsl/v6/epi/tests/test_epidewarp.py b/pydra/tasks/fsl/v6/epi/tests/test_epidewarp.py new file mode 100644 index 0000000..4a76cd6 --- /dev/null +++ b/pydra/tasks/fsl/v6/epi/tests/test_epidewarp.py @@ -0,0 +1,35 @@ +from fileformats.generic import File +from fileformats.medimage import Nifti1 +import logging +from nipype2pydra.testing import PassAfterTimeoutWorker +from pydra.tasks.fsl.v6.epi.epi_de_warp import EPIDeWarp +import pytest + + +logger = logging.getLogger(__name__) + + +@pytest.mark.xfail +def test_epidewarp_1(): + task = EPIDeWarp() + task.mag_file = File.sample(seed=0) + task.dph_file = Nifti1.sample(seed=1) + task.exf_file = File.sample(seed=2) + task.epi_file = Nifti1.sample(seed=3) + task.tediff = 2.46 + task.esp = 0.58 + task.sigma = 2 + task.nocleanup = True + print(f"CMDLINE: {task.cmdline}\n\n") + res = task(worker=PassAfterTimeoutWorker) + print("RESULT: ", res) + + +@pytest.mark.xfail +def test_epidewarp_2(): + task = EPIDeWarp() + task.dph_file = Nifti1.sample(seed=1) + task.epi_file = Nifti1.sample(seed=3) + print(f"CMDLINE: {task.cmdline}\n\n") + res = task(worker=PassAfterTimeoutWorker) + print("RESULT: ", res) diff --git a/pydra/tasks/fsl/v6/epi/tests/test_epireg.py b/pydra/tasks/fsl/v6/epi/tests/test_epireg.py new file mode 100644 index 0000000..1220895 --- /dev/null +++ b/pydra/tasks/fsl/v6/epi/tests/test_epireg.py @@ -0,0 +1,40 @@ +from fileformats.generic import File +from fileformats.medimage import Nifti1 +import logging +from nipype2pydra.testing import PassAfterTimeoutWorker +from pydra.tasks.fsl.v6.epi.epi_reg import EpiReg +import pytest + + +logger = logging.getLogger(__name__) + + +@pytest.mark.xfail +def test_epireg_1(): + task = EpiReg() + task.epi = Nifti1.sample(seed=0) + task.t1_head = File.sample(seed=1) + task.t1_brain = Nifti1.sample(seed=2) + task.out_base = "epi2struct" + task.fmap = Nifti1.sample(seed=4) + task.fmapmag = File.sample(seed=5) + task.fmapmagbrain = Nifti1.sample(seed=6) + task.wmseg = File.sample(seed=7) + task.weight_image = File.sample(seed=10) + task.no_clean = True + print(f"CMDLINE: {task.cmdline}\n\n") + res = task(worker=PassAfterTimeoutWorker) + print("RESULT: ", res) + + +@pytest.mark.xfail +def test_epireg_2(): + task = EpiReg() + task.epi = Nifti1.sample(seed=0) + task.t1_brain = Nifti1.sample(seed=2) + task.fmap = Nifti1.sample(seed=4) + task.fmapmagbrain = Nifti1.sample(seed=6) + task.pedir = "y" + print(f"CMDLINE: {task.cmdline}\n\n") + res = task(worker=PassAfterTimeoutWorker) + print("RESULT: ", res) diff --git a/pydra/tasks/fsl/v6/epi/tests/test_preparefieldmap.py b/pydra/tasks/fsl/v6/epi/tests/test_preparefieldmap.py new file mode 100644 index 0000000..452ae9c --- /dev/null +++ b/pydra/tasks/fsl/v6/epi/tests/test_preparefieldmap.py @@ -0,0 +1,31 @@ +from fileformats.generic import File +from fileformats.medimage import Nifti1 +import logging +from nipype2pydra.testing import PassAfterTimeoutWorker +from pydra.tasks.fsl.v6.epi.prepare_fieldmap import PrepareFieldmap +import pytest + + +logger = logging.getLogger(__name__) + + +@pytest.mark.xfail +def test_preparefieldmap_1(): + task = PrepareFieldmap() + task.scanner = "SIEMENS" + task.in_phase = Nifti1.sample(seed=1) + task.in_magnitude = File.sample(seed=2) + task.delta_TE = 2.46 + task.nocheck = False + print(f"CMDLINE: {task.cmdline}\n\n") + res = task(worker=PassAfterTimeoutWorker) + print("RESULT: ", res) + + +@pytest.mark.xfail +def test_preparefieldmap_2(): + task = PrepareFieldmap() + task.in_phase = Nifti1.sample(seed=1) + print(f"CMDLINE: {task.cmdline}\n\n") + res = task(worker=PassAfterTimeoutWorker) + print("RESULT: ", res) diff --git a/pydra/tasks/fsl/v6/epi/tests/test_topup.py b/pydra/tasks/fsl/v6/epi/tests/test_topup.py new file mode 100644 index 0000000..bbfeb5e --- /dev/null +++ b/pydra/tasks/fsl/v6/epi/tests/test_topup.py @@ -0,0 +1,32 @@ +from fileformats.generic import File +from fileformats.medimage import Nifti1 +import logging +from nipype2pydra.testing import PassAfterTimeoutWorker +from pydra.tasks.fsl.v6.epi.topup import TOPUP +import pytest + + +logger = logging.getLogger(__name__) + + +@pytest.mark.xfail +def test_topup_1(): + task = TOPUP() + task.in_file = Nifti1.sample(seed=0) + task.encoding_file = File.sample(seed=1) + task.out_warp_prefix = "warpfield" + task.out_mat_prefix = "xfm" + task.out_jac_prefix = "jac" + task.config = "b02b0.cnf" + print(f"CMDLINE: {task.cmdline}\n\n") + res = task(worker=PassAfterTimeoutWorker) + print("RESULT: ", res) + + +@pytest.mark.xfail +def test_topup_2(): + task = TOPUP() + task.in_file = Nifti1.sample(seed=0) + print(f"CMDLINE: {task.cmdline}\n\n") + res = task(worker=PassAfterTimeoutWorker) + print("RESULT: ", res) diff --git a/pydra/tasks/fsl/v6/epi/topup.py b/pydra/tasks/fsl/v6/epi/topup.py new file mode 100644 index 0000000..e27ee83 --- /dev/null +++ b/pydra/tasks/fsl/v6/epi/topup.py @@ -0,0 +1,398 @@ +import attrs +from fileformats.generic import File +from fileformats.medimage import Nifti1 +import logging +import nibabel as nb +from pydra.tasks.fsl.v6.base import Info +from pydra.tasks.fsl.v6.nipype_ports.utils.filemanip import ( + fname_presuffix, + split_filename, +) +import numpy as np +import os +from pathlib import Path +from pathlib import Path +from pydra.compose import shell +from pydra.utils.typing import MultiInputObj +import typing as ty + + +logger = logging.getLogger(__name__) + + +def _format_arg(name, value, inputs, argstr): + if value is None: + return "" + + if name == "encoding_direction": + return argstr.format( + **{ + name: _generate_encfile( + encoding_direction=inputs["encoding_direction"], + in_file=inputs["in_file"], + readout_times=inputs["readout_times"], + ) + } + ) + if name == "out_base": + path, name, ext = split_filename(value) + if path != "": + if not os.path.exists(path): + raise ValueError("out_base path must exist if provided") + + return argstr.format(**inputs) + + +def encoding_direction_formatter(field, inputs): + return _format_arg( + "encoding_direction", field, inputs, argstr="--datain={encoding_direction}" + ) + + +def out_base_formatter(field, inputs): + return _format_arg("out_base", field, inputs, argstr="--out={out_base}") + + +def _list_outputs(inputs=None, stdout=None, stderr=None, output_dir=None): + inputs = attrs.asdict(inputs) + + outputs = {} + del outputs["out_base"] + base_path = None + if inputs["out_base"] is not attrs.NOTHING: + base_path, base, _ = split_filename(inputs["out_base"]) + if base_path == "": + base_path = None + else: + base = split_filename(inputs["in_file"])[1] + "_base" + outputs["out_fieldcoef"] = _gen_fname( + base, + suffix="_fieldcoef", + cwd=base_path, + output_type=inputs["output_type"], + inputs=inputs["inputs"], + output_dir=inputs["output_dir"], + stderr=inputs["stderr"], + stdout=inputs["stdout"], + ) + outputs["out_movpar"] = _gen_fname( + base, + suffix="_movpar", + ext=".txt", + cwd=base_path, + output_type=inputs["output_type"], + inputs=inputs["inputs"], + output_dir=inputs["output_dir"], + stderr=inputs["stderr"], + stdout=inputs["stdout"], + ) + + n_vols = nb.load(inputs["in_file"]).shape[-1] + ext = Info.output_type_to_ext(inputs["output_type"]) + fmt = os.path.abspath("{prefix}_{i:02d}{ext}").format + outputs["out_warps"] = [ + fmt(prefix=inputs["out_warp_prefix"], i=i, ext=ext) + for i in range(1, n_vols + 1) + ] + outputs["out_jacs"] = [ + fmt(prefix=inputs["out_jac_prefix"], i=i, ext=ext) for i in range(1, n_vols + 1) + ] + outputs["out_mats"] = [ + fmt(prefix=inputs["out_mat_prefix"], i=i, ext=".mat") + for i in range(1, n_vols + 1) + ] + + if inputs["encoding_direction"] is not attrs.NOTHING: + outputs["out_enc_file"] = _get_encfilename( + in_file=inputs["in_file"], + inputs=inputs["inputs"], + output_dir=inputs["output_dir"], + stderr=inputs["stderr"], + stdout=inputs["stdout"], + ) + return outputs + + +def out_fieldcoef_callable(output_dir, inputs, stdout, stderr): + outputs = _list_outputs( + output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr + ) + return outputs.get("out_fieldcoef") + + +def out_movpar_callable(output_dir, inputs, stdout, stderr): + outputs = _list_outputs( + output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr + ) + return outputs.get("out_movpar") + + +def out_enc_file_callable(output_dir, inputs, stdout, stderr): + outputs = _list_outputs( + output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr + ) + return outputs.get("out_enc_file") + + +def out_warps_callable(output_dir, inputs, stdout, stderr): + outputs = _list_outputs( + output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr + ) + return outputs.get("out_warps") + + +def out_jacs_callable(output_dir, inputs, stdout, stderr): + outputs = _list_outputs( + output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr + ) + return outputs.get("out_jacs") + + +def out_mats_callable(output_dir, inputs, stdout, stderr): + outputs = _list_outputs( + output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr + ) + return outputs.get("out_mats") + + +@shell.define( + xor=[["encoding_direction", "encoding_file"], ["encoding_file", "readout_times"]] +) +class TOPUP(shell.Task["TOPUP.Outputs"]): + """ + Examples + ------- + + >>> from fileformats.generic import File + >>> from fileformats.medimage import Nifti1 + >>> from pathlib import Path + >>> from pydra.tasks.fsl.v6.epi.topup import TOPUP + >>> from pydra.utils.typing import MultiInputObj + + >>> task = TOPUP() + >>> task.in_file = Nifti1.mock("b0_b0rev.nii") + >>> task.encoding_file = File.mock() + >>> task.cmdline + 'topup --config=b02b0.cnf --datain=topup_encoding.txt --imain=b0_b0rev.nii --out=b0_b0rev_base --iout=b0_b0rev_corrected.nii.gz --fout=b0_b0rev_field.nii.gz --jacout=jac --logout=b0_b0rev_topup.log --rbmout=xfm --dfout=warpfield' + + + """ + + executable = "topup" + in_file: Nifti1 = shell.arg( + help="name of 4D file with images", argstr="--imain={in_file}" + ) + encoding_file: File | None = shell.arg( + help="name of text file with PE directions/times", + argstr="--datain={encoding_file}", + ) + encoding_direction: list[ty.Any] = shell.arg( + help="encoding direction for automatic generation of encoding_file", + requires=["readout_times"], + formatter="encoding_direction_formatter", + ) + readout_times: MultiInputObj = shell.arg( + help="readout times (dwell times by # phase-encode steps minus 1)", + requires=["encoding_direction"], + ) + out_base: Path = shell.arg( + help="base-name of output files (spline coefficients (Hz) and movement parameters)", + formatter="out_base_formatter", + ) + out_warp_prefix: str = shell.arg( + help="prefix for the warpfield images (in mm)", + argstr="--dfout={out_warp_prefix}", + default="warpfield", + ) + out_mat_prefix: str = shell.arg( + help="prefix for the realignment matrices", + argstr="--rbmout={out_mat_prefix}", + default="xfm", + ) + out_jac_prefix: str = shell.arg( + help="prefix for the warpfield images", + argstr="--jacout={out_jac_prefix}", + default="jac", + ) + warp_res: float = shell.arg( + help="(approximate) resolution (in mm) of warp basis for the different sub-sampling levels", + argstr="--warpres={warp_res}", + ) + subsamp: int = shell.arg(help="sub-sampling scheme", argstr="--subsamp={subsamp}") + fwhm: float = shell.arg( + help="FWHM (in mm) of gaussian smoothing kernel", argstr="--fwhm={fwhm}" + ) + config: ty.Any = shell.arg( + help="Name of config file specifying command line arguments", + argstr="--config={config}", + default="b02b0.cnf", + ) + max_iter: int = shell.arg( + help="max # of non-linear iterations", argstr="--miter={max_iter}" + ) + reg_lambda: float = shell.arg( + help="Weight of regularisation, default depending on --ssqlambda and --regmod switches.", + argstr="--lambda={reg_lambda:0.}", + ) + ssqlambda: ty.Any = shell.arg( + help="Weight lambda by the current value of the ssd. If used (=1), the effective weight of regularisation term becomes higher for the initial iterations, therefore initial steps are a little smoother than they would without weighting. This reduces the risk of finding a local minimum.", + argstr="--ssqlambda={ssqlambda}", + ) + regmod: ty.Any = shell.arg( + help="Regularisation term implementation. Defaults to bending_energy. Note that the two functions have vastly different scales. The membrane energy is based on the first derivatives and the bending energy on the second derivatives. The second derivatives will typically be much smaller than the first derivatives, so input lambda will have to be larger for bending_energy to yield approximately the same level of regularisation.", + argstr="--regmod={regmod}", + ) + estmov: ty.Any = shell.arg( + help="estimate movements if set", argstr="--estmov={estmov}" + ) + minmet: ty.Any = shell.arg( + help="Minimisation method 0=Levenberg-Marquardt, 1=Scaled Conjugate Gradient", + argstr="--minmet={minmet}", + ) + splineorder: int = shell.arg( + help="order of spline, 2->Qadratic spline, 3->Cubic spline", + argstr="--splineorder={splineorder}", + ) + numprec: ty.Any = shell.arg( + help="Precision for representing Hessian, double or float.", + argstr="--numprec={numprec}", + ) + interp: ty.Any = shell.arg( + help="Image interpolation model, linear or spline.", argstr="--interp={interp}" + ) + scale: ty.Any = shell.arg( + help="If set (=1), the images are individually scaled to a common mean", + argstr="--scale={scale}", + ) + regrid: ty.Any = shell.arg( + help="If set (=1), the calculations are done in a different grid", + argstr="--regrid={regrid}", + ) + + class Outputs(shell.Outputs): + out_field: Path = shell.outarg( + help="name of image file with field (Hz)", + argstr="--fout={out_field}", + path_template="{in_file}_field", + ) + out_corrected: Path = shell.outarg( + help="name of 4D image file with unwarped images", + argstr="--iout={out_corrected}", + path_template="{in_file}_corrected", + ) + out_logfile: Path = shell.outarg( + help="name of log-file", + argstr="--logout={out_logfile}", + path_template="{in_file}_topup.log", + ) + out_fieldcoef: File | None = shell.out( + help="file containing the field coefficients", + callable=out_fieldcoef_callable, + ) + out_movpar: File | None = shell.out( + help="movpar.txt output file", callable=out_movpar_callable + ) + out_enc_file: File | None = shell.out( + help="encoding directions file output for applytopup", + callable=out_enc_file_callable, + ) + out_warps: list[File] | None = shell.out( + help="warpfield images", callable=out_warps_callable + ) + out_jacs: list[File] | None = shell.out( + help="Jacobian images", callable=out_jacs_callable + ) + out_mats: list[File] | None = shell.out( + help="realignment matrices", callable=out_mats_callable + ) + + +def _gen_fname( + basename, + cwd=None, + suffix=None, + change_ext=True, + ext=None, + output_type=None, + inputs=None, + output_dir=None, + stderr=None, + stdout=None, +): + """Generate a filename based on the given parameters. + + The filename will take the form: cwd/basename. + If change_ext is True, it will use the extensions specified in + inputs.output_type. + + Parameters + ---------- + basename : str + Filename to base the new filename on. + cwd : str + Path to prefix to the new filename. (default is output_dir) + suffix : str + Suffix to add to the `basename`. (defaults is '' ) + change_ext : bool + Flag to change the filename extension to the FSL output type. + (default True) + + Returns + ------- + fname : str + New filename based on given parameters. + + """ + + if basename == "": + msg = "Unable to generate filename for command %s. " % "topup" + msg += "basename is not set!" + raise ValueError(msg) + if cwd is None: + cwd = output_dir + if ext is None: + ext = Info.output_type_to_ext(output_type) + if change_ext: + if suffix: + suffix = f"{suffix}{ext}" + else: + suffix = ext + if suffix is None: + suffix = "" + fname = fname_presuffix(basename, suffix=suffix, use_ext=False, newpath=cwd) + return fname + + +def _generate_encfile(encoding_direction=None, in_file=None, readout_times=None): + """Generate a topup compatible encoding file based on given directions""" + out_file = _get_encfilename(in_file=in_file) + durations = readout_times + if len(encoding_direction) != len(durations): + if len(readout_times) != 1: + raise ValueError( + "Readout time must be a float or match the" + "length of encoding directions" + ) + durations = durations * len(encoding_direction) + + lines = [] + for idx, encdir in enumerate(encoding_direction): + direction = 1.0 + if encdir.endswith("-"): + direction = -1.0 + line = [float(val[0] == encdir[0]) * direction for val in ["x", "y", "z"]] + [ + durations[idx] + ] + lines.append(line) + np.savetxt(out_file, np.array(lines), fmt="%d %d %d %.8f") + return out_file + + +def _get_encfilename( + in_file=None, inputs=None, output_dir=None, stderr=None, stdout=None +): + out_file = os.path.join(output_dir, ("%s_encfile.txt" % split_filename(in_file)[1])) + return out_file + + +IFLOGGER = logging.getLogger("nipype.interface") diff --git a/pydra/tasks/fsl/v6/fix/__init__.py b/pydra/tasks/fsl/v6/fix/__init__.py new file mode 100644 index 0000000..30375e1 --- /dev/null +++ b/pydra/tasks/fsl/v6/fix/__init__.py @@ -0,0 +1,6 @@ +from .accuracy_tester import AccuracyTester +from .classifier import Classifier +from .cleaner import Cleaner +from .feature_extractor import FeatureExtractor +from .training import Training +from .training_set_creator import TrainingSetCreator diff --git a/pydra/tasks/fsl/v6/fix/accuracy_tester.py b/pydra/tasks/fsl/v6/fix/accuracy_tester.py new file mode 100644 index 0000000..89dd55b --- /dev/null +++ b/pydra/tasks/fsl/v6/fix/accuracy_tester.py @@ -0,0 +1,59 @@ +import attrs +from fileformats.generic import Directory, File +import logging +from pydra.compose import shell +import typing as ty + + +logger = logging.getLogger(__name__) + + +def _list_outputs(inputs=None, stdout=None, stderr=None, output_dir=None): + inputs = attrs.asdict(inputs) + + outputs = {} + if inputs["output_directory"] is not attrs.NOTHING: + outputs["output_directory"] = Directory( + exists=False, value=inputs["output_directory"] + ) + else: + outputs["output_directory"] = Directory(exists=False, value="accuracy_test") + return outputs + + +def output_directory_callable(output_dir, inputs, stdout, stderr): + outputs = _list_outputs( + output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr + ) + return outputs.get("output_directory") + + +@shell.define +class AccuracyTester(shell.Task["AccuracyTester.Outputs"]): + """ + Examples + ------- + + >>> from fileformats.generic import Directory, File + >>> from pydra.tasks.fsl.v6.fix.accuracy_tester import AccuracyTester + + """ + + executable = "fix -C" + mel_icas: list[Directory] = shell.arg( + help="Melodic output directories", argstr="{mel_icas}", position=3 + ) + trained_wts_file: File = shell.arg( + help="trained-weights file", argstr="{trained_wts_file}", position=1 + ) + output_directory: ty.Any = shell.arg( + help="Path to folder in which to store the results of the accuracy test.", + argstr="{output_directory}", + position=2, + ) + + class Outputs(shell.Outputs): + output_directory: Directory | None = shell.out( + help="Path to folder in which to store the results of the accuracy test.", + callable=output_directory_callable, + ) diff --git a/pydra/tasks/fsl/v6/fix/classifier.py b/pydra/tasks/fsl/v6/fix/classifier.py new file mode 100644 index 0000000..99fb85e --- /dev/null +++ b/pydra/tasks/fsl/v6/fix/classifier.py @@ -0,0 +1,84 @@ +import attrs +from fileformats.generic import Directory, File +import logging +import os +from pathlib import Path +from pathlib import Path +from pydra.compose import shell + + +logger = logging.getLogger(__name__) + + +def _list_outputs(inputs=None, stdout=None, stderr=None, output_dir=None): + inputs = attrs.asdict(inputs) + + outputs = {} + outputs["artifacts_list_file"] = _gen_artifacts_list_file( + inputs["mel_ica"], + inputs["thresh"], + trained_wts_file=inputs["trained_wts_file"], + inputs=inputs["inputs"], + output_dir=inputs["output_dir"], + stderr=inputs["stderr"], + stdout=inputs["stdout"], + ) + + return outputs + + +def artifacts_list_file_callable(output_dir, inputs, stdout, stderr): + outputs = _list_outputs( + output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr + ) + return outputs.get("artifacts_list_file") + + +@shell.define +class Classifier(shell.Task["Classifier.Outputs"]): + """ + Examples + ------- + + >>> from fileformats.generic import Directory, File + >>> from pathlib import Path + >>> from pydra.tasks.fsl.v6.fix.classifier import Classifier + + """ + + executable = "fix -c" + mel_ica: Directory = shell.arg( + help="Melodic output directory or directories", argstr="{mel_ica}", position=1 + ) + trained_wts_file: File = shell.arg( + help="trained-weights file", argstr="{trained_wts_file}", position=2 + ) + thresh: int = shell.arg( + help="Threshold for cleanup.", argstr="{thresh}", position=-1 + ) + artifacts_list_file: Path = shell.arg( + help="Text file listing which ICs are artifacts; can be the output from classification or can be created manually" + ) + + class Outputs(shell.Outputs): + artifacts_list_file: File | None = shell.out( + help="Text file listing which ICs are artifacts; can be the output from classification or can be created manually", + callable=artifacts_list_file_callable, + ) + + +def _gen_artifacts_list_file( + mel_ica, + thresh, + trained_wts_file=None, + inputs=None, + output_dir=None, + stderr=None, + stdout=None, +): + _, trained_wts_file = os.path.split(trained_wts_file) + trained_wts_filestem = trained_wts_file.split(".")[0] + filestem = "fix4melview_" + trained_wts_filestem + "_thr" + + fname = os.path.join(mel_ica, filestem + str(thresh) + ".txt") + return fname diff --git a/pydra/tasks/fsl/v6/fix/cleaner.py b/pydra/tasks/fsl/v6/fix/cleaner.py new file mode 100644 index 0000000..af8e64f --- /dev/null +++ b/pydra/tasks/fsl/v6/fix/cleaner.py @@ -0,0 +1,97 @@ +import attrs +from fileformats.generic import File +import logging +import os +from pydra.compose import shell + + +logger = logging.getLogger(__name__) + + +def _list_outputs(inputs=None, stdout=None, stderr=None, output_dir=None): + inputs = attrs.asdict(inputs) + + outputs = {} + outputs["cleaned_functional_file"] = _get_cleaned_functional_filename( + inputs["artifacts_list_file"], + inputs=inputs["inputs"], + output_dir=inputs["output_dir"], + stderr=inputs["stderr"], + stdout=inputs["stdout"], + ) + return outputs + + +def cleaned_functional_file_callable(output_dir, inputs, stdout, stderr): + outputs = _list_outputs( + output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr + ) + return outputs.get("cleaned_functional_file") + + +@shell.define +class Cleaner(shell.Task["Cleaner.Outputs"]): + """ + Examples + ------- + + >>> from fileformats.generic import File + >>> from pydra.tasks.fsl.v6.fix.cleaner import Cleaner + + """ + + executable = "fix -a" + artifacts_list_file: File = shell.arg( + help="Text file listing which ICs are artifacts; can be the output from classification or can be created manually", + argstr="{artifacts_list_file}", + position=1, + ) + cleanup_motion: bool = shell.arg( + help="cleanup motion confounds, looks for design.fsf for highpass filter cut-off", + argstr="-m", + position=2, + ) + highpass: float = shell.arg( + help="cleanup motion confounds", + argstr="-m -h {highpass}", + position=3, + default=100, + ) + aggressive: bool = shell.arg( + help="Apply aggressive (full variance) cleanup, instead of the default less-aggressive (unique variance) cleanup.", + argstr="-A", + position=4, + ) + confound_file: File = shell.arg( + help="Include additional confound file.", + argstr="-x {confound_file}", + position=5, + ) + confound_file_1: File = shell.arg( + help="Include additional confound file.", + argstr="-x {confound_file_1}", + position=6, + ) + confound_file_2: File = shell.arg( + help="Include additional confound file.", + argstr="-x {confound_file_2}", + position=7, + ) + + class Outputs(shell.Outputs): + cleaned_functional_file: File | None = shell.out( + help="Cleaned session data", callable=cleaned_functional_file_callable + ) + + +def _get_cleaned_functional_filename( + artifacts_list_filename, inputs=None, output_dir=None, stderr=None, stdout=None +): + """extract the proper filename from the first line of the artifacts file""" + artifacts_list_file = open(artifacts_list_filename) + functional_filename, extension = artifacts_list_file.readline().split(".") + artifacts_list_file_path, artifacts_list_filename = os.path.split( + artifacts_list_filename + ) + + return os.path.join(artifacts_list_file_path, functional_filename + "_clean.nii.gz") diff --git a/pydra/tasks/fsl/v6/fix/feature_extractor.py b/pydra/tasks/fsl/v6/fix/feature_extractor.py new file mode 100644 index 0000000..50bb77f --- /dev/null +++ b/pydra/tasks/fsl/v6/fix/feature_extractor.py @@ -0,0 +1,45 @@ +import attrs +from fileformats.generic import Directory +import logging +from pydra.compose import shell +import typing as ty + + +logger = logging.getLogger(__name__) + + +def _list_outputs(inputs=None, stdout=None, stderr=None, output_dir=None): + inputs = attrs.asdict(inputs) + + outputs = {} + outputs["mel_ica"] = inputs["mel_ica"] + return outputs + + +def mel_ica_callable(output_dir, inputs, stdout, stderr): + outputs = _list_outputs( + output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr + ) + return outputs.get("mel_ica") + + +@shell.define +class FeatureExtractor(shell.Task["FeatureExtractor.Outputs"]): + """ + Examples + ------- + + >>> from fileformats.generic import Directory + >>> from pydra.tasks.fsl.v6.fix.feature_extractor import FeatureExtractor + + """ + + executable = "fix -f" + mel_ica: ty.Any = shell.arg( + help="Melodic output directory or directories", argstr="{mel_ica}", position=-1 + ) + + class Outputs(shell.Outputs): + mel_ica: Directory | None = shell.out( + help="Melodic output directory or directories", callable=mel_ica_callable + ) diff --git a/pydra/tasks/fsl/v6/fix/tests/conftest.py b/pydra/tasks/fsl/v6/fix/tests/conftest.py new file mode 100644 index 0000000..8c8af14 --- /dev/null +++ b/pydra/tasks/fsl/v6/fix/tests/conftest.py @@ -0,0 +1,24 @@ +# For debugging in IDE's don't catch raised exceptions and let the IDE +# break at it +import os +import pytest + + +if os.getenv("_PYTEST_RAISE", "0") != "0": + + @pytest.hookimpl(tryfirst=True) + def pytest_exception_interact(call): + raise call.excinfo.value # raise internal errors instead of capturing them + + @pytest.hookimpl(tryfirst=True) + def pytest_internalerror(excinfo): + raise excinfo.value # raise internal errors instead of capturing them + + def pytest_configure(config): + config.option.capture = "no" # allow print statements to show up in the console + config.option.log_cli = True # show log messages in the console + config.option.log_level = "INFO" # set the log level to INFO + + CATCH_CLI_EXCEPTIONS = False +else: + CATCH_CLI_EXCEPTIONS = True diff --git a/pydra/tasks/fsl/v6/fix/tests/test_accuracytester.py b/pydra/tasks/fsl/v6/fix/tests/test_accuracytester.py new file mode 100644 index 0000000..12d3213 --- /dev/null +++ b/pydra/tasks/fsl/v6/fix/tests/test_accuracytester.py @@ -0,0 +1,18 @@ +from fileformats.generic import Directory, File +import logging +from nipype2pydra.testing import PassAfterTimeoutWorker +from pydra.tasks.fsl.v6.fix.accuracy_tester import AccuracyTester +import pytest + + +logger = logging.getLogger(__name__) + + +@pytest.mark.xfail +def test_accuracytester_1(): + task = AccuracyTester() + task.mel_icas = [Directory.sample(seed=0)] + task.trained_wts_file = File.sample(seed=1) + print(f"CMDLINE: {task.cmdline}\n\n") + res = task(worker=PassAfterTimeoutWorker) + print("RESULT: ", res) diff --git a/pydra/tasks/fsl/v6/fix/tests/test_classifier.py b/pydra/tasks/fsl/v6/fix/tests/test_classifier.py new file mode 100644 index 0000000..0eef41d --- /dev/null +++ b/pydra/tasks/fsl/v6/fix/tests/test_classifier.py @@ -0,0 +1,18 @@ +from fileformats.generic import Directory, File +import logging +from nipype2pydra.testing import PassAfterTimeoutWorker +from pydra.tasks.fsl.v6.fix.classifier import Classifier +import pytest + + +logger = logging.getLogger(__name__) + + +@pytest.mark.xfail +def test_classifier_1(): + task = Classifier() + task.mel_ica = Directory.sample(seed=0) + task.trained_wts_file = File.sample(seed=1) + print(f"CMDLINE: {task.cmdline}\n\n") + res = task(worker=PassAfterTimeoutWorker) + print("RESULT: ", res) diff --git a/pydra/tasks/fsl/v6/fix/tests/test_cleaner.py b/pydra/tasks/fsl/v6/fix/tests/test_cleaner.py new file mode 100644 index 0000000..cb880b9 --- /dev/null +++ b/pydra/tasks/fsl/v6/fix/tests/test_cleaner.py @@ -0,0 +1,21 @@ +from fileformats.generic import File +import logging +from nipype2pydra.testing import PassAfterTimeoutWorker +from pydra.tasks.fsl.v6.fix.cleaner import Cleaner +import pytest + + +logger = logging.getLogger(__name__) + + +@pytest.mark.xfail +def test_cleaner_1(): + task = Cleaner() + task.artifacts_list_file = File.sample(seed=0) + task.highpass = 100 + task.confound_file = File.sample(seed=4) + task.confound_file_1 = File.sample(seed=5) + task.confound_file_2 = File.sample(seed=6) + print(f"CMDLINE: {task.cmdline}\n\n") + res = task(worker=PassAfterTimeoutWorker) + print("RESULT: ", res) diff --git a/pydra/tasks/fsl/v6/fix/tests/test_featureextractor.py b/pydra/tasks/fsl/v6/fix/tests/test_featureextractor.py new file mode 100644 index 0000000..9526f50 --- /dev/null +++ b/pydra/tasks/fsl/v6/fix/tests/test_featureextractor.py @@ -0,0 +1,15 @@ +import logging +from nipype2pydra.testing import PassAfterTimeoutWorker +from pydra.tasks.fsl.v6.fix.feature_extractor import FeatureExtractor +import pytest + + +logger = logging.getLogger(__name__) + + +@pytest.mark.xfail +def test_featureextractor_1(): + task = FeatureExtractor() + print(f"CMDLINE: {task.cmdline}\n\n") + res = task(worker=PassAfterTimeoutWorker) + print("RESULT: ", res) diff --git a/pydra/tasks/fsl/v6/fix/tests/test_training.py b/pydra/tasks/fsl/v6/fix/tests/test_training.py new file mode 100644 index 0000000..f76a8ac --- /dev/null +++ b/pydra/tasks/fsl/v6/fix/tests/test_training.py @@ -0,0 +1,17 @@ +from fileformats.generic import Directory +import logging +from nipype2pydra.testing import PassAfterTimeoutWorker +from pydra.tasks.fsl.v6.fix.training import Training +import pytest + + +logger = logging.getLogger(__name__) + + +@pytest.mark.xfail +def test_training_1(): + task = Training() + task.mel_icas = [Directory.sample(seed=0)] + print(f"CMDLINE: {task.cmdline}\n\n") + res = task(worker=PassAfterTimeoutWorker) + print("RESULT: ", res) diff --git a/pydra/tasks/fsl/v6/fix/tests/test_trainingsetcreator.py b/pydra/tasks/fsl/v6/fix/tests/test_trainingsetcreator.py new file mode 100644 index 0000000..8aabb1e --- /dev/null +++ b/pydra/tasks/fsl/v6/fix/tests/test_trainingsetcreator.py @@ -0,0 +1,16 @@ +from fileformats.generic import Directory +import logging +from nipype2pydra.testing import PassAfterTimeoutWorker +from pydra.tasks.fsl.v6.fix.training_set_creator import TrainingSetCreator +import pytest + + +logger = logging.getLogger(__name__) + + +@pytest.mark.xfail +def test_trainingsetcreator_1(): + task = TrainingSetCreator() + task.mel_icas_in = [Directory.sample(seed=0)] + res = task(worker=PassAfterTimeoutWorker) + print("RESULT: ", res) diff --git a/pydra/tasks/fsl/v6/fix/training.py b/pydra/tasks/fsl/v6/fix/training.py new file mode 100644 index 0000000..4729999 --- /dev/null +++ b/pydra/tasks/fsl/v6/fix/training.py @@ -0,0 +1,58 @@ +import attrs +from fileformats.generic import Directory, File +import logging +import os +from pydra.compose import shell + + +logger = logging.getLogger(__name__) + + +def _list_outputs(inputs=None, stdout=None, stderr=None, output_dir=None): + inputs = attrs.asdict(inputs) + + outputs = {} + if inputs["trained_wts_filestem"] is not attrs.NOTHING: + outputs["trained_wts_file"] = os.path.abspath( + inputs["trained_wts_filestem"] + ".RData" + ) + else: + outputs["trained_wts_file"] = os.path.abspath("trained_wts_file.RData") + return outputs + + +def trained_wts_file_callable(output_dir, inputs, stdout, stderr): + outputs = _list_outputs( + output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr + ) + return outputs.get("trained_wts_file") + + +@shell.define +class Training(shell.Task["Training.Outputs"]): + """ + Examples + ------- + + >>> from fileformats.generic import Directory, File + >>> from pydra.tasks.fsl.v6.fix.training import Training + + """ + + executable = "fix -t" + mel_icas: list[Directory] = shell.arg( + help="Melodic output directories", argstr="{mel_icas}", position=-1 + ) + trained_wts_filestem: str = shell.arg( + help="trained-weights filestem, used for trained_wts_file and output directories", + argstr="{trained_wts_filestem}", + position=1, + ) + loo: bool = shell.arg( + help="full leave-one-out test with classifier training", argstr="-l", position=2 + ) + + class Outputs(shell.Outputs): + trained_wts_file: File | None = shell.out( + help="Trained-weights file", callable=trained_wts_file_callable + ) diff --git a/pydra/tasks/fsl/v6/fix/training_set_creator.py b/pydra/tasks/fsl/v6/fix/training_set_creator.py new file mode 100644 index 0000000..43fef28 --- /dev/null +++ b/pydra/tasks/fsl/v6/fix/training_set_creator.py @@ -0,0 +1,50 @@ +import attrs +from fileformats.generic import Directory +import logging +import os +from pydra.compose import python + + +logger = logging.getLogger(__name__) + + +@python.define +class TrainingSetCreator(python.Task["TrainingSetCreator.Outputs"]): + """ + Examples + ------- + + >>> from fileformats.generic import Directory + >>> from pydra.tasks.fsl.v6.fix.training_set_creator import TrainingSetCreator + + """ + + mel_icas_in: list[Directory] + + class Outputs(python.Outputs): + mel_icas_out: list[Directory] + + @staticmethod + def function(mel_icas_in: list[Directory]) -> list[Directory]: + mel_icas_out = attrs.NOTHING + self_dict = {} + mel_icas = [ + item + for item in mel_icas_in + if os.path.exists(os.path.join(item, "hand_labels_noise.txt")) + ] + if len(mel_icas) == 0: + raise Exception( + "%s did not find any hand_labels_noise.txt files in the following directories: %s" + % (self_dict["__class__"].__name__, mel_icas) + ) + + mel_icas = [ + item + for item in mel_icas_in + if os.path.exists(os.path.join(item, "hand_labels_noise.txt")) + ] + outputs = {} + mel_icas_out = mel_icas + + return mel_icas_out diff --git a/pydra/tasks/fsl/v6/maths/__init__.py b/pydra/tasks/fsl/v6/maths/__init__.py new file mode 100644 index 0000000..e18ff6a --- /dev/null +++ b/pydra/tasks/fsl/v6/maths/__init__.py @@ -0,0 +1,20 @@ +from .apply_mask import ApplyMask +from .ar1_image import AR1Image +from .binary_maths import BinaryMaths +from .change_data_type import ChangeDataType +from .dilate_image import DilateImage +from .erode_image import ErodeImage +from .isotropic_smooth import IsotropicSmooth +from .maths_command import MathsCommand +from .max_image import MaxImage +from .maxn_image import MaxnImage +from .mean_image import MeanImage +from .median_image import MedianImage +from .min_image import MinImage +from .multi_image_maths import MultiImageMaths +from .percentile_image import PercentileImage +from .spatial_filter import SpatialFilter +from .std_image import StdImage +from .temporal_filter import TemporalFilter +from .threshold import Threshold +from .unary_maths import UnaryMaths diff --git a/pydra/tasks/fsl/v6/maths/apply_mask.py b/pydra/tasks/fsl/v6/maths/apply_mask.py new file mode 100644 index 0000000..2154d2b --- /dev/null +++ b/pydra/tasks/fsl/v6/maths/apply_mask.py @@ -0,0 +1,130 @@ +import attrs +from fileformats.generic import File +import logging +from pydra.tasks.fsl.v6.nipype_ports.utils.filemanip import fname_presuffix +import os +from pathlib import Path +from pathlib import Path +from pydra.compose import shell +import typing as ty + + +logger = logging.getLogger(__name__) + + +def _gen_filename(name, inputs): + if name == "out_file": + return _list_outputs( + in_file=inputs["in_file"], + out_file=inputs["out_file"], + output_type=inputs["output_type"], + )["out_file"] + return None + + +def out_file_default(inputs): + return _gen_filename("out_file", inputs=inputs) + + +@shell.define +class ApplyMask(shell.Task["ApplyMask.Outputs"]): + """ + Examples + ------- + + >>> from fileformats.generic import File + >>> from pathlib import Path + >>> from pydra.tasks.fsl.v6.maths.apply_mask import ApplyMask + + """ + + executable = "fslmaths" + mask_file: File = shell.arg( + help="binary image defining mask space", argstr="-mas {mask_file}", position=4 + ) + in_file: File = shell.arg( + help="image to operate on", argstr="{in_file}", position=2 + ) + internal_datatype: ty.Any = shell.arg( + help="datatype to use for calculations (default is float)", + argstr="-dt {internal_datatype}", + position=1, + ) + output_datatype: ty.Any = shell.arg( + help="datatype to use for output (default uses input type)", + argstr="-odt {output_datatype}", + position=-1, + ) + nan2zeros: bool = shell.arg( + help="change NaNs to zeros before doing anything", argstr="-nan", position=3 + ) + + class Outputs(shell.Outputs): + out_file: Path = shell.outarg( + help="image to write", + argstr="{out_file}", + position=-2, + path_template="out_file", + ) + + +def _gen_fname( + basename, cwd=None, suffix=None, change_ext=True, ext=None, output_type=None +): + """Generate a filename based on the given parameters. + + The filename will take the form: cwd/basename. + If change_ext is True, it will use the extensions specified in + inputs.output_type. + + Parameters + ---------- + basename : str + Filename to base the new filename on. + cwd : str + Path to prefix to the new filename. (default is output_dir) + suffix : str + Suffix to add to the `basename`. (defaults is '' ) + change_ext : bool + Flag to change the filename extension to the FSL output type. + (default True) + + Returns + ------- + fname : str + New filename based on given parameters. + + """ + + if basename == "": + msg = "Unable to generate filename for command %s. " % "fslmaths" + msg += "basename is not set!" + raise ValueError(msg) + if cwd is None: + cwd = output_dir + if ext is None: + ext = Info.output_type_to_ext(output_type) + if change_ext: + if suffix: + suffix = f"{suffix}{ext}" + else: + suffix = ext + if suffix is None: + suffix = "" + fname = fname_presuffix(basename, suffix=suffix, use_ext=False, newpath=cwd) + return fname + + +def _list_outputs(in_file=None, out_file=None, output_type=None): + self_dict = {} + outputs = {} + outputs["out_file"] = out_file + if out_file is attrs.NOTHING: + outputs["out_file"] = _gen_fname( + in_file, suffix=self_dict["_suffix"], output_type=output_type + ) + outputs["out_file"] = os.path.abspath(outputs["out_file"]) + return outputs + + +IFLOGGER = logging.getLogger("nipype.interface") diff --git a/pydra/tasks/fsl/v6/maths/ar1_image.py b/pydra/tasks/fsl/v6/maths/ar1_image.py new file mode 100644 index 0000000..920b02b --- /dev/null +++ b/pydra/tasks/fsl/v6/maths/ar1_image.py @@ -0,0 +1,133 @@ +import attrs +from fileformats.generic import File +import logging +from pydra.tasks.fsl.v6.nipype_ports.utils.filemanip import fname_presuffix +import os +from pathlib import Path +from pathlib import Path +from pydra.compose import shell +import typing as ty + + +logger = logging.getLogger(__name__) + + +def _gen_filename(name, inputs): + if name == "out_file": + return _list_outputs( + in_file=inputs["in_file"], + out_file=inputs["out_file"], + output_type=inputs["output_type"], + )["out_file"] + return None + + +def out_file_default(inputs): + return _gen_filename("out_file", inputs=inputs) + + +@shell.define +class AR1Image(shell.Task["AR1Image.Outputs"]): + """ + Examples + ------- + + >>> from fileformats.generic import File + >>> from pathlib import Path + >>> from pydra.tasks.fsl.v6.maths.ar1_image import AR1Image + + """ + + executable = "fslmaths" + dimension: ty.Any = shell.arg( + help="dimension to find AR(1) coefficient across", + argstr="-{dimension}ar1", + position=4, + default="T", + ) + in_file: File = shell.arg( + help="image to operate on", argstr="{in_file}", position=2 + ) + internal_datatype: ty.Any = shell.arg( + help="datatype to use for calculations (default is float)", + argstr="-dt {internal_datatype}", + position=1, + ) + output_datatype: ty.Any = shell.arg( + help="datatype to use for output (default uses input type)", + argstr="-odt {output_datatype}", + position=-1, + ) + nan2zeros: bool = shell.arg( + help="change NaNs to zeros before doing anything", argstr="-nan", position=3 + ) + + class Outputs(shell.Outputs): + out_file: Path = shell.outarg( + help="image to write", + argstr="{out_file}", + position=-2, + path_template="out_file", + ) + + +def _gen_fname( + basename, cwd=None, suffix=None, change_ext=True, ext=None, output_type=None +): + """Generate a filename based on the given parameters. + + The filename will take the form: cwd/basename. + If change_ext is True, it will use the extensions specified in + inputs.output_type. + + Parameters + ---------- + basename : str + Filename to base the new filename on. + cwd : str + Path to prefix to the new filename. (default is output_dir) + suffix : str + Suffix to add to the `basename`. (defaults is '' ) + change_ext : bool + Flag to change the filename extension to the FSL output type. + (default True) + + Returns + ------- + fname : str + New filename based on given parameters. + + """ + + if basename == "": + msg = "Unable to generate filename for command %s. " % "fslmaths" + msg += "basename is not set!" + raise ValueError(msg) + if cwd is None: + cwd = output_dir + if ext is None: + ext = Info.output_type_to_ext(output_type) + if change_ext: + if suffix: + suffix = f"{suffix}{ext}" + else: + suffix = ext + if suffix is None: + suffix = "" + fname = fname_presuffix(basename, suffix=suffix, use_ext=False, newpath=cwd) + return fname + + +def _list_outputs(in_file=None, out_file=None, output_type=None): + self_dict = {} + outputs = {} + outputs["out_file"] = out_file + if out_file is attrs.NOTHING: + outputs["out_file"] = _gen_fname( + in_file, suffix=self_dict["_suffix"], output_type=output_type + ) + outputs["out_file"] = os.path.abspath(outputs["out_file"]) + return outputs + + +IFLOGGER = logging.getLogger("nipype.interface") diff --git a/pydra/tasks/fsl/v6/maths/binary_maths.py b/pydra/tasks/fsl/v6/maths/binary_maths.py new file mode 100644 index 0000000..653c290 --- /dev/null +++ b/pydra/tasks/fsl/v6/maths/binary_maths.py @@ -0,0 +1,138 @@ +import attrs +from fileformats.generic import File +import logging +from pydra.tasks.fsl.v6.nipype_ports.utils.filemanip import fname_presuffix +import os +from pathlib import Path +from pathlib import Path +from pydra.compose import shell +import typing as ty + + +logger = logging.getLogger(__name__) + + +def _gen_filename(name, inputs): + if name == "out_file": + return _list_outputs( + in_file=inputs["in_file"], + out_file=inputs["out_file"], + output_type=inputs["output_type"], + )["out_file"] + return None + + +def out_file_default(inputs): + return _gen_filename("out_file", inputs=inputs) + + +@shell.define(xor=[["operand_file", "operand_value"]]) +class BinaryMaths(shell.Task["BinaryMaths.Outputs"]): + """ + Examples + ------- + + >>> from fileformats.generic import File + >>> from pathlib import Path + >>> from pydra.tasks.fsl.v6.maths.binary_maths import BinaryMaths + + """ + + executable = "fslmaths" + operation: ty.Any = shell.arg( + help="operation to perform", argstr="-{operation}", position=4 + ) + operand_file: File | None = shell.arg( + help="second image to perform operation with", + argstr="{operand_file}", + position=5, + ) + operand_value: float | None = shell.arg( + help="value to perform operation with", argstr="{operand_value:.8}", position=5 + ) + in_file: File = shell.arg( + help="image to operate on", argstr="{in_file}", position=2 + ) + internal_datatype: ty.Any = shell.arg( + help="datatype to use for calculations (default is float)", + argstr="-dt {internal_datatype}", + position=1, + ) + output_datatype: ty.Any = shell.arg( + help="datatype to use for output (default uses input type)", + argstr="-odt {output_datatype}", + position=-1, + ) + nan2zeros: bool = shell.arg( + help="change NaNs to zeros before doing anything", argstr="-nan", position=3 + ) + + class Outputs(shell.Outputs): + out_file: Path = shell.outarg( + help="image to write", + argstr="{out_file}", + position=-2, + path_template="out_file", + ) + + +def _gen_fname( + basename, cwd=None, suffix=None, change_ext=True, ext=None, output_type=None +): + """Generate a filename based on the given parameters. + + The filename will take the form: cwd/basename. + If change_ext is True, it will use the extensions specified in + inputs.output_type. + + Parameters + ---------- + basename : str + Filename to base the new filename on. + cwd : str + Path to prefix to the new filename. (default is output_dir) + suffix : str + Suffix to add to the `basename`. (defaults is '' ) + change_ext : bool + Flag to change the filename extension to the FSL output type. + (default True) + + Returns + ------- + fname : str + New filename based on given parameters. + + """ + + if basename == "": + msg = "Unable to generate filename for command %s. " % "fslmaths" + msg += "basename is not set!" + raise ValueError(msg) + if cwd is None: + cwd = output_dir + if ext is None: + ext = Info.output_type_to_ext(output_type) + if change_ext: + if suffix: + suffix = f"{suffix}{ext}" + else: + suffix = ext + if suffix is None: + suffix = "" + fname = fname_presuffix(basename, suffix=suffix, use_ext=False, newpath=cwd) + return fname + + +def _list_outputs(in_file=None, out_file=None, output_type=None): + self_dict = {} + outputs = {} + outputs["out_file"] = out_file + if out_file is attrs.NOTHING: + outputs["out_file"] = _gen_fname( + in_file, suffix=self_dict["_suffix"], output_type=output_type + ) + outputs["out_file"] = os.path.abspath(outputs["out_file"]) + return outputs + + +IFLOGGER = logging.getLogger("nipype.interface") diff --git a/pydra/tasks/fsl/v6/maths/change_data_type.py b/pydra/tasks/fsl/v6/maths/change_data_type.py new file mode 100644 index 0000000..6c27c6f --- /dev/null +++ b/pydra/tasks/fsl/v6/maths/change_data_type.py @@ -0,0 +1,125 @@ +import attrs +from fileformats.generic import File +import logging +from pydra.tasks.fsl.v6.nipype_ports.utils.filemanip import fname_presuffix +import os +from pathlib import Path +from pathlib import Path +from pydra.compose import shell +import typing as ty + + +logger = logging.getLogger(__name__) + + +def _gen_filename(name, inputs): + if name == "out_file": + return _list_outputs( + in_file=inputs["in_file"], + out_file=inputs["out_file"], + output_type=inputs["output_type"], + )["out_file"] + return None + + +def out_file_default(inputs): + return _gen_filename("out_file", inputs=inputs) + + +@shell.define +class ChangeDataType(shell.Task["ChangeDataType.Outputs"]): + """ + Examples + ------- + + >>> from fileformats.generic import File + >>> from pathlib import Path + >>> from pydra.tasks.fsl.v6.maths.change_data_type import ChangeDataType + + """ + + executable = "fslmaths" + output_datatype: ty.Any = shell.arg( + help="output data type", argstr="-odt {output_datatype}", position=-1 + ) + in_file: File = shell.arg( + help="image to operate on", argstr="{in_file}", position=2 + ) + internal_datatype: ty.Any = shell.arg( + help="datatype to use for calculations (default is float)", + argstr="-dt {internal_datatype}", + position=1, + ) + nan2zeros: bool = shell.arg( + help="change NaNs to zeros before doing anything", argstr="-nan", position=3 + ) + + class Outputs(shell.Outputs): + out_file: Path = shell.outarg( + help="image to write", + argstr="{out_file}", + position=-2, + path_template="out_file", + ) + + +def _gen_fname( + basename, cwd=None, suffix=None, change_ext=True, ext=None, output_type=None +): + """Generate a filename based on the given parameters. + + The filename will take the form: cwd/basename. + If change_ext is True, it will use the extensions specified in + inputs.output_type. + + Parameters + ---------- + basename : str + Filename to base the new filename on. + cwd : str + Path to prefix to the new filename. (default is output_dir) + suffix : str + Suffix to add to the `basename`. (defaults is '' ) + change_ext : bool + Flag to change the filename extension to the FSL output type. + (default True) + + Returns + ------- + fname : str + New filename based on given parameters. + + """ + + if basename == "": + msg = "Unable to generate filename for command %s. " % "fslmaths" + msg += "basename is not set!" + raise ValueError(msg) + if cwd is None: + cwd = output_dir + if ext is None: + ext = Info.output_type_to_ext(output_type) + if change_ext: + if suffix: + suffix = f"{suffix}{ext}" + else: + suffix = ext + if suffix is None: + suffix = "" + fname = fname_presuffix(basename, suffix=suffix, use_ext=False, newpath=cwd) + return fname + + +def _list_outputs(in_file=None, out_file=None, output_type=None): + self_dict = {} + outputs = {} + outputs["out_file"] = out_file + if out_file is attrs.NOTHING: + outputs["out_file"] = _gen_fname( + in_file, suffix=self_dict["_suffix"], output_type=output_type + ) + outputs["out_file"] = os.path.abspath(outputs["out_file"]) + return outputs + + +IFLOGGER = logging.getLogger("nipype.interface") diff --git a/pydra/tasks/fsl/v6/maths/dilate_image.py b/pydra/tasks/fsl/v6/maths/dilate_image.py new file mode 100644 index 0000000..8a9dfa6 --- /dev/null +++ b/pydra/tasks/fsl/v6/maths/dilate_image.py @@ -0,0 +1,157 @@ +import attrs +from fileformats.generic import File +import logging +from pydra.tasks.fsl.v6.nipype_ports.utils.filemanip import fname_presuffix +import os +from pathlib import Path +from pathlib import Path +from pydra.compose import shell +import typing as ty + + +logger = logging.getLogger(__name__) + + +def _format_arg(name, value, inputs, argstr): + if value is None: + return "" + + if name == "operation": + return argstr.format(**{name: dict(mean="M", modal="D", max="F")[value]}) + + return argstr.format(**inputs) + + +def operation_formatter(field, inputs): + return _format_arg("operation", field, inputs, argstr="-dil{operation}") + + +def _gen_filename(name, inputs): + if name == "out_file": + return _list_outputs( + in_file=inputs["in_file"], + out_file=inputs["out_file"], + output_type=inputs["output_type"], + )["out_file"] + return None + + +def out_file_default(inputs): + return _gen_filename("out_file", inputs=inputs) + + +@shell.define(xor=[["kernel_file", "kernel_size"]]) +class DilateImage(shell.Task["DilateImage.Outputs"]): + """ + Examples + ------- + + >>> from fileformats.generic import File + >>> from pathlib import Path + >>> from pydra.tasks.fsl.v6.maths.dilate_image import DilateImage + + """ + + executable = "fslmaths" + operation: ty.Any = shell.arg( + help="filtering operation to perform in dilation", + position=6, + formatter="operation_formatter", + ) + kernel_shape: ty.Any = shell.arg( + help="kernel shape to use", argstr="-kernel {kernel_shape}", position=4 + ) + kernel_size: float | None = shell.arg( + help="kernel size - voxels for box/boxv, mm for sphere, mm sigma for gauss", + argstr="{kernel_size:.4}", + position=5, + ) + kernel_file: File | None = shell.arg( + help="use external file for kernel", argstr="{kernel_file}", position=5 + ) + in_file: File = shell.arg( + help="image to operate on", argstr="{in_file}", position=2 + ) + internal_datatype: ty.Any = shell.arg( + help="datatype to use for calculations (default is float)", + argstr="-dt {internal_datatype}", + position=1, + ) + output_datatype: ty.Any = shell.arg( + help="datatype to use for output (default uses input type)", + argstr="-odt {output_datatype}", + position=-1, + ) + nan2zeros: bool = shell.arg( + help="change NaNs to zeros before doing anything", argstr="-nan", position=3 + ) + + class Outputs(shell.Outputs): + out_file: Path = shell.outarg( + help="image to write", + argstr="{out_file}", + position=-2, + path_template="out_file", + ) + + +def _gen_fname( + basename, cwd=None, suffix=None, change_ext=True, ext=None, output_type=None +): + """Generate a filename based on the given parameters. + + The filename will take the form: cwd/basename. + If change_ext is True, it will use the extensions specified in + inputs.output_type. + + Parameters + ---------- + basename : str + Filename to base the new filename on. + cwd : str + Path to prefix to the new filename. (default is output_dir) + suffix : str + Suffix to add to the `basename`. (defaults is '' ) + change_ext : bool + Flag to change the filename extension to the FSL output type. + (default True) + + Returns + ------- + fname : str + New filename based on given parameters. + + """ + + if basename == "": + msg = "Unable to generate filename for command %s. " % "fslmaths" + msg += "basename is not set!" + raise ValueError(msg) + if cwd is None: + cwd = output_dir + if ext is None: + ext = Info.output_type_to_ext(output_type) + if change_ext: + if suffix: + suffix = f"{suffix}{ext}" + else: + suffix = ext + if suffix is None: + suffix = "" + fname = fname_presuffix(basename, suffix=suffix, use_ext=False, newpath=cwd) + return fname + + +def _list_outputs(in_file=None, out_file=None, output_type=None): + self_dict = {} + outputs = {} + outputs["out_file"] = out_file + if out_file is attrs.NOTHING: + outputs["out_file"] = _gen_fname( + in_file, suffix=self_dict["_suffix"], output_type=output_type + ) + outputs["out_file"] = os.path.abspath(outputs["out_file"]) + return outputs + + +IFLOGGER = logging.getLogger("nipype.interface") diff --git a/pydra/tasks/fsl/v6/maths/erode_image.py b/pydra/tasks/fsl/v6/maths/erode_image.py new file mode 100644 index 0000000..8907302 --- /dev/null +++ b/pydra/tasks/fsl/v6/maths/erode_image.py @@ -0,0 +1,160 @@ +import attrs +from fileformats.generic import File +import logging +from pydra.tasks.fsl.v6.nipype_ports.utils.filemanip import fname_presuffix +import os +from pathlib import Path +from pathlib import Path +from pydra.compose import shell +import typing as ty + + +logger = logging.getLogger(__name__) + + +def _format_arg(name, value, inputs, argstr): + if value is None: + return "" + + if name == "minimum_filter": + if value: + return "-eroF" + return "-ero" + + return argstr.format(**inputs) + + +def minimum_filter_formatter(field, inputs): + return _format_arg("minimum_filter", field, inputs, argstr="{minimum_filter:d}") + + +def _gen_filename(name, inputs): + if name == "out_file": + return _list_outputs( + in_file=inputs["in_file"], + out_file=inputs["out_file"], + output_type=inputs["output_type"], + )["out_file"] + return None + + +def out_file_default(inputs): + return _gen_filename("out_file", inputs=inputs) + + +@shell.define(xor=[["kernel_file", "kernel_size"]]) +class ErodeImage(shell.Task["ErodeImage.Outputs"]): + """ + Examples + ------- + + >>> from fileformats.generic import File + >>> from pathlib import Path + >>> from pydra.tasks.fsl.v6.maths.erode_image import ErodeImage + + """ + + executable = "fslmaths" + minimum_filter: bool = shell.arg( + help="if true, minimum filter rather than erosion by zeroing-out", + position=6, + formatter="minimum_filter_formatter", + default=False, + ) + kernel_shape: ty.Any = shell.arg( + help="kernel shape to use", argstr="-kernel {kernel_shape}", position=4 + ) + kernel_size: float | None = shell.arg( + help="kernel size - voxels for box/boxv, mm for sphere, mm sigma for gauss", + argstr="{kernel_size:.4}", + position=5, + ) + kernel_file: File | None = shell.arg( + help="use external file for kernel", argstr="{kernel_file}", position=5 + ) + in_file: File = shell.arg( + help="image to operate on", argstr="{in_file}", position=2 + ) + internal_datatype: ty.Any = shell.arg( + help="datatype to use for calculations (default is float)", + argstr="-dt {internal_datatype}", + position=1, + ) + output_datatype: ty.Any = shell.arg( + help="datatype to use for output (default uses input type)", + argstr="-odt {output_datatype}", + position=-1, + ) + nan2zeros: bool = shell.arg( + help="change NaNs to zeros before doing anything", argstr="-nan", position=3 + ) + + class Outputs(shell.Outputs): + out_file: Path = shell.outarg( + help="image to write", + argstr="{out_file}", + position=-2, + path_template="out_file", + ) + + +def _gen_fname( + basename, cwd=None, suffix=None, change_ext=True, ext=None, output_type=None +): + """Generate a filename based on the given parameters. + + The filename will take the form: cwd/basename. + If change_ext is True, it will use the extensions specified in + inputs.output_type. + + Parameters + ---------- + basename : str + Filename to base the new filename on. + cwd : str + Path to prefix to the new filename. (default is output_dir) + suffix : str + Suffix to add to the `basename`. (defaults is '' ) + change_ext : bool + Flag to change the filename extension to the FSL output type. + (default True) + + Returns + ------- + fname : str + New filename based on given parameters. + + """ + + if basename == "": + msg = "Unable to generate filename for command %s. " % "fslmaths" + msg += "basename is not set!" + raise ValueError(msg) + if cwd is None: + cwd = output_dir + if ext is None: + ext = Info.output_type_to_ext(output_type) + if change_ext: + if suffix: + suffix = f"{suffix}{ext}" + else: + suffix = ext + if suffix is None: + suffix = "" + fname = fname_presuffix(basename, suffix=suffix, use_ext=False, newpath=cwd) + return fname + + +def _list_outputs(in_file=None, out_file=None, output_type=None): + self_dict = {} + outputs = {} + outputs["out_file"] = out_file + if out_file is attrs.NOTHING: + outputs["out_file"] = _gen_fname( + in_file, suffix=self_dict["_suffix"], output_type=output_type + ) + outputs["out_file"] = os.path.abspath(outputs["out_file"]) + return outputs + + +IFLOGGER = logging.getLogger("nipype.interface") diff --git a/pydra/tasks/fsl/v6/maths/isotropic_smooth.py b/pydra/tasks/fsl/v6/maths/isotropic_smooth.py new file mode 100644 index 0000000..35a20a5 --- /dev/null +++ b/pydra/tasks/fsl/v6/maths/isotropic_smooth.py @@ -0,0 +1,149 @@ +import attrs +from fileformats.generic import File +import logging +from pydra.tasks.fsl.v6.nipype_ports.utils.filemanip import fname_presuffix +import numpy as np +import os +from pathlib import Path +from pathlib import Path +from pydra.compose import shell +import typing as ty + + +logger = logging.getLogger(__name__) + + +def _format_arg(name, value, inputs, argstr): + if value is None: + return "" + + if name == "fwhm": + sigma = float(value) / np.sqrt(8 * np.log(2)) + return argstr.format(**{name: sigma}) + + return argstr.format(**inputs) + + +def fwhm_formatter(field, inputs): + return _format_arg("fwhm", field, inputs, argstr="-s {fwhm:.5}") + + +def _gen_filename(name, inputs): + if name == "out_file": + return _list_outputs( + in_file=inputs["in_file"], + out_file=inputs["out_file"], + output_type=inputs["output_type"], + )["out_file"] + return None + + +def out_file_default(inputs): + return _gen_filename("out_file", inputs=inputs) + + +@shell.define(xor=[["fwhm", "sigma"]]) +class IsotropicSmooth(shell.Task["IsotropicSmooth.Outputs"]): + """ + Examples + ------- + + >>> from fileformats.generic import File + >>> from pathlib import Path + >>> from pydra.tasks.fsl.v6.maths.isotropic_smooth import IsotropicSmooth + + """ + + executable = "fslmaths" + fwhm: float | None = shell.arg( + help="fwhm of smoothing kernel [mm]", position=4, formatter="fwhm_formatter" + ) + sigma: float | None = shell.arg( + help="sigma of smoothing kernel [mm]", argstr="-s {sigma:.5}", position=4 + ) + in_file: File = shell.arg( + help="image to operate on", argstr="{in_file}", position=2 + ) + internal_datatype: ty.Any = shell.arg( + help="datatype to use for calculations (default is float)", + argstr="-dt {internal_datatype}", + position=1, + ) + output_datatype: ty.Any = shell.arg( + help="datatype to use for output (default uses input type)", + argstr="-odt {output_datatype}", + position=-1, + ) + nan2zeros: bool = shell.arg( + help="change NaNs to zeros before doing anything", argstr="-nan", position=3 + ) + + class Outputs(shell.Outputs): + out_file: Path = shell.outarg( + help="image to write", + argstr="{out_file}", + position=-2, + path_template="out_file", + ) + + +def _gen_fname( + basename, cwd=None, suffix=None, change_ext=True, ext=None, output_type=None +): + """Generate a filename based on the given parameters. + + The filename will take the form: cwd/basename. + If change_ext is True, it will use the extensions specified in + inputs.output_type. + + Parameters + ---------- + basename : str + Filename to base the new filename on. + cwd : str + Path to prefix to the new filename. (default is output_dir) + suffix : str + Suffix to add to the `basename`. (defaults is '' ) + change_ext : bool + Flag to change the filename extension to the FSL output type. + (default True) + + Returns + ------- + fname : str + New filename based on given parameters. + + """ + + if basename == "": + msg = "Unable to generate filename for command %s. " % "fslmaths" + msg += "basename is not set!" + raise ValueError(msg) + if cwd is None: + cwd = output_dir + if ext is None: + ext = Info.output_type_to_ext(output_type) + if change_ext: + if suffix: + suffix = f"{suffix}{ext}" + else: + suffix = ext + if suffix is None: + suffix = "" + fname = fname_presuffix(basename, suffix=suffix, use_ext=False, newpath=cwd) + return fname + + +def _list_outputs(in_file=None, out_file=None, output_type=None): + self_dict = {} + outputs = {} + outputs["out_file"] = out_file + if out_file is attrs.NOTHING: + outputs["out_file"] = _gen_fname( + in_file, suffix=self_dict["_suffix"], output_type=output_type + ) + outputs["out_file"] = os.path.abspath(outputs["out_file"]) + return outputs + + +IFLOGGER = logging.getLogger("nipype.interface") diff --git a/pydra/tasks/fsl/v6/maths/maths_command.py b/pydra/tasks/fsl/v6/maths/maths_command.py new file mode 100644 index 0000000..8d2450a --- /dev/null +++ b/pydra/tasks/fsl/v6/maths/maths_command.py @@ -0,0 +1,127 @@ +import attrs +from fileformats.generic import File +import logging +from pydra.tasks.fsl.v6.nipype_ports.utils.filemanip import fname_presuffix +import os +from pathlib import Path +from pathlib import Path +from pydra.compose import shell +import typing as ty + + +logger = logging.getLogger(__name__) + + +def _gen_filename(name, inputs): + if name == "out_file": + return _list_outputs( + in_file=inputs["in_file"], + out_file=inputs["out_file"], + output_type=inputs["output_type"], + )["out_file"] + return None + + +def out_file_default(inputs): + return _gen_filename("out_file", inputs=inputs) + + +@shell.define +class MathsCommand(shell.Task["MathsCommand.Outputs"]): + """ + Examples + ------- + + >>> from fileformats.generic import File + >>> from pathlib import Path + >>> from pydra.tasks.fsl.v6.maths.maths_command import MathsCommand + + """ + + executable = "fslmaths" + in_file: File = shell.arg( + help="image to operate on", argstr="{in_file}", position=2 + ) + internal_datatype: ty.Any = shell.arg( + help="datatype to use for calculations (default is float)", + argstr="-dt {internal_datatype}", + position=1, + ) + output_datatype: ty.Any = shell.arg( + help="datatype to use for output (default uses input type)", + argstr="-odt {output_datatype}", + position=-1, + ) + nan2zeros: bool = shell.arg( + help="change NaNs to zeros before doing anything", argstr="-nan", position=3 + ) + + class Outputs(shell.Outputs): + out_file: Path = shell.outarg( + help="image to write", + argstr="{out_file}", + position=-2, + path_template="out_file", + ) + + +def _gen_fname( + basename, cwd=None, suffix=None, change_ext=True, ext=None, output_type=None +): + """Generate a filename based on the given parameters. + + The filename will take the form: cwd/basename. + If change_ext is True, it will use the extensions specified in + inputs.output_type. + + Parameters + ---------- + basename : str + Filename to base the new filename on. + cwd : str + Path to prefix to the new filename. (default is output_dir) + suffix : str + Suffix to add to the `basename`. (defaults is '' ) + change_ext : bool + Flag to change the filename extension to the FSL output type. + (default True) + + Returns + ------- + fname : str + New filename based on given parameters. + + """ + + if basename == "": + msg = "Unable to generate filename for command %s. " % "fslmaths" + msg += "basename is not set!" + raise ValueError(msg) + if cwd is None: + cwd = output_dir + if ext is None: + ext = Info.output_type_to_ext(output_type) + if change_ext: + if suffix: + suffix = f"{suffix}{ext}" + else: + suffix = ext + if suffix is None: + suffix = "" + fname = fname_presuffix(basename, suffix=suffix, use_ext=False, newpath=cwd) + return fname + + +def _list_outputs(in_file=None, out_file=None, output_type=None): + self_dict = {} + outputs = {} + outputs["out_file"] = out_file + if out_file is attrs.NOTHING: + outputs["out_file"] = _gen_fname( + in_file, suffix=self_dict["_suffix"], output_type=output_type + ) + outputs["out_file"] = os.path.abspath(outputs["out_file"]) + return outputs + + +IFLOGGER = logging.getLogger("nipype.interface") diff --git a/pydra/tasks/fsl/v6/maths/max_image.py b/pydra/tasks/fsl/v6/maths/max_image.py new file mode 100644 index 0000000..1aafd4a --- /dev/null +++ b/pydra/tasks/fsl/v6/maths/max_image.py @@ -0,0 +1,139 @@ +import attrs +from fileformats.medimage import Nifti1 +import logging +from pydra.tasks.fsl.v6.nipype_ports.utils.filemanip import fname_presuffix +import os +from pathlib import Path +from pathlib import Path +from pydra.compose import shell +import typing as ty + + +logger = logging.getLogger(__name__) + + +def _gen_filename(name, inputs): + if name == "out_file": + return _list_outputs( + in_file=inputs["in_file"], + out_file=inputs["out_file"], + output_type=inputs["output_type"], + )["out_file"] + return None + + +def out_file_default(inputs): + return _gen_filename("out_file", inputs=inputs) + + +@shell.define +class MaxImage(shell.Task["MaxImage.Outputs"]): + """ + Examples + ------- + + >>> from fileformats.medimage import Nifti1 + >>> from pathlib import Path + >>> from pydra.tasks.fsl.v6.maths.max_image import MaxImage + + >>> task = MaxImage() + >>> task.in_file = Nifti1.mock("functional.nii" # doctest: +SKIP) + >>> task.cmdline + 'None' + + + """ + + executable = "fslmaths" + dimension: ty.Any = shell.arg( + help="dimension to max across", + argstr="-{dimension}max", + position=4, + default="T", + ) + in_file: Nifti1 = shell.arg( + help="image to operate on", argstr="{in_file}", position=2 + ) + internal_datatype: ty.Any = shell.arg( + help="datatype to use for calculations (default is float)", + argstr="-dt {internal_datatype}", + position=1, + ) + output_datatype: ty.Any = shell.arg( + help="datatype to use for output (default uses input type)", + argstr="-odt {output_datatype}", + position=-1, + ) + nan2zeros: bool = shell.arg( + help="change NaNs to zeros before doing anything", argstr="-nan", position=3 + ) + + class Outputs(shell.Outputs): + out_file: Path = shell.outarg( + help="image to write", + argstr="{out_file}", + position=-2, + path_template="out_file", + ) + + +def _gen_fname( + basename, cwd=None, suffix=None, change_ext=True, ext=None, output_type=None +): + """Generate a filename based on the given parameters. + + The filename will take the form: cwd/basename. + If change_ext is True, it will use the extensions specified in + inputs.output_type. + + Parameters + ---------- + basename : str + Filename to base the new filename on. + cwd : str + Path to prefix to the new filename. (default is output_dir) + suffix : str + Suffix to add to the `basename`. (defaults is '' ) + change_ext : bool + Flag to change the filename extension to the FSL output type. + (default True) + + Returns + ------- + fname : str + New filename based on given parameters. + + """ + + if basename == "": + msg = "Unable to generate filename for command %s. " % "fslmaths" + msg += "basename is not set!" + raise ValueError(msg) + if cwd is None: + cwd = output_dir + if ext is None: + ext = Info.output_type_to_ext(output_type) + if change_ext: + if suffix: + suffix = f"{suffix}{ext}" + else: + suffix = ext + if suffix is None: + suffix = "" + fname = fname_presuffix(basename, suffix=suffix, use_ext=False, newpath=cwd) + return fname + + +def _list_outputs(in_file=None, out_file=None, output_type=None): + self_dict = {} + outputs = {} + outputs["out_file"] = out_file + if out_file is attrs.NOTHING: + outputs["out_file"] = _gen_fname( + in_file, suffix=self_dict["_suffix"], output_type=output_type + ) + outputs["out_file"] = os.path.abspath(outputs["out_file"]) + return outputs + + +IFLOGGER = logging.getLogger("nipype.interface") diff --git a/pydra/tasks/fsl/v6/maths/maxn_image.py b/pydra/tasks/fsl/v6/maths/maxn_image.py new file mode 100644 index 0000000..0cc106e --- /dev/null +++ b/pydra/tasks/fsl/v6/maths/maxn_image.py @@ -0,0 +1,133 @@ +import attrs +from fileformats.generic import File +import logging +from pydra.tasks.fsl.v6.nipype_ports.utils.filemanip import fname_presuffix +import os +from pathlib import Path +from pathlib import Path +from pydra.compose import shell +import typing as ty + + +logger = logging.getLogger(__name__) + + +def _gen_filename(name, inputs): + if name == "out_file": + return _list_outputs( + in_file=inputs["in_file"], + out_file=inputs["out_file"], + output_type=inputs["output_type"], + )["out_file"] + return None + + +def out_file_default(inputs): + return _gen_filename("out_file", inputs=inputs) + + +@shell.define +class MaxnImage(shell.Task["MaxnImage.Outputs"]): + """ + Examples + ------- + + >>> from fileformats.generic import File + >>> from pathlib import Path + >>> from pydra.tasks.fsl.v6.maths.maxn_image import MaxnImage + + """ + + executable = "fslmaths" + dimension: ty.Any = shell.arg( + help="dimension to index max across", + argstr="-{dimension}maxn", + position=4, + default="T", + ) + in_file: File = shell.arg( + help="image to operate on", argstr="{in_file}", position=2 + ) + internal_datatype: ty.Any = shell.arg( + help="datatype to use for calculations (default is float)", + argstr="-dt {internal_datatype}", + position=1, + ) + output_datatype: ty.Any = shell.arg( + help="datatype to use for output (default uses input type)", + argstr="-odt {output_datatype}", + position=-1, + ) + nan2zeros: bool = shell.arg( + help="change NaNs to zeros before doing anything", argstr="-nan", position=3 + ) + + class Outputs(shell.Outputs): + out_file: Path = shell.outarg( + help="image to write", + argstr="{out_file}", + position=-2, + path_template="out_file", + ) + + +def _gen_fname( + basename, cwd=None, suffix=None, change_ext=True, ext=None, output_type=None +): + """Generate a filename based on the given parameters. + + The filename will take the form: cwd/basename. + If change_ext is True, it will use the extensions specified in + inputs.output_type. + + Parameters + ---------- + basename : str + Filename to base the new filename on. + cwd : str + Path to prefix to the new filename. (default is output_dir) + suffix : str + Suffix to add to the `basename`. (defaults is '' ) + change_ext : bool + Flag to change the filename extension to the FSL output type. + (default True) + + Returns + ------- + fname : str + New filename based on given parameters. + + """ + + if basename == "": + msg = "Unable to generate filename for command %s. " % "fslmaths" + msg += "basename is not set!" + raise ValueError(msg) + if cwd is None: + cwd = output_dir + if ext is None: + ext = Info.output_type_to_ext(output_type) + if change_ext: + if suffix: + suffix = f"{suffix}{ext}" + else: + suffix = ext + if suffix is None: + suffix = "" + fname = fname_presuffix(basename, suffix=suffix, use_ext=False, newpath=cwd) + return fname + + +def _list_outputs(in_file=None, out_file=None, output_type=None): + self_dict = {} + outputs = {} + outputs["out_file"] = out_file + if out_file is attrs.NOTHING: + outputs["out_file"] = _gen_fname( + in_file, suffix=self_dict["_suffix"], output_type=output_type + ) + outputs["out_file"] = os.path.abspath(outputs["out_file"]) + return outputs + + +IFLOGGER = logging.getLogger("nipype.interface") diff --git a/pydra/tasks/fsl/v6/maths/mean_image.py b/pydra/tasks/fsl/v6/maths/mean_image.py new file mode 100644 index 0000000..76e131f --- /dev/null +++ b/pydra/tasks/fsl/v6/maths/mean_image.py @@ -0,0 +1,133 @@ +import attrs +from fileformats.generic import File +import logging +from pydra.tasks.fsl.v6.nipype_ports.utils.filemanip import fname_presuffix +import os +from pathlib import Path +from pathlib import Path +from pydra.compose import shell +import typing as ty + + +logger = logging.getLogger(__name__) + + +def _gen_filename(name, inputs): + if name == "out_file": + return _list_outputs( + in_file=inputs["in_file"], + out_file=inputs["out_file"], + output_type=inputs["output_type"], + )["out_file"] + return None + + +def out_file_default(inputs): + return _gen_filename("out_file", inputs=inputs) + + +@shell.define +class MeanImage(shell.Task["MeanImage.Outputs"]): + """ + Examples + ------- + + >>> from fileformats.generic import File + >>> from pathlib import Path + >>> from pydra.tasks.fsl.v6.maths.mean_image import MeanImage + + """ + + executable = "fslmaths" + dimension: ty.Any = shell.arg( + help="dimension to mean across", + argstr="-{dimension}mean", + position=4, + default="T", + ) + in_file: File = shell.arg( + help="image to operate on", argstr="{in_file}", position=2 + ) + internal_datatype: ty.Any = shell.arg( + help="datatype to use for calculations (default is float)", + argstr="-dt {internal_datatype}", + position=1, + ) + output_datatype: ty.Any = shell.arg( + help="datatype to use for output (default uses input type)", + argstr="-odt {output_datatype}", + position=-1, + ) + nan2zeros: bool = shell.arg( + help="change NaNs to zeros before doing anything", argstr="-nan", position=3 + ) + + class Outputs(shell.Outputs): + out_file: Path = shell.outarg( + help="image to write", + argstr="{out_file}", + position=-2, + path_template="out_file", + ) + + +def _gen_fname( + basename, cwd=None, suffix=None, change_ext=True, ext=None, output_type=None +): + """Generate a filename based on the given parameters. + + The filename will take the form: cwd/basename. + If change_ext is True, it will use the extensions specified in + inputs.output_type. + + Parameters + ---------- + basename : str + Filename to base the new filename on. + cwd : str + Path to prefix to the new filename. (default is output_dir) + suffix : str + Suffix to add to the `basename`. (defaults is '' ) + change_ext : bool + Flag to change the filename extension to the FSL output type. + (default True) + + Returns + ------- + fname : str + New filename based on given parameters. + + """ + + if basename == "": + msg = "Unable to generate filename for command %s. " % "fslmaths" + msg += "basename is not set!" + raise ValueError(msg) + if cwd is None: + cwd = output_dir + if ext is None: + ext = Info.output_type_to_ext(output_type) + if change_ext: + if suffix: + suffix = f"{suffix}{ext}" + else: + suffix = ext + if suffix is None: + suffix = "" + fname = fname_presuffix(basename, suffix=suffix, use_ext=False, newpath=cwd) + return fname + + +def _list_outputs(in_file=None, out_file=None, output_type=None): + self_dict = {} + outputs = {} + outputs["out_file"] = out_file + if out_file is attrs.NOTHING: + outputs["out_file"] = _gen_fname( + in_file, suffix=self_dict["_suffix"], output_type=output_type + ) + outputs["out_file"] = os.path.abspath(outputs["out_file"]) + return outputs + + +IFLOGGER = logging.getLogger("nipype.interface") diff --git a/pydra/tasks/fsl/v6/maths/median_image.py b/pydra/tasks/fsl/v6/maths/median_image.py new file mode 100644 index 0000000..18e3c54 --- /dev/null +++ b/pydra/tasks/fsl/v6/maths/median_image.py @@ -0,0 +1,133 @@ +import attrs +from fileformats.generic import File +import logging +from pydra.tasks.fsl.v6.nipype_ports.utils.filemanip import fname_presuffix +import os +from pathlib import Path +from pathlib import Path +from pydra.compose import shell +import typing as ty + + +logger = logging.getLogger(__name__) + + +def _gen_filename(name, inputs): + if name == "out_file": + return _list_outputs( + in_file=inputs["in_file"], + out_file=inputs["out_file"], + output_type=inputs["output_type"], + )["out_file"] + return None + + +def out_file_default(inputs): + return _gen_filename("out_file", inputs=inputs) + + +@shell.define +class MedianImage(shell.Task["MedianImage.Outputs"]): + """ + Examples + ------- + + >>> from fileformats.generic import File + >>> from pathlib import Path + >>> from pydra.tasks.fsl.v6.maths.median_image import MedianImage + + """ + + executable = "fslmaths" + dimension: ty.Any = shell.arg( + help="dimension to median across", + argstr="-{dimension}median", + position=4, + default="T", + ) + in_file: File = shell.arg( + help="image to operate on", argstr="{in_file}", position=2 + ) + internal_datatype: ty.Any = shell.arg( + help="datatype to use for calculations (default is float)", + argstr="-dt {internal_datatype}", + position=1, + ) + output_datatype: ty.Any = shell.arg( + help="datatype to use for output (default uses input type)", + argstr="-odt {output_datatype}", + position=-1, + ) + nan2zeros: bool = shell.arg( + help="change NaNs to zeros before doing anything", argstr="-nan", position=3 + ) + + class Outputs(shell.Outputs): + out_file: Path = shell.outarg( + help="image to write", + argstr="{out_file}", + position=-2, + path_template="out_file", + ) + + +def _gen_fname( + basename, cwd=None, suffix=None, change_ext=True, ext=None, output_type=None +): + """Generate a filename based on the given parameters. + + The filename will take the form: cwd/basename. + If change_ext is True, it will use the extensions specified in + inputs.output_type. + + Parameters + ---------- + basename : str + Filename to base the new filename on. + cwd : str + Path to prefix to the new filename. (default is output_dir) + suffix : str + Suffix to add to the `basename`. (defaults is '' ) + change_ext : bool + Flag to change the filename extension to the FSL output type. + (default True) + + Returns + ------- + fname : str + New filename based on given parameters. + + """ + + if basename == "": + msg = "Unable to generate filename for command %s. " % "fslmaths" + msg += "basename is not set!" + raise ValueError(msg) + if cwd is None: + cwd = output_dir + if ext is None: + ext = Info.output_type_to_ext(output_type) + if change_ext: + if suffix: + suffix = f"{suffix}{ext}" + else: + suffix = ext + if suffix is None: + suffix = "" + fname = fname_presuffix(basename, suffix=suffix, use_ext=False, newpath=cwd) + return fname + + +def _list_outputs(in_file=None, out_file=None, output_type=None): + self_dict = {} + outputs = {} + outputs["out_file"] = out_file + if out_file is attrs.NOTHING: + outputs["out_file"] = _gen_fname( + in_file, suffix=self_dict["_suffix"], output_type=output_type + ) + outputs["out_file"] = os.path.abspath(outputs["out_file"]) + return outputs + + +IFLOGGER = logging.getLogger("nipype.interface") diff --git a/pydra/tasks/fsl/v6/maths/min_image.py b/pydra/tasks/fsl/v6/maths/min_image.py new file mode 100644 index 0000000..97439dd --- /dev/null +++ b/pydra/tasks/fsl/v6/maths/min_image.py @@ -0,0 +1,133 @@ +import attrs +from fileformats.generic import File +import logging +from pydra.tasks.fsl.v6.nipype_ports.utils.filemanip import fname_presuffix +import os +from pathlib import Path +from pathlib import Path +from pydra.compose import shell +import typing as ty + + +logger = logging.getLogger(__name__) + + +def _gen_filename(name, inputs): + if name == "out_file": + return _list_outputs( + in_file=inputs["in_file"], + out_file=inputs["out_file"], + output_type=inputs["output_type"], + )["out_file"] + return None + + +def out_file_default(inputs): + return _gen_filename("out_file", inputs=inputs) + + +@shell.define +class MinImage(shell.Task["MinImage.Outputs"]): + """ + Examples + ------- + + >>> from fileformats.generic import File + >>> from pathlib import Path + >>> from pydra.tasks.fsl.v6.maths.min_image import MinImage + + """ + + executable = "fslmaths" + dimension: ty.Any = shell.arg( + help="dimension to min across", + argstr="-{dimension}min", + position=4, + default="T", + ) + in_file: File = shell.arg( + help="image to operate on", argstr="{in_file}", position=2 + ) + internal_datatype: ty.Any = shell.arg( + help="datatype to use for calculations (default is float)", + argstr="-dt {internal_datatype}", + position=1, + ) + output_datatype: ty.Any = shell.arg( + help="datatype to use for output (default uses input type)", + argstr="-odt {output_datatype}", + position=-1, + ) + nan2zeros: bool = shell.arg( + help="change NaNs to zeros before doing anything", argstr="-nan", position=3 + ) + + class Outputs(shell.Outputs): + out_file: Path = shell.outarg( + help="image to write", + argstr="{out_file}", + position=-2, + path_template="out_file", + ) + + +def _gen_fname( + basename, cwd=None, suffix=None, change_ext=True, ext=None, output_type=None +): + """Generate a filename based on the given parameters. + + The filename will take the form: cwd/basename. + If change_ext is True, it will use the extensions specified in + inputs.output_type. + + Parameters + ---------- + basename : str + Filename to base the new filename on. + cwd : str + Path to prefix to the new filename. (default is output_dir) + suffix : str + Suffix to add to the `basename`. (defaults is '' ) + change_ext : bool + Flag to change the filename extension to the FSL output type. + (default True) + + Returns + ------- + fname : str + New filename based on given parameters. + + """ + + if basename == "": + msg = "Unable to generate filename for command %s. " % "fslmaths" + msg += "basename is not set!" + raise ValueError(msg) + if cwd is None: + cwd = output_dir + if ext is None: + ext = Info.output_type_to_ext(output_type) + if change_ext: + if suffix: + suffix = f"{suffix}{ext}" + else: + suffix = ext + if suffix is None: + suffix = "" + fname = fname_presuffix(basename, suffix=suffix, use_ext=False, newpath=cwd) + return fname + + +def _list_outputs(in_file=None, out_file=None, output_type=None): + self_dict = {} + outputs = {} + outputs["out_file"] = out_file + if out_file is attrs.NOTHING: + outputs["out_file"] = _gen_fname( + in_file, suffix=self_dict["_suffix"], output_type=output_type + ) + outputs["out_file"] = os.path.abspath(outputs["out_file"]) + return outputs + + +IFLOGGER = logging.getLogger("nipype.interface") diff --git a/pydra/tasks/fsl/v6/maths/multi_image_maths.py b/pydra/tasks/fsl/v6/maths/multi_image_maths.py new file mode 100644 index 0000000..23ed667 --- /dev/null +++ b/pydra/tasks/fsl/v6/maths/multi_image_maths.py @@ -0,0 +1,156 @@ +import attrs +from fileformats.medimage import Nifti1 +import logging +from pydra.tasks.fsl.v6.nipype_ports.utils.filemanip import fname_presuffix +import os +from pathlib import Path +from pathlib import Path +from pydra.compose import shell +import typing as ty + + +logger = logging.getLogger(__name__) + + +def _format_arg(name, value, inputs, argstr): + if value is None: + return "" + + if name == "op_string": + return value % tuple(inputs["operand_files"]) + + return argstr.format(**inputs) + + +def op_string_formatter(field, inputs): + return _format_arg("op_string", field, inputs, argstr="{op_string}") + + +def _gen_filename(name, inputs): + if name == "out_file": + return _list_outputs( + in_file=inputs["in_file"], + out_file=inputs["out_file"], + output_type=inputs["output_type"], + )["out_file"] + return None + + +def out_file_default(inputs): + return _gen_filename("out_file", inputs=inputs) + + +@shell.define +class MultiImageMaths(shell.Task["MultiImageMaths.Outputs"]): + """ + Examples + ------- + + >>> from fileformats.medimage import Nifti1 + >>> from pathlib import Path + >>> from pydra.tasks.fsl.v6.maths.multi_image_maths import MultiImageMaths + + >>> task = MultiImageMaths() + >>> task.operand_files = [Nifti1.mock("functional2.nii"), Nifti1.mock("functional3.nii")] + >>> task.in_file = Nifti1.mock("functional.nii") + >>> task.cmdline + 'None' + + + """ + + executable = "fslmaths" + op_string: ty.Any = shell.arg( + help="python formatted string of operations to perform", + position=4, + formatter="op_string_formatter", + ) + operand_files: list[Nifti1] = shell.arg( + help="list of file names to plug into op string" + ) + in_file: Nifti1 = shell.arg( + help="image to operate on", argstr="{in_file}", position=2 + ) + internal_datatype: ty.Any = shell.arg( + help="datatype to use for calculations (default is float)", + argstr="-dt {internal_datatype}", + position=1, + ) + output_datatype: ty.Any = shell.arg( + help="datatype to use for output (default uses input type)", + argstr="-odt {output_datatype}", + position=-1, + ) + nan2zeros: bool = shell.arg( + help="change NaNs to zeros before doing anything", argstr="-nan", position=3 + ) + + class Outputs(shell.Outputs): + out_file: Path = shell.outarg( + help="image to write", + argstr="{out_file}", + position=-2, + path_template="out_file", + ) + + +def _gen_fname( + basename, cwd=None, suffix=None, change_ext=True, ext=None, output_type=None +): + """Generate a filename based on the given parameters. + + The filename will take the form: cwd/basename. + If change_ext is True, it will use the extensions specified in + inputs.output_type. + + Parameters + ---------- + basename : str + Filename to base the new filename on. + cwd : str + Path to prefix to the new filename. (default is output_dir) + suffix : str + Suffix to add to the `basename`. (defaults is '' ) + change_ext : bool + Flag to change the filename extension to the FSL output type. + (default True) + + Returns + ------- + fname : str + New filename based on given parameters. + + """ + + if basename == "": + msg = "Unable to generate filename for command %s. " % "fslmaths" + msg += "basename is not set!" + raise ValueError(msg) + if cwd is None: + cwd = output_dir + if ext is None: + ext = Info.output_type_to_ext(output_type) + if change_ext: + if suffix: + suffix = f"{suffix}{ext}" + else: + suffix = ext + if suffix is None: + suffix = "" + fname = fname_presuffix(basename, suffix=suffix, use_ext=False, newpath=cwd) + return fname + + +def _list_outputs(in_file=None, out_file=None, output_type=None): + self_dict = {} + outputs = {} + outputs["out_file"] = out_file + if out_file is attrs.NOTHING: + outputs["out_file"] = _gen_fname( + in_file, suffix=self_dict["_suffix"], output_type=output_type + ) + outputs["out_file"] = os.path.abspath(outputs["out_file"]) + return outputs + + +IFLOGGER = logging.getLogger("nipype.interface") diff --git a/pydra/tasks/fsl/v6/maths/percentile_image.py b/pydra/tasks/fsl/v6/maths/percentile_image.py new file mode 100644 index 0000000..03684b1 --- /dev/null +++ b/pydra/tasks/fsl/v6/maths/percentile_image.py @@ -0,0 +1,144 @@ +import attrs +from fileformats.medimage import Nifti1 +import logging +from pydra.tasks.fsl.v6.nipype_ports.utils.filemanip import fname_presuffix +import os +from pathlib import Path +from pathlib import Path +from pydra.compose import shell +import typing as ty + + +logger = logging.getLogger(__name__) + + +def _gen_filename(name, inputs): + if name == "out_file": + return _list_outputs( + in_file=inputs["in_file"], + out_file=inputs["out_file"], + output_type=inputs["output_type"], + )["out_file"] + return None + + +def out_file_default(inputs): + return _gen_filename("out_file", inputs=inputs) + + +@shell.define +class PercentileImage(shell.Task["PercentileImage.Outputs"]): + """ + Examples + ------- + + >>> from fileformats.medimage import Nifti1 + >>> from pathlib import Path + >>> from pydra.tasks.fsl.v6.maths.percentile_image import PercentileImage + + >>> task = PercentileImage() + >>> task.in_file = Nifti1.mock("functional.nii" # doctest: +SKIP) + >>> task.cmdline + 'None' + + + """ + + executable = "fslmaths" + dimension: ty.Any = shell.arg( + help="dimension to percentile across", + argstr="-{dimension}perc", + position=4, + default="T", + ) + perc: ty.Any = shell.arg( + help="nth percentile (0-100) of FULL RANGE across dimension", + argstr="{perc}", + position=5, + ) + in_file: Nifti1 = shell.arg( + help="image to operate on", argstr="{in_file}", position=2 + ) + internal_datatype: ty.Any = shell.arg( + help="datatype to use for calculations (default is float)", + argstr="-dt {internal_datatype}", + position=1, + ) + output_datatype: ty.Any = shell.arg( + help="datatype to use for output (default uses input type)", + argstr="-odt {output_datatype}", + position=-1, + ) + nan2zeros: bool = shell.arg( + help="change NaNs to zeros before doing anything", argstr="-nan", position=3 + ) + + class Outputs(shell.Outputs): + out_file: Path = shell.outarg( + help="image to write", + argstr="{out_file}", + position=-2, + path_template="out_file", + ) + + +def _gen_fname( + basename, cwd=None, suffix=None, change_ext=True, ext=None, output_type=None +): + """Generate a filename based on the given parameters. + + The filename will take the form: cwd/basename. + If change_ext is True, it will use the extensions specified in + inputs.output_type. + + Parameters + ---------- + basename : str + Filename to base the new filename on. + cwd : str + Path to prefix to the new filename. (default is output_dir) + suffix : str + Suffix to add to the `basename`. (defaults is '' ) + change_ext : bool + Flag to change the filename extension to the FSL output type. + (default True) + + Returns + ------- + fname : str + New filename based on given parameters. + + """ + + if basename == "": + msg = "Unable to generate filename for command %s. " % "fslmaths" + msg += "basename is not set!" + raise ValueError(msg) + if cwd is None: + cwd = output_dir + if ext is None: + ext = Info.output_type_to_ext(output_type) + if change_ext: + if suffix: + suffix = f"{suffix}{ext}" + else: + suffix = ext + if suffix is None: + suffix = "" + fname = fname_presuffix(basename, suffix=suffix, use_ext=False, newpath=cwd) + return fname + + +def _list_outputs(in_file=None, out_file=None, output_type=None): + self_dict = {} + outputs = {} + outputs["out_file"] = out_file + if out_file is attrs.NOTHING: + outputs["out_file"] = _gen_fname( + in_file, suffix=self_dict["_suffix"], output_type=output_type + ) + outputs["out_file"] = os.path.abspath(outputs["out_file"]) + return outputs + + +IFLOGGER = logging.getLogger("nipype.interface") diff --git a/pydra/tasks/fsl/v6/maths/spatial_filter.py b/pydra/tasks/fsl/v6/maths/spatial_filter.py new file mode 100644 index 0000000..c9867c1 --- /dev/null +++ b/pydra/tasks/fsl/v6/maths/spatial_filter.py @@ -0,0 +1,141 @@ +import attrs +from fileformats.generic import File +import logging +from pydra.tasks.fsl.v6.nipype_ports.utils.filemanip import fname_presuffix +import os +from pathlib import Path +from pathlib import Path +from pydra.compose import shell +import typing as ty + + +logger = logging.getLogger(__name__) + + +def _gen_filename(name, inputs): + if name == "out_file": + return _list_outputs( + in_file=inputs["in_file"], + out_file=inputs["out_file"], + output_type=inputs["output_type"], + )["out_file"] + return None + + +def out_file_default(inputs): + return _gen_filename("out_file", inputs=inputs) + + +@shell.define(xor=[["kernel_file", "kernel_size"]]) +class SpatialFilter(shell.Task["SpatialFilter.Outputs"]): + """ + Examples + ------- + + >>> from fileformats.generic import File + >>> from pathlib import Path + >>> from pydra.tasks.fsl.v6.maths.spatial_filter import SpatialFilter + + """ + + executable = "fslmaths" + operation: ty.Any = shell.arg( + help="operation to filter with", argstr="-f{operation}", position=6 + ) + kernel_shape: ty.Any = shell.arg( + help="kernel shape to use", argstr="-kernel {kernel_shape}", position=4 + ) + kernel_size: float | None = shell.arg( + help="kernel size - voxels for box/boxv, mm for sphere, mm sigma for gauss", + argstr="{kernel_size:.4}", + position=5, + ) + kernel_file: File | None = shell.arg( + help="use external file for kernel", argstr="{kernel_file}", position=5 + ) + in_file: File = shell.arg( + help="image to operate on", argstr="{in_file}", position=2 + ) + internal_datatype: ty.Any = shell.arg( + help="datatype to use for calculations (default is float)", + argstr="-dt {internal_datatype}", + position=1, + ) + output_datatype: ty.Any = shell.arg( + help="datatype to use for output (default uses input type)", + argstr="-odt {output_datatype}", + position=-1, + ) + nan2zeros: bool = shell.arg( + help="change NaNs to zeros before doing anything", argstr="-nan", position=3 + ) + + class Outputs(shell.Outputs): + out_file: Path = shell.outarg( + help="image to write", + argstr="{out_file}", + position=-2, + path_template="out_file", + ) + + +def _gen_fname( + basename, cwd=None, suffix=None, change_ext=True, ext=None, output_type=None +): + """Generate a filename based on the given parameters. + + The filename will take the form: cwd/basename. + If change_ext is True, it will use the extensions specified in + inputs.output_type. + + Parameters + ---------- + basename : str + Filename to base the new filename on. + cwd : str + Path to prefix to the new filename. (default is output_dir) + suffix : str + Suffix to add to the `basename`. (defaults is '' ) + change_ext : bool + Flag to change the filename extension to the FSL output type. + (default True) + + Returns + ------- + fname : str + New filename based on given parameters. + + """ + + if basename == "": + msg = "Unable to generate filename for command %s. " % "fslmaths" + msg += "basename is not set!" + raise ValueError(msg) + if cwd is None: + cwd = output_dir + if ext is None: + ext = Info.output_type_to_ext(output_type) + if change_ext: + if suffix: + suffix = f"{suffix}{ext}" + else: + suffix = ext + if suffix is None: + suffix = "" + fname = fname_presuffix(basename, suffix=suffix, use_ext=False, newpath=cwd) + return fname + + +def _list_outputs(in_file=None, out_file=None, output_type=None): + self_dict = {} + outputs = {} + outputs["out_file"] = out_file + if out_file is attrs.NOTHING: + outputs["out_file"] = _gen_fname( + in_file, suffix=self_dict["_suffix"], output_type=output_type + ) + outputs["out_file"] = os.path.abspath(outputs["out_file"]) + return outputs + + +IFLOGGER = logging.getLogger("nipype.interface") diff --git a/pydra/tasks/fsl/v6/maths/std_image.py b/pydra/tasks/fsl/v6/maths/std_image.py new file mode 100644 index 0000000..9d71fec --- /dev/null +++ b/pydra/tasks/fsl/v6/maths/std_image.py @@ -0,0 +1,133 @@ +import attrs +from fileformats.generic import File +import logging +from pydra.tasks.fsl.v6.nipype_ports.utils.filemanip import fname_presuffix +import os +from pathlib import Path +from pathlib import Path +from pydra.compose import shell +import typing as ty + + +logger = logging.getLogger(__name__) + + +def _gen_filename(name, inputs): + if name == "out_file": + return _list_outputs( + in_file=inputs["in_file"], + out_file=inputs["out_file"], + output_type=inputs["output_type"], + )["out_file"] + return None + + +def out_file_default(inputs): + return _gen_filename("out_file", inputs=inputs) + + +@shell.define +class StdImage(shell.Task["StdImage.Outputs"]): + """ + Examples + ------- + + >>> from fileformats.generic import File + >>> from pathlib import Path + >>> from pydra.tasks.fsl.v6.maths.std_image import StdImage + + """ + + executable = "fslmaths" + dimension: ty.Any = shell.arg( + help="dimension to standard deviate across", + argstr="-{dimension}std", + position=4, + default="T", + ) + in_file: File = shell.arg( + help="image to operate on", argstr="{in_file}", position=2 + ) + internal_datatype: ty.Any = shell.arg( + help="datatype to use for calculations (default is float)", + argstr="-dt {internal_datatype}", + position=1, + ) + output_datatype: ty.Any = shell.arg( + help="datatype to use for output (default uses input type)", + argstr="-odt {output_datatype}", + position=-1, + ) + nan2zeros: bool = shell.arg( + help="change NaNs to zeros before doing anything", argstr="-nan", position=3 + ) + + class Outputs(shell.Outputs): + out_file: Path = shell.outarg( + help="image to write", + argstr="{out_file}", + position=-2, + path_template="out_file", + ) + + +def _gen_fname( + basename, cwd=None, suffix=None, change_ext=True, ext=None, output_type=None +): + """Generate a filename based on the given parameters. + + The filename will take the form: cwd/basename. + If change_ext is True, it will use the extensions specified in + inputs.output_type. + + Parameters + ---------- + basename : str + Filename to base the new filename on. + cwd : str + Path to prefix to the new filename. (default is output_dir) + suffix : str + Suffix to add to the `basename`. (defaults is '' ) + change_ext : bool + Flag to change the filename extension to the FSL output type. + (default True) + + Returns + ------- + fname : str + New filename based on given parameters. + + """ + + if basename == "": + msg = "Unable to generate filename for command %s. " % "fslmaths" + msg += "basename is not set!" + raise ValueError(msg) + if cwd is None: + cwd = output_dir + if ext is None: + ext = Info.output_type_to_ext(output_type) + if change_ext: + if suffix: + suffix = f"{suffix}{ext}" + else: + suffix = ext + if suffix is None: + suffix = "" + fname = fname_presuffix(basename, suffix=suffix, use_ext=False, newpath=cwd) + return fname + + +def _list_outputs(in_file=None, out_file=None, output_type=None): + self_dict = {} + outputs = {} + outputs["out_file"] = out_file + if out_file is attrs.NOTHING: + outputs["out_file"] = _gen_fname( + in_file, suffix=self_dict["_suffix"], output_type=output_type + ) + outputs["out_file"] = os.path.abspath(outputs["out_file"]) + return outputs + + +IFLOGGER = logging.getLogger("nipype.interface") diff --git a/pydra/tasks/fsl/v6/maths/temporal_filter.py b/pydra/tasks/fsl/v6/maths/temporal_filter.py new file mode 100644 index 0000000..7477c21 --- /dev/null +++ b/pydra/tasks/fsl/v6/maths/temporal_filter.py @@ -0,0 +1,139 @@ +import attrs +from fileformats.generic import File +import logging +from pydra.tasks.fsl.v6.nipype_ports.utils.filemanip import fname_presuffix +import os +from pathlib import Path +from pathlib import Path +from pydra.compose import shell +import typing as ty + + +logger = logging.getLogger(__name__) + + +def _gen_filename(name, inputs): + if name == "out_file": + return _list_outputs( + in_file=inputs["in_file"], + out_file=inputs["out_file"], + output_type=inputs["output_type"], + )["out_file"] + return None + + +def out_file_default(inputs): + return _gen_filename("out_file", inputs=inputs) + + +@shell.define +class TemporalFilter(shell.Task["TemporalFilter.Outputs"]): + """ + Examples + ------- + + >>> from fileformats.generic import File + >>> from pathlib import Path + >>> from pydra.tasks.fsl.v6.maths.temporal_filter import TemporalFilter + + """ + + executable = "fslmaths" + lowpass_sigma: float = shell.arg( + help="lowpass filter sigma (in volumes)", + argstr="{lowpass_sigma:.6}", + position=5, + default=-1, + ) + highpass_sigma: float = shell.arg( + help="highpass filter sigma (in volumes)", + argstr="-bptf {highpass_sigma:.6}", + position=4, + default=-1, + ) + in_file: File = shell.arg( + help="image to operate on", argstr="{in_file}", position=2 + ) + internal_datatype: ty.Any = shell.arg( + help="datatype to use for calculations (default is float)", + argstr="-dt {internal_datatype}", + position=1, + ) + output_datatype: ty.Any = shell.arg( + help="datatype to use for output (default uses input type)", + argstr="-odt {output_datatype}", + position=-1, + ) + nan2zeros: bool = shell.arg( + help="change NaNs to zeros before doing anything", argstr="-nan", position=3 + ) + + class Outputs(shell.Outputs): + out_file: Path = shell.outarg( + help="image to write", + argstr="{out_file}", + position=-2, + path_template="out_file", + ) + + +def _gen_fname( + basename, cwd=None, suffix=None, change_ext=True, ext=None, output_type=None +): + """Generate a filename based on the given parameters. + + The filename will take the form: cwd/basename. + If change_ext is True, it will use the extensions specified in + inputs.output_type. + + Parameters + ---------- + basename : str + Filename to base the new filename on. + cwd : str + Path to prefix to the new filename. (default is output_dir) + suffix : str + Suffix to add to the `basename`. (defaults is '' ) + change_ext : bool + Flag to change the filename extension to the FSL output type. + (default True) + + Returns + ------- + fname : str + New filename based on given parameters. + + """ + + if basename == "": + msg = "Unable to generate filename for command %s. " % "fslmaths" + msg += "basename is not set!" + raise ValueError(msg) + if cwd is None: + cwd = output_dir + if ext is None: + ext = Info.output_type_to_ext(output_type) + if change_ext: + if suffix: + suffix = f"{suffix}{ext}" + else: + suffix = ext + if suffix is None: + suffix = "" + fname = fname_presuffix(basename, suffix=suffix, use_ext=False, newpath=cwd) + return fname + + +def _list_outputs(in_file=None, out_file=None, output_type=None): + self_dict = {} + outputs = {} + outputs["out_file"] = out_file + if out_file is attrs.NOTHING: + outputs["out_file"] = _gen_fname( + in_file, suffix=self_dict["_suffix"], output_type=output_type + ) + outputs["out_file"] = os.path.abspath(outputs["out_file"]) + return outputs + + +IFLOGGER = logging.getLogger("nipype.interface") diff --git a/pydra/tasks/fsl/v6/maths/tests/conftest.py b/pydra/tasks/fsl/v6/maths/tests/conftest.py new file mode 100644 index 0000000..8c8af14 --- /dev/null +++ b/pydra/tasks/fsl/v6/maths/tests/conftest.py @@ -0,0 +1,24 @@ +# For debugging in IDE's don't catch raised exceptions and let the IDE +# break at it +import os +import pytest + + +if os.getenv("_PYTEST_RAISE", "0") != "0": + + @pytest.hookimpl(tryfirst=True) + def pytest_exception_interact(call): + raise call.excinfo.value # raise internal errors instead of capturing them + + @pytest.hookimpl(tryfirst=True) + def pytest_internalerror(excinfo): + raise excinfo.value # raise internal errors instead of capturing them + + def pytest_configure(config): + config.option.capture = "no" # allow print statements to show up in the console + config.option.log_cli = True # show log messages in the console + config.option.log_level = "INFO" # set the log level to INFO + + CATCH_CLI_EXCEPTIONS = False +else: + CATCH_CLI_EXCEPTIONS = True diff --git a/pydra/tasks/fsl/v6/maths/tests/test_applymask.py b/pydra/tasks/fsl/v6/maths/tests/test_applymask.py new file mode 100644 index 0000000..1351dd5 --- /dev/null +++ b/pydra/tasks/fsl/v6/maths/tests/test_applymask.py @@ -0,0 +1,18 @@ +from fileformats.generic import File +import logging +from nipype2pydra.testing import PassAfterTimeoutWorker +from pydra.tasks.fsl.v6.maths.apply_mask import ApplyMask +import pytest + + +logger = logging.getLogger(__name__) + + +@pytest.mark.xfail +def test_applymask_1(): + task = ApplyMask() + task.mask_file = File.sample(seed=0) + task.in_file = File.sample(seed=1) + print(f"CMDLINE: {task.cmdline}\n\n") + res = task(worker=PassAfterTimeoutWorker) + print("RESULT: ", res) diff --git a/pydra/tasks/fsl/v6/maths/tests/test_ar1image.py b/pydra/tasks/fsl/v6/maths/tests/test_ar1image.py new file mode 100644 index 0000000..6d4db93 --- /dev/null +++ b/pydra/tasks/fsl/v6/maths/tests/test_ar1image.py @@ -0,0 +1,18 @@ +from fileformats.generic import File +import logging +from nipype2pydra.testing import PassAfterTimeoutWorker +from pydra.tasks.fsl.v6.maths.ar1_image import AR1Image +import pytest + + +logger = logging.getLogger(__name__) + + +@pytest.mark.xfail +def test_ar1image_1(): + task = AR1Image() + task.dimension = "T" + task.in_file = File.sample(seed=1) + print(f"CMDLINE: {task.cmdline}\n\n") + res = task(worker=PassAfterTimeoutWorker) + print("RESULT: ", res) diff --git a/pydra/tasks/fsl/v6/maths/tests/test_binarymaths.py b/pydra/tasks/fsl/v6/maths/tests/test_binarymaths.py new file mode 100644 index 0000000..f478344 --- /dev/null +++ b/pydra/tasks/fsl/v6/maths/tests/test_binarymaths.py @@ -0,0 +1,18 @@ +from fileformats.generic import File +import logging +from nipype2pydra.testing import PassAfterTimeoutWorker +from pydra.tasks.fsl.v6.maths.binary_maths import BinaryMaths +import pytest + + +logger = logging.getLogger(__name__) + + +@pytest.mark.xfail +def test_binarymaths_1(): + task = BinaryMaths() + task.operand_file = File.sample(seed=1) + task.in_file = File.sample(seed=3) + print(f"CMDLINE: {task.cmdline}\n\n") + res = task(worker=PassAfterTimeoutWorker) + print("RESULT: ", res) diff --git a/pydra/tasks/fsl/v6/maths/tests/test_changedatatype.py b/pydra/tasks/fsl/v6/maths/tests/test_changedatatype.py new file mode 100644 index 0000000..cd5546e --- /dev/null +++ b/pydra/tasks/fsl/v6/maths/tests/test_changedatatype.py @@ -0,0 +1,17 @@ +from fileformats.generic import File +import logging +from nipype2pydra.testing import PassAfterTimeoutWorker +from pydra.tasks.fsl.v6.maths.change_data_type import ChangeDataType +import pytest + + +logger = logging.getLogger(__name__) + + +@pytest.mark.xfail +def test_changedatatype_1(): + task = ChangeDataType() + task.in_file = File.sample(seed=1) + print(f"CMDLINE: {task.cmdline}\n\n") + res = task(worker=PassAfterTimeoutWorker) + print("RESULT: ", res) diff --git a/pydra/tasks/fsl/v6/maths/tests/test_dilateimage.py b/pydra/tasks/fsl/v6/maths/tests/test_dilateimage.py new file mode 100644 index 0000000..21bea34 --- /dev/null +++ b/pydra/tasks/fsl/v6/maths/tests/test_dilateimage.py @@ -0,0 +1,18 @@ +from fileformats.generic import File +import logging +from nipype2pydra.testing import PassAfterTimeoutWorker +from pydra.tasks.fsl.v6.maths.dilate_image import DilateImage +import pytest + + +logger = logging.getLogger(__name__) + + +@pytest.mark.xfail +def test_dilateimage_1(): + task = DilateImage() + task.kernel_file = File.sample(seed=3) + task.in_file = File.sample(seed=4) + print(f"CMDLINE: {task.cmdline}\n\n") + res = task(worker=PassAfterTimeoutWorker) + print("RESULT: ", res) diff --git a/pydra/tasks/fsl/v6/maths/tests/test_erodeimage.py b/pydra/tasks/fsl/v6/maths/tests/test_erodeimage.py new file mode 100644 index 0000000..02b6b1a --- /dev/null +++ b/pydra/tasks/fsl/v6/maths/tests/test_erodeimage.py @@ -0,0 +1,19 @@ +from fileformats.generic import File +import logging +from nipype2pydra.testing import PassAfterTimeoutWorker +from pydra.tasks.fsl.v6.maths.erode_image import ErodeImage +import pytest + + +logger = logging.getLogger(__name__) + + +@pytest.mark.xfail +def test_erodeimage_1(): + task = ErodeImage() + task.minimum_filter = False + task.kernel_file = File.sample(seed=3) + task.in_file = File.sample(seed=4) + print(f"CMDLINE: {task.cmdline}\n\n") + res = task(worker=PassAfterTimeoutWorker) + print("RESULT: ", res) diff --git a/pydra/tasks/fsl/v6/maths/tests/test_isotropicsmooth.py b/pydra/tasks/fsl/v6/maths/tests/test_isotropicsmooth.py new file mode 100644 index 0000000..36c9e96 --- /dev/null +++ b/pydra/tasks/fsl/v6/maths/tests/test_isotropicsmooth.py @@ -0,0 +1,17 @@ +from fileformats.generic import File +import logging +from nipype2pydra.testing import PassAfterTimeoutWorker +from pydra.tasks.fsl.v6.maths.isotropic_smooth import IsotropicSmooth +import pytest + + +logger = logging.getLogger(__name__) + + +@pytest.mark.xfail +def test_isotropicsmooth_1(): + task = IsotropicSmooth() + task.in_file = File.sample(seed=2) + print(f"CMDLINE: {task.cmdline}\n\n") + res = task(worker=PassAfterTimeoutWorker) + print("RESULT: ", res) diff --git a/pydra/tasks/fsl/v6/maths/tests/test_mathscommand.py b/pydra/tasks/fsl/v6/maths/tests/test_mathscommand.py new file mode 100644 index 0000000..dce7626 --- /dev/null +++ b/pydra/tasks/fsl/v6/maths/tests/test_mathscommand.py @@ -0,0 +1,17 @@ +from fileformats.generic import File +import logging +from nipype2pydra.testing import PassAfterTimeoutWorker +from pydra.tasks.fsl.v6.maths.maths_command import MathsCommand +import pytest + + +logger = logging.getLogger(__name__) + + +@pytest.mark.xfail +def test_mathscommand_1(): + task = MathsCommand() + task.in_file = File.sample(seed=0) + print(f"CMDLINE: {task.cmdline}\n\n") + res = task(worker=PassAfterTimeoutWorker) + print("RESULT: ", res) diff --git a/pydra/tasks/fsl/v6/maths/tests/test_maximage.py b/pydra/tasks/fsl/v6/maths/tests/test_maximage.py new file mode 100644 index 0000000..191dbb6 --- /dev/null +++ b/pydra/tasks/fsl/v6/maths/tests/test_maximage.py @@ -0,0 +1,27 @@ +from fileformats.medimage import Nifti1 +import logging +from nipype2pydra.testing import PassAfterTimeoutWorker +from pydra.tasks.fsl.v6.maths.max_image import MaxImage +import pytest + + +logger = logging.getLogger(__name__) + + +@pytest.mark.xfail +def test_maximage_1(): + task = MaxImage() + task.dimension = "T" + task.in_file = Nifti1.sample(seed=1) + print(f"CMDLINE: {task.cmdline}\n\n") + res = task(worker=PassAfterTimeoutWorker) + print("RESULT: ", res) + + +@pytest.mark.xfail +def test_maximage_2(): + task = MaxImage() + task.in_file = Nifti1.sample(seed=1) + print(f"CMDLINE: {task.cmdline}\n\n") + res = task(worker=PassAfterTimeoutWorker) + print("RESULT: ", res) diff --git a/pydra/tasks/fsl/v6/maths/tests/test_maxnimage.py b/pydra/tasks/fsl/v6/maths/tests/test_maxnimage.py new file mode 100644 index 0000000..103dd67 --- /dev/null +++ b/pydra/tasks/fsl/v6/maths/tests/test_maxnimage.py @@ -0,0 +1,18 @@ +from fileformats.generic import File +import logging +from nipype2pydra.testing import PassAfterTimeoutWorker +from pydra.tasks.fsl.v6.maths.maxn_image import MaxnImage +import pytest + + +logger = logging.getLogger(__name__) + + +@pytest.mark.xfail +def test_maxnimage_1(): + task = MaxnImage() + task.dimension = "T" + task.in_file = File.sample(seed=1) + print(f"CMDLINE: {task.cmdline}\n\n") + res = task(worker=PassAfterTimeoutWorker) + print("RESULT: ", res) diff --git a/pydra/tasks/fsl/v6/maths/tests/test_meanimage.py b/pydra/tasks/fsl/v6/maths/tests/test_meanimage.py new file mode 100644 index 0000000..117aa88 --- /dev/null +++ b/pydra/tasks/fsl/v6/maths/tests/test_meanimage.py @@ -0,0 +1,18 @@ +from fileformats.generic import File +import logging +from nipype2pydra.testing import PassAfterTimeoutWorker +from pydra.tasks.fsl.v6.maths.mean_image import MeanImage +import pytest + + +logger = logging.getLogger(__name__) + + +@pytest.mark.xfail +def test_meanimage_1(): + task = MeanImage() + task.dimension = "T" + task.in_file = File.sample(seed=1) + print(f"CMDLINE: {task.cmdline}\n\n") + res = task(worker=PassAfterTimeoutWorker) + print("RESULT: ", res) diff --git a/pydra/tasks/fsl/v6/maths/tests/test_medianimage.py b/pydra/tasks/fsl/v6/maths/tests/test_medianimage.py new file mode 100644 index 0000000..16450d9 --- /dev/null +++ b/pydra/tasks/fsl/v6/maths/tests/test_medianimage.py @@ -0,0 +1,18 @@ +from fileformats.generic import File +import logging +from nipype2pydra.testing import PassAfterTimeoutWorker +from pydra.tasks.fsl.v6.maths.median_image import MedianImage +import pytest + + +logger = logging.getLogger(__name__) + + +@pytest.mark.xfail +def test_medianimage_1(): + task = MedianImage() + task.dimension = "T" + task.in_file = File.sample(seed=1) + print(f"CMDLINE: {task.cmdline}\n\n") + res = task(worker=PassAfterTimeoutWorker) + print("RESULT: ", res) diff --git a/pydra/tasks/fsl/v6/maths/tests/test_minimage.py b/pydra/tasks/fsl/v6/maths/tests/test_minimage.py new file mode 100644 index 0000000..05147a1 --- /dev/null +++ b/pydra/tasks/fsl/v6/maths/tests/test_minimage.py @@ -0,0 +1,18 @@ +from fileformats.generic import File +import logging +from nipype2pydra.testing import PassAfterTimeoutWorker +from pydra.tasks.fsl.v6.maths.min_image import MinImage +import pytest + + +logger = logging.getLogger(__name__) + + +@pytest.mark.xfail +def test_minimage_1(): + task = MinImage() + task.dimension = "T" + task.in_file = File.sample(seed=1) + print(f"CMDLINE: {task.cmdline}\n\n") + res = task(worker=PassAfterTimeoutWorker) + print("RESULT: ", res) diff --git a/pydra/tasks/fsl/v6/maths/tests/test_multiimagemaths.py b/pydra/tasks/fsl/v6/maths/tests/test_multiimagemaths.py new file mode 100644 index 0000000..1e532c5 --- /dev/null +++ b/pydra/tasks/fsl/v6/maths/tests/test_multiimagemaths.py @@ -0,0 +1,28 @@ +from fileformats.medimage import Nifti1 +import logging +from nipype2pydra.testing import PassAfterTimeoutWorker +from pydra.tasks.fsl.v6.maths.multi_image_maths import MultiImageMaths +import pytest + + +logger = logging.getLogger(__name__) + + +@pytest.mark.xfail +def test_multiimagemaths_1(): + task = MultiImageMaths() + task.operand_files = [Nifti1.sample(seed=1)] + task.in_file = Nifti1.sample(seed=2) + print(f"CMDLINE: {task.cmdline}\n\n") + res = task(worker=PassAfterTimeoutWorker) + print("RESULT: ", res) + + +@pytest.mark.xfail +def test_multiimagemaths_2(): + task = MultiImageMaths() + task.operand_files = [Nifti1.sample(seed=1)] + task.in_file = Nifti1.sample(seed=2) + print(f"CMDLINE: {task.cmdline}\n\n") + res = task(worker=PassAfterTimeoutWorker) + print("RESULT: ", res) diff --git a/pydra/tasks/fsl/v6/maths/tests/test_percentileimage.py b/pydra/tasks/fsl/v6/maths/tests/test_percentileimage.py new file mode 100644 index 0000000..b12438c --- /dev/null +++ b/pydra/tasks/fsl/v6/maths/tests/test_percentileimage.py @@ -0,0 +1,27 @@ +from fileformats.medimage import Nifti1 +import logging +from nipype2pydra.testing import PassAfterTimeoutWorker +from pydra.tasks.fsl.v6.maths.percentile_image import PercentileImage +import pytest + + +logger = logging.getLogger(__name__) + + +@pytest.mark.xfail +def test_percentileimage_1(): + task = PercentileImage() + task.dimension = "T" + task.in_file = Nifti1.sample(seed=2) + print(f"CMDLINE: {task.cmdline}\n\n") + res = task(worker=PassAfterTimeoutWorker) + print("RESULT: ", res) + + +@pytest.mark.xfail +def test_percentileimage_2(): + task = PercentileImage() + task.in_file = Nifti1.sample(seed=2) + print(f"CMDLINE: {task.cmdline}\n\n") + res = task(worker=PassAfterTimeoutWorker) + print("RESULT: ", res) diff --git a/pydra/tasks/fsl/v6/maths/tests/test_spatialfilter.py b/pydra/tasks/fsl/v6/maths/tests/test_spatialfilter.py new file mode 100644 index 0000000..571ec7f --- /dev/null +++ b/pydra/tasks/fsl/v6/maths/tests/test_spatialfilter.py @@ -0,0 +1,18 @@ +from fileformats.generic import File +import logging +from nipype2pydra.testing import PassAfterTimeoutWorker +from pydra.tasks.fsl.v6.maths.spatial_filter import SpatialFilter +import pytest + + +logger = logging.getLogger(__name__) + + +@pytest.mark.xfail +def test_spatialfilter_1(): + task = SpatialFilter() + task.kernel_file = File.sample(seed=3) + task.in_file = File.sample(seed=4) + print(f"CMDLINE: {task.cmdline}\n\n") + res = task(worker=PassAfterTimeoutWorker) + print("RESULT: ", res) diff --git a/pydra/tasks/fsl/v6/maths/tests/test_stdimage.py b/pydra/tasks/fsl/v6/maths/tests/test_stdimage.py new file mode 100644 index 0000000..82a4cfa --- /dev/null +++ b/pydra/tasks/fsl/v6/maths/tests/test_stdimage.py @@ -0,0 +1,18 @@ +from fileformats.generic import File +import logging +from nipype2pydra.testing import PassAfterTimeoutWorker +from pydra.tasks.fsl.v6.maths.std_image import StdImage +import pytest + + +logger = logging.getLogger(__name__) + + +@pytest.mark.xfail +def test_stdimage_1(): + task = StdImage() + task.dimension = "T" + task.in_file = File.sample(seed=1) + print(f"CMDLINE: {task.cmdline}\n\n") + res = task(worker=PassAfterTimeoutWorker) + print("RESULT: ", res) diff --git a/pydra/tasks/fsl/v6/maths/tests/test_temporalfilter.py b/pydra/tasks/fsl/v6/maths/tests/test_temporalfilter.py new file mode 100644 index 0000000..dfeb8cc --- /dev/null +++ b/pydra/tasks/fsl/v6/maths/tests/test_temporalfilter.py @@ -0,0 +1,19 @@ +from fileformats.generic import File +import logging +from nipype2pydra.testing import PassAfterTimeoutWorker +from pydra.tasks.fsl.v6.maths.temporal_filter import TemporalFilter +import pytest + + +logger = logging.getLogger(__name__) + + +@pytest.mark.xfail +def test_temporalfilter_1(): + task = TemporalFilter() + task.lowpass_sigma = -1 + task.highpass_sigma = -1 + task.in_file = File.sample(seed=2) + print(f"CMDLINE: {task.cmdline}\n\n") + res = task(worker=PassAfterTimeoutWorker) + print("RESULT: ", res) diff --git a/pydra/tasks/fsl/v6/maths/tests/test_threshold.py b/pydra/tasks/fsl/v6/maths/tests/test_threshold.py new file mode 100644 index 0000000..89a6095 --- /dev/null +++ b/pydra/tasks/fsl/v6/maths/tests/test_threshold.py @@ -0,0 +1,18 @@ +from fileformats.generic import File +import logging +from nipype2pydra.testing import PassAfterTimeoutWorker +from pydra.tasks.fsl.v6.maths.threshold import Threshold +import pytest + + +logger = logging.getLogger(__name__) + + +@pytest.mark.xfail +def test_threshold_1(): + task = Threshold() + task.direction = "below" + task.in_file = File.sample(seed=4) + print(f"CMDLINE: {task.cmdline}\n\n") + res = task(worker=PassAfterTimeoutWorker) + print("RESULT: ", res) diff --git a/pydra/tasks/fsl/v6/maths/tests/test_unarymaths.py b/pydra/tasks/fsl/v6/maths/tests/test_unarymaths.py new file mode 100644 index 0000000..9ab5b51 --- /dev/null +++ b/pydra/tasks/fsl/v6/maths/tests/test_unarymaths.py @@ -0,0 +1,17 @@ +from fileformats.generic import File +import logging +from nipype2pydra.testing import PassAfterTimeoutWorker +from pydra.tasks.fsl.v6.maths.unary_maths import UnaryMaths +import pytest + + +logger = logging.getLogger(__name__) + + +@pytest.mark.xfail +def test_unarymaths_1(): + task = UnaryMaths() + task.in_file = File.sample(seed=1) + print(f"CMDLINE: {task.cmdline}\n\n") + res = task(worker=PassAfterTimeoutWorker) + print("RESULT: ", res) diff --git a/pydra/tasks/fsl/v6/maths/threshold.py b/pydra/tasks/fsl/v6/maths/threshold.py new file mode 100644 index 0000000..6968443 --- /dev/null +++ b/pydra/tasks/fsl/v6/maths/threshold.py @@ -0,0 +1,166 @@ +import attrs +from fileformats.generic import File +import logging +from pydra.tasks.fsl.v6.nipype_ports.utils.filemanip import fname_presuffix +import os +from pathlib import Path +from pathlib import Path +from pydra.compose import shell +import typing as ty + + +logger = logging.getLogger(__name__) + + +def _format_arg(name, value, inputs, argstr): + if value is None: + return "" + self_dict = {} + + if name == "thresh": + arg = "-" + _si = self_dict["inputs"] + if inputs["direction"] == "above": + arg += "u" + arg += "thr" + if (_si.use_robust_range is not attrs.NOTHING) and _si.use_robust_range: + if (_si.use_nonzero_voxels is not attrs.NOTHING) and _si.use_nonzero_voxels: + arg += "P" + else: + arg += "p" + arg += " %.10f" % value + return arg + + return argstr.format(**inputs) + + +def thresh_formatter(field, inputs): + return _format_arg("thresh", field, inputs, argstr="{thresh}") + + +def _gen_filename(name, inputs): + if name == "out_file": + return _list_outputs( + in_file=inputs["in_file"], + out_file=inputs["out_file"], + output_type=inputs["output_type"], + )["out_file"] + return None + + +def out_file_default(inputs): + return _gen_filename("out_file", inputs=inputs) + + +@shell.define +class Threshold(shell.Task["Threshold.Outputs"]): + """ + Examples + ------- + + >>> from fileformats.generic import File + >>> from pathlib import Path + >>> from pydra.tasks.fsl.v6.maths.threshold import Threshold + + """ + + executable = "fslmaths" + thresh: float = shell.arg( + help="threshold value", position=4, formatter="thresh_formatter" + ) + direction: ty.Any = shell.arg( + help="zero-out either below or above thresh value", default="below" + ) + use_robust_range: bool = shell.arg( + help="interpret thresh as percentage (0-100) of robust range" + ) + use_nonzero_voxels: bool = shell.arg( + help="use nonzero voxels to calculate robust range", + requires=["use_robust_range"], + ) + in_file: File = shell.arg( + help="image to operate on", argstr="{in_file}", position=2 + ) + internal_datatype: ty.Any = shell.arg( + help="datatype to use for calculations (default is float)", + argstr="-dt {internal_datatype}", + position=1, + ) + output_datatype: ty.Any = shell.arg( + help="datatype to use for output (default uses input type)", + argstr="-odt {output_datatype}", + position=-1, + ) + nan2zeros: bool = shell.arg( + help="change NaNs to zeros before doing anything", argstr="-nan", position=3 + ) + + class Outputs(shell.Outputs): + out_file: Path = shell.outarg( + help="image to write", + argstr="{out_file}", + position=-2, + path_template="out_file", + ) + + +def _gen_fname( + basename, cwd=None, suffix=None, change_ext=True, ext=None, output_type=None +): + """Generate a filename based on the given parameters. + + The filename will take the form: cwd/basename. + If change_ext is True, it will use the extensions specified in + inputs.output_type. + + Parameters + ---------- + basename : str + Filename to base the new filename on. + cwd : str + Path to prefix to the new filename. (default is output_dir) + suffix : str + Suffix to add to the `basename`. (defaults is '' ) + change_ext : bool + Flag to change the filename extension to the FSL output type. + (default True) + + Returns + ------- + fname : str + New filename based on given parameters. + + """ + + if basename == "": + msg = "Unable to generate filename for command %s. " % "fslmaths" + msg += "basename is not set!" + raise ValueError(msg) + if cwd is None: + cwd = output_dir + if ext is None: + ext = Info.output_type_to_ext(output_type) + if change_ext: + if suffix: + suffix = f"{suffix}{ext}" + else: + suffix = ext + if suffix is None: + suffix = "" + fname = fname_presuffix(basename, suffix=suffix, use_ext=False, newpath=cwd) + return fname + + +def _list_outputs(in_file=None, out_file=None, output_type=None): + self_dict = {} + outputs = {} + outputs["out_file"] = out_file + if out_file is attrs.NOTHING: + outputs["out_file"] = _gen_fname( + in_file, suffix=self_dict["_suffix"], output_type=output_type + ) + outputs["out_file"] = os.path.abspath(outputs["out_file"]) + return outputs + + +IFLOGGER = logging.getLogger("nipype.interface") diff --git a/pydra/tasks/fsl/v6/maths/unary_maths.py b/pydra/tasks/fsl/v6/maths/unary_maths.py new file mode 100644 index 0000000..897b412 --- /dev/null +++ b/pydra/tasks/fsl/v6/maths/unary_maths.py @@ -0,0 +1,139 @@ +import attrs +from fileformats.generic import File +import logging +from pydra.tasks.fsl.v6.nipype_ports.utils.filemanip import fname_presuffix +import os +from pathlib import Path +from pathlib import Path +from pydra.compose import shell +import typing as ty + + +logger = logging.getLogger(__name__) + + +def _gen_filename(name, inputs): + if name == "out_file": + return _list_outputs( + in_file=inputs["in_file"], + operation=inputs["operation"], + out_file=inputs["out_file"], + output_type=inputs["output_type"], + )["out_file"] + return None + + +def out_file_default(inputs): + return _gen_filename("out_file", inputs=inputs) + + +@shell.define +class UnaryMaths(shell.Task["UnaryMaths.Outputs"]): + """ + Examples + ------- + + >>> from fileformats.generic import File + >>> from pathlib import Path + >>> from pydra.tasks.fsl.v6.maths.unary_maths import UnaryMaths + + """ + + executable = "fslmaths" + operation: ty.Any = shell.arg( + help="operation to perform", argstr="-{operation}", position=4 + ) + in_file: File = shell.arg( + help="image to operate on", argstr="{in_file}", position=2 + ) + internal_datatype: ty.Any = shell.arg( + help="datatype to use for calculations (default is float)", + argstr="-dt {internal_datatype}", + position=1, + ) + output_datatype: ty.Any = shell.arg( + help="datatype to use for output (default uses input type)", + argstr="-odt {output_datatype}", + position=-1, + ) + nan2zeros: bool = shell.arg( + help="change NaNs to zeros before doing anything", argstr="-nan", position=3 + ) + + class Outputs(shell.Outputs): + out_file: Path = shell.outarg( + help="image to write", + argstr="{out_file}", + position=-2, + path_template="out_file", + ) + + +def _gen_fname( + basename, cwd=None, suffix=None, change_ext=True, ext=None, output_type=None +): + """Generate a filename based on the given parameters. + + The filename will take the form: cwd/basename. + If change_ext is True, it will use the extensions specified in + inputs.output_type. + + Parameters + ---------- + basename : str + Filename to base the new filename on. + cwd : str + Path to prefix to the new filename. (default is output_dir) + suffix : str + Suffix to add to the `basename`. (defaults is '' ) + change_ext : bool + Flag to change the filename extension to the FSL output type. + (default True) + + Returns + ------- + fname : str + New filename based on given parameters. + + """ + + if basename == "": + msg = "Unable to generate filename for command %s. " % "fslmaths" + msg += "basename is not set!" + raise ValueError(msg) + if cwd is None: + cwd = output_dir + if ext is None: + ext = Info.output_type_to_ext(output_type) + if change_ext: + if suffix: + suffix = f"{suffix}{ext}" + else: + suffix = ext + if suffix is None: + suffix = "" + fname = fname_presuffix(basename, suffix=suffix, use_ext=False, newpath=cwd) + return fname + + +def _list_outputs(in_file=None, operation=None, out_file=None, output_type=None): + _suffix = attrs.NOTHING + self_dict = {} + outputs = {} + outputs["out_file"] = out_file + if out_file is attrs.NOTHING: + outputs["out_file"] = _gen_fname( + in_file, suffix=self_dict["_suffix"], output_type=output_type + ) + outputs["out_file"] = os.path.abspath(outputs["out_file"]) + return outputs, _suffix + + +def _list_outputs(in_file=None, operation=None, out_file=None, output_type=None): + _suffix = attrs.NOTHING + self_dict = {} + self_dict["_suffix"] = "_" + operation + return _list_outputs(), _suffix + + +IFLOGGER = logging.getLogger("nipype.interface") diff --git a/pydra/tasks/fsl/v6/model/__init__.py b/pydra/tasks/fsl/v6/model/__init__.py new file mode 100644 index 0000000..7c9c13a --- /dev/null +++ b/pydra/tasks/fsl/v6/model/__init__.py @@ -0,0 +1,15 @@ +from .cluster import Cluster +from .contrast_mgr import ContrastMgr +from .dual_regression import DualRegression +from .feat import FEAT +from .feat_model import FEATModel +from .filmgls import FILMGLS +from .flameo import FLAMEO +from .glm import GLM +from .l2_model import L2Model +from .level_1_design import Level1Design +from .melodic import MELODIC +from .multiple_regress_design import MultipleRegressDesign +from .randomise import Randomise +from .smm import SMM +from .smooth_estimate import SmoothEstimate diff --git a/pydra/tasks/fsl/v6/model/cluster.py b/pydra/tasks/fsl/v6/model/cluster.py new file mode 100644 index 0000000..4766a82 --- /dev/null +++ b/pydra/tasks/fsl/v6/model/cluster.py @@ -0,0 +1,303 @@ +import attrs +from fileformats.generic import File +import logging +from pydra.tasks.fsl.v6.nipype_ports.utils.filemanip import fname_presuffix +import os +from pydra.compose import shell +import typing as ty + + +logger = logging.getLogger(__name__) + + +def _format_arg(name, value, inputs, argstr): + if value is None: + return "" + + if name in list(parsed_inputs["filemap"].keys()): + if isinstance(value, bool): + fname = _list_outputs( + in_file=inputs["in_file"], output_type=inputs["output_type"] + )[name[4:]] + else: + fname = value + return argstr.format(**{name: fname}) + + return argstr.format(**inputs) + + +def _list_outputs(inputs=None, stdout=None, stderr=None, output_dir=None): + inputs = attrs.asdict(inputs) + self_dict = {} + + outputs = {} + for key, suffix in list(parsed_inputs["filemap"].items()): + outkey = key[4:] + inval = getattr(self_dict["inputs"], key) + if inval is not attrs.NOTHING: + if isinstance(inval, bool): + if inval: + change_ext = True + if suffix.endswith(".txt"): + change_ext = False + outputs[outkey] = _gen_fname( + inputs["in_file"], + suffix="_" + suffix, + change_ext=change_ext, + output_type=inputs["output_type"], + inputs=inputs["inputs"], + output_dir=inputs["output_dir"], + stderr=inputs["stderr"], + stdout=inputs["stdout"], + ) + else: + outputs[outkey] = os.path.abspath(inval) + return outputs + + +def index_file_callable(output_dir, inputs, stdout, stderr): + outputs = _list_outputs( + output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr + ) + return outputs.get("index_file") + + +def threshold_file_callable(output_dir, inputs, stdout, stderr): + outputs = _list_outputs( + output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr + ) + return outputs.get("threshold_file") + + +def localmax_txt_file_callable(output_dir, inputs, stdout, stderr): + outputs = _list_outputs( + output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr + ) + return outputs.get("localmax_txt_file") + + +def localmax_vol_file_callable(output_dir, inputs, stdout, stderr): + outputs = _list_outputs( + output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr + ) + return outputs.get("localmax_vol_file") + + +def size_file_callable(output_dir, inputs, stdout, stderr): + outputs = _list_outputs( + output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr + ) + return outputs.get("size_file") + + +def max_file_callable(output_dir, inputs, stdout, stderr): + outputs = _list_outputs( + output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr + ) + return outputs.get("max_file") + + +def mean_file_callable(output_dir, inputs, stdout, stderr): + outputs = _list_outputs( + output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr + ) + return outputs.get("mean_file") + + +def pval_file_callable(output_dir, inputs, stdout, stderr): + outputs = _list_outputs( + output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr + ) + return outputs.get("pval_file") + + +@shell.define +class Cluster(shell.Task["Cluster.Outputs"]): + """ + Examples + ------- + + >>> from fileformats.generic import File + >>> from pydra.tasks.fsl.v6.model.cluster import Cluster + + >>> task = Cluster() + >>> task.in_file = File.mock() + >>> task.threshold = 2.3 + >>> task.out_localmax_txt_file = "stats.txt" + >>> task.cope_file = File.mock() + >>> task.xfm_file = File.mock() + >>> task.std_space_file = File.mock() + >>> task.warpfield_file = File.mock() + >>> task.cmdline + 'cluster --in=zstat1.nii.gz --olmax=stats.txt --thresh=2.3000000000 --mm' + + + """ + + executable = "cluster" + in_file: File = shell.arg(help="input volume", argstr="--in={in_file}") + threshold: float = shell.arg( + help="threshold for input volume", argstr="--thresh={threshold:.10}" + ) + out_index_file: ty.Any = shell.arg( + help="output of cluster index (in size order)", + argstr="--oindex={out_index_file}", + ) + out_threshold_file: ty.Any = shell.arg( + help="thresholded image", argstr="--othresh={out_threshold_file}" + ) + out_localmax_txt_file: ty.Any = shell.arg( + help="local maxima text file", argstr="--olmax={out_localmax_txt_file}" + ) + out_localmax_vol_file: ty.Any = shell.arg( + help="output of local maxima volume", argstr="--olmaxim={out_localmax_vol_file}" + ) + out_size_file: ty.Any = shell.arg( + help="filename for output of size image", argstr="--osize={out_size_file}" + ) + out_max_file: ty.Any = shell.arg( + help="filename for output of max image", argstr="--omax={out_max_file}" + ) + out_mean_file: ty.Any = shell.arg( + help="filename for output of mean image", argstr="--omean={out_mean_file}" + ) + out_pval_file: ty.Any = shell.arg( + help="filename for image output of log pvals", argstr="--opvals={out_pval_file}" + ) + pthreshold: float = shell.arg( + help="p-threshold for clusters", + argstr="--pthresh={pthreshold:.10}", + requires=["dlh", "volume"], + ) + peak_distance: float = shell.arg( + help="minimum distance between local maxima/minima, in mm (default 0)", + argstr="--peakdist={peak_distance:.10}", + ) + cope_file: File = shell.arg(help="cope volume", argstr="--cope={cope_file}") + volume: int = shell.arg( + help="number of voxels in the mask", argstr="--volume={volume}" + ) + dlh: float = shell.arg( + help="smoothness estimate = sqrt(det(Lambda))", argstr="--dlh={dlh:.10}" + ) + fractional: bool = shell.arg( + help="interprets the threshold as a fraction of the robust range", + argstr="--fractional", + default=False, + ) + connectivity: int = shell.arg( + help="the connectivity of voxels (default 26)", + argstr="--connectivity={connectivity}", + ) + use_mm: bool = shell.arg( + help="use mm, not voxel, coordinates", argstr="--mm", default=False + ) + find_min: bool = shell.arg( + help="find minima instead of maxima", argstr="--min", default=False + ) + no_table: bool = shell.arg( + help="suppresses printing of the table info", argstr="--no_table", default=False + ) + minclustersize: bool = shell.arg( + help="prints out minimum significant cluster size", + argstr="--minclustersize", + default=False, + ) + xfm_file: File = shell.arg( + help="filename for Linear: input->standard-space transform. Non-linear: input->highres transform", + argstr="--xfm={xfm_file}", + ) + std_space_file: File = shell.arg( + help="filename for standard-space volume", argstr="--stdvol={std_space_file}" + ) + num_maxima: int = shell.arg( + help="no of local maxima to report", argstr="--num={num_maxima}" + ) + warpfield_file: File = shell.arg( + help="file containing warpfield", argstr="--warpvol={warpfield_file}" + ) + + class Outputs(shell.Outputs): + index_file: File | None = shell.out( + help="output of cluster index (in size order)", callable=index_file_callable + ) + threshold_file: File | None = shell.out( + help="thresholded image", callable=threshold_file_callable + ) + localmax_txt_file: File | None = shell.out( + help="local maxima text file", callable=localmax_txt_file_callable + ) + localmax_vol_file: File | None = shell.out( + help="output of local maxima volume", callable=localmax_vol_file_callable + ) + size_file: File | None = shell.out( + help="filename for output of size image", callable=size_file_callable + ) + max_file: File | None = shell.out( + help="filename for output of max image", callable=max_file_callable + ) + mean_file: File | None = shell.out( + help="filename for output of mean image", callable=mean_file_callable + ) + pval_file: File | None = shell.out( + help="filename for image output of log pvals", callable=pval_file_callable + ) + + +def _gen_fname( + basename, + cwd=None, + suffix=None, + change_ext=True, + ext=None, + output_type=None, + inputs=None, + output_dir=None, + stderr=None, + stdout=None, +): + """Generate a filename based on the given parameters. + + The filename will take the form: cwd/basename. + If change_ext is True, it will use the extensions specified in + inputs.output_type. + + Parameters + ---------- + basename : str + Filename to base the new filename on. + cwd : str + Path to prefix to the new filename. (default is output_dir) + suffix : str + Suffix to add to the `basename`. (defaults is '' ) + change_ext : bool + Flag to change the filename extension to the FSL output type. + (default True) + + Returns + ------- + fname : str + New filename based on given parameters. + + """ + + if basename == "": + msg = "Unable to generate filename for command %s. " % "cluster" + msg += "basename is not set!" + raise ValueError(msg) + if cwd is None: + cwd = output_dir + if ext is None: + ext = Info.output_type_to_ext(output_type) + if change_ext: + if suffix: + suffix = f"{suffix}{ext}" + else: + suffix = ext + if suffix is None: + suffix = "" + fname = fname_presuffix(basename, suffix=suffix, use_ext=False, newpath=cwd) + return fname + + +IFLOGGER = logging.getLogger("nipype.interface") diff --git a/pydra/tasks/fsl/v6/model/contrast_mgr.py b/pydra/tasks/fsl/v6/model/contrast_mgr.py new file mode 100644 index 0000000..998abfe --- /dev/null +++ b/pydra/tasks/fsl/v6/model/contrast_mgr.py @@ -0,0 +1,332 @@ +import attrs +from fileformats.generic import File +import logging +from pydra.tasks.fsl.v6.nipype_ports.utils.filemanip import fname_presuffix +import os +from pydra.compose import shell +import typing as ty + + +logger = logging.getLogger(__name__) + + +def _format_arg(name, value, inputs, argstr): + if value is None: + return "" + + if name in ["param_estimates", "corrections", "dof_file"]: + return "" + elif name in ["sigmasquareds"]: + path, _ = os.path.split(value) + return path + else: + pass + + return argstr.format(**inputs) + + +def _list_outputs(inputs=None, stdout=None, stderr=None, output_dir=None): + inputs = attrs.asdict(inputs) + + outputs = {} + pth, _ = os.path.split(inputs["sigmasquareds"]) + numtcons, numfcons = _get_numcons( + fcon_file=inputs["fcon_file"], + tcon_file=inputs["tcon_file"], + inputs=inputs["inputs"], + output_dir=inputs["output_dir"], + stderr=inputs["stderr"], + stdout=inputs["stdout"], + ) + base_contrast = 1 + if inputs["contrast_num"] is not attrs.NOTHING: + base_contrast = inputs["contrast_num"] + copes = [] + varcopes = [] + zstats = [] + tstats = [] + neffs = [] + for i in range(numtcons): + copes.append( + _gen_fname( + "cope%d.nii" % (base_contrast + i), + cwd=pth, + output_type=inputs["output_type"], + inputs=inputs["inputs"], + output_dir=inputs["output_dir"], + stderr=inputs["stderr"], + stdout=inputs["stdout"], + ) + ) + varcopes.append( + _gen_fname( + "varcope%d.nii" % (base_contrast + i), + cwd=pth, + output_type=inputs["output_type"], + inputs=inputs["inputs"], + output_dir=inputs["output_dir"], + stderr=inputs["stderr"], + stdout=inputs["stdout"], + ) + ) + zstats.append( + _gen_fname( + "zstat%d.nii" % (base_contrast + i), + cwd=pth, + output_type=inputs["output_type"], + inputs=inputs["inputs"], + output_dir=inputs["output_dir"], + stderr=inputs["stderr"], + stdout=inputs["stdout"], + ) + ) + tstats.append( + _gen_fname( + "tstat%d.nii" % (base_contrast + i), + cwd=pth, + output_type=inputs["output_type"], + inputs=inputs["inputs"], + output_dir=inputs["output_dir"], + stderr=inputs["stderr"], + stdout=inputs["stdout"], + ) + ) + neffs.append( + _gen_fname( + "neff%d.nii" % (base_contrast + i), + cwd=pth, + output_type=inputs["output_type"], + inputs=inputs["inputs"], + output_dir=inputs["output_dir"], + stderr=inputs["stderr"], + stdout=inputs["stdout"], + ) + ) + if copes: + outputs["copes"] = copes + outputs["varcopes"] = varcopes + outputs["zstats"] = zstats + outputs["tstats"] = tstats + outputs["neffs"] = neffs + fstats = [] + zfstats = [] + for i in range(numfcons): + fstats.append( + _gen_fname( + "fstat%d.nii" % (base_contrast + i), + cwd=pth, + output_type=inputs["output_type"], + inputs=inputs["inputs"], + output_dir=inputs["output_dir"], + stderr=inputs["stderr"], + stdout=inputs["stdout"], + ) + ) + zfstats.append( + _gen_fname( + "zfstat%d.nii" % (base_contrast + i), + cwd=pth, + output_type=inputs["output_type"], + inputs=inputs["inputs"], + output_dir=inputs["output_dir"], + stderr=inputs["stderr"], + stdout=inputs["stdout"], + ) + ) + if fstats: + outputs["fstats"] = fstats + outputs["zfstats"] = zfstats + return outputs + + +def copes_callable(output_dir, inputs, stdout, stderr): + outputs = _list_outputs( + output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr + ) + return outputs.get("copes") + + +def varcopes_callable(output_dir, inputs, stdout, stderr): + outputs = _list_outputs( + output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr + ) + return outputs.get("varcopes") + + +def zstats_callable(output_dir, inputs, stdout, stderr): + outputs = _list_outputs( + output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr + ) + return outputs.get("zstats") + + +def tstats_callable(output_dir, inputs, stdout, stderr): + outputs = _list_outputs( + output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr + ) + return outputs.get("tstats") + + +def fstats_callable(output_dir, inputs, stdout, stderr): + outputs = _list_outputs( + output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr + ) + return outputs.get("fstats") + + +def zfstats_callable(output_dir, inputs, stdout, stderr): + outputs = _list_outputs( + output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr + ) + return outputs.get("zfstats") + + +def neffs_callable(output_dir, inputs, stdout, stderr): + outputs = _list_outputs( + output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr + ) + return outputs.get("neffs") + + +@shell.define +class ContrastMgr(shell.Task["ContrastMgr.Outputs"]): + """ + Examples + ------- + + >>> from fileformats.generic import File + >>> from pydra.tasks.fsl.v6.model.contrast_mgr import ContrastMgr + + """ + + executable = "contrast_mgr" + tcon_file: File = shell.arg( + help="contrast file containing T-contrasts", argstr="{tcon_file}", position=-1 + ) + fcon_file: File = shell.arg( + help="contrast file containing F-contrasts", argstr="-f {fcon_file}" + ) + param_estimates: list[File] = shell.arg( + help="Parameter estimates for each column of the design matrix", argstr="" + ) + corrections: File = shell.arg( + help="statistical corrections used within FILM modelling" + ) + dof_file: File = shell.arg(help="degrees of freedom", argstr="") + sigmasquareds: File = shell.arg( + help="summary of residuals, See Woolrich, et. al., 2001", argstr="", position=-2 + ) + contrast_num: ty.Any = shell.arg( + help="contrast number to start labeling copes from", argstr="-cope" + ) + suffix: str = shell.arg( + help="suffix to put on the end of the cope filename before the contrast number, default is nothing", + argstr="-suffix {suffix}", + ) + + class Outputs(shell.Outputs): + copes: list[File] | None = shell.out( + help="Contrast estimates for each contrast", callable=copes_callable + ) + varcopes: list[File] | None = shell.out( + help="Variance estimates for each contrast", callable=varcopes_callable + ) + zstats: list[File] | None = shell.out( + help="z-stat file for each contrast", callable=zstats_callable + ) + tstats: list[File] | None = shell.out( + help="t-stat file for each contrast", callable=tstats_callable + ) + fstats: list[File] | None = shell.out( + help="f-stat file for each contrast", callable=fstats_callable + ) + zfstats: list[File] | None = shell.out( + help="z-stat file for each F contrast", callable=zfstats_callable + ) + neffs: list[File] | None = shell.out( + help="neff file ?? for each contrast", callable=neffs_callable + ) + + +def _gen_fname( + basename, + cwd=None, + suffix=None, + change_ext=True, + ext=None, + output_type=None, + inputs=None, + output_dir=None, + stderr=None, + stdout=None, +): + """Generate a filename based on the given parameters. + + The filename will take the form: cwd/basename. + If change_ext is True, it will use the extensions specified in + inputs.output_type. + + Parameters + ---------- + basename : str + Filename to base the new filename on. + cwd : str + Path to prefix to the new filename. (default is output_dir) + suffix : str + Suffix to add to the `basename`. (defaults is '' ) + change_ext : bool + Flag to change the filename extension to the FSL output type. + (default True) + + Returns + ------- + fname : str + New filename based on given parameters. + + """ + + if basename == "": + msg = "Unable to generate filename for command %s. " % "contrast_mgr" + msg += "basename is not set!" + raise ValueError(msg) + if cwd is None: + cwd = output_dir + if ext is None: + ext = Info.output_type_to_ext(output_type) + if change_ext: + if suffix: + suffix = f"{suffix}{ext}" + else: + suffix = ext + if suffix is None: + suffix = "" + fname = fname_presuffix(basename, suffix=suffix, use_ext=False, newpath=cwd) + return fname + + +def _get_numcons( + fcon_file=None, + tcon_file=None, + inputs=None, + output_dir=None, + stderr=None, + stdout=None, +): + numtcons = 0 + numfcons = 0 + if tcon_file is not attrs.NOTHING: + with open(tcon_file) as fp: + for line in fp: + if line.startswith("/NumContrasts"): + numtcons = int(line.split()[-1]) + break + if fcon_file is not attrs.NOTHING: + with open(fcon_file) as fp: + for line in fp: + if line.startswith("/NumContrasts"): + numfcons = int(line.split()[-1]) + break + return numtcons, numfcons + + +IFLOGGER = logging.getLogger("nipype.interface") diff --git a/pydra/tasks/fsl/v6/model/dual_regression.py b/pydra/tasks/fsl/v6/model/dual_regression.py new file mode 100644 index 0000000..b70b6ef --- /dev/null +++ b/pydra/tasks/fsl/v6/model/dual_regression.py @@ -0,0 +1,89 @@ +from fileformats.generic import File +from fileformats.medimage import Nifti1 +import logging +import os +from pydra.compose import shell +import typing as ty + + +logger = logging.getLogger(__name__) + + +def _gen_filename(name, inputs): + if name == "out_dir": + return os.getcwd() + + +def out_dir_default(inputs): + return _gen_filename("out_dir", inputs=inputs) + + +@shell.define(xor=[["one_sample_group_mean", "design_file"]]) +class DualRegression(shell.Task["DualRegression.Outputs"]): + """ + Examples + ------- + + >>> from fileformats.generic import File + >>> from fileformats.medimage import Nifti1 + >>> from pydra.tasks.fsl.v6.model.dual_regression import DualRegression + + >>> task = DualRegression() + >>> task.in_files = [Nifti1.mock("functional.nii"), Nifti1.mock("functional2.nii"), Nifti1.mock("functional3.nii")] + >>> task.group_IC_maps_4D = File.mock() + >>> task.des_norm = False + >>> task.design_file = File.mock() + >>> task.con_file = File.mock() + >>> task.n_perm = 10 + >>> task.cmdline + 'dual_regression allFA.nii 0 -1 10 my_output_directory functional.nii functional2.nii functional3.nii' + + + """ + + executable = "dual_regression" + in_files: list[Nifti1] = shell.arg( + help="List all subjects' preprocessed, standard-space 4D datasets", + argstr="{in_files}", + position=-1, + sep=" ", + ) + group_IC_maps_4D: File = shell.arg( + help="4D image containing spatial IC maps (melodic_IC) from the whole-group ICA analysis", + argstr="{group_IC_maps_4D}", + position=1, + ) + des_norm: bool = shell.arg( + help="Whether to variance-normalise the timecourses used as the stage-2 regressors; True is default and recommended", + argstr="{des_norm:d}", + position=2, + default=True, + ) + one_sample_group_mean: bool = shell.arg( + help="perform 1-sample group-mean test instead of generic permutation test", + argstr="-1", + position=3, + ) + design_file: File | None = shell.arg( + help="Design matrix for final cross-subject modelling with randomise", + argstr="{design_file}", + position=3, + ) + con_file: File = shell.arg( + help="Design contrasts for final cross-subject modelling with randomise", + argstr="{con_file}", + position=4, + ) + n_perm: int = shell.arg( + help="Number of permutations for randomise; set to 1 for just raw tstat output, set to 0 to not run randomise at all.", + argstr="{n_perm}", + position=5, + ) + + class Outputs(shell.Outputs): + out_dir: ty.Any = shell.outarg( + help="This directory will be created to hold all output and logfiles", + argstr="{out_dir}", + position=6, + path_template="out_dir", + ) diff --git a/pydra/tasks/fsl/v6/model/feat.py b/pydra/tasks/fsl/v6/model/feat.py new file mode 100644 index 0000000..d84c353 --- /dev/null +++ b/pydra/tasks/fsl/v6/model/feat.py @@ -0,0 +1,65 @@ +import attrs +from fileformats.generic import Directory, File +from glob import glob +import logging +import os +from pydra.compose import shell + + +logger = logging.getLogger(__name__) + + +def _list_outputs(inputs=None, stdout=None, stderr=None, output_dir=None): + inputs = attrs.asdict(inputs) + + outputs = {} + is_ica = False + outputs["feat_dir"] = None + with open(inputs["fsf_file"]) as fp: + text = fp.read() + if "set fmri(inmelodic) 1" in text: + is_ica = True + for line in text.split("\n"): + if line.find("set fmri(outputdir)") > -1: + try: + outputdir_spec = line.split('"')[-2] + if os.path.exists(outputdir_spec): + outputs["feat_dir"] = outputdir_spec + + except: + pass + if not outputs["feat_dir"]: + if is_ica: + outputs["feat_dir"] = glob(os.path.join(os.getcwd(), "*ica"))[0] + else: + outputs["feat_dir"] = glob(os.path.join(os.getcwd(), "*feat"))[0] + return outputs + + +def feat_dir_callable(output_dir, inputs, stdout, stderr): + outputs = _list_outputs( + output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr + ) + return outputs.get("feat_dir") + + +@shell.define +class FEAT(shell.Task["FEAT.Outputs"]): + """ + Examples + ------- + + >>> from fileformats.generic import Directory, File + >>> from pydra.tasks.fsl.v6.model.feat import FEAT + + """ + + executable = "feat" + fsf_file: File = shell.arg( + help="File specifying the feat design spec file", + argstr="{fsf_file}", + position=1, + ) + + class Outputs(shell.Outputs): + feat_dir: Directory | None = shell.out(callable=feat_dir_callable) diff --git a/pydra/tasks/fsl/v6/model/feat_model.py b/pydra/tasks/fsl/v6/model/feat_model.py new file mode 100644 index 0000000..de62305 --- /dev/null +++ b/pydra/tasks/fsl/v6/model/feat_model.py @@ -0,0 +1,147 @@ +import attrs +from fileformats.generic import File +from glob import glob +import logging +from pydra.tasks.fsl.v6.nipype_ports.utils.filemanip import simplify_list +import os +from pydra.compose import shell + + +logger = logging.getLogger(__name__) + + +def _format_arg(name, value, inputs, argstr): + if value is None: + return "" + + if name == "fsf_file": + pass + elif name == "ev_files": + return "" + else: + pass + + return argstr.format(**inputs) + + +def fsf_file_formatter(field, inputs): + return _format_arg("fsf_file", field, inputs, argstr="{fsf_file}") + + +def ev_files_formatter(field, inputs): + return _format_arg("ev_files", field, inputs, argstr="{ev_files}") + + +def _list_outputs(inputs=None, stdout=None, stderr=None, output_dir=None): + inputs = attrs.asdict(inputs) + + outputs = {} + root = _get_design_root( + simplify_list(inputs["fsf_file"]), + inputs=inputs["inputs"], + output_dir=inputs["output_dir"], + stderr=inputs["stderr"], + stdout=inputs["stdout"], + ) + design_file = glob(os.path.join(os.getcwd(), "%s*.mat" % root)) + assert len(design_file) == 1, "No mat file generated by FEAT Model" + outputs["design_file"] = design_file[0] + design_image = glob(os.path.join(os.getcwd(), "%s.png" % root)) + assert len(design_image) == 1, "No design image generated by FEAT Model" + outputs["design_image"] = design_image[0] + design_cov = glob(os.path.join(os.getcwd(), "%s_cov.png" % root)) + assert len(design_cov) == 1, "No covariance image generated by FEAT Model" + outputs["design_cov"] = design_cov[0] + con_file = glob(os.path.join(os.getcwd(), "%s*.con" % root)) + assert len(con_file) == 1, "No con file generated by FEAT Model" + outputs["con_file"] = con_file[0] + fcon_file = glob(os.path.join(os.getcwd(), "%s*.fts" % root)) + if fcon_file: + assert len(fcon_file) == 1, "No fts file generated by FEAT Model" + outputs["fcon_file"] = fcon_file[0] + return outputs + + +def design_file_callable(output_dir, inputs, stdout, stderr): + outputs = _list_outputs( + output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr + ) + return outputs.get("design_file") + + +def design_image_callable(output_dir, inputs, stdout, stderr): + outputs = _list_outputs( + output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr + ) + return outputs.get("design_image") + + +def design_cov_callable(output_dir, inputs, stdout, stderr): + outputs = _list_outputs( + output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr + ) + return outputs.get("design_cov") + + +def con_file_callable(output_dir, inputs, stdout, stderr): + outputs = _list_outputs( + output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr + ) + return outputs.get("con_file") + + +def fcon_file_callable(output_dir, inputs, stdout, stderr): + outputs = _list_outputs( + output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr + ) + return outputs.get("fcon_file") + + +@shell.define +class FEATModel(shell.Task["FEATModel.Outputs"]): + """ + Examples + ------- + + >>> from fileformats.generic import File + >>> from pydra.tasks.fsl.v6.model.feat_model import FEATModel + + """ + + executable = "feat_model" + fsf_file: File = shell.arg( + help="File specifying the feat design spec file", + formatter="fsf_file_formatter", + position=1, + ) + ev_files: list[File] = shell.arg( + help="Event spec files generated by level1design", + formatter="ev_files_formatter", + position=2, + ) + + class Outputs(shell.Outputs): + design_file: File | None = shell.out( + help="Mat file containing ascii matrix for design", + callable=design_file_callable, + ) + design_image: File | None = shell.out( + help="Graphical representation of design matrix", + callable=design_image_callable, + ) + design_cov: File | None = shell.out( + help="Graphical representation of design covariance", + callable=design_cov_callable, + ) + con_file: File | None = shell.out( + help="Contrast file containing contrast vectors", callable=con_file_callable + ) + fcon_file: File | None = shell.out( + help="Contrast file containing contrast vectors", + callable=fcon_file_callable, + ) + + +def _get_design_root(infile, inputs=None, output_dir=None, stderr=None, stdout=None): + _, fname = os.path.split(infile) + return fname.split(".")[0] diff --git a/pydra/tasks/fsl/v6/model/filmgls.py b/pydra/tasks/fsl/v6/model/filmgls.py new file mode 100644 index 0000000..84050f7 --- /dev/null +++ b/pydra/tasks/fsl/v6/model/filmgls.py @@ -0,0 +1,435 @@ +import attrs +from fileformats.generic import Directory, File +import logging +from looseversion import LooseVersion +from pydra.tasks.fsl.v6.base import Info +from pydra.tasks.fsl.v6.nipype_ports.utils.filemanip import fname_presuffix +import os +from pydra.compose import shell +import typing as ty + + +logger = logging.getLogger(__name__) + + +def _list_outputs(inputs=None, stdout=None, stderr=None, output_dir=None): + inputs = attrs.asdict(inputs) + + outputs = {} + cwd = os.getcwd() + results_dir = os.path.join(cwd, inputs["results_dir"]) + outputs["results_dir"] = results_dir + pe_files = _get_pe_files( + results_dir, + design_file=inputs["design_file"], + output_type=inputs["output_type"], + inputs=inputs["inputs"], + output_dir=inputs["output_dir"], + stderr=inputs["stderr"], + stdout=inputs["stdout"], + ) + if pe_files: + outputs["param_estimates"] = pe_files + outputs["residual4d"] = _gen_fname( + "res4d.nii", + cwd=results_dir, + output_type=inputs["output_type"], + inputs=inputs["inputs"], + output_dir=inputs["output_dir"], + stderr=inputs["stderr"], + stdout=inputs["stdout"], + ) + outputs["dof_file"] = os.path.join(results_dir, "dof") + outputs["sigmasquareds"] = _gen_fname( + "sigmasquareds.nii", + cwd=results_dir, + output_type=inputs["output_type"], + inputs=inputs["inputs"], + output_dir=inputs["output_dir"], + stderr=inputs["stderr"], + stdout=inputs["stdout"], + ) + outputs["thresholdac"] = _gen_fname( + "threshac1.nii", + cwd=results_dir, + output_type=inputs["output_type"], + inputs=inputs["inputs"], + output_dir=inputs["output_dir"], + stderr=inputs["stderr"], + stdout=inputs["stdout"], + ) + if Info.version() and LooseVersion(Info.version()) < LooseVersion("5.0.7"): + outputs["corrections"] = _gen_fname( + "corrections.nii", + cwd=results_dir, + output_type=inputs["output_type"], + inputs=inputs["inputs"], + output_dir=inputs["output_dir"], + stderr=inputs["stderr"], + stdout=inputs["stdout"], + ) + outputs["logfile"] = _gen_fname( + "logfile", + change_ext=False, + cwd=results_dir, + output_type=inputs["output_type"], + inputs=inputs["inputs"], + output_dir=inputs["output_dir"], + stderr=inputs["stderr"], + stdout=inputs["stdout"], + ) + + if Info.version() and LooseVersion(Info.version()) > LooseVersion("5.0.6"): + pth = results_dir + numtcons, numfcons = _get_numcons( + fcon_file=inputs["fcon_file"], + tcon_file=inputs["tcon_file"], + inputs=inputs["inputs"], + output_dir=inputs["output_dir"], + stderr=inputs["stderr"], + stdout=inputs["stdout"], + ) + base_contrast = 1 + copes = [] + varcopes = [] + zstats = [] + tstats = [] + for i in range(numtcons): + copes.append( + _gen_fname( + "cope%d.nii" % (base_contrast + i), + cwd=pth, + output_type=inputs["output_type"], + inputs=inputs["inputs"], + output_dir=inputs["output_dir"], + stderr=inputs["stderr"], + stdout=inputs["stdout"], + ) + ) + varcopes.append( + _gen_fname( + "varcope%d.nii" % (base_contrast + i), + cwd=pth, + output_type=inputs["output_type"], + inputs=inputs["inputs"], + output_dir=inputs["output_dir"], + stderr=inputs["stderr"], + stdout=inputs["stdout"], + ) + ) + zstats.append( + _gen_fname( + "zstat%d.nii" % (base_contrast + i), + cwd=pth, + output_type=inputs["output_type"], + inputs=inputs["inputs"], + output_dir=inputs["output_dir"], + stderr=inputs["stderr"], + stdout=inputs["stdout"], + ) + ) + tstats.append( + _gen_fname( + "tstat%d.nii" % (base_contrast + i), + cwd=pth, + output_type=inputs["output_type"], + inputs=inputs["inputs"], + output_dir=inputs["output_dir"], + stderr=inputs["stderr"], + stdout=inputs["stdout"], + ) + ) + if copes: + outputs["copes"] = copes + outputs["varcopes"] = varcopes + outputs["zstats"] = zstats + outputs["tstats"] = tstats + fstats = [] + zfstats = [] + for i in range(numfcons): + fstats.append( + _gen_fname( + "fstat%d.nii" % (base_contrast + i), + cwd=pth, + output_type=inputs["output_type"], + inputs=inputs["inputs"], + output_dir=inputs["output_dir"], + stderr=inputs["stderr"], + stdout=inputs["stdout"], + ) + ) + zfstats.append( + _gen_fname( + "zfstat%d.nii" % (base_contrast + i), + cwd=pth, + output_type=inputs["output_type"], + inputs=inputs["inputs"], + output_dir=inputs["output_dir"], + stderr=inputs["stderr"], + stdout=inputs["stdout"], + ) + ) + if fstats: + outputs["fstats"] = fstats + outputs["zfstats"] = zfstats + return outputs + + +def param_estimates_callable(output_dir, inputs, stdout, stderr): + outputs = _list_outputs( + output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr + ) + return outputs.get("param_estimates") + + +def residual4d_callable(output_dir, inputs, stdout, stderr): + outputs = _list_outputs( + output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr + ) + return outputs.get("residual4d") + + +def dof_file_callable(output_dir, inputs, stdout, stderr): + outputs = _list_outputs( + output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr + ) + return outputs.get("dof_file") + + +def sigmasquareds_callable(output_dir, inputs, stdout, stderr): + outputs = _list_outputs( + output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr + ) + return outputs.get("sigmasquareds") + + +def results_dir_callable(output_dir, inputs, stdout, stderr): + outputs = _list_outputs( + output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr + ) + return outputs.get("results_dir") + + +def corrections_callable(output_dir, inputs, stdout, stderr): + outputs = _list_outputs( + output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr + ) + return outputs.get("corrections") + + +def thresholdac_callable(output_dir, inputs, stdout, stderr): + outputs = _list_outputs( + output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr + ) + return outputs.get("thresholdac") + + +def logfile_callable(output_dir, inputs, stdout, stderr): + outputs = _list_outputs( + output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr + ) + return outputs.get("logfile") + + +@shell.define( + xor=[ + [ + "autocorr_estimate_only", + "autocorr_noestimate", + "fit_armodel", + "multitaper_product", + "tukey_window", + "use_pava", + ] + ] +) +class FILMGLS(shell.Task["FILMGLS.Outputs"]): + """ + Examples + ------- + + >>> from fileformats.generic import Directory, File + >>> from pydra.tasks.fsl.v6.model.filmgls import FILMGLS + + """ + + executable = "film_gls" + in_file: File = shell.arg(help="input data file", argstr="{in_file}", position=-3) + design_file: File = shell.arg( + help="design matrix file", argstr="{design_file}", position=-2 + ) + threshold: ty.Any = shell.arg( + help="threshold", argstr="{threshold}", position=-1, default=1000.0 + ) + smooth_autocorr: bool = shell.arg(help="Smooth auto corr estimates", argstr="-sa") + mask_size: int = shell.arg(help="susan mask size", argstr="-ms {mask_size}") + brightness_threshold: ty.Any = shell.arg( + help="susan brightness threshold, otherwise it is estimated", + argstr="-epith {brightness_threshold}", + ) + full_data: bool = shell.arg(help="output full data", argstr="-v") + autocorr_estimate_only: bool = shell.arg( + help="perform autocorrelation estimatation only", argstr="-ac" + ) + fit_armodel: bool = shell.arg( + help="fits autoregressive model - default is to use tukey with M=sqrt(numvols)", + argstr="-ar", + ) + tukey_window: int | None = shell.arg( + help="tukey window size to estimate autocorr", argstr="-tukey {tukey_window}" + ) + multitaper_product: int | None = shell.arg( + help="multitapering with slepian tapers and num is the time-bandwidth product", + argstr="-mt {multitaper_product}", + ) + use_pava: bool = shell.arg(help="estimates autocorr using PAVA", argstr="-pava") + autocorr_noestimate: bool = shell.arg( + help="do not estimate autocorrs", argstr="-noest" + ) + output_pwdata: bool = shell.arg( + help="output prewhitened data and average design matrix", + argstr="-output_pwdata", + ) + results_dir: ty.Any = shell.arg( + help="directory to store results in", + argstr="-rn {results_dir}", + default="results", + ) + + class Outputs(shell.Outputs): + param_estimates: list[File] | None = shell.out( + help="Parameter estimates for each column of the design matrix", + callable=param_estimates_callable, + ) + residual4d: File | None = shell.out( + help="Model fit residual mean-squared error for each time point", + callable=residual4d_callable, + ) + dof_file: File | None = shell.out( + help="degrees of freedom", callable=dof_file_callable + ) + sigmasquareds: File | None = shell.out( + help="summary of residuals, See Woolrich, et. al., 2001", + callable=sigmasquareds_callable, + ) + results_dir: Directory | None = shell.out( + help="directory storing model estimation output", + callable=results_dir_callable, + ) + corrections: File | None = shell.out( + help="statistical corrections used within FILM modeling", + callable=corrections_callable, + ) + thresholdac: File | None = shell.out( + help="The FILM autocorrelation parameters", callable=thresholdac_callable + ) + logfile: File | None = shell.out( + help="FILM run logfile", callable=logfile_callable + ) + + +def _gen_fname( + basename, + cwd=None, + suffix=None, + change_ext=True, + ext=None, + output_type=None, + inputs=None, + output_dir=None, + stderr=None, + stdout=None, +): + """Generate a filename based on the given parameters. + + The filename will take the form: cwd/basename. + If change_ext is True, it will use the extensions specified in + inputs.output_type. + + Parameters + ---------- + basename : str + Filename to base the new filename on. + cwd : str + Path to prefix to the new filename. (default is output_dir) + suffix : str + Suffix to add to the `basename`. (defaults is '' ) + change_ext : bool + Flag to change the filename extension to the FSL output type. + (default True) + + Returns + ------- + fname : str + New filename based on given parameters. + + """ + + if basename == "": + msg = "Unable to generate filename for command %s. " % "film_gls" + msg += "basename is not set!" + raise ValueError(msg) + if cwd is None: + cwd = output_dir + if ext is None: + ext = Info.output_type_to_ext(output_type) + if change_ext: + if suffix: + suffix = f"{suffix}{ext}" + else: + suffix = ext + if suffix is None: + suffix = "" + fname = fname_presuffix(basename, suffix=suffix, use_ext=False, newpath=cwd) + return fname + + +def _get_numcons( + fcon_file=None, + tcon_file=None, + inputs=None, + output_dir=None, + stderr=None, + stdout=None, +): + numtcons = 0 + numfcons = 0 + if tcon_file is not attrs.NOTHING: + with open(tcon_file) as fp: + for line in fp: + if line.startswith("/NumContrasts"): + numtcons = int(line.split()[-1]) + break + if fcon_file is not attrs.NOTHING: + with open(fcon_file) as fp: + for line in fp: + if line.startswith("/NumContrasts"): + numfcons = int(line.split()[-1]) + break + return numtcons, numfcons + + +def _get_pe_files( + cwd, + design_file=None, + output_type=None, + inputs=None, + output_dir=None, + stderr=None, + stdout=None, +): + files = None + if design_file is not attrs.NOTHING: + with open(design_file) as fp: + for line in fp: + if line.startswith("/NumWaves"): + numpes = int(line.split()[-1]) + files = [ + _gen_fname(f"pe{i + 1}.nii", cwd=cwd, output_type=output_type) + for i in range(numpes) + ] + break + return files + + +IFLOGGER = logging.getLogger("nipype.interface") diff --git a/pydra/tasks/fsl/v6/model/flameo.py b/pydra/tasks/fsl/v6/model/flameo.py new file mode 100644 index 0000000..b031931 --- /dev/null +++ b/pydra/tasks/fsl/v6/model/flameo.py @@ -0,0 +1,285 @@ +import attrs +from fileformats.datascience import TextMatrix +from fileformats.generic import Directory, File +from fileformats.medimage import NiftiGz +from fileformats.medimage_fsl import Con +from glob import glob +import logging +from pydra.tasks.fsl.v6.nipype_ports.utils.misc import human_order_sorted +import os +from pydra.compose import shell +import typing as ty + + +logger = logging.getLogger(__name__) + + +def _list_outputs(inputs=None, stdout=None, stderr=None, output_dir=None): + inputs = attrs.asdict(inputs) + + outputs = {} + pth = os.path.join(os.getcwd(), inputs["log_dir"]) + + pes = human_order_sorted(glob(os.path.join(pth, "pe[0-9]*.*"))) + assert len(pes) >= 1, "No pe volumes generated by FSL Estimate" + outputs["pes"] = pes + + res4d = human_order_sorted(glob(os.path.join(pth, "res4d.*"))) + assert len(res4d) == 1, "No residual volume generated by FSL Estimate" + outputs["res4d"] = res4d[0] + + copes = human_order_sorted(glob(os.path.join(pth, "cope[0-9]*.*"))) + assert len(copes) >= 1, "No cope volumes generated by FSL CEstimate" + outputs["copes"] = copes + + var_copes = human_order_sorted(glob(os.path.join(pth, "varcope[0-9]*.*"))) + assert len(var_copes) >= 1, "No varcope volumes generated by FSL CEstimate" + outputs["var_copes"] = var_copes + + zstats = human_order_sorted(glob(os.path.join(pth, "zstat[0-9]*.*"))) + assert len(zstats) >= 1, "No zstat volumes generated by FSL CEstimate" + outputs["zstats"] = zstats + + if inputs["f_con_file"] is not attrs.NOTHING: + zfstats = human_order_sorted(glob(os.path.join(pth, "zfstat[0-9]*.*"))) + assert len(zfstats) >= 1, "No zfstat volumes generated by FSL CEstimate" + outputs["zfstats"] = zfstats + + fstats = human_order_sorted(glob(os.path.join(pth, "fstat[0-9]*.*"))) + assert len(fstats) >= 1, "No fstat volumes generated by FSL CEstimate" + outputs["fstats"] = fstats + + tstats = human_order_sorted(glob(os.path.join(pth, "tstat[0-9]*.*"))) + assert len(tstats) >= 1, "No tstat volumes generated by FSL CEstimate" + outputs["tstats"] = tstats + + mrefs = human_order_sorted( + glob(os.path.join(pth, "mean_random_effects_var[0-9]*.*")) + ) + assert len(mrefs) >= 1, "No mean random effects volumes generated by FLAMEO" + outputs["mrefvars"] = mrefs + + tdof = human_order_sorted(glob(os.path.join(pth, "tdof_t[0-9]*.*"))) + assert len(tdof) >= 1, "No T dof volumes generated by FLAMEO" + outputs["tdof"] = tdof + + weights = human_order_sorted(glob(os.path.join(pth, "weights[0-9]*.*"))) + assert len(weights) >= 1, "No weight volumes generated by FLAMEO" + outputs["weights"] = weights + + outputs["stats_dir"] = pth + + return outputs + + +def pes_callable(output_dir, inputs, stdout, stderr): + outputs = _list_outputs( + output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr + ) + return outputs.get("pes") + + +def res4d_callable(output_dir, inputs, stdout, stderr): + outputs = _list_outputs( + output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr + ) + return outputs.get("res4d") + + +def copes_callable(output_dir, inputs, stdout, stderr): + outputs = _list_outputs( + output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr + ) + return outputs.get("copes") + + +def var_copes_callable(output_dir, inputs, stdout, stderr): + outputs = _list_outputs( + output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr + ) + return outputs.get("var_copes") + + +def zstats_callable(output_dir, inputs, stdout, stderr): + outputs = _list_outputs( + output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr + ) + return outputs.get("zstats") + + +def tstats_callable(output_dir, inputs, stdout, stderr): + outputs = _list_outputs( + output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr + ) + return outputs.get("tstats") + + +def zfstats_callable(output_dir, inputs, stdout, stderr): + outputs = _list_outputs( + output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr + ) + return outputs.get("zfstats") + + +def fstats_callable(output_dir, inputs, stdout, stderr): + outputs = _list_outputs( + output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr + ) + return outputs.get("fstats") + + +def mrefvars_callable(output_dir, inputs, stdout, stderr): + outputs = _list_outputs( + output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr + ) + return outputs.get("mrefvars") + + +def tdof_callable(output_dir, inputs, stdout, stderr): + outputs = _list_outputs( + output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr + ) + return outputs.get("tdof") + + +def weights_callable(output_dir, inputs, stdout, stderr): + outputs = _list_outputs( + output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr + ) + return outputs.get("weights") + + +def stats_dir_callable(output_dir, inputs, stdout, stderr): + outputs = _list_outputs( + output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr + ) + return outputs.get("stats_dir") + + +@shell.define +class FLAMEO(shell.Task["FLAMEO.Outputs"]): + """ + Examples + ------- + + >>> from fileformats.datascience import TextMatrix + >>> from fileformats.generic import Directory, File + >>> from fileformats.medimage import NiftiGz + >>> from fileformats.medimage_fsl import Con + >>> from pydra.tasks.fsl.v6.model.flameo import FLAMEO + + >>> task = FLAMEO() + >>> task.cope_file = NiftiGz.mock("cope.nii.gz") + >>> task.var_cope_file = File.mock() + >>> task.dof_var_cope_file = File.mock() + >>> task.mask_file = File.mock() + >>> task.design_file = File.mock() + >>> task.t_con_file = Con.mock("design.con") + >>> task.f_con_file = File.mock() + >>> task.cov_split_file = TextMatrix.mock("cov_split.mat") + >>> task.run_mode = "fe" + >>> task.log_dir = Directory.mock() + >>> task.cmdline + 'flameo --copefile=cope.nii.gz --covsplitfile=cov_split.mat --designfile=design.mat --ld=stats --maskfile=mask.nii --runmode=fe --tcontrastsfile=design.con --varcopefile=varcope.nii.gz' + + + """ + + executable = "flameo" + cope_file: NiftiGz = shell.arg( + help="cope regressor data file", argstr="--copefile={cope_file}" + ) + var_cope_file: File = shell.arg( + help="varcope weightings data file", argstr="--varcopefile={var_cope_file}" + ) + dof_var_cope_file: File = shell.arg( + help="dof data file for varcope data", + argstr="--dofvarcopefile={dof_var_cope_file}", + ) + mask_file: File = shell.arg(help="mask file", argstr="--maskfile={mask_file}") + design_file: File = shell.arg( + help="design matrix file", argstr="--designfile={design_file}" + ) + t_con_file: Con = shell.arg( + help="ascii matrix specifying t-contrasts", + argstr="--tcontrastsfile={t_con_file}", + ) + f_con_file: File = shell.arg( + help="ascii matrix specifying f-contrasts", + argstr="--fcontrastsfile={f_con_file}", + ) + cov_split_file: TextMatrix = shell.arg( + help="ascii matrix specifying the groups the covariance is split into", + argstr="--covsplitfile={cov_split_file}", + ) + run_mode: ty.Any = shell.arg( + help="inference to perform", argstr="--runmode={run_mode}" + ) + n_jumps: int = shell.arg( + help="number of jumps made by mcmc", argstr="--njumps={n_jumps}" + ) + burnin: int = shell.arg( + help="number of jumps at start of mcmc to be discarded", + argstr="--burnin={burnin}", + ) + sample_every: int = shell.arg( + help="number of jumps for each sample", argstr="--sampleevery={sample_every}" + ) + fix_mean: bool = shell.arg(help="fix mean for tfit", argstr="--fixmean") + infer_outliers: bool = shell.arg( + help="infer outliers - not for fe", argstr="--inferoutliers" + ) + no_pe_outputs: bool = shell.arg( + help="do not output pe files", argstr="--nopeoutput" + ) + sigma_dofs: int = shell.arg( + help="sigma (in mm) to use for Gaussian smoothing the DOFs in FLAME 2. Default is 1mm, -1 indicates no smoothing", + argstr="--sigma_dofs={sigma_dofs}", + ) + outlier_iter: int = shell.arg( + help="Number of max iterations to use when inferring outliers. Default is 12.", + argstr="--ioni={outlier_iter}", + ) + log_dir: Directory = shell.arg(help="", argstr="--ld={log_dir}", default="stats") + + class Outputs(shell.Outputs): + pes: list[File] | None = shell.out( + help="Parameter estimates for each column of the design matrix for each voxel", + callable=pes_callable, + ) + res4d: list[File] | None = shell.out( + help="Model fit residual mean-squared error for each time point", + callable=res4d_callable, + ) + copes: list[File] | None = shell.out( + help="Contrast estimates for each contrast", callable=copes_callable + ) + var_copes: list[File] | None = shell.out( + help="Variance estimates for each contrast", callable=var_copes_callable + ) + zstats: list[File] | None = shell.out( + help="z-stat file for each contrast", callable=zstats_callable + ) + tstats: list[File] | None = shell.out( + help="t-stat file for each contrast", callable=tstats_callable + ) + zfstats: list[File] | None = shell.out( + help="z stat file for each f contrast", callable=zfstats_callable + ) + fstats: list[File] | None = shell.out( + help="f-stat file for each contrast", callable=fstats_callable + ) + mrefvars: list[File] | None = shell.out( + help="mean random effect variances for each contrast", + callable=mrefvars_callable, + ) + tdof: list[File] | None = shell.out( + help="temporal dof file for each contrast", callable=tdof_callable + ) + weights: list[File] | None = shell.out( + help="weights file for each contrast", callable=weights_callable + ) + stats_dir: Directory | None = shell.out( + help="directory storing model estimation output", + callable=stats_dir_callable, + ) diff --git a/pydra/tasks/fsl/v6/model/glm.py b/pydra/tasks/fsl/v6/model/glm.py new file mode 100644 index 0000000..96f5a52 --- /dev/null +++ b/pydra/tasks/fsl/v6/model/glm.py @@ -0,0 +1,277 @@ +import attrs +from fileformats.generic import File +from fileformats.medimage import Nifti1 +import logging +import os +from pathlib import Path +from pathlib import Path +from pydra.compose import shell + + +logger = logging.getLogger(__name__) + + +def _list_outputs(inputs=None, stdout=None, stderr=None, output_dir=None): + inputs = attrs.asdict(inputs) + + outputs = {} + + if inputs["out_cope"] is not attrs.NOTHING: + outputs["out_cope"] = os.path.abspath(inputs["out_cope"]) + + if inputs["out_z_name"] is not attrs.NOTHING: + outputs["out_z"] = os.path.abspath(inputs["out_z_name"]) + + if inputs["out_t_name"] is not attrs.NOTHING: + outputs["out_t"] = os.path.abspath(inputs["out_t_name"]) + + if inputs["out_p_name"] is not attrs.NOTHING: + outputs["out_p"] = os.path.abspath(inputs["out_p_name"]) + + if inputs["out_f_name"] is not attrs.NOTHING: + outputs["out_f"] = os.path.abspath(inputs["out_f_name"]) + + if inputs["out_pf_name"] is not attrs.NOTHING: + outputs["out_pf"] = os.path.abspath(inputs["out_pf_name"]) + + if inputs["out_res_name"] is not attrs.NOTHING: + outputs["out_res"] = os.path.abspath(inputs["out_res_name"]) + + if inputs["out_varcb_name"] is not attrs.NOTHING: + outputs["out_varcb"] = os.path.abspath(inputs["out_varcb_name"]) + + if inputs["out_sigsq_name"] is not attrs.NOTHING: + outputs["out_sigsq"] = os.path.abspath(inputs["out_sigsq_name"]) + + if inputs["out_data_name"] is not attrs.NOTHING: + outputs["out_data"] = os.path.abspath(inputs["out_data_name"]) + + if inputs["out_vnscales_name"] is not attrs.NOTHING: + outputs["out_vnscales"] = os.path.abspath(inputs["out_vnscales_name"]) + + return outputs + + +def out_cope_callable(output_dir, inputs, stdout, stderr): + outputs = _list_outputs( + output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr + ) + return outputs.get("out_cope") + + +def out_z_callable(output_dir, inputs, stdout, stderr): + outputs = _list_outputs( + output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr + ) + return outputs.get("out_z") + + +def out_t_callable(output_dir, inputs, stdout, stderr): + outputs = _list_outputs( + output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr + ) + return outputs.get("out_t") + + +def out_p_callable(output_dir, inputs, stdout, stderr): + outputs = _list_outputs( + output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr + ) + return outputs.get("out_p") + + +def out_f_callable(output_dir, inputs, stdout, stderr): + outputs = _list_outputs( + output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr + ) + return outputs.get("out_f") + + +def out_pf_callable(output_dir, inputs, stdout, stderr): + outputs = _list_outputs( + output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr + ) + return outputs.get("out_pf") + + +def out_res_callable(output_dir, inputs, stdout, stderr): + outputs = _list_outputs( + output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr + ) + return outputs.get("out_res") + + +def out_varcb_callable(output_dir, inputs, stdout, stderr): + outputs = _list_outputs( + output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr + ) + return outputs.get("out_varcb") + + +def out_sigsq_callable(output_dir, inputs, stdout, stderr): + outputs = _list_outputs( + output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr + ) + return outputs.get("out_sigsq") + + +def out_data_callable(output_dir, inputs, stdout, stderr): + outputs = _list_outputs( + output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr + ) + return outputs.get("out_data") + + +def out_vnscales_callable(output_dir, inputs, stdout, stderr): + outputs = _list_outputs( + output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr + ) + return outputs.get("out_vnscales") + + +@shell.define +class GLM(shell.Task["GLM.Outputs"]): + """ + Examples + ------- + + >>> from fileformats.generic import File + >>> from fileformats.medimage import Nifti1 + >>> from pathlib import Path + >>> from pydra.tasks.fsl.v6.model.glm import GLM + + >>> task = GLM() + >>> task.in_file = Nifti1.mock("functional.nii") + >>> task.design = Nifti1.mock("maps.nii") + >>> task.contrasts = File.mock() + >>> task.mask = File.mock() + >>> task.cmdline + 'None' + + + """ + + executable = "fsl_glm" + in_file: Nifti1 = shell.arg( + help="input file name (text matrix or 3D/4D image file)", + argstr="-i {in_file}", + position=1, + ) + design: Nifti1 = shell.arg( + help="file name of the GLM design matrix (text time courses for temporal regression or an image file for spatial regression)", + argstr="-d {design}", + position=2, + ) + contrasts: File = shell.arg( + help="matrix of t-statics contrasts", argstr="-c {contrasts}" + ) + mask: File = shell.arg( + help="mask image file name if input is image", argstr="-m {mask}" + ) + dof: int = shell.arg(help="set degrees of freedom explicitly", argstr="--dof={dof}") + des_norm: bool = shell.arg( + help="switch on normalization of the design matrix columns to unit std deviation", + argstr="--des_norm", + ) + dat_norm: bool = shell.arg( + help="switch on normalization of the data time series to unit std deviation", + argstr="--dat_norm", + ) + var_norm: bool = shell.arg( + help="perform MELODIC variance-normalisation on data", argstr="--vn" + ) + demean: bool = shell.arg( + help="switch on demeaining of design and data", argstr="--demean" + ) + out_cope: Path = shell.arg( + help="output file name for COPE (either as txt or image", + argstr="--out_cope={out_cope}", + ) + out_z_name: Path = shell.arg( + help="output file name for Z-stats (either as txt or image", + argstr="--out_z={out_z_name}", + ) + out_t_name: Path = shell.arg( + help="output file name for t-stats (either as txt or image", + argstr="--out_t={out_t_name}", + ) + out_p_name: Path = shell.arg( + help="output file name for p-values of Z-stats (either as text file or image)", + argstr="--out_p={out_p_name}", + ) + out_f_name: Path = shell.arg( + help="output file name for F-value of full model fit", + argstr="--out_f={out_f_name}", + ) + out_pf_name: Path = shell.arg( + help="output file name for p-value for full model fit", + argstr="--out_pf={out_pf_name}", + ) + out_res_name: Path = shell.arg( + help="output file name for residuals", argstr="--out_res={out_res_name}" + ) + out_varcb_name: Path = shell.arg( + help="output file name for variance of COPEs", + argstr="--out_varcb={out_varcb_name}", + ) + out_sigsq_name: Path = shell.arg( + help="output file name for residual noise variance sigma-square", + argstr="--out_sigsq={out_sigsq_name}", + ) + out_data_name: Path = shell.arg( + help="output file name for pre-processed data", + argstr="--out_data={out_data_name}", + ) + out_vnscales_name: Path = shell.arg( + help="output file name for scaling factors for variance normalisation", + argstr="--out_vnscales={out_vnscales_name}", + ) + + class Outputs(shell.Outputs): + out_file: Path = shell.outarg( + help="filename for GLM parameter estimates (GLM betas)", + argstr="-o {out_file}", + position=3, + path_template="{in_file}_glm", + ) + out_cope: list[File] | None = shell.out( + help="output file name for COPEs (either as text file or image)", + callable=out_cope_callable, + ) + out_z: list[File] | None = shell.out( + help="output file name for COPEs (either as text file or image)", + callable=out_z_callable, + ) + out_t: list[File] | None = shell.out( + help="output file name for t-stats (either as text file or image)", + callable=out_t_callable, + ) + out_p: list[File] | None = shell.out( + help="output file name for p-values of Z-stats (either as text file or image)", + callable=out_p_callable, + ) + out_f: list[File] | None = shell.out( + help="output file name for F-value of full model fit", + callable=out_f_callable, + ) + out_pf: list[File] | None = shell.out( + help="output file name for p-value for full model fit", + callable=out_pf_callable, + ) + out_res: list[File] | None = shell.out( + help="output file name for residuals", callable=out_res_callable + ) + out_varcb: list[File] | None = shell.out( + help="output file name for variance of COPEs", callable=out_varcb_callable + ) + out_sigsq: list[File] | None = shell.out( + help="output file name for residual noise variance sigma-square", + callable=out_sigsq_callable, + ) + out_data: list[File] | None = shell.out( + help="output file for preprocessed data", callable=out_data_callable + ) + out_vnscales: list[File] | None = shell.out( + help="output file name for scaling factors for variance normalisation", + callable=out_vnscales_callable, + ) diff --git a/pydra/tasks/fsl/v6/model/l2_model.py b/pydra/tasks/fsl/v6/model/l2_model.py new file mode 100644 index 0000000..0316a4d --- /dev/null +++ b/pydra/tasks/fsl/v6/model/l2_model.py @@ -0,0 +1,79 @@ +import attrs +from fileformats.generic import File +import logging +import os +from pydra.compose import python +import typing as ty + + +logger = logging.getLogger(__name__) + + +@python.define +class L2Model(python.Task["L2Model.Outputs"]): + """ + Examples + ------- + + >>> from fileformats.generic import File + >>> from pydra.tasks.fsl.v6.model.l2_model import L2Model + + """ + + num_copes: ty.Any + + class Outputs(python.Outputs): + design_mat: File + design_con: File + design_grp: File + + @staticmethod + def function(num_copes: ty.Any) -> tuple[File, File, File]: + design_mat = attrs.NOTHING + design_con = attrs.NOTHING + design_grp = attrs.NOTHING + cwd = os.getcwd() + mat_txt = [ + "/NumWaves 1", + f"/NumPoints {num_copes:d}", + "/PPheights 1", + "", + "/Matrix", + ] + for i in range(num_copes): + mat_txt += ["1"] + mat_txt = "\n".join(mat_txt) + + con_txt = [ + "/ContrastName1 group mean", + "/NumWaves 1", + "/NumContrasts 1", + "/PPheights 1", + "/RequiredEffect 100", # XX where does this + "", + "/Matrix", + "1", + ] + con_txt = "\n".join(con_txt) + + grp_txt = [ + "/NumWaves 1", + f"/NumPoints {num_copes:d}", + "", + "/Matrix", + ] + for i in range(num_copes): + grp_txt += ["1"] + grp_txt = "\n".join(grp_txt) + + txt = {"design.mat": mat_txt, "design.con": con_txt, "design.grp": grp_txt} + + for i, name in enumerate(["design.mat", "design.con", "design.grp"]): + with open(os.path.join(cwd, name), "w") as f: + f.write(txt[name]) + + outputs = {} + for field in list(outputs["keys"]()): + outputs[field] = os.path.join(os.getcwd(), field.replace("_", ".")) + + return design_mat, design_con, design_grp diff --git a/pydra/tasks/fsl/v6/model/level_1_design.py b/pydra/tasks/fsl/v6/model/level_1_design.py new file mode 100644 index 0000000..304880e --- /dev/null +++ b/pydra/tasks/fsl/v6/model/level_1_design.py @@ -0,0 +1,355 @@ +import acres +import attrs +from fileformats.generic import File +import logging +from nibabel import load +import numpy as np +import os +from pydra.compose import python +from pydra.utils.typing import MultiOutputType +from string import Template +import typing as ty + + +logger = logging.getLogger(__name__) + + +@python.define +class Level1Design(python.Task["Level1Design.Outputs"]): + """ + Examples + ------- + + >>> from fileformats.generic import File + >>> from pydra.tasks.fsl.v6.model.level_1_design import Level1Design + >>> from pydra.utils.typing import MultiOutputType + + """ + + interscan_interval: float + session_info: ty.Any + bases: ty.Any + orthogonalization: dict = {} + model_serial_correlations: bool + contrasts: list[ty.Any] + + class Outputs(python.Outputs): + fsf_files: list[File] + ev_files: ty.Union[list, object, MultiOutputType] + + @staticmethod + def function( + interscan_interval: float, + session_info: ty.Any, + bases: ty.Any, + orthogonalization: dict, + model_serial_correlations: bool, + contrasts: list[ty.Any], + ) -> tuple[list[File], ty.Union[list, object, MultiOutputType]]: + fsf_files = attrs.NOTHING + ev_files = attrs.NOTHING + cwd = os.getcwd() + fsf_header = load_template("feat_header_l1.tcl") + fsf_postscript = load_template("feat_nongui.tcl") + + prewhiten = 0 + if model_serial_correlations is not attrs.NOTHING: + prewhiten = int(model_serial_correlations) + basis_key = list(bases.keys())[0] + ev_parameters = dict(bases[basis_key]) + session_info = _format_session_info(session_info) + func_files = _get_func_files(session_info) + n_tcon = 0 + n_fcon = 0 + if contrasts is not attrs.NOTHING: + for i, c in enumerate(contrasts): + if c[1] == "T": + n_tcon += 1 + elif c[1] == "F": + n_fcon += 1 + + for i, info in enumerate(session_info): + do_tempfilter = 1 + if info["hpf"] == np.inf: + do_tempfilter = 0 + num_evs, cond_txt = _create_ev_files( + cwd, + info, + i, + ev_parameters, + orthogonalization, + contrasts, + do_tempfilter, + basis_key, + ) + nim = load(func_files[i]) + (_, _, _, timepoints) = nim.shape + fsf_txt = fsf_header.substitute( + run_num=i, + interscan_interval=interscan_interval, + num_vols=timepoints, + prewhiten=prewhiten, + num_evs=num_evs[0], + num_evs_real=num_evs[1], + num_tcon=n_tcon, + num_fcon=n_fcon, + high_pass_filter_cutoff=info["hpf"], + temphp_yn=do_tempfilter, + func_file=func_files[i], + ) + fsf_txt += cond_txt + fsf_txt += fsf_postscript.substitute(overwrite=1) + + with open(os.path.join(cwd, "run%d.fsf" % i), "w") as f: + f.write(fsf_txt) + + outputs = {} + cwd = os.getcwd() + fsf_files = [] + ev_files = [] + basis_key = list(bases.keys())[0] + ev_parameters = dict(bases[basis_key]) + for runno, runinfo in enumerate(_format_session_info(session_info)): + fsf_files.append(os.path.join(cwd, "run%d.fsf" % runno)) + ev_files.insert(runno, []) + evname = [] + for field in ["cond", "regress"]: + for i, cond in enumerate(runinfo[field]): + name = cond["name"] + evname.append(name) + evfname = os.path.join( + cwd, "ev_%s_%d_%d.txt" % (name, runno, len(evname)) + ) + if field == "cond": + ev_parameters["temporalderiv"] = int( + bool(ev_parameters.get("derivs", False)) + ) + if ev_parameters["temporalderiv"]: + evname.append(name + "TD") + ev_files[runno].append(os.path.join(cwd, evfname)) + + return fsf_files, ev_files + + +def _create_ev_file(evfname, evinfo): + with open(evfname, "w") as f: + for i in evinfo: + if len(i) == 3: + f.write(f"{i[0]:f} {i[1]:f} {i[2]:f}\n") + else: + f.write("%f\n" % i[0]) + + +def _create_ev_files( + cwd, + runinfo, + runidx, + ev_parameters, + orthogonalization, + contrasts, + do_tempfilter, + basis_key, +): + """Creates EV files from condition and regressor information. + + Parameters: + ----------- + + runinfo : dict + Generated by `SpecifyModel` and contains information + about events and other regressors. + runidx : int + Index to run number + ev_parameters : dict + A dictionary containing the model parameters for the + given design type. + orthogonalization : dict + A dictionary of dictionaries specifying orthogonal EVs. + contrasts : list of lists + Information on contrasts to be evaluated + """ + conds = {} + evname = [] + if basis_key == "dgamma": + basis_key = "hrf" + elif basis_key == "gamma": + try: + _ = ev_parameters["gammasigma"] + except KeyError: + ev_parameters["gammasigma"] = 3 + try: + _ = ev_parameters["gammadelay"] + except KeyError: + ev_parameters["gammadelay"] = 6 + ev_template = load_template("feat_ev_" + basis_key + ".tcl") + ev_none = load_template("feat_ev_none.tcl") + ev_ortho = load_template("feat_ev_ortho.tcl") + ev_txt = "" + + num_evs = [0, 0] + for field in ["cond", "regress"]: + for i, cond in enumerate(runinfo[field]): + name = cond["name"] + evname.append(name) + evfname = os.path.join(cwd, "ev_%s_%d_%d.txt" % (name, runidx, len(evname))) + evinfo = [] + num_evs[0] += 1 + num_evs[1] += 1 + if field == "cond": + for j, onset in enumerate(cond["onset"]): + try: + amplitudes = cond["amplitudes"] + if len(amplitudes) > 1: + amp = amplitudes[j] + else: + amp = amplitudes[0] + except KeyError: + amp = 1 + if len(cond["duration"]) > 1: + evinfo.insert(j, [onset, cond["duration"][j], amp]) + else: + evinfo.insert(j, [onset, cond["duration"][0], amp]) + ev_parameters["cond_file"] = evfname + ev_parameters["ev_num"] = num_evs[0] + ev_parameters["ev_name"] = name + ev_parameters["tempfilt_yn"] = do_tempfilter + if "basisorth" not in ev_parameters: + ev_parameters["basisorth"] = 1 + if "basisfnum" not in ev_parameters: + ev_parameters["basisfnum"] = 1 + try: + ev_parameters["fsldir"] = os.environ["FSLDIR"] + except KeyError: + if basis_key == "flobs": + raise Exception("FSL environment variables not set") + else: + ev_parameters["fsldir"] = "/usr/share/fsl" + ev_parameters["temporalderiv"] = int( + bool(ev_parameters.get("derivs", False)) + ) + if ev_parameters["temporalderiv"]: + evname.append(name + "TD") + num_evs[1] += 1 + ev_txt += ev_template.substitute(ev_parameters) + elif field == "regress": + evinfo = [[j] for j in cond["val"]] + ev_txt += ev_none.substitute( + ev_num=num_evs[0], + ev_name=name, + tempfilt_yn=do_tempfilter, + cond_file=evfname, + ) + ev_txt += "\n" + conds[name] = evfname + _create_ev_file(evfname, evinfo) + + for i in range(1, num_evs[0] + 1): + initial = ev_ortho.substitute(c0=i, c1=0, orthogonal=1) + for j in range(num_evs[0] + 1): + try: + orthogonal = int(orthogonalization[i][j]) + except (KeyError, TypeError, ValueError, IndexError): + orthogonal = 0 + if orthogonal == 1 and initial not in ev_txt: + ev_txt += initial + "\n" + ev_txt += ev_ortho.substitute(c0=i, c1=j, orthogonal=orthogonal) + ev_txt += "\n" + + if contrasts is not attrs.NOTHING: + contrast_header = load_template("feat_contrast_header.tcl") + contrast_prolog = load_template("feat_contrast_prolog.tcl") + contrast_element = load_template("feat_contrast_element.tcl") + contrast_ftest_element = load_template("feat_contrast_ftest_element.tcl") + contrastmask_header = load_template("feat_contrastmask_header.tcl") + contrastmask_footer = load_template("feat_contrastmask_footer.tcl") + contrastmask_element = load_template("feat_contrastmask_element.tcl") + + ev_txt += contrast_header.substitute() + con_names = [] + for j, con in enumerate(contrasts): + con_names.append(con[0]) + con_map = {} + ftest_idx = [] + ttest_idx = [] + for j, con in enumerate(contrasts): + if con[1] == "F": + ftest_idx.append(j) + for c in con[2]: + if c[0] not in list(con_map.keys()): + con_map[c[0]] = [] + con_map[c[0]].append(j) + else: + ttest_idx.append(j) + + for ctype in ["real", "orig"]: + for j, con in enumerate(contrasts): + if con[1] == "F": + continue + tidx = ttest_idx.index(j) + 1 + ev_txt += contrast_prolog.substitute( + cnum=tidx, ctype=ctype, cname=con[0] + ) + count = 0 + for c in range(1, len(evname) + 1): + if evname[c - 1].endswith("TD") and ctype == "orig": + continue + count = count + 1 + if evname[c - 1] in con[2]: + val = con[3][con[2].index(evname[c - 1])] + else: + val = 0.0 + ev_txt += contrast_element.substitute( + cnum=tidx, element=count, ctype=ctype, val=val + ) + ev_txt += "\n" + + for fconidx in ftest_idx: + fval = 0 + if con[0] in con_map and fconidx in con_map[con[0]]: + fval = 1 + ev_txt += contrast_ftest_element.substitute( + cnum=ftest_idx.index(fconidx) + 1, + element=tidx, + ctype=ctype, + val=fval, + ) + ev_txt += "\n" + + ev_txt += contrastmask_header.substitute() + for j, _ in enumerate(contrasts): + for k, _ in enumerate(contrasts): + if j != k: + ev_txt += contrastmask_element.substitute(c1=j + 1, c2=k + 1) + ev_txt += contrastmask_footer.substitute() + return num_evs, ev_txt + + +def _format_session_info(session_info): + if isinstance(session_info, dict): + session_info = [session_info] + return session_info + + +def _get_func_files(session_info): + """Returns functional files in the order of runs""" + func_files = [] + for i, info in enumerate(session_info): + func_files.insert(i, info["scans"]) + return func_files + + +def load_template(name): + """Load a template from the model_templates directory + + Parameters + ---------- + name : str + The name of the file to load + + Returns + ------- + template : string.Template + + """ + loader = acres.Loader("nipype.interfaces.fsl") + return Template(loader.readable("model_templates", name).read_text()) diff --git a/pydra/tasks/fsl/v6/model/melodic.py b/pydra/tasks/fsl/v6/model/melodic.py new file mode 100644 index 0000000..d796264 --- /dev/null +++ b/pydra/tasks/fsl/v6/model/melodic.py @@ -0,0 +1,216 @@ +import attrs +from fileformats.generic import Directory, File +from fileformats.medimage_fsl import Con +import logging +import os +from pydra.compose import shell +import typing as ty + + +logger = logging.getLogger(__name__) + + +def _list_outputs(inputs=None, stdout=None, stderr=None, output_dir=None): + inputs = attrs.asdict(inputs) + + outputs = {} + if inputs["out_dir"] is not attrs.NOTHING: + outputs["out_dir"] = os.path.abspath(inputs["out_dir"]) + else: + outputs["out_dir"] = _gen_filename( + "out_dir", + inputs=inputs["inputs"], + output_dir=inputs["output_dir"], + stderr=inputs["stderr"], + stdout=inputs["stdout"], + ) + if (inputs["report"] is not attrs.NOTHING) and inputs["report"]: + outputs["report_dir"] = os.path.join(outputs["out_dir"], "report") + return outputs + + +def report_dir_callable(output_dir, inputs, stdout, stderr): + outputs = _list_outputs( + output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr + ) + return outputs.get("report_dir") + + +def _gen_filename(name, inputs): + if name == "out_dir": + return os.getcwd() + + +def out_dir_default(inputs): + return _gen_filename("out_dir", inputs=inputs) + + +@shell.define +class MELODIC(shell.Task["MELODIC.Outputs"]): + """ + Examples + ------- + + >>> from fileformats.generic import Directory, File + >>> from fileformats.medimage_fsl import Con + >>> from pydra.tasks.fsl.v6.model.melodic import MELODIC + + >>> task = MELODIC() + >>> task.mask = File.mock() + >>> task.no_bet = True + >>> task.approach = "tica" + >>> task.ICs = File.mock() + >>> task.mix = File.mock() + >>> task.smode = File.mock() + >>> task.bg_image = File.mock() + >>> task.tr_sec = 1.5 + >>> task.t_des = File.mock() + >>> task.t_con = Con.mock("timeDesign.con") + >>> task.s_des = File.mock() + >>> task.s_con = Con.mock("subjectDesign.con") + >>> task.out_stats = True + >>> task.cmdline + 'melodic -i functional.nii,functional2.nii,functional3.nii -a tica --bgthreshold=10.000000 --mmthresh=0.500000 --nobet -o groupICA.out --Ostats --Scon=subjectDesign.con --Sdes=subjectDesign.mat --Tcon=timeDesign.con --Tdes=timeDesign.mat --tr=1.500000' + + + """ + + executable = "melodic" + in_files: list[File] = shell.arg( + help="input file names (either single file name or a list)", + argstr="-i {in_files}", + sep=",", + position=1, + ) + mask: File = shell.arg( + help="file name of mask for thresholding", argstr="-m {mask}" + ) + no_mask: bool = shell.arg(help="switch off masking", argstr="--nomask") + update_mask: bool = shell.arg( + help="switch off mask updating", argstr="--update_mask" + ) + no_bet: bool = shell.arg(help="switch off BET", argstr="--nobet") + bg_threshold: float = shell.arg( + help="brain/non-brain threshold used to mask non-brain voxels, as a percentage (only if --nobet selected)", + argstr="--bgthreshold={bg_threshold}", + ) + dim: int = shell.arg( + help="dimensionality reduction into #num dimensions (default: automatic estimation)", + argstr="-d {dim}", + ) + dim_est: str = shell.arg( + help="use specific dim. estimation technique: lap, bic, mdl, aic, mean (default: lap)", + argstr="--dimest={dim_est}", + ) + sep_whiten: bool = shell.arg( + help="switch on separate whitening", argstr="--sep_whiten" + ) + sep_vn: bool = shell.arg( + help="switch off joined variance normalization", argstr="--sep_vn" + ) + migp: bool = shell.arg(help="switch on MIGP data reduction", argstr="--migp") + migpN: int = shell.arg( + help="number of internal Eigenmaps", argstr="--migpN {migpN}" + ) + migp_shuffle: bool = shell.arg( + help="randomise MIGP file order (default: TRUE)", argstr="--migp_shuffle" + ) + migp_factor: int = shell.arg( + help="Internal Factor of mem-threshold relative to number of Eigenmaps (default: 2)", + argstr="--migp_factor {migp_factor}", + ) + num_ICs: int = shell.arg( + help="number of IC's to extract (for deflation approach)", argstr="-n {num_ICs}" + ) + approach: str = shell.arg( + help="approach for decomposition, 2D: defl, symm (default), 3D: tica (default), concat", + argstr="-a {approach}", + ) + non_linearity: str = shell.arg( + help="nonlinearity: gauss, tanh, pow3, pow4", argstr="--nl={non_linearity}" + ) + var_norm: bool = shell.arg(help="switch off variance normalization", argstr="--vn") + pbsc: bool = shell.arg( + help="switch off conversion to percent BOLD signal change", argstr="--pbsc" + ) + cov_weight: float = shell.arg( + help="voxel-wise weights for the covariance matrix (e.g. segmentation information)", + argstr="--covarweight={cov_weight}", + ) + epsilon: float = shell.arg(help="minimum error change", argstr="--eps={epsilon}") + epsilonS: float = shell.arg( + help="minimum error change for rank-1 approximation in TICA", + argstr="--epsS={epsilonS}", + ) + maxit: int = shell.arg( + help="maximum number of iterations before restart", argstr="--maxit={maxit}" + ) + max_restart: int = shell.arg( + help="maximum number of restarts", argstr="--maxrestart={max_restart}" + ) + mm_thresh: float = shell.arg( + help="threshold for Mixture Model based inference", + argstr="--mmthresh={mm_thresh}", + ) + no_mm: bool = shell.arg( + help="switch off mixture modelling on IC maps", argstr="--no_mm" + ) + ICs: File = shell.arg( + help="filename of the IC components file for mixture modelling", + argstr="--ICs={ICs}", + ) + mix: File = shell.arg( + help="mixing matrix for mixture modelling / filtering", argstr="--mix={mix}" + ) + smode: File = shell.arg( + help="matrix of session modes for report generation", argstr="--smode={smode}" + ) + rem_cmp: list[int] = shell.arg( + help="component numbers to remove", argstr="-f {rem_cmp}" + ) + report: bool = shell.arg(help="generate Melodic web report", argstr="--report") + bg_image: File = shell.arg( + help="specify background image for report (default: mean image)", + argstr="--bgimage={bg_image}", + ) + tr_sec: float = shell.arg(help="TR in seconds", argstr="--tr={tr_sec}") + log_power: bool = shell.arg( + help="calculate log of power for frequency spectrum", argstr="--logPower" + ) + t_des: File = shell.arg( + help="design matrix across time-domain", argstr="--Tdes={t_des}" + ) + t_con: Con = shell.arg( + help="t-contrast matrix across time-domain", argstr="--Tcon={t_con}" + ) + s_des: File = shell.arg( + help="design matrix across subject-domain", argstr="--Sdes={s_des}" + ) + s_con: Con = shell.arg( + help="t-contrast matrix across subject-domain", argstr="--Scon={s_con}" + ) + out_all: bool = shell.arg(help="output everything", argstr="--Oall") + out_unmix: bool = shell.arg(help="output unmixing matrix", argstr="--Ounmix") + out_stats: bool = shell.arg( + help="output thresholded maps and probability maps", argstr="--Ostats" + ) + out_pca: bool = shell.arg(help="output PCA results", argstr="--Opca") + out_white: bool = shell.arg( + help="output whitening/dewhitening matrices", argstr="--Owhite" + ) + out_orig: bool = shell.arg(help="output the original ICs", argstr="--Oorig") + out_mean: bool = shell.arg(help="output mean volume", argstr="--Omean") + report_maps: str = shell.arg( + help="control string for spatial map images (see slicer)", + argstr="--report_maps={report_maps}", + ) + remove_deriv: bool = shell.arg( + help="removes every second entry in paradigm file (EV derivatives)", + argstr="--remove_deriv", + ) + + class Outputs(shell.Outputs): + out_dir: ty.Any = shell.outarg( + help="output directory name", argstr="-o {out_dir}", path_template="out_dir" + ) + report_dir: Directory | None = shell.out(callable=report_dir_callable) diff --git a/pydra/tasks/fsl/v6/model/multiple_regress_design.py b/pydra/tasks/fsl/v6/model/multiple_regress_design.py new file mode 100644 index 0000000..f04e7d9 --- /dev/null +++ b/pydra/tasks/fsl/v6/model/multiple_regress_design.py @@ -0,0 +1,133 @@ +import attrs +from fileformats.generic import File +import logging +import numpy as np +import os +from pydra.compose import python +import typing as ty + + +logger = logging.getLogger(__name__) + + +@python.define +class MultipleRegressDesign(python.Task["MultipleRegressDesign.Outputs"]): + """ + Examples + ------- + + >>> from fileformats.generic import File + >>> from pydra.tasks.fsl.v6.model.multiple_regress_design import MultipleRegressDesign + + """ + + contrasts: list[ty.Any] + regressors: dict + groups: list[int] + + class Outputs(python.Outputs): + design_mat: File + design_con: File + design_fts: File + design_grp: File + + @staticmethod + def function( + contrasts: list[ty.Any], regressors: dict, groups: list[int] + ) -> tuple[File, File, File, File]: + design_mat = attrs.NOTHING + design_con = attrs.NOTHING + design_fts = attrs.NOTHING + design_grp = attrs.NOTHING + cwd = os.getcwd() + regs = sorted(regressors.keys()) + nwaves = len(regs) + npoints = len(regressors[regs[0]]) + ntcons = sum(1 for con in contrasts if con[1] == "T") + nfcons = sum(1 for con in contrasts if con[1] == "F") + + mat_txt = ["/NumWaves %d" % nwaves, "/NumPoints %d" % npoints] + ppheights = [] + for reg in regs: + maxreg = np.max(regressors[reg]) + minreg = np.min(regressors[reg]) + if np.sign(maxreg) == np.sign(minreg): + regheight = max([abs(minreg), abs(maxreg)]) + else: + regheight = abs(maxreg - minreg) + ppheights.append("%e" % regheight) + mat_txt += ["/PPheights " + " ".join(ppheights)] + mat_txt += ["", "/Matrix"] + for cidx in range(npoints): + mat_txt.append(" ".join(["%e" % regressors[key][cidx] for key in regs])) + mat_txt = "\n".join(mat_txt) + "\n" + + con_txt = [] + counter = 0 + tconmap = {} + for conidx, con in enumerate(contrasts): + if con[1] == "T": + tconmap[conidx] = counter + counter += 1 + con_txt += ["/ContrastName%d %s" % (counter, con[0])] + con_txt += [ + "/NumWaves %d" % nwaves, + "/NumContrasts %d" % ntcons, + "/PPheights %s" % " ".join(["%e" % 1 for i in range(counter)]), + "/RequiredEffect %s" % " ".join(["%.3f" % 100 for i in range(counter)]), + "", + "/Matrix", + ] + for idx in sorted(tconmap.keys()): + convals = np.zeros((nwaves, 1)) + for regidx, reg in enumerate(contrasts[idx][2]): + convals[regs.index(reg)] = contrasts[idx][3][regidx] + con_txt.append(" ".join(["%e" % val for val in convals])) + con_txt = "\n".join(con_txt) + "\n" + + fcon_txt = "" + if nfcons: + fcon_txt = [ + "/NumWaves %d" % ntcons, + "/NumContrasts %d" % nfcons, + "", + "/Matrix", + ] + for conidx, con in enumerate(contrasts): + if con[1] == "F": + convals = np.zeros((ntcons, 1)) + for tcon in con[2]: + convals[tconmap[contrasts.index(tcon)]] = 1 + fcon_txt.append(" ".join(["%d" % val for val in convals])) + fcon_txt = "\n".join(fcon_txt) + "\n" + + grp_txt = ["/NumWaves 1", "/NumPoints %d" % npoints, "", "/Matrix"] + for i in range(npoints): + if groups is not attrs.NOTHING: + grp_txt += ["%d" % groups[i]] + else: + grp_txt += ["1"] + grp_txt = "\n".join(grp_txt) + "\n" + + txt = { + "design.mat": mat_txt, + "design.con": con_txt, + "design.fts": fcon_txt, + "design.grp": grp_txt, + } + + for key, val in list(txt.items()): + if ("fts" in key) and (nfcons == 0): + continue + filename = key.replace("_", ".") + with open(os.path.join(cwd, filename), "w") as f: + f.write(val) + + outputs = {} + nfcons = sum(1 for con in contrasts if con[1] == "F") + for field in list(outputs["keys"]()): + if ("fts" in field) and (nfcons == 0): + continue + outputs[field] = os.path.join(os.getcwd(), field.replace("_", ".")) + + return design_mat, design_con, design_fts, design_grp diff --git a/pydra/tasks/fsl/v6/model/randomise.py b/pydra/tasks/fsl/v6/model/randomise.py new file mode 100644 index 0000000..1350b15 --- /dev/null +++ b/pydra/tasks/fsl/v6/model/randomise.py @@ -0,0 +1,319 @@ +import attrs +from fileformats.datascience import TextMatrix +from fileformats.generic import File +from fileformats.medimage import Nifti1 +from fileformats.medimage_fsl import Con +from glob import glob +import logging +from pydra.tasks.fsl.v6.nipype_ports.utils.filemanip import fname_presuffix +from pydra.compose import shell + + +logger = logging.getLogger(__name__) + + +def _list_outputs(inputs=None, stdout=None, stderr=None, output_dir=None): + inputs = attrs.asdict(inputs) + + outputs = {} + outputs["tstat_files"] = glob( + _gen_fname( + "%s_tstat*.nii" % inputs["base_name"], + output_type=inputs["output_type"], + inputs=inputs["inputs"], + output_dir=inputs["output_dir"], + stderr=inputs["stderr"], + stdout=inputs["stdout"], + ) + ) + outputs["fstat_files"] = glob( + _gen_fname( + "%s_fstat*.nii" % inputs["base_name"], + output_type=inputs["output_type"], + inputs=inputs["inputs"], + output_dir=inputs["output_dir"], + stderr=inputs["stderr"], + stdout=inputs["stdout"], + ) + ) + prefix = False + if inputs["tfce"] or inputs["tfce2D"]: + prefix = "tfce" + elif inputs["vox_p_values"]: + prefix = "vox" + elif inputs["c_thresh"] or inputs["f_c_thresh"]: + prefix = "clustere" + elif inputs["cm_thresh"] or inputs["f_cm_thresh"]: + prefix = "clusterm" + if prefix: + outputs["t_p_files"] = glob( + _gen_fname( + f"{inputs['base_name']}_{prefix}_p_tstat*", + output_type=inputs["output_type"], + inputs=inputs["inputs"], + output_dir=inputs["output_dir"], + stderr=inputs["stderr"], + stdout=inputs["stdout"], + ) + ) + outputs["t_corrected_p_files"] = glob( + _gen_fname( + f"{inputs['base_name']}_{prefix}_corrp_tstat*.nii", + output_type=inputs["output_type"], + inputs=inputs["inputs"], + output_dir=inputs["output_dir"], + stderr=inputs["stderr"], + stdout=inputs["stdout"], + ) + ) + + outputs["f_p_files"] = glob( + _gen_fname( + f"{inputs['base_name']}_{prefix}_p_fstat*.nii", + output_type=inputs["output_type"], + inputs=inputs["inputs"], + output_dir=inputs["output_dir"], + stderr=inputs["stderr"], + stdout=inputs["stdout"], + ) + ) + outputs["f_corrected_p_files"] = glob( + _gen_fname( + f"{inputs['base_name']}_{prefix}_corrp_fstat*.nii", + output_type=inputs["output_type"], + inputs=inputs["inputs"], + output_dir=inputs["output_dir"], + stderr=inputs["stderr"], + stdout=inputs["stdout"], + ) + ) + return outputs + + +def tstat_files_callable(output_dir, inputs, stdout, stderr): + outputs = _list_outputs( + output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr + ) + return outputs.get("tstat_files") + + +def fstat_files_callable(output_dir, inputs, stdout, stderr): + outputs = _list_outputs( + output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr + ) + return outputs.get("fstat_files") + + +def t_p_files_callable(output_dir, inputs, stdout, stderr): + outputs = _list_outputs( + output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr + ) + return outputs.get("t_p_files") + + +def f_p_files_callable(output_dir, inputs, stdout, stderr): + outputs = _list_outputs( + output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr + ) + return outputs.get("f_p_files") + + +def t_corrected_p_files_callable(output_dir, inputs, stdout, stderr): + outputs = _list_outputs( + output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr + ) + return outputs.get("t_corrected_p_files") + + +def f_corrected_p_files_callable(output_dir, inputs, stdout, stderr): + outputs = _list_outputs( + output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr + ) + return outputs.get("f_corrected_p_files") + + +@shell.define +class Randomise(shell.Task["Randomise.Outputs"]): + """ + Examples + ------- + + >>> from fileformats.datascience import TextMatrix + >>> from fileformats.generic import File + >>> from fileformats.medimage import Nifti1 + >>> from fileformats.medimage_fsl import Con + >>> from pydra.tasks.fsl.v6.model.randomise import Randomise + + >>> task = Randomise() + >>> task.in_file = Nifti1.mock("allFA.nii") + >>> task.design_mat = TextMatrix.mock("design.mat") + >>> task.tcon = Con.mock("design.con") + >>> task.fcon = File.mock() + >>> task.mask = Nifti1.mock("mask.nii") + >>> task.x_block_labels = File.mock() + >>> task.cmdline + 'None' + + + """ + + executable = "randomise" + in_file: Nifti1 = shell.arg(help="4D input file", argstr="-i {in_file}", position=1) + base_name: str = shell.arg( + help="the rootname that all generated files will have", + argstr='-o "{base_name}"', + position=2, + default="randomise", + ) + design_mat: TextMatrix = shell.arg( + help="design matrix file", argstr="-d {design_mat}", position=3 + ) + tcon: Con = shell.arg(help="t contrasts file", argstr="-t {tcon}", position=4) + fcon: File = shell.arg(help="f contrasts file", argstr="-f {fcon}") + mask: Nifti1 = shell.arg(help="mask image", argstr="-m {mask}") + x_block_labels: File = shell.arg( + help="exchangeability block labels file", argstr="-e {x_block_labels}" + ) + demean: bool = shell.arg( + help="demean data temporally before model fitting", argstr="-D" + ) + one_sample_group_mean: bool = shell.arg( + help="perform 1-sample group-mean test instead of generic permutation test", + argstr="-1", + ) + show_total_perms: bool = shell.arg( + help="print out how many unique permutations would be generated and exit", + argstr="-q", + ) + show_info_parallel_mode: bool = shell.arg( + help="print out information required for parallel mode and exit", argstr="-Q" + ) + vox_p_values: bool = shell.arg( + help="output voxelwise (corrected and uncorrected) p-value images", argstr="-x" + ) + tfce: bool = shell.arg( + help="carry out Threshold-Free Cluster Enhancement", argstr="-T" + ) + tfce2D: bool = shell.arg( + help="carry out Threshold-Free Cluster Enhancement with 2D optimisation", + argstr="--T2", + ) + f_only: bool = shell.arg(help="calculate f-statistics only", argstr="--fonly") + raw_stats_imgs: bool = shell.arg( + help="output raw ( unpermuted ) statistic images", argstr="-R" + ) + p_vec_n_dist_files: bool = shell.arg( + help="output permutation vector and null distribution text files", argstr="-P" + ) + num_perm: int = shell.arg( + help="number of permutations (default 5000, set to 0 for exhaustive)", + argstr="-n {num_perm}", + ) + seed: int = shell.arg( + help="specific integer seed for random number generator", argstr="--seed={seed}" + ) + var_smooth: int = shell.arg( + help="use variance smoothing (std is in mm)", argstr="-v {var_smooth}" + ) + c_thresh: float = shell.arg( + help="carry out cluster-based thresholding", argstr="-c {c_thresh:.1}" + ) + cm_thresh: float = shell.arg( + help="carry out cluster-mass-based thresholding", argstr="-C {cm_thresh:.1}" + ) + f_c_thresh: float = shell.arg( + help="carry out f cluster thresholding", argstr="-F {f_c_thresh:.2}" + ) + f_cm_thresh: float = shell.arg( + help="carry out f cluster-mass thresholding", argstr="-S {f_cm_thresh:.2}" + ) + tfce_H: float = shell.arg( + help="TFCE height parameter (default=2)", argstr="--tfce_H={tfce_H:.2}" + ) + tfce_E: float = shell.arg( + help="TFCE extent parameter (default=0.5)", argstr="--tfce_E={tfce_E:.2}" + ) + tfce_C: float = shell.arg( + help="TFCE connectivity (6 or 26; default=6)", argstr="--tfce_C={tfce_C:.2}" + ) + + class Outputs(shell.Outputs): + tstat_files: list[File] | None = shell.out( + help="t contrast raw statistic", callable=tstat_files_callable + ) + fstat_files: list[File] | None = shell.out( + help="f contrast raw statistic", callable=fstat_files_callable + ) + t_p_files: list[File] | None = shell.out( + help="f contrast uncorrected p values files", callable=t_p_files_callable + ) + f_p_files: list[File] | None = shell.out( + help="f contrast uncorrected p values files", callable=f_p_files_callable + ) + t_corrected_p_files: list[File] | None = shell.out( + help="t contrast FWE (Family-wise error) corrected p values files", + callable=t_corrected_p_files_callable, + ) + f_corrected_p_files: list[File] | None = shell.out( + help="f contrast FWE (Family-wise error) corrected p values files", + callable=f_corrected_p_files_callable, + ) + + +def _gen_fname( + basename, + cwd=None, + suffix=None, + change_ext=True, + ext=None, + output_type=None, + inputs=None, + output_dir=None, + stderr=None, + stdout=None, +): + """Generate a filename based on the given parameters. + + The filename will take the form: cwd/basename. + If change_ext is True, it will use the extensions specified in + inputs.output_type. + + Parameters + ---------- + basename : str + Filename to base the new filename on. + cwd : str + Path to prefix to the new filename. (default is output_dir) + suffix : str + Suffix to add to the `basename`. (defaults is '' ) + change_ext : bool + Flag to change the filename extension to the FSL output type. + (default True) + + Returns + ------- + fname : str + New filename based on given parameters. + + """ + + if basename == "": + msg = "Unable to generate filename for command %s. " % "randomise" + msg += "basename is not set!" + raise ValueError(msg) + if cwd is None: + cwd = output_dir + if ext is None: + ext = Info.output_type_to_ext(output_type) + if change_ext: + if suffix: + suffix = f"{suffix}{ext}" + else: + suffix = ext + if suffix is None: + suffix = "" + fname = fname_presuffix(basename, suffix=suffix, use_ext=False, newpath=cwd) + return fname + + +IFLOGGER = logging.getLogger("nipype.interface") diff --git a/pydra/tasks/fsl/v6/model/smm.py b/pydra/tasks/fsl/v6/model/smm.py new file mode 100644 index 0000000..bc36d32 --- /dev/null +++ b/pydra/tasks/fsl/v6/model/smm.py @@ -0,0 +1,154 @@ +import attrs +from fileformats.generic import File +import logging +from pydra.tasks.fsl.v6.nipype_ports.utils.filemanip import fname_presuffix +from pydra.compose import shell + + +logger = logging.getLogger(__name__) + + +def _list_outputs(inputs=None, stdout=None, stderr=None, output_dir=None): + inputs = attrs.asdict(inputs) + + outputs = {} + + outputs["null_p_map"] = _gen_fname( + basename="w1_mean", + cwd="logdir", + output_type=inputs["output_type"], + inputs=inputs["inputs"], + output_dir=inputs["output_dir"], + stderr=inputs["stderr"], + stdout=inputs["stdout"], + ) + outputs["activation_p_map"] = _gen_fname( + basename="w2_mean", + cwd="logdir", + output_type=inputs["output_type"], + inputs=inputs["inputs"], + output_dir=inputs["output_dir"], + stderr=inputs["stderr"], + stdout=inputs["stdout"], + ) + if (inputs["no_deactivation_class"] is attrs.NOTHING) or not inputs[ + "no_deactivation_class" + ]: + outputs["deactivation_p_map"] = _gen_fname( + basename="w3_mean", + cwd="logdir", + output_type=inputs["output_type"], + inputs=inputs["inputs"], + output_dir=inputs["output_dir"], + stderr=inputs["stderr"], + stdout=inputs["stdout"], + ) + return outputs + + +def null_p_map_callable(output_dir, inputs, stdout, stderr): + outputs = _list_outputs( + output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr + ) + return outputs.get("null_p_map") + + +def activation_p_map_callable(output_dir, inputs, stdout, stderr): + outputs = _list_outputs( + output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr + ) + return outputs.get("activation_p_map") + + +def deactivation_p_map_callable(output_dir, inputs, stdout, stderr): + outputs = _list_outputs( + output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr + ) + return outputs.get("deactivation_p_map") + + +@shell.define +class SMM(shell.Task["SMM.Outputs"]): + """ + Examples + ------- + + >>> from fileformats.generic import File + >>> from pydra.tasks.fsl.v6.model.smm import SMM + + """ + + executable = "mm --ld=logdir" + spatial_data_file: File = shell.arg( + help="statistics spatial map", argstr='--sdf="{spatial_data_file}"', position=1 + ) + mask: File = shell.arg(help="mask file", argstr='--mask="{mask}"', position=2) + no_deactivation_class: bool = shell.arg( + help="enforces no deactivation class", argstr="--zfstatmode", position=3 + ) + + class Outputs(shell.Outputs): + null_p_map: File | None = shell.out(callable=null_p_map_callable) + activation_p_map: File | None = shell.out(callable=activation_p_map_callable) + deactivation_p_map: File | None = shell.out( + callable=deactivation_p_map_callable + ) + + +def _gen_fname( + basename, + cwd=None, + suffix=None, + change_ext=True, + ext=None, + output_type=None, + inputs=None, + output_dir=None, + stderr=None, + stdout=None, +): + """Generate a filename based on the given parameters. + + The filename will take the form: cwd/basename. + If change_ext is True, it will use the extensions specified in + inputs.output_type. + + Parameters + ---------- + basename : str + Filename to base the new filename on. + cwd : str + Path to prefix to the new filename. (default is output_dir) + suffix : str + Suffix to add to the `basename`. (defaults is '' ) + change_ext : bool + Flag to change the filename extension to the FSL output type. + (default True) + + Returns + ------- + fname : str + New filename based on given parameters. + + """ + + if basename == "": + msg = "Unable to generate filename for command %s. " % "mm --ld=logdir" + msg += "basename is not set!" + raise ValueError(msg) + if cwd is None: + cwd = output_dir + if ext is None: + ext = Info.output_type_to_ext(output_type) + if change_ext: + if suffix: + suffix = f"{suffix}{ext}" + else: + suffix = ext + if suffix is None: + suffix = "" + fname = fname_presuffix(basename, suffix=suffix, use_ext=False, newpath=cwd) + return fname + + +IFLOGGER = logging.getLogger("nipype.interface") diff --git a/pydra/tasks/fsl/v6/model/smooth_estimate.py b/pydra/tasks/fsl/v6/model/smooth_estimate.py new file mode 100644 index 0000000..e603676 --- /dev/null +++ b/pydra/tasks/fsl/v6/model/smooth_estimate.py @@ -0,0 +1,88 @@ +import attrs +from fileformats.generic import File +from fileformats.medimage import NiftiGz +import logging +from pydra.compose import shell + + +logger = logging.getLogger(__name__) + + +def aggregate_outputs(inputs=None, stdout=None, stderr=None, output_dir=None): + inputs = attrs.asdict(inputs) + needed_outputs = ["dlh", "volume", "resels"] + + outputs = {} + stdout = stdout.split("\n") + outputs["dlh"] = float(stdout[0].split()[1]) + outputs["volume"] = int(stdout[1].split()[1]) + outputs["resels"] = float(stdout[2].split()[1]) + return outputs + + +def dlh_callable(output_dir, inputs, stdout, stderr): + outputs = aggregate_outputs( + output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr + ) + return outputs.get("dlh") + + +def volume_callable(output_dir, inputs, stdout, stderr): + outputs = aggregate_outputs( + output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr + ) + return outputs.get("volume") + + +def resels_callable(output_dir, inputs, stdout, stderr): + outputs = aggregate_outputs( + output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr + ) + return outputs.get("resels") + + +@shell.define(xor=[["dof", "zstat_file"]]) +class SmoothEstimate(shell.Task["SmoothEstimate.Outputs"]): + """ + Examples + ------- + + >>> from fileformats.generic import File + >>> from fileformats.medimage import NiftiGz + >>> from pydra.tasks.fsl.v6.model.smooth_estimate import SmoothEstimate + + >>> task = SmoothEstimate() + >>> task.mask_file = File.mock() + >>> task.residual_fit_file = File.mock() + >>> task.zstat_file = NiftiGz.mock("zstat1.nii.gz") + >>> task.cmdline + 'smoothest --mask=mask.nii --zstat=zstat1.nii.gz' + + + """ + + executable = "smoothest" + dof: int | None = shell.arg( + help="number of degrees of freedom", argstr="--dof={dof}" + ) + mask_file: File = shell.arg(help="brain mask volume", argstr="--mask={mask_file}") + residual_fit_file: File | None = shell.arg( + help="residual-fit image file", + argstr="--res={residual_fit_file}", + requires=["dof"], + ) + zstat_file: NiftiGz | None = shell.arg( + help="zstat image file", argstr="--zstat={zstat_file}" + ) + + class Outputs(shell.Outputs): + dlh: float | None = shell.out( + help="smoothness estimate sqrt(det(Lambda))", callable=dlh_callable + ) + volume: int | None = shell.out( + help="number of voxels in mask", callable=volume_callable + ) + resels: float | None = shell.out( + help="volume of resel, in voxels, defined as FWHM_x * FWHM_y * FWHM_z", + callable=resels_callable, + ) diff --git a/pydra/tasks/fsl/v6/model/tests/conftest.py b/pydra/tasks/fsl/v6/model/tests/conftest.py new file mode 100644 index 0000000..8c8af14 --- /dev/null +++ b/pydra/tasks/fsl/v6/model/tests/conftest.py @@ -0,0 +1,24 @@ +# For debugging in IDE's don't catch raised exceptions and let the IDE +# break at it +import os +import pytest + + +if os.getenv("_PYTEST_RAISE", "0") != "0": + + @pytest.hookimpl(tryfirst=True) + def pytest_exception_interact(call): + raise call.excinfo.value # raise internal errors instead of capturing them + + @pytest.hookimpl(tryfirst=True) + def pytest_internalerror(excinfo): + raise excinfo.value # raise internal errors instead of capturing them + + def pytest_configure(config): + config.option.capture = "no" # allow print statements to show up in the console + config.option.log_cli = True # show log messages in the console + config.option.log_level = "INFO" # set the log level to INFO + + CATCH_CLI_EXCEPTIONS = False +else: + CATCH_CLI_EXCEPTIONS = True diff --git a/pydra/tasks/fsl/v6/model/tests/test_cluster.py b/pydra/tasks/fsl/v6/model/tests/test_cluster.py new file mode 100644 index 0000000..e088b5d --- /dev/null +++ b/pydra/tasks/fsl/v6/model/tests/test_cluster.py @@ -0,0 +1,36 @@ +from fileformats.generic import File +import logging +from nipype2pydra.testing import PassAfterTimeoutWorker +from pydra.tasks.fsl.v6.model.cluster import Cluster +import pytest + + +logger = logging.getLogger(__name__) + + +@pytest.mark.xfail +def test_cluster_1(): + task = Cluster() + task.in_file = File.sample(seed=0) + task.cope_file = File.sample(seed=12) + task.fractional = False + task.use_mm = False + task.find_min = False + task.no_table = False + task.minclustersize = False + task.xfm_file = File.sample(seed=21) + task.std_space_file = File.sample(seed=22) + task.warpfield_file = File.sample(seed=24) + print(f"CMDLINE: {task.cmdline}\n\n") + res = task(worker=PassAfterTimeoutWorker) + print("RESULT: ", res) + + +@pytest.mark.xfail +def test_cluster_2(): + task = Cluster() + task.threshold = 2.3 + task.out_localmax_txt_file = "stats.txt" + print(f"CMDLINE: {task.cmdline}\n\n") + res = task(worker=PassAfterTimeoutWorker) + print("RESULT: ", res) diff --git a/pydra/tasks/fsl/v6/model/tests/test_contrastmgr.py b/pydra/tasks/fsl/v6/model/tests/test_contrastmgr.py new file mode 100644 index 0000000..ff239b7 --- /dev/null +++ b/pydra/tasks/fsl/v6/model/tests/test_contrastmgr.py @@ -0,0 +1,22 @@ +from fileformats.generic import File +import logging +from nipype2pydra.testing import PassAfterTimeoutWorker +from pydra.tasks.fsl.v6.model.contrast_mgr import ContrastMgr +import pytest + + +logger = logging.getLogger(__name__) + + +@pytest.mark.xfail +def test_contrastmgr_1(): + task = ContrastMgr() + task.tcon_file = File.sample(seed=0) + task.fcon_file = File.sample(seed=1) + task.param_estimates = [File.sample(seed=2)] + task.corrections = File.sample(seed=3) + task.dof_file = File.sample(seed=4) + task.sigmasquareds = File.sample(seed=5) + print(f"CMDLINE: {task.cmdline}\n\n") + res = task(worker=PassAfterTimeoutWorker) + print("RESULT: ", res) diff --git a/pydra/tasks/fsl/v6/model/tests/test_dualregression.py b/pydra/tasks/fsl/v6/model/tests/test_dualregression.py new file mode 100644 index 0000000..b780803 --- /dev/null +++ b/pydra/tasks/fsl/v6/model/tests/test_dualregression.py @@ -0,0 +1,34 @@ +from fileformats.generic import File +from fileformats.medimage import Nifti1 +import logging +from nipype2pydra.testing import PassAfterTimeoutWorker +from pydra.tasks.fsl.v6.model.dual_regression import DualRegression +import pytest + + +logger = logging.getLogger(__name__) + + +@pytest.mark.xfail +def test_dualregression_1(): + task = DualRegression() + task.in_files = [Nifti1.sample(seed=0)] + task.group_IC_maps_4D = File.sample(seed=1) + task.des_norm = True + task.design_file = File.sample(seed=4) + task.con_file = File.sample(seed=5) + task.out_dir = "output" + print(f"CMDLINE: {task.cmdline}\n\n") + res = task(worker=PassAfterTimeoutWorker) + print("RESULT: ", res) + + +@pytest.mark.xfail +def test_dualregression_2(): + task = DualRegression() + task.in_files = [Nifti1.sample(seed=0)] + task.des_norm = False + task.n_perm = 10 + print(f"CMDLINE: {task.cmdline}\n\n") + res = task(worker=PassAfterTimeoutWorker) + print("RESULT: ", res) diff --git a/pydra/tasks/fsl/v6/model/tests/test_feat.py b/pydra/tasks/fsl/v6/model/tests/test_feat.py new file mode 100644 index 0000000..d365268 --- /dev/null +++ b/pydra/tasks/fsl/v6/model/tests/test_feat.py @@ -0,0 +1,17 @@ +from fileformats.generic import File +import logging +from nipype2pydra.testing import PassAfterTimeoutWorker +from pydra.tasks.fsl.v6.model.feat import FEAT +import pytest + + +logger = logging.getLogger(__name__) + + +@pytest.mark.xfail +def test_feat_1(): + task = FEAT() + task.fsf_file = File.sample(seed=0) + print(f"CMDLINE: {task.cmdline}\n\n") + res = task(worker=PassAfterTimeoutWorker) + print("RESULT: ", res) diff --git a/pydra/tasks/fsl/v6/model/tests/test_featmodel.py b/pydra/tasks/fsl/v6/model/tests/test_featmodel.py new file mode 100644 index 0000000..78b2910 --- /dev/null +++ b/pydra/tasks/fsl/v6/model/tests/test_featmodel.py @@ -0,0 +1,18 @@ +from fileformats.generic import File +import logging +from nipype2pydra.testing import PassAfterTimeoutWorker +from pydra.tasks.fsl.v6.model.feat_model import FEATModel +import pytest + + +logger = logging.getLogger(__name__) + + +@pytest.mark.xfail +def test_featmodel_1(): + task = FEATModel() + task.fsf_file = File.sample(seed=0) + task.ev_files = [File.sample(seed=1)] + print(f"CMDLINE: {task.cmdline}\n\n") + res = task(worker=PassAfterTimeoutWorker) + print("RESULT: ", res) diff --git a/pydra/tasks/fsl/v6/model/tests/test_filmgls.py b/pydra/tasks/fsl/v6/model/tests/test_filmgls.py new file mode 100644 index 0000000..a89300d --- /dev/null +++ b/pydra/tasks/fsl/v6/model/tests/test_filmgls.py @@ -0,0 +1,20 @@ +from fileformats.generic import File +import logging +from nipype2pydra.testing import PassAfterTimeoutWorker +from pydra.tasks.fsl.v6.model.filmgls import FILMGLS +import pytest + + +logger = logging.getLogger(__name__) + + +@pytest.mark.xfail +def test_filmgls_1(): + task = FILMGLS() + task.in_file = File.sample(seed=0) + task.design_file = File.sample(seed=1) + task.threshold = 1000.0 + task.results_dir = "results" + print(f"CMDLINE: {task.cmdline}\n\n") + res = task(worker=PassAfterTimeoutWorker) + print("RESULT: ", res) diff --git a/pydra/tasks/fsl/v6/model/tests/test_flameo.py b/pydra/tasks/fsl/v6/model/tests/test_flameo.py new file mode 100644 index 0000000..1fb3b4e --- /dev/null +++ b/pydra/tasks/fsl/v6/model/tests/test_flameo.py @@ -0,0 +1,40 @@ +from fileformats.datascience import TextMatrix +from fileformats.generic import Directory, File +from fileformats.medimage import NiftiGz +from fileformats.medimage_fsl import Con +import logging +from nipype2pydra.testing import PassAfterTimeoutWorker +from pydra.tasks.fsl.v6.model.flameo import FLAMEO +import pytest + + +logger = logging.getLogger(__name__) + + +@pytest.mark.xfail +def test_flameo_1(): + task = FLAMEO() + task.cope_file = NiftiGz.sample(seed=0) + task.var_cope_file = File.sample(seed=1) + task.dof_var_cope_file = File.sample(seed=2) + task.mask_file = File.sample(seed=3) + task.design_file = File.sample(seed=4) + task.t_con_file = Con.sample(seed=5) + task.f_con_file = File.sample(seed=6) + task.cov_split_file = TextMatrix.sample(seed=7) + task.log_dir = Directory.sample(seed=17) + print(f"CMDLINE: {task.cmdline}\n\n") + res = task(worker=PassAfterTimeoutWorker) + print("RESULT: ", res) + + +@pytest.mark.xfail +def test_flameo_2(): + task = FLAMEO() + task.cope_file = NiftiGz.sample(seed=0) + task.t_con_file = Con.sample(seed=5) + task.cov_split_file = TextMatrix.sample(seed=7) + task.run_mode = "fe" + print(f"CMDLINE: {task.cmdline}\n\n") + res = task(worker=PassAfterTimeoutWorker) + print("RESULT: ", res) diff --git a/pydra/tasks/fsl/v6/model/tests/test_glm.py b/pydra/tasks/fsl/v6/model/tests/test_glm.py new file mode 100644 index 0000000..29068a2 --- /dev/null +++ b/pydra/tasks/fsl/v6/model/tests/test_glm.py @@ -0,0 +1,31 @@ +from fileformats.generic import File +from fileformats.medimage import Nifti1 +import logging +from nipype2pydra.testing import PassAfterTimeoutWorker +from pydra.tasks.fsl.v6.model.glm import GLM +import pytest + + +logger = logging.getLogger(__name__) + + +@pytest.mark.xfail +def test_glm_1(): + task = GLM() + task.in_file = Nifti1.sample(seed=0) + task.design = Nifti1.sample(seed=2) + task.contrasts = File.sample(seed=3) + task.mask = File.sample(seed=4) + print(f"CMDLINE: {task.cmdline}\n\n") + res = task(worker=PassAfterTimeoutWorker) + print("RESULT: ", res) + + +@pytest.mark.xfail +def test_glm_2(): + task = GLM() + task.in_file = Nifti1.sample(seed=0) + task.design = Nifti1.sample(seed=2) + print(f"CMDLINE: {task.cmdline}\n\n") + res = task(worker=PassAfterTimeoutWorker) + print("RESULT: ", res) diff --git a/pydra/tasks/fsl/v6/model/tests/test_l2model.py b/pydra/tasks/fsl/v6/model/tests/test_l2model.py new file mode 100644 index 0000000..85c95d5 --- /dev/null +++ b/pydra/tasks/fsl/v6/model/tests/test_l2model.py @@ -0,0 +1,14 @@ +import logging +from nipype2pydra.testing import PassAfterTimeoutWorker +from pydra.tasks.fsl.v6.model.l2_model import L2Model +import pytest + + +logger = logging.getLogger(__name__) + + +@pytest.mark.xfail +def test_l2model_1(): + task = L2Model() + res = task(worker=PassAfterTimeoutWorker) + print("RESULT: ", res) diff --git a/pydra/tasks/fsl/v6/model/tests/test_level1design.py b/pydra/tasks/fsl/v6/model/tests/test_level1design.py new file mode 100644 index 0000000..c87115f --- /dev/null +++ b/pydra/tasks/fsl/v6/model/tests/test_level1design.py @@ -0,0 +1,15 @@ +import logging +from nipype2pydra.testing import PassAfterTimeoutWorker +from pydra.tasks.fsl.v6.model.level_1_design import Level1Design +import pytest + + +logger = logging.getLogger(__name__) + + +@pytest.mark.xfail +def test_level1design_1(): + task = Level1Design() + task.orthogonalization = {} + res = task(worker=PassAfterTimeoutWorker) + print("RESULT: ", res) diff --git a/pydra/tasks/fsl/v6/model/tests/test_melodic.py b/pydra/tasks/fsl/v6/model/tests/test_melodic.py new file mode 100644 index 0000000..aa5c4fa --- /dev/null +++ b/pydra/tasks/fsl/v6/model/tests/test_melodic.py @@ -0,0 +1,41 @@ +from fileformats.generic import File +from fileformats.medimage_fsl import Con +import logging +from nipype2pydra.testing import PassAfterTimeoutWorker +from pydra.tasks.fsl.v6.model.melodic import MELODIC +import pytest + + +logger = logging.getLogger(__name__) + + +@pytest.mark.xfail +def test_melodic_1(): + task = MELODIC() + task.in_files = [File.sample(seed=0)] + task.mask = File.sample(seed=2) + task.ICs = File.sample(seed=27) + task.mix = File.sample(seed=28) + task.smode = File.sample(seed=29) + task.bg_image = File.sample(seed=32) + task.t_des = File.sample(seed=35) + task.t_con = Con.sample(seed=36) + task.s_des = File.sample(seed=37) + task.s_con = Con.sample(seed=38) + print(f"CMDLINE: {task.cmdline}\n\n") + res = task(worker=PassAfterTimeoutWorker) + print("RESULT: ", res) + + +@pytest.mark.xfail +def test_melodic_2(): + task = MELODIC() + task.no_bet = True + task.approach = "tica" + task.tr_sec = 1.5 + task.t_con = Con.sample(seed=36) + task.s_con = Con.sample(seed=38) + task.out_stats = True + print(f"CMDLINE: {task.cmdline}\n\n") + res = task(worker=PassAfterTimeoutWorker) + print("RESULT: ", res) diff --git a/pydra/tasks/fsl/v6/model/tests/test_multipleregressdesign.py b/pydra/tasks/fsl/v6/model/tests/test_multipleregressdesign.py new file mode 100644 index 0000000..b61af20 --- /dev/null +++ b/pydra/tasks/fsl/v6/model/tests/test_multipleregressdesign.py @@ -0,0 +1,14 @@ +import logging +from nipype2pydra.testing import PassAfterTimeoutWorker +from pydra.tasks.fsl.v6.model.multiple_regress_design import MultipleRegressDesign +import pytest + + +logger = logging.getLogger(__name__) + + +@pytest.mark.xfail +def test_multipleregressdesign_1(): + task = MultipleRegressDesign() + res = task(worker=PassAfterTimeoutWorker) + print("RESULT: ", res) diff --git a/pydra/tasks/fsl/v6/model/tests/test_randomise.py b/pydra/tasks/fsl/v6/model/tests/test_randomise.py new file mode 100644 index 0000000..47bf623 --- /dev/null +++ b/pydra/tasks/fsl/v6/model/tests/test_randomise.py @@ -0,0 +1,38 @@ +from fileformats.datascience import TextMatrix +from fileformats.generic import File +from fileformats.medimage import Nifti1 +from fileformats.medimage_fsl import Con +import logging +from nipype2pydra.testing import PassAfterTimeoutWorker +from pydra.tasks.fsl.v6.model.randomise import Randomise +import pytest + + +logger = logging.getLogger(__name__) + + +@pytest.mark.xfail +def test_randomise_1(): + task = Randomise() + task.in_file = Nifti1.sample(seed=0) + task.base_name = "randomise" + task.design_mat = TextMatrix.sample(seed=2) + task.tcon = Con.sample(seed=3) + task.fcon = File.sample(seed=4) + task.mask = Nifti1.sample(seed=5) + task.x_block_labels = File.sample(seed=6) + print(f"CMDLINE: {task.cmdline}\n\n") + res = task(worker=PassAfterTimeoutWorker) + print("RESULT: ", res) + + +@pytest.mark.xfail +def test_randomise_2(): + task = Randomise() + task.in_file = Nifti1.sample(seed=0) + task.design_mat = TextMatrix.sample(seed=2) + task.tcon = Con.sample(seed=3) + task.mask = Nifti1.sample(seed=5) + print(f"CMDLINE: {task.cmdline}\n\n") + res = task(worker=PassAfterTimeoutWorker) + print("RESULT: ", res) diff --git a/pydra/tasks/fsl/v6/model/tests/test_smm.py b/pydra/tasks/fsl/v6/model/tests/test_smm.py new file mode 100644 index 0000000..fc88346 --- /dev/null +++ b/pydra/tasks/fsl/v6/model/tests/test_smm.py @@ -0,0 +1,18 @@ +from fileformats.generic import File +import logging +from nipype2pydra.testing import PassAfterTimeoutWorker +from pydra.tasks.fsl.v6.model.smm import SMM +import pytest + + +logger = logging.getLogger(__name__) + + +@pytest.mark.xfail +def test_smm_1(): + task = SMM() + task.spatial_data_file = File.sample(seed=0) + task.mask = File.sample(seed=1) + print(f"CMDLINE: {task.cmdline}\n\n") + res = task(worker=PassAfterTimeoutWorker) + print("RESULT: ", res) diff --git a/pydra/tasks/fsl/v6/model/tests/test_smoothestimate.py b/pydra/tasks/fsl/v6/model/tests/test_smoothestimate.py new file mode 100644 index 0000000..be35ce2 --- /dev/null +++ b/pydra/tasks/fsl/v6/model/tests/test_smoothestimate.py @@ -0,0 +1,29 @@ +from fileformats.generic import File +from fileformats.medimage import NiftiGz +import logging +from nipype2pydra.testing import PassAfterTimeoutWorker +from pydra.tasks.fsl.v6.model.smooth_estimate import SmoothEstimate +import pytest + + +logger = logging.getLogger(__name__) + + +@pytest.mark.xfail +def test_smoothestimate_1(): + task = SmoothEstimate() + task.mask_file = File.sample(seed=1) + task.residual_fit_file = File.sample(seed=2) + task.zstat_file = NiftiGz.sample(seed=3) + print(f"CMDLINE: {task.cmdline}\n\n") + res = task(worker=PassAfterTimeoutWorker) + print("RESULT: ", res) + + +@pytest.mark.xfail +def test_smoothestimate_2(): + task = SmoothEstimate() + task.zstat_file = NiftiGz.sample(seed=3) + print(f"CMDLINE: {task.cmdline}\n\n") + res = task(worker=PassAfterTimeoutWorker) + print("RESULT: ", res) diff --git a/pydra/tasks/fsl/v6/nipype_ports/__init__.py b/pydra/tasks/fsl/v6/nipype_ports/__init__.py new file mode 100644 index 0000000..e929a18 --- /dev/null +++ b/pydra/tasks/fsl/v6/nipype_ports/__init__.py @@ -0,0 +1,8 @@ +from .utils import ( + fname_presuffix, + human_order_sorted, + load_json, + save_json, + simplify_list, + split_filename, +) diff --git a/pydra/tasks/fsl/v6/nipype_ports/utils/__init__.py b/pydra/tasks/fsl/v6/nipype_ports/utils/__init__.py new file mode 100644 index 0000000..7aa6834 --- /dev/null +++ b/pydra/tasks/fsl/v6/nipype_ports/utils/__init__.py @@ -0,0 +1,8 @@ +from .filemanip import ( + fname_presuffix, + load_json, + save_json, + simplify_list, + split_filename, +) +from .misc import human_order_sorted diff --git a/nipype-auto-conv/specs/interfaces/plot_motion_params_callables.py b/pydra/tasks/fsl/v6/nipype_ports/utils/filemanip.py similarity index 53% rename from nipype-auto-conv/specs/interfaces/plot_motion_params_callables.py rename to pydra/tasks/fsl/v6/nipype_ports/utils/filemanip.py index d632bf6..3f33830 100644 --- a/nipype-auto-conv/specs/interfaces/plot_motion_params_callables.py +++ b/pydra/tasks/fsl/v6/nipype_ports/utils/filemanip.py @@ -1,47 +1,12 @@ -"""Module to put any functions that are referred to in the "callables" section of PlotMotionParams.yaml""" - -import attrs -import os +import json +import logging import os.path as op -from pathlib import Path - - -def out_file_default(inputs): - return _gen_filename("out_file", inputs=inputs) - +import simplejson as json -def out_file_callable(output_dir, inputs, stdout, stderr): - outputs = _list_outputs( - output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr - ) - return outputs["out_file"] +logger = logging.getLogger(__name__) -# Original source at L1495 of /interfaces/fsl/utils.py -def _gen_filename(name, inputs=None, stdout=None, stderr=None, output_dir=None): - if name == "out_file": - return _list_outputs( - inputs=inputs, stdout=stdout, stderr=stderr, output_dir=output_dir - )["out_file"] - return None - -# Original source at L1478 of /interfaces/fsl/utils.py -def _list_outputs(inputs=None, stdout=None, stderr=None, output_dir=None): - outputs = {} - out_file = inputs.out_file - if out_file is attrs.NOTHING: - if isinstance(inputs.in_file, list): - infile = inputs.in_file[0] - else: - infile = inputs.in_file - plttype = dict(rot="rot", tra="trans", dis="disp")[inputs.plot_type[:3]] - out_file = fname_presuffix(infile, suffix="_%s.png" % plttype, use_ext=False) - outputs["out_file"] = os.path.abspath(out_file) - return outputs - - -# Original source at L108 of /utils/filemanip.py def fname_presuffix(fname, prefix="", suffix="", newpath=None, use_ext=True): """Manipulates path and name of input filename @@ -68,23 +33,64 @@ def fname_presuffix(fname, prefix="", suffix="", newpath=None, use_ext=True): >>> fname_presuffix(fname,'pre','post','/tmp') '/tmp/prefoopost.nii.gz' - >>> from nipype.interfaces.base import attrs.NOTHING - >>> fname_presuffix(fname, 'pre', 'post', attrs.NOTHING) == \ - fname_presuffix(fname, 'pre', 'post') + >>> from nipype.interfaces.base import type(attrs.NOTHING) + >>> fname_presuffix(fname, 'pre', 'post', type(attrs.NOTHING)) == fname_presuffix(fname, 'pre', 'post') True """ pth, fname, ext = split_filename(fname) if not use_ext: ext = "" - - # No need for : bool(attrs.NOTHING is not attrs.NOTHING) evaluates to False + # No need for : bool(type(attrs.NOTHING) is not attrs.NOTHING) evaluates to False if newpath: pth = op.abspath(newpath) return op.join(pth, prefix + fname + suffix + ext) -# Original source at L58 of /utils/filemanip.py +def load_json(filename): + """Load data from a json file + + Parameters + ---------- + filename : str + Filename to load data from. + + Returns + ------- + data : dict + + """ + with open(filename) as fp: + data = json.load(fp) + return data + + +def save_json(filename, data): + """Save data to a json file + + Parameters + ---------- + filename : str + Filename to save data in. + data : dict + Dictionary to save in json file. + + """ + mode = "w" + with open(filename, mode) as fp: + json.dump(data, fp, sort_keys=True, indent=4) + + +def simplify_list(filelist): + """Returns a list if filelist is a list of length greater than 1, + otherwise returns the first element + """ + if len(filelist) > 1: + return filelist + else: + return filelist[0] + + def split_filename(fname): """Split a filename into parts: path, base filename and extension. @@ -100,7 +106,7 @@ def split_filename(fname): fname : str filename from fname, without extension ext : str - file extension from fname + file extension from fname706 Examples -------- @@ -116,12 +122,9 @@ def split_filename(fname): '.nii.gz' """ - special_extensions = [".nii.gz", ".tar.gz", ".niml.dset"] - pth = op.dirname(fname) fname = op.basename(fname) - ext = None for special_ext in special_extensions: ext_len = len(special_ext) @@ -131,5 +134,4 @@ def split_filename(fname): break if not ext: fname, ext = op.splitext(fname) - return pth, fname, ext diff --git a/pydra/tasks/fsl/v6/nipype_ports/utils/misc.py b/pydra/tasks/fsl/v6/nipype_ports/utils/misc.py new file mode 100644 index 0000000..4dd053b --- /dev/null +++ b/pydra/tasks/fsl/v6/nipype_ports/utils/misc.py @@ -0,0 +1,19 @@ +import logging +import re + + +logger = logging.getLogger(__name__) + + +def human_order_sorted(l): + """Sorts string in human order (i.e. 'stat10' will go after 'stat2')""" + + def atoi(text): + return int(text) if text.isdigit() else text + + def natural_keys(text): + if isinstance(text, tuple): + text = text[0] + return [atoi(c) for c in re.split(r"(\d+)", text)] + + return sorted(l, key=natural_keys) diff --git a/pydra/tasks/fsl/v6/possum/__init__.py b/pydra/tasks/fsl/v6/possum/__init__.py new file mode 100644 index 0000000..be1c817 --- /dev/null +++ b/pydra/tasks/fsl/v6/possum/__init__.py @@ -0,0 +1 @@ +from .b0_calc import B0Calc diff --git a/pydra/tasks/fsl/v6/possum/b0_calc.py b/pydra/tasks/fsl/v6/possum/b0_calc.py new file mode 100644 index 0000000..6142163 --- /dev/null +++ b/pydra/tasks/fsl/v6/possum/b0_calc.py @@ -0,0 +1,105 @@ +from fileformats.medimage import Nifti1 +import logging +from pathlib import Path +from pathlib import Path +from pydra.compose import shell +import typing as ty + + +logger = logging.getLogger(__name__) + + +@shell.define( + xor=[ + ["x_b0", "xyz_b0"], + ["x_b0", "xyz_b0", "y_b0", "z_b0"], + ["xyz_b0", "y_b0"], + ["xyz_b0", "z_b0"], + ] +) +class B0Calc(shell.Task["B0Calc.Outputs"]): + """ + Examples + ------- + + >>> from fileformats.medimage import Nifti1 + >>> from pathlib import Path + >>> from pydra.tasks.fsl.v6.possum.b0_calc import B0Calc + + >>> task = B0Calc() + >>> task.in_file = Nifti1.mock("tissue+air_map.nii") + >>> task.cmdline + 'b0calc -i tissue+air_map.nii -o tissue+air_map_b0field.nii.gz --chi0=4.000000e-07 -d -9.450000e-06 --extendboundary=1.00 --b0x=0.00 --gx=0.0000 --b0y=0.00 --gy=0.0000 --b0=3.00 --gz=0.0000' + + + """ + + executable = "b0calc" + in_file: Nifti1 = shell.arg( + help="filename of input image (usually a tissue/air segmentation)", + argstr="-i {in_file}", + position=1, + ) + x_grad: float = shell.arg( + help="Value for zeroth-order x-gradient field (per mm)", + argstr="--gx={x_grad:0.4}", + default=0.0, + ) + y_grad: float = shell.arg( + help="Value for zeroth-order y-gradient field (per mm)", + argstr="--gy={y_grad:0.4}", + default=0.0, + ) + z_grad: float = shell.arg( + help="Value for zeroth-order z-gradient field (per mm)", + argstr="--gz={z_grad:0.4}", + default=0.0, + ) + x_b0: float | None = shell.arg( + help="Value for zeroth-order b0 field (x-component), in Tesla", + argstr="--b0x={x_b0:0.2}", + default=0.0, + ) + y_b0: float | None = shell.arg( + help="Value for zeroth-order b0 field (y-component), in Tesla", + argstr="--b0y={y_b0:0.2}", + default=0.0, + ) + z_b0: float | None = shell.arg( + help="Value for zeroth-order b0 field (z-component), in Tesla", + argstr="--b0={z_b0:0.2}", + default=1.0, + ) + xyz_b0: ty.Any | None = shell.arg( + help="Zeroth-order B0 field in Tesla", + argstr="--b0x={xyz_b0[0]:0.2} --b0y={xyz_b0[1]:0.2} --b0={xyz_b0[2]:0.2}", + ) + delta: float = shell.arg( + help="Delta value (chi_tissue - chi_air)", argstr="-d %e", default=-9.45e-06 + ) + chi_air: float = shell.arg( + help="susceptibility of air", argstr="--chi0=%e", default=4e-07 + ) + compute_xyz: bool = shell.arg( + help="calculate and save all 3 field components (i.e. x,y,z)", + argstr="--xyz", + default=False, + ) + extendboundary: float = shell.arg( + help="Relative proportion to extend voxels at boundary", + argstr="--extendboundary={extendboundary:0.2}", + default=1.0, + ) + directconv: bool = shell.arg( + help="use direct (image space) convolution, not FFT", + argstr="--directconv", + default=False, + ) + + class Outputs(shell.Outputs): + out_file: Path = shell.outarg( + help="filename of B0 output volume", + argstr="-o {out_file}", + path_template="{in_file}_b0field", + position=2, + ) diff --git a/pydra/tasks/fsl/v6/possum/tests/conftest.py b/pydra/tasks/fsl/v6/possum/tests/conftest.py new file mode 100644 index 0000000..8c8af14 --- /dev/null +++ b/pydra/tasks/fsl/v6/possum/tests/conftest.py @@ -0,0 +1,24 @@ +# For debugging in IDE's don't catch raised exceptions and let the IDE +# break at it +import os +import pytest + + +if os.getenv("_PYTEST_RAISE", "0") != "0": + + @pytest.hookimpl(tryfirst=True) + def pytest_exception_interact(call): + raise call.excinfo.value # raise internal errors instead of capturing them + + @pytest.hookimpl(tryfirst=True) + def pytest_internalerror(excinfo): + raise excinfo.value # raise internal errors instead of capturing them + + def pytest_configure(config): + config.option.capture = "no" # allow print statements to show up in the console + config.option.log_cli = True # show log messages in the console + config.option.log_level = "INFO" # set the log level to INFO + + CATCH_CLI_EXCEPTIONS = False +else: + CATCH_CLI_EXCEPTIONS = True diff --git a/pydra/tasks/fsl/v6/possum/tests/test_b0calc.py b/pydra/tasks/fsl/v6/possum/tests/test_b0calc.py new file mode 100644 index 0000000..f1a40c8 --- /dev/null +++ b/pydra/tasks/fsl/v6/possum/tests/test_b0calc.py @@ -0,0 +1,37 @@ +from fileformats.medimage import Nifti1 +import logging +from nipype2pydra.testing import PassAfterTimeoutWorker +from pydra.tasks.fsl.v6.possum.b0_calc import B0Calc +import pytest + + +logger = logging.getLogger(__name__) + + +@pytest.mark.xfail +def test_b0calc_1(): + task = B0Calc() + task.in_file = Nifti1.sample(seed=0) + task.x_grad = 0.0 + task.y_grad = 0.0 + task.z_grad = 0.0 + task.x_b0 = 0.0 + task.y_b0 = 0.0 + task.z_b0 = 1.0 + task.delta = -9.45e-06 + task.chi_air = 4e-07 + task.compute_xyz = False + task.extendboundary = 1.0 + task.directconv = False + print(f"CMDLINE: {task.cmdline}\n\n") + res = task(worker=PassAfterTimeoutWorker) + print("RESULT: ", res) + + +@pytest.mark.xfail +def test_b0calc_2(): + task = B0Calc() + task.in_file = Nifti1.sample(seed=0) + print(f"CMDLINE: {task.cmdline}\n\n") + res = task(worker=PassAfterTimeoutWorker) + print("RESULT: ", res) diff --git a/pydra/tasks/fsl/v6/preprocess/__init__.py b/pydra/tasks/fsl/v6/preprocess/__init__.py new file mode 100644 index 0000000..96e4c78 --- /dev/null +++ b/pydra/tasks/fsl/v6/preprocess/__init__.py @@ -0,0 +1,12 @@ +from .apply_warp import ApplyWarp +from .apply_xfm import ApplyXFM +from .bet import BET +from .fast import FAST +from .first import FIRST +from .flirt import FLIRT +from .fnirt import FNIRT +from .fugue import FUGUE +from .mcflirt import MCFLIRT +from .prelude import PRELUDE +from .slice_timer import SliceTimer +from .susan import SUSAN diff --git a/pydra/tasks/fsl/v6/preprocess/apply_warp.py b/pydra/tasks/fsl/v6/preprocess/apply_warp.py new file mode 100644 index 0000000..22fe167 --- /dev/null +++ b/pydra/tasks/fsl/v6/preprocess/apply_warp.py @@ -0,0 +1,162 @@ +import attrs +from fileformats.generic import File +import logging +from pydra.tasks.fsl.v6.nipype_ports.utils.filemanip import fname_presuffix +import os +from pathlib import Path +from pathlib import Path +from pydra.compose import shell +import typing as ty + + +logger = logging.getLogger(__name__) + + +def _format_arg(name, value, inputs, argstr): + if value is None: + return "" + + if name == "superlevel": + return argstr.format(**{name: str(value)}) + + return argstr.format(**inputs) + + +def superlevel_formatter(field, inputs): + return _format_arg("superlevel", field, inputs, argstr="--superlevel={superlevel}") + + +def _gen_filename(name, inputs): + if name == "out_file": + return _list_outputs( + in_file=inputs["in_file"], + out_file=inputs["out_file"], + output_type=inputs["output_type"], + )[name] + return None + + +def out_file_default(inputs): + return _gen_filename("out_file", inputs=inputs) + + +@shell.define(xor=[["abswarp", "relwarp"]]) +class ApplyWarp(shell.Task["ApplyWarp.Outputs"]): + """ + Examples + ------- + + >>> from fileformats.generic import File + >>> from pathlib import Path + >>> from pydra.tasks.fsl.v6.preprocess.apply_warp import ApplyWarp + + """ + + executable = "applywarp" + in_file: File = shell.arg( + help="image to be warped", argstr="--in={in_file}", position=1 + ) + ref_file: File = shell.arg( + help="reference image", argstr="--ref={ref_file}", position=2 + ) + field_file: File = shell.arg( + help="file containing warp field", argstr="--warp={field_file}" + ) + abswarp: bool = shell.arg( + help="treat warp field as absolute: x' = w(x)", argstr="--abs" + ) + relwarp: bool = shell.arg( + help="treat warp field as relative: x' = x + w(x)", argstr="--rel", position=-1 + ) + datatype: ty.Any = shell.arg( + help="Force output data type [char short int float double].", + argstr="--datatype={datatype}", + ) + supersample: bool = shell.arg( + help="intermediary supersampling of output, default is off", argstr="--super" + ) + superlevel: ty.Any = shell.arg( + help="level of intermediary supersampling, a for 'automatic' or integer level. Default = 2", + formatter="superlevel_formatter", + ) + premat: File = shell.arg( + help="filename for pre-transform (affine matrix)", argstr="--premat={premat}" + ) + postmat: File = shell.arg( + help="filename for post-transform (affine matrix)", argstr="--postmat={postmat}" + ) + mask_file: File = shell.arg( + help="filename for mask image (in reference space)", argstr="--mask={mask_file}" + ) + interp: ty.Any = shell.arg( + help="interpolation method", argstr="--interp={interp}", position=-2 + ) + + class Outputs(shell.Outputs): + out_file: Path = shell.outarg( + help="output filename", + argstr="--out={out_file}", + path_template="out_file", + position=3, + ) + + +def _gen_fname( + basename, cwd=None, suffix=None, change_ext=True, ext=None, output_type=None +): + """Generate a filename based on the given parameters. + + The filename will take the form: cwd/basename. + If change_ext is True, it will use the extensions specified in + inputs.output_type. + + Parameters + ---------- + basename : str + Filename to base the new filename on. + cwd : str + Path to prefix to the new filename. (default is output_dir) + suffix : str + Suffix to add to the `basename`. (defaults is '' ) + change_ext : bool + Flag to change the filename extension to the FSL output type. + (default True) + + Returns + ------- + fname : str + New filename based on given parameters. + + """ + + if basename == "": + msg = "Unable to generate filename for command %s. " % "applywarp" + msg += "basename is not set!" + raise ValueError(msg) + if cwd is None: + cwd = output_dir + if ext is None: + ext = Info.output_type_to_ext(output_type) + if change_ext: + if suffix: + suffix = f"{suffix}{ext}" + else: + suffix = ext + if suffix is None: + suffix = "" + fname = fname_presuffix(basename, suffix=suffix, use_ext=False, newpath=cwd) + return fname + + +def _list_outputs(in_file=None, out_file=None, output_type=None): + outputs = {} + if out_file is attrs.NOTHING: + outputs["out_file"] = _gen_fname( + in_file, suffix="_warp", output_type=output_type + ) + else: + outputs["out_file"] = os.path.abspath(out_file) + return outputs + + +IFLOGGER = logging.getLogger("nipype.interface") diff --git a/pydra/tasks/fsl/v6/preprocess/apply_xfm.py b/pydra/tasks/fsl/v6/preprocess/apply_xfm.py new file mode 100644 index 0000000..8280830 --- /dev/null +++ b/pydra/tasks/fsl/v6/preprocess/apply_xfm.py @@ -0,0 +1,195 @@ +from fileformats.generic import File +import logging +import os +from pathlib import Path +from pathlib import Path +from pydra.compose import shell +import typing as ty + + +logger = logging.getLogger(__name__) + + +def _parse_inputs(inputs, output_dir=None): + if not output_dir: + output_dir = os.getcwd() + parsed_inputs = {} + skip = [] + + if skip is None: + skip = [] + if inputs["save_log"] and not inputs["verbose"]: + inputs["verbose"] = 1 + if inputs["apply_xfm"] and not (inputs["in_matrix_file"] or inputs["uses_qform"]): + raise RuntimeError( + "Argument apply_xfm requires in_matrix_file or " + "uses_qform arguments to run" + ) + skip.append("save_log") + + return parsed_inputs + + +@shell.define(xor=[["apply_isoxfm", "apply_xfm"]]) +class ApplyXFM(shell.Task["ApplyXFM.Outputs"]): + """ + Examples + ------- + + >>> from fileformats.generic import File + >>> from pathlib import Path + >>> from pydra.tasks.fsl.v6.preprocess.apply_xfm import ApplyXFM + + """ + + executable = "flirt" + apply_xfm: bool = shell.arg( + help="apply transformation supplied by in_matrix_file or uses_qform to use the affine matrix stored in the reference header", + argstr="-applyxfm", + default=True, + ) + in_file: File = shell.arg(help="input file", argstr="-in {in_file}", position=1) + reference: File = shell.arg( + help="reference file", argstr="-ref {reference}", position=2 + ) + in_matrix_file: File = shell.arg( + help="input 4x4 affine matrix", argstr="-init {in_matrix_file}" + ) + apply_isoxfm: float | None = shell.arg( + help="as applyxfm but forces isotropic resampling", + argstr="-applyisoxfm {apply_isoxfm}", + ) + datatype: ty.Any = shell.arg( + help="force output data type", argstr="-datatype {datatype}" + ) + cost: ty.Any = shell.arg(help="cost function", argstr="-cost {cost}") + cost_func: ty.Any = shell.arg( + help="cost function", argstr="-searchcost {cost_func}" + ) + uses_qform: bool = shell.arg( + help="initialize using sform or qform", argstr="-usesqform" + ) + display_init: bool = shell.arg(help="display initial matrix", argstr="-displayinit") + angle_rep: ty.Any = shell.arg( + help="representation of rotation angles", argstr="-anglerep {angle_rep}" + ) + interp: ty.Any = shell.arg( + help="final interpolation method used in reslicing", argstr="-interp {interp}" + ) + sinc_width: int = shell.arg( + help="full-width in voxels", argstr="-sincwidth {sinc_width}" + ) + sinc_window: ty.Any = shell.arg( + help="sinc window", argstr="-sincwindow {sinc_window}" + ) + bins: int = shell.arg(help="number of histogram bins", argstr="-bins {bins}") + dof: int = shell.arg( + help="number of transform degrees of freedom", argstr="-dof {dof}" + ) + no_resample: bool = shell.arg( + help="do not change input sampling", argstr="-noresample" + ) + force_scaling: bool = shell.arg( + help="force rescaling even for low-res images", argstr="-forcescaling" + ) + min_sampling: float = shell.arg( + help="set minimum voxel dimension for sampling", + argstr="-minsampling {min_sampling}", + ) + padding_size: int = shell.arg( + help="for applyxfm: interpolates outside image by size", + argstr="-paddingsize {padding_size}", + ) + searchr_x: list[int] = shell.arg( + help="search angles along x-axis, in degrees", argstr="-searchrx {searchr_x}" + ) + searchr_y: list[int] = shell.arg( + help="search angles along y-axis, in degrees", argstr="-searchry {searchr_y}" + ) + searchr_z: list[int] = shell.arg( + help="search angles along z-axis, in degrees", argstr="-searchrz {searchr_z}" + ) + no_search: bool = shell.arg( + help="set all angular searches to ranges 0 to 0", argstr="-nosearch" + ) + coarse_search: int = shell.arg( + help="coarse search delta angle", argstr="-coarsesearch {coarse_search}" + ) + fine_search: int = shell.arg( + help="fine search delta angle", argstr="-finesearch {fine_search}" + ) + schedule: File = shell.arg( + help="replaces default schedule", argstr="-schedule {schedule}" + ) + ref_weight: File = shell.arg( + help="File for reference weighting volume", argstr="-refweight {ref_weight}" + ) + in_weight: File = shell.arg( + help="File for input weighting volume", argstr="-inweight {in_weight}" + ) + no_clamp: bool = shell.arg(help="do not use intensity clamping", argstr="-noclamp") + no_resample_blur: bool = shell.arg( + help="do not use blurring on downsampling", argstr="-noresampblur" + ) + rigid2D: bool = shell.arg(help="use 2D rigid body mode - ignores dof", argstr="-2D") + save_log: bool = shell.arg(help="save to log file") + verbose: int = shell.arg( + help="verbose mode, 0 is least", argstr="-verbose {verbose}" + ) + bgvalue: float = shell.arg( + help="use specified background value for points outside FOV", + argstr="-setbackground {bgvalue}", + ) + wm_seg: File = shell.arg( + help="white matter segmentation volume needed by BBR cost function", + argstr="-wmseg {wm_seg}", + ) + wmcoords: File = shell.arg( + help="white matter boundary coordinates for BBR cost function", + argstr="-wmcoords {wmcoords}", + ) + wmnorms: File = shell.arg( + help="white matter boundary normals for BBR cost function", + argstr="-wmnorms {wmnorms}", + ) + fieldmap: File = shell.arg( + help="fieldmap image in rads/s - must be already registered to the reference image", + argstr="-fieldmap {fieldmap}", + ) + fieldmapmask: File = shell.arg( + help="mask for fieldmap image", argstr="-fieldmapmask {fieldmapmask}" + ) + pedir: int = shell.arg( + help="phase encode direction of EPI - 1/2/3=x/y/z & -1/-2/-3=-x/-y/-z", + argstr="-pedir {pedir}", + ) + echospacing: float = shell.arg( + help="value of EPI echo spacing - units of seconds", + argstr="-echospacing {echospacing}", + ) + bbrtype: ty.Any = shell.arg( + help="type of bbr cost function: signed [default], global_abs, local_abs", + argstr="-bbrtype {bbrtype}", + ) + bbrslope: float = shell.arg( + help="value of bbr slope", argstr="-bbrslope {bbrslope}" + ) + + class Outputs(shell.Outputs): + out_file: File = shell.outarg( + help="registered output file", + argstr="-out {out_file}", + path_template="{in_file}_flirt", + position=3, + ) + out_matrix_file: File = shell.outarg( + help="output affine matrix in 4x4 asciii format", + argstr="-omat {out_matrix_file}", + path_template="{in_file}_flirt.mat", + position=4, + ) + out_log: File | None = shell.outarg( + help="output log", + requires=["save_log"], + path_template="{in_file}_flirt.log", + ) diff --git a/pydra/tasks/fsl/v6/preprocess/bet.py b/pydra/tasks/fsl/v6/preprocess/bet.py new file mode 100644 index 0000000..c01f6c7 --- /dev/null +++ b/pydra/tasks/fsl/v6/preprocess/bet.py @@ -0,0 +1,460 @@ +import attrs +from fileformats.generic import File +from fileformats.medimage import Nifti1 +import logging +from pydra.tasks.fsl.v6.nipype_ports.utils.filemanip import fname_presuffix +import os +import os.path as op +from pathlib import Path +from pathlib import Path +from pydra.compose import shell + + +logger = logging.getLogger(__name__) + + +def _format_arg(name, value, inputs, argstr): + if value is None: + return "" + + formatted = argstr.format(**inputs) + if name == "in_file": + + return op.relpath(formatted, start=os.getcwd()) + return formatted + + return argstr.format(**inputs) + + +def in_file_formatter(field, inputs): + return _format_arg("in_file", field, inputs, argstr="{in_file}") + + +def _list_outputs(inputs=None, stdout=None, stderr=None, output_dir=None): + inputs = attrs.asdict(inputs) + + outputs = {} + outputs["out_file"] = os.path.abspath( + _gen_outfilename( + in_file=inputs["in_file"], + out_file=inputs["out_file"], + output_type=inputs["output_type"], + inputs=inputs["inputs"], + output_dir=inputs["output_dir"], + stderr=inputs["stderr"], + stdout=inputs["stdout"], + ) + ) + + basename = os.path.basename(outputs["out_file"]) + cwd = os.path.dirname(outputs["out_file"]) + kwargs = {"basename": basename, "cwd": cwd} + + if ((inputs["mesh"] is not attrs.NOTHING) and inputs["mesh"]) or ( + (inputs["surfaces"] is not attrs.NOTHING) and inputs["surfaces"] + ): + outputs["meshfile"] = _gen_fname( + suffix="_mesh.vtk", + change_ext=False, + output_type=inputs["output_type"], + inputs=inputs["inputs"], + output_dir=inputs["output_dir"], + stderr=inputs["stderr"], + stdout=inputs["stdout"], + **kwargs, + ) + if ((inputs["mask"] is not attrs.NOTHING) and inputs["mask"]) or ( + (inputs["reduce_bias"] is not attrs.NOTHING) and inputs["reduce_bias"] + ): + outputs["mask_file"] = _gen_fname( + suffix="_mask", + output_type=inputs["output_type"], + inputs=inputs["inputs"], + output_dir=inputs["output_dir"], + stderr=inputs["stderr"], + stdout=inputs["stdout"], + **kwargs, + ) + if (inputs["outline"] is not attrs.NOTHING) and inputs["outline"]: + outputs["outline_file"] = _gen_fname( + suffix="_overlay", + output_type=inputs["output_type"], + inputs=inputs["inputs"], + output_dir=inputs["output_dir"], + stderr=inputs["stderr"], + stdout=inputs["stdout"], + **kwargs, + ) + if (inputs["surfaces"] is not attrs.NOTHING) and inputs["surfaces"]: + outputs["inskull_mask_file"] = _gen_fname( + suffix="_inskull_mask", + output_type=inputs["output_type"], + inputs=inputs["inputs"], + output_dir=inputs["output_dir"], + stderr=inputs["stderr"], + stdout=inputs["stdout"], + **kwargs, + ) + outputs["inskull_mesh_file"] = _gen_fname( + suffix="_inskull_mesh", + output_type=inputs["output_type"], + inputs=inputs["inputs"], + output_dir=inputs["output_dir"], + stderr=inputs["stderr"], + stdout=inputs["stdout"], + **kwargs, + ) + outputs["outskull_mask_file"] = _gen_fname( + suffix="_outskull_mask", + output_type=inputs["output_type"], + inputs=inputs["inputs"], + output_dir=inputs["output_dir"], + stderr=inputs["stderr"], + stdout=inputs["stdout"], + **kwargs, + ) + outputs["outskull_mesh_file"] = _gen_fname( + suffix="_outskull_mesh", + output_type=inputs["output_type"], + inputs=inputs["inputs"], + output_dir=inputs["output_dir"], + stderr=inputs["stderr"], + stdout=inputs["stdout"], + **kwargs, + ) + outputs["outskin_mask_file"] = _gen_fname( + suffix="_outskin_mask", + output_type=inputs["output_type"], + inputs=inputs["inputs"], + output_dir=inputs["output_dir"], + stderr=inputs["stderr"], + stdout=inputs["stdout"], + **kwargs, + ) + outputs["outskin_mesh_file"] = _gen_fname( + suffix="_outskin_mesh", + output_type=inputs["output_type"], + inputs=inputs["inputs"], + output_dir=inputs["output_dir"], + stderr=inputs["stderr"], + stdout=inputs["stdout"], + **kwargs, + ) + outputs["skull_mask_file"] = _gen_fname( + suffix="_skull_mask", + output_type=inputs["output_type"], + inputs=inputs["inputs"], + output_dir=inputs["output_dir"], + stderr=inputs["stderr"], + stdout=inputs["stdout"], + **kwargs, + ) + if (inputs["skull"] is not attrs.NOTHING) and inputs["skull"]: + outputs["skull_file"] = _gen_fname( + suffix="_skull", + output_type=inputs["output_type"], + inputs=inputs["inputs"], + output_dir=inputs["output_dir"], + stderr=inputs["stderr"], + stdout=inputs["stdout"], + **kwargs, + ) + if (inputs["no_output"] is not attrs.NOTHING) and inputs["no_output"]: + outputs["out_file"] = type(attrs.NOTHING) + return outputs + + +def mask_file_callable(output_dir, inputs, stdout, stderr): + outputs = _list_outputs( + output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr + ) + return outputs.get("mask_file") + + +def outline_file_callable(output_dir, inputs, stdout, stderr): + outputs = _list_outputs( + output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr + ) + return outputs.get("outline_file") + + +def meshfile_callable(output_dir, inputs, stdout, stderr): + outputs = _list_outputs( + output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr + ) + return outputs.get("meshfile") + + +def inskull_mask_file_callable(output_dir, inputs, stdout, stderr): + outputs = _list_outputs( + output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr + ) + return outputs.get("inskull_mask_file") + + +def inskull_mesh_file_callable(output_dir, inputs, stdout, stderr): + outputs = _list_outputs( + output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr + ) + return outputs.get("inskull_mesh_file") + + +def outskull_mask_file_callable(output_dir, inputs, stdout, stderr): + outputs = _list_outputs( + output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr + ) + return outputs.get("outskull_mask_file") + + +def outskull_mesh_file_callable(output_dir, inputs, stdout, stderr): + outputs = _list_outputs( + output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr + ) + return outputs.get("outskull_mesh_file") + + +def outskin_mask_file_callable(output_dir, inputs, stdout, stderr): + outputs = _list_outputs( + output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr + ) + return outputs.get("outskin_mask_file") + + +def outskin_mesh_file_callable(output_dir, inputs, stdout, stderr): + outputs = _list_outputs( + output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr + ) + return outputs.get("outskin_mesh_file") + + +def skull_mask_file_callable(output_dir, inputs, stdout, stderr): + outputs = _list_outputs( + output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr + ) + return outputs.get("skull_mask_file") + + +def skull_file_callable(output_dir, inputs, stdout, stderr): + outputs = _list_outputs( + output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr + ) + return outputs.get("skull_file") + + +def _gen_filename(name, inputs): + if name == "out_file": + return _gen_outfilename( + in_file=inputs["in_file"], + out_file=inputs["out_file"], + output_type=inputs["output_type"], + ) + return None + + +def out_file_default(inputs): + return _gen_filename("out_file", inputs=inputs) + + +@shell.define( + xor=[ + [ + "functional", + "padding", + "reduce_bias", + "remove_eyes", + "robust", + "surfaces", + "t2_guided", + ] + ] +) +class BET(shell.Task["BET.Outputs"]): + """ + Examples + ------- + + >>> from fileformats.generic import File + >>> from fileformats.medimage import Nifti1 + >>> from pathlib import Path + >>> from pydra.tasks.fsl.v6.preprocess.bet import BET + + >>> task = BET() + >>> task.in_file = Nifti1.mock("structural.nii") + >>> task.out_file = "brain_anat.nii" + >>> task.t2_guided = File.mock() + >>> task.cmdline + 'None' + + + """ + + executable = "bet" + in_file: Nifti1 = shell.arg( + help="input file to skull strip", formatter="in_file_formatter", position=1 + ) + outline: bool = shell.arg(help="create surface outline image", argstr="-o") + mask: bool = shell.arg(help="create binary mask image", argstr="-m") + skull: bool = shell.arg(help="create skull image", argstr="-s") + no_output: bool = shell.arg(help="Don't generate segmented output", argstr="-n") + frac: float = shell.arg( + help="fractional intensity threshold", argstr="-f {frac:.2}" + ) + vertical_gradient: float = shell.arg( + help="vertical gradient in fractional intensity threshold (-1, 1)", + argstr="-g {vertical_gradient:.2}", + ) + radius: int = shell.arg(help="head radius", argstr="-r {radius}") + center: list[int] = shell.arg( + help="center of gravity in voxels", argstr="-c {center}" + ) + threshold: bool = shell.arg( + help="apply thresholding to segmented brain image and mask", argstr="-t" + ) + mesh: bool = shell.arg(help="generate a vtk mesh brain surface", argstr="-e") + robust: bool = shell.arg( + help="robust brain centre estimation (iterates BET several times)", argstr="-R" + ) + padding: bool = shell.arg( + help="improve BET if FOV is very small in Z (by temporarily padding end slices)", + argstr="-Z", + ) + remove_eyes: bool = shell.arg( + help="eye & optic nerve cleanup (can be useful in SIENA)", argstr="-S" + ) + surfaces: bool = shell.arg( + help="run bet2 and then betsurf to get additional skull and scalp surfaces (includes registrations)", + argstr="-A", + ) + t2_guided: File | None = shell.arg( + help="as with creating surfaces, when also feeding in non-brain-extracted T2 (includes registrations)", + argstr="-A2 {t2_guided}", + ) + functional: bool = shell.arg(help="apply to 4D fMRI data", argstr="-F") + reduce_bias: bool = shell.arg(help="bias field and neck cleanup", argstr="-B") + + class Outputs(shell.Outputs): + out_file: Path = shell.outarg( + help="name of output skull stripped image", + argstr="{out_file}", + path_template='"brain_anat.nii"', + position=2, + ) + mask_file: File | None = shell.out( + help="path/name of binary brain mask (if generated)", + callable=mask_file_callable, + ) + outline_file: File | None = shell.out( + help="path/name of outline file (if generated)", + callable=outline_file_callable, + ) + meshfile: File | None = shell.out( + help="path/name of vtk mesh file (if generated)", callable=meshfile_callable + ) + inskull_mask_file: File | None = shell.out( + help="path/name of inskull mask (if generated)", + callable=inskull_mask_file_callable, + ) + inskull_mesh_file: File | None = shell.out( + help="path/name of inskull mesh outline (if generated)", + callable=inskull_mesh_file_callable, + ) + outskull_mask_file: File | None = shell.out( + help="path/name of outskull mask (if generated)", + callable=outskull_mask_file_callable, + ) + outskull_mesh_file: File | None = shell.out( + help="path/name of outskull mesh outline (if generated)", + callable=outskull_mesh_file_callable, + ) + outskin_mask_file: File | None = shell.out( + help="path/name of outskin mask (if generated)", + callable=outskin_mask_file_callable, + ) + outskin_mesh_file: File | None = shell.out( + help="path/name of outskin mesh outline (if generated)", + callable=outskin_mesh_file_callable, + ) + skull_mask_file: File | None = shell.out( + help="path/name of skull mask (if generated)", + callable=skull_mask_file_callable, + ) + skull_file: File | None = shell.out( + help="path/name of skull file (if generated)", callable=skull_file_callable + ) + + +def _gen_fname( + basename, + cwd=None, + suffix=None, + change_ext=True, + ext=None, + output_type=None, + inputs=None, + output_dir=None, + stderr=None, + stdout=None, +): + """Generate a filename based on the given parameters. + + The filename will take the form: cwd/basename. + If change_ext is True, it will use the extensions specified in + inputs.output_type. + + Parameters + ---------- + basename : str + Filename to base the new filename on. + cwd : str + Path to prefix to the new filename. (default is output_dir) + suffix : str + Suffix to add to the `basename`. (defaults is '' ) + change_ext : bool + Flag to change the filename extension to the FSL output type. + (default True) + + Returns + ------- + fname : str + New filename based on given parameters. + + """ + + if basename == "": + msg = "Unable to generate filename for command %s. " % "bet" + msg += "basename is not set!" + raise ValueError(msg) + if cwd is None: + cwd = output_dir + if ext is None: + ext = Info.output_type_to_ext(output_type) + if change_ext: + if suffix: + suffix = f"{suffix}{ext}" + else: + suffix = ext + if suffix is None: + suffix = "" + fname = fname_presuffix(basename, suffix=suffix, use_ext=False, newpath=cwd) + return fname + + +def _gen_outfilename( + in_file=None, + out_file=None, + output_type=None, + inputs=None, + output_dir=None, + stderr=None, + stdout=None, +): + out_file = out_file + + if (out_file is attrs.NOTHING) and (in_file is not attrs.NOTHING): + out_file = _gen_fname(in_file, suffix="_brain", output_type=output_type) + + return op.relpath(out_file, start=output_dir) + return out_file + + +IFLOGGER = logging.getLogger("nipype.interface") diff --git a/pydra/tasks/fsl/v6/preprocess/fast.py b/pydra/tasks/fsl/v6/preprocess/fast.py new file mode 100644 index 0000000..390f8f0 --- /dev/null +++ b/pydra/tasks/fsl/v6/preprocess/fast.py @@ -0,0 +1,414 @@ +import attrs +from fileformats.generic import File +from fileformats.medimage import Nifti1 +import logging +from pydra.tasks.fsl.v6.nipype_ports.utils.filemanip import ( + fname_presuffix, + split_filename, +) +import os +from pathlib import Path +from pathlib import Path +from pydra.compose import shell +import typing as ty + + +logger = logging.getLogger(__name__) + + +def _format_arg(name, value, inputs, argstr): + if value is None: + return "" + + formatted = argstr.format(**inputs) + if name == "in_files": + + formatted = "-S %d %s" % (len(value), formatted) + return formatted + + return argstr.format(**inputs) + + +def in_files_formatter(field, inputs): + return _format_arg("in_files", field, inputs, argstr="{in_files}") + + +def _list_outputs(inputs=None, stdout=None, stderr=None, output_dir=None): + inputs = attrs.asdict(inputs) + + outputs = {} + if inputs["number_classes"] is attrs.NOTHING: + nclasses = 3 + else: + nclasses = inputs["number_classes"] + + _gen_fname_opts = {} + if inputs["out_basename"] is not attrs.NOTHING: + _gen_fname_opts["basename"] = inputs["out_basename"] + _gen_fname_opts["cwd"] = os.getcwd() + else: + _gen_fname_opts["basename"] = inputs["in_files"][-1] + _gen_fname_opts["cwd"], _, _ = split_filename(_gen_fname_opts["basename"]) + + outputs["tissue_class_map"] = _gen_fname( + suffix="_seg", + output_type=inputs["output_type"], + inputs=inputs["inputs"], + output_dir=inputs["output_dir"], + stderr=inputs["stderr"], + stdout=inputs["stdout"], + **_gen_fname_opts, + ) + if inputs["segments"]: + outputs["tissue_class_files"] = [] + for i in range(nclasses): + outputs["tissue_class_files"].append( + _gen_fname( + suffix="_seg_%d" % i, + output_type=inputs["output_type"], + inputs=inputs["inputs"], + output_dir=inputs["output_dir"], + stderr=inputs["stderr"], + stdout=inputs["stdout"], + **_gen_fname_opts, + ) + ) + if inputs["output_biascorrected"] is not attrs.NOTHING: + outputs["restored_image"] = [] + if len(inputs["in_files"]) > 1: + + for val, f in enumerate(inputs["in_files"]): + + outputs["restored_image"].append( + _gen_fname( + suffix="_restore_%d" % (val + 1), + output_type=inputs["output_type"], + inputs=inputs["inputs"], + output_dir=inputs["output_dir"], + stderr=inputs["stderr"], + stdout=inputs["stdout"], + **_gen_fname_opts, + ) + ) + else: + + outputs["restored_image"].append( + _gen_fname( + suffix="_restore", + output_type=inputs["output_type"], + inputs=inputs["inputs"], + output_dir=inputs["output_dir"], + stderr=inputs["stderr"], + stdout=inputs["stdout"], + **_gen_fname_opts, + ) + ) + + outputs["mixeltype"] = _gen_fname( + suffix="_mixeltype", + output_type=inputs["output_type"], + inputs=inputs["inputs"], + output_dir=inputs["output_dir"], + stderr=inputs["stderr"], + stdout=inputs["stdout"], + **_gen_fname_opts, + ) + if not inputs["no_pve"]: + outputs["partial_volume_map"] = _gen_fname( + suffix="_pveseg", + output_type=inputs["output_type"], + inputs=inputs["inputs"], + output_dir=inputs["output_dir"], + stderr=inputs["stderr"], + stdout=inputs["stdout"], + **_gen_fname_opts, + ) + outputs["partial_volume_files"] = [] + for i in range(nclasses): + outputs["partial_volume_files"].append( + _gen_fname( + suffix="_pve_%d" % i, + output_type=inputs["output_type"], + inputs=inputs["inputs"], + output_dir=inputs["output_dir"], + stderr=inputs["stderr"], + stdout=inputs["stdout"], + **_gen_fname_opts, + ) + ) + if inputs["output_biasfield"]: + outputs["bias_field"] = [] + if len(inputs["in_files"]) > 1: + + for val, f in enumerate(inputs["in_files"]): + + outputs["bias_field"].append( + _gen_fname( + suffix="_bias_%d" % (val + 1), + output_type=inputs["output_type"], + inputs=inputs["inputs"], + output_dir=inputs["output_dir"], + stderr=inputs["stderr"], + stdout=inputs["stdout"], + **_gen_fname_opts, + ) + ) + else: + + outputs["bias_field"].append( + _gen_fname( + suffix="_bias", + output_type=inputs["output_type"], + inputs=inputs["inputs"], + output_dir=inputs["output_dir"], + stderr=inputs["stderr"], + stdout=inputs["stdout"], + **_gen_fname_opts, + ) + ) + + if inputs["probability_maps"]: + outputs["probability_maps"] = [] + for i in range(nclasses): + outputs["probability_maps"].append( + _gen_fname( + suffix="_prob_%d" % i, + output_type=inputs["output_type"], + inputs=inputs["inputs"], + output_dir=inputs["output_dir"], + stderr=inputs["stderr"], + stdout=inputs["stdout"], + **_gen_fname_opts, + ) + ) + return outputs + + +def tissue_class_map_callable(output_dir, inputs, stdout, stderr): + outputs = _list_outputs( + output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr + ) + return outputs.get("tissue_class_map") + + +def tissue_class_files_callable(output_dir, inputs, stdout, stderr): + outputs = _list_outputs( + output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr + ) + return outputs.get("tissue_class_files") + + +def restored_image_callable(output_dir, inputs, stdout, stderr): + outputs = _list_outputs( + output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr + ) + return outputs.get("restored_image") + + +def mixeltype_callable(output_dir, inputs, stdout, stderr): + outputs = _list_outputs( + output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr + ) + return outputs.get("mixeltype") + + +def partial_volume_map_callable(output_dir, inputs, stdout, stderr): + outputs = _list_outputs( + output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr + ) + return outputs.get("partial_volume_map") + + +def partial_volume_files_callable(output_dir, inputs, stdout, stderr): + outputs = _list_outputs( + output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr + ) + return outputs.get("partial_volume_files") + + +def bias_field_callable(output_dir, inputs, stdout, stderr): + outputs = _list_outputs( + output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr + ) + return outputs.get("bias_field") + + +def probability_maps_callable(output_dir, inputs, stdout, stderr): + outputs = _list_outputs( + output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr + ) + return outputs.get("probability_maps") + + +@shell.define +class FAST(shell.Task["FAST.Outputs"]): + """ + Examples + ------- + + >>> from fileformats.generic import File + >>> from fileformats.medimage import Nifti1 + >>> from pathlib import Path + >>> from pydra.tasks.fsl.v6.preprocess.fast import FAST + + >>> task = FAST() + >>> task.in_files = [Nifti1.mock("s"), Nifti1.mock("t"), Nifti1.mock("r"), Nifti1.mock("u"), Nifti1.mock("c"), Nifti1.mock("t"), Nifti1.mock("u"), Nifti1.mock("r"), Nifti1.mock("a"), Nifti1.mock("l"), Nifti1.mock("."), Nifti1.mock("n"), Nifti1.mock("i"), Nifti1.mock("i")] + >>> task.init_transform = File.mock() + >>> task.manual_seg = File.mock() + >>> task.cmdline + 'None' + + + """ + + executable = "fast" + in_files: list[Nifti1] = shell.arg( + help="image, or multi-channel set of images, to be segmented", + position=-1, + formatter="in_files_formatter", + ) + out_basename: Path = shell.arg( + help="base name of output files", argstr="-o {out_basename}" + ) + number_classes: ty.Any = shell.arg( + help="number of tissue-type classes", argstr="-n {number_classes}" + ) + output_biasfield: bool = shell.arg(help="output estimated bias field", argstr="-b") + output_biascorrected: bool = shell.arg( + help="output restored image (bias-corrected image)", argstr="-B" + ) + img_type: ty.Any = shell.arg( + help="int specifying type of image: (1 = T1, 2 = T2, 3 = PD)", + argstr="-t {img_type}", + ) + bias_iters: ty.Any = shell.arg( + help="number of main-loop iterations during bias-field removal", + argstr="-I {bias_iters}", + ) + bias_lowpass: ty.Any = shell.arg( + help="bias field smoothing extent (FWHM) in mm", argstr="-l {bias_lowpass}" + ) + init_seg_smooth: ty.Any = shell.arg( + help="initial segmentation spatial smoothness (during bias field estimation)", + argstr="-f {init_seg_smooth:.3}", + ) + segments: bool = shell.arg( + help="outputs a separate binary image for each tissue type", argstr="-g" + ) + init_transform: File = shell.arg( + help=" initialise using priors", + argstr="-a {init_transform}", + ) + other_priors: list[File] = shell.arg( + help="alternative prior images", argstr="-A {other_priors}" + ) + no_pve: bool = shell.arg( + help="turn off PVE (partial volume estimation)", argstr="--nopve" + ) + no_bias: bool = shell.arg(help="do not remove bias field", argstr="-N") + use_priors: bool = shell.arg(help="use priors throughout", argstr="-P") + segment_iters: ty.Any = shell.arg( + help="number of segmentation-initialisation iterations", + argstr="-W {segment_iters}", + ) + mixel_smooth: ty.Any = shell.arg( + help="spatial smoothness for mixeltype", argstr="-R {mixel_smooth:.2}" + ) + iters_afterbias: ty.Any = shell.arg( + help="number of main-loop iterations after bias-field removal", + argstr="-O {iters_afterbias}", + ) + hyper: ty.Any = shell.arg( + help="segmentation spatial smoothness", argstr="-H {hyper:.2}" + ) + verbose: bool = shell.arg(help="switch on diagnostic messages", argstr="-v") + manual_seg: File = shell.arg( + help="Filename containing intensities", argstr="-s {manual_seg}" + ) + probability_maps: bool = shell.arg( + help="outputs individual probability maps", argstr="-p" + ) + + class Outputs(shell.Outputs): + tissue_class_map: File | None = shell.out( + help="path/name of binary segmented volume file one val for each class _seg", + callable=tissue_class_map_callable, + ) + tissue_class_files: list[File] | None = shell.out( + callable=tissue_class_files_callable + ) + restored_image: list[File] | None = shell.out(callable=restored_image_callable) + mixeltype: File | None = shell.out( + help="path/name of mixeltype volume file _mixeltype", + callable=mixeltype_callable, + ) + partial_volume_map: File | None = shell.out( + help="path/name of partial volume file _pveseg", + callable=partial_volume_map_callable, + ) + partial_volume_files: list[File] | None = shell.out( + callable=partial_volume_files_callable + ) + bias_field: list[File] | None = shell.out(callable=bias_field_callable) + probability_maps: list[File] | None = shell.out( + callable=probability_maps_callable + ) + + +def _gen_fname( + basename, + cwd=None, + suffix=None, + change_ext=True, + ext=None, + output_type=None, + inputs=None, + output_dir=None, + stderr=None, + stdout=None, +): + """Generate a filename based on the given parameters. + + The filename will take the form: cwd/basename. + If change_ext is True, it will use the extensions specified in + inputs.output_type. + + Parameters + ---------- + basename : str + Filename to base the new filename on. + cwd : str + Path to prefix to the new filename. (default is output_dir) + suffix : str + Suffix to add to the `basename`. (defaults is '' ) + change_ext : bool + Flag to change the filename extension to the FSL output type. + (default True) + + Returns + ------- + fname : str + New filename based on given parameters. + + """ + + if basename == "": + msg = "Unable to generate filename for command %s. " % "fast" + msg += "basename is not set!" + raise ValueError(msg) + if cwd is None: + cwd = output_dir + if ext is None: + ext = Info.output_type_to_ext(output_type) + if change_ext: + if suffix: + suffix = f"{suffix}{ext}" + else: + suffix = ext + if suffix is None: + suffix = "" + fname = fname_presuffix(basename, suffix=suffix, use_ext=False, newpath=cwd) + return fname + + +IFLOGGER = logging.getLogger("nipype.interface") diff --git a/pydra/tasks/fsl/v6/preprocess/first.py b/pydra/tasks/fsl/v6/preprocess/first.py new file mode 100644 index 0000000..fa6b980 --- /dev/null +++ b/pydra/tasks/fsl/v6/preprocess/first.py @@ -0,0 +1,238 @@ +import attrs +from fileformats.generic import File +import logging +from pydra.tasks.fsl.v6.nipype_ports.utils.filemanip import split_filename +import os.path as op +from pathlib import Path +from pathlib import Path +from pydra.compose import shell +import typing as ty + + +logger = logging.getLogger(__name__) + + +def _list_outputs(inputs=None, stdout=None, stderr=None, output_dir=None): + inputs = attrs.asdict(inputs) + + outputs = {} + + if inputs["list_of_specific_structures"] is not attrs.NOTHING: + structures = inputs["list_of_specific_structures"] + else: + structures = [ + "L_Hipp", + "R_Hipp", + "L_Accu", + "R_Accu", + "L_Amyg", + "R_Amyg", + "L_Caud", + "R_Caud", + "L_Pall", + "R_Pall", + "L_Puta", + "R_Puta", + "L_Thal", + "R_Thal", + "BrStem", + ] + outputs["original_segmentations"] = _gen_fname( + "original_segmentations", + list_of_specific_structures=inputs["list_of_specific_structures"], + method=inputs["method"], + method_as_numerical_threshold=inputs["method_as_numerical_threshold"], + out_file=inputs["out_file"], + inputs=inputs["inputs"], + output_dir=inputs["output_dir"], + stderr=inputs["stderr"], + stdout=inputs["stdout"], + ) + outputs["segmentation_file"] = _gen_fname( + "segmentation_file", + list_of_specific_structures=inputs["list_of_specific_structures"], + method=inputs["method"], + method_as_numerical_threshold=inputs["method_as_numerical_threshold"], + out_file=inputs["out_file"], + inputs=inputs["inputs"], + output_dir=inputs["output_dir"], + stderr=inputs["stderr"], + stdout=inputs["stdout"], + ) + outputs["vtk_surfaces"] = _gen_mesh_names( + "vtk_surfaces", + structures, + out_file=inputs["out_file"], + inputs=inputs["inputs"], + output_dir=inputs["output_dir"], + stderr=inputs["stderr"], + stdout=inputs["stdout"], + ) + outputs["bvars"] = _gen_mesh_names( + "bvars", + structures, + out_file=inputs["out_file"], + inputs=inputs["inputs"], + output_dir=inputs["output_dir"], + stderr=inputs["stderr"], + stdout=inputs["stdout"], + ) + return outputs + + +def vtk_surfaces_callable(output_dir, inputs, stdout, stderr): + outputs = _list_outputs( + output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr + ) + return outputs.get("vtk_surfaces") + + +def bvars_callable(output_dir, inputs, stdout, stderr): + outputs = _list_outputs( + output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr + ) + return outputs.get("bvars") + + +def original_segmentations_callable(output_dir, inputs, stdout, stderr): + outputs = _list_outputs( + output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr + ) + return outputs.get("original_segmentations") + + +def segmentation_file_callable(output_dir, inputs, stdout, stderr): + outputs = _list_outputs( + output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr + ) + return outputs.get("segmentation_file") + + +@shell.define(xor=[["method", "method_as_numerical_threshold"]]) +class FIRST(shell.Task["FIRST.Outputs"]): + """ + Examples + ------- + + >>> from fileformats.generic import File + >>> from pathlib import Path + >>> from pydra.tasks.fsl.v6.preprocess.first import FIRST + + """ + + executable = "run_first_all" + in_file: File = shell.arg( + help="input data file", argstr="-i {in_file}", position=-2 + ) + out_file: Path | None = shell.arg( + help="output data file", + argstr="-o {out_file}", + position=-1, + default="segmented", + ) + verbose: bool = shell.arg(help="Use verbose logging.", argstr="-v", position=1) + brain_extracted: bool = shell.arg( + help="Input structural image is already brain-extracted", + argstr="-b", + position=2, + ) + no_cleanup: bool = shell.arg( + help="Input structural image is already brain-extracted", + argstr="-d", + position=3, + ) + method: ty.Any | None = shell.arg( + help="Method must be one of auto, fast, none, or it can be entered using the 'method_as_numerical_threshold' input", + argstr="-m {method}", + position=4, + default="auto", + ) + method_as_numerical_threshold: float | None = shell.arg( + help="Specify a numerical threshold value or use the 'method' input to choose auto, fast, or none", + argstr="-m {method_as_numerical_threshold:.4}", + position=4, + ) + list_of_specific_structures: list[str] = shell.arg( + help="Runs only on the specified structures (e.g. L_Hipp, R_HippL_Accu, R_Accu, L_Amyg, R_AmygL_Caud, R_Caud, L_Pall, R_PallL_Puta, R_Puta, L_Thal, R_Thal, BrStem", + argstr="-s {list_of_specific_structures}", + position=5, + sep=",", + ) + affine_file: File = shell.arg( + help="Affine matrix to use (e.g. img2std.mat) (does not re-run registration)", + argstr="-a {affine_file}", + position=6, + ) + + class Outputs(shell.Outputs): + vtk_surfaces: list[File] | None = shell.out( + help="VTK format meshes for each subcortical region", + callable=vtk_surfaces_callable, + ) + bvars: list[File] | None = shell.out( + help="bvars for each subcortical region", callable=bvars_callable + ) + original_segmentations: File | None = shell.out( + help="3D image file containing the segmented regions as integer values. Uses CMA labelling", + callable=original_segmentations_callable, + ) + segmentation_file: File | None = shell.out( + help="4D image file containing a single volume per segmented region", + callable=segmentation_file_callable, + ) + + +def _gen_fname( + basename, + list_of_specific_structures=None, + method=None, + method_as_numerical_threshold=None, + out_file=None, + inputs=None, + output_dir=None, + stderr=None, + stdout=None, +): + path, outname, ext = split_filename(out_file) + + method = "none" + if (method is not attrs.NOTHING) and method != "none": + method = "fast" + if list_of_specific_structures and method == "auto": + method = "none" + + if method_as_numerical_threshold is not attrs.NOTHING: + thres = "%.4f" % method_as_numerical_threshold + method = thres.replace(".", "") + + if basename == "original_segmentations": + return op.abspath(f"{outname}_all_{method}_origsegs.nii.gz") + if basename == "segmentation_file": + return op.abspath(f"{outname}_all_{method}_firstseg.nii.gz") + + return None + + +def _gen_mesh_names( + name, + structures, + out_file=None, + inputs=None, + output_dir=None, + stderr=None, + stdout=None, +): + path, prefix, ext = split_filename(out_file) + if name == "vtk_surfaces": + vtks = list() + for struct in structures: + vtk = prefix + "-" + struct + "_first.vtk" + vtks.append(op.abspath(vtk)) + return vtks + if name == "bvars": + bvars = list() + for struct in structures: + bvar = prefix + "-" + struct + "_first.bvars" + bvars.append(op.abspath(bvar)) + return bvars + return None diff --git a/pydra/tasks/fsl/v6/preprocess/flirt.py b/pydra/tasks/fsl/v6/preprocess/flirt.py new file mode 100644 index 0000000..2cc8435 --- /dev/null +++ b/pydra/tasks/fsl/v6/preprocess/flirt.py @@ -0,0 +1,214 @@ +from fileformats.generic import File +from fileformats.medimage import Nifti1 +import logging +import os +from pathlib import Path +from pathlib import Path +from pydra.compose import shell +import typing as ty + + +logger = logging.getLogger(__name__) + + +def _parse_inputs(inputs, output_dir=None): + if not output_dir: + output_dir = os.getcwd() + parsed_inputs = {} + skip = [] + + if skip is None: + skip = [] + if inputs["save_log"] and not inputs["verbose"]: + inputs["verbose"] = 1 + if inputs["apply_xfm"] and not (inputs["in_matrix_file"] or inputs["uses_qform"]): + raise RuntimeError( + "Argument apply_xfm requires in_matrix_file or " + "uses_qform arguments to run" + ) + skip.append("save_log") + + return parsed_inputs + + +@shell.define(xor=[["apply_isoxfm", "apply_xfm"]]) +class FLIRT(shell.Task["FLIRT.Outputs"]): + """ + Examples + ------- + + >>> from fileformats.generic import File + >>> from fileformats.medimage import Nifti1 + >>> from pathlib import Path + >>> from pydra.tasks.fsl.v6.preprocess.flirt import FLIRT + + >>> task = FLIRT() + >>> task.in_file = Nifti1.mock("structural.nii") + >>> task.reference = File.mock() + >>> task.in_matrix_file = File.mock() + >>> task.cost_func = "mutualinfo" + >>> task.bins = 640 + >>> task.schedule = File.mock() + >>> task.ref_weight = File.mock() + >>> task.in_weight = File.mock() + >>> task.wm_seg = File.mock() + >>> task.wmcoords = File.mock() + >>> task.wmnorms = File.mock() + >>> task.fieldmap = File.mock() + >>> task.fieldmapmask = File.mock() + >>> task.cmdline + 'None' + + + """ + + executable = "flirt" + in_file: Nifti1 = shell.arg(help="input file", argstr="-in {in_file}", position=1) + reference: File = shell.arg( + help="reference file", argstr="-ref {reference}", position=2 + ) + in_matrix_file: File = shell.arg( + help="input 4x4 affine matrix", argstr="-init {in_matrix_file}" + ) + apply_xfm: bool = shell.arg( + help="apply transformation supplied by in_matrix_file or uses_qform to use the affine matrix stored in the reference header", + argstr="-applyxfm", + ) + apply_isoxfm: float | None = shell.arg( + help="as applyxfm but forces isotropic resampling", + argstr="-applyisoxfm {apply_isoxfm}", + ) + datatype: ty.Any = shell.arg( + help="force output data type", argstr="-datatype {datatype}" + ) + cost: ty.Any = shell.arg(help="cost function", argstr="-cost {cost}") + cost_func: ty.Any = shell.arg( + help="cost function", argstr="-searchcost {cost_func}" + ) + uses_qform: bool = shell.arg( + help="initialize using sform or qform", argstr="-usesqform" + ) + display_init: bool = shell.arg(help="display initial matrix", argstr="-displayinit") + angle_rep: ty.Any = shell.arg( + help="representation of rotation angles", argstr="-anglerep {angle_rep}" + ) + interp: ty.Any = shell.arg( + help="final interpolation method used in reslicing", argstr="-interp {interp}" + ) + sinc_width: int = shell.arg( + help="full-width in voxels", argstr="-sincwidth {sinc_width}" + ) + sinc_window: ty.Any = shell.arg( + help="sinc window", argstr="-sincwindow {sinc_window}" + ) + bins: int = shell.arg(help="number of histogram bins", argstr="-bins {bins}") + dof: int = shell.arg( + help="number of transform degrees of freedom", argstr="-dof {dof}" + ) + no_resample: bool = shell.arg( + help="do not change input sampling", argstr="-noresample" + ) + force_scaling: bool = shell.arg( + help="force rescaling even for low-res images", argstr="-forcescaling" + ) + min_sampling: float = shell.arg( + help="set minimum voxel dimension for sampling", + argstr="-minsampling {min_sampling}", + ) + padding_size: int = shell.arg( + help="for applyxfm: interpolates outside image by size", + argstr="-paddingsize {padding_size}", + ) + searchr_x: list[int] = shell.arg( + help="search angles along x-axis, in degrees", argstr="-searchrx {searchr_x}" + ) + searchr_y: list[int] = shell.arg( + help="search angles along y-axis, in degrees", argstr="-searchry {searchr_y}" + ) + searchr_z: list[int] = shell.arg( + help="search angles along z-axis, in degrees", argstr="-searchrz {searchr_z}" + ) + no_search: bool = shell.arg( + help="set all angular searches to ranges 0 to 0", argstr="-nosearch" + ) + coarse_search: int = shell.arg( + help="coarse search delta angle", argstr="-coarsesearch {coarse_search}" + ) + fine_search: int = shell.arg( + help="fine search delta angle", argstr="-finesearch {fine_search}" + ) + schedule: File = shell.arg( + help="replaces default schedule", argstr="-schedule {schedule}" + ) + ref_weight: File = shell.arg( + help="File for reference weighting volume", argstr="-refweight {ref_weight}" + ) + in_weight: File = shell.arg( + help="File for input weighting volume", argstr="-inweight {in_weight}" + ) + no_clamp: bool = shell.arg(help="do not use intensity clamping", argstr="-noclamp") + no_resample_blur: bool = shell.arg( + help="do not use blurring on downsampling", argstr="-noresampblur" + ) + rigid2D: bool = shell.arg(help="use 2D rigid body mode - ignores dof", argstr="-2D") + save_log: bool = shell.arg(help="save to log file") + verbose: int = shell.arg( + help="verbose mode, 0 is least", argstr="-verbose {verbose}" + ) + bgvalue: float = shell.arg( + help="use specified background value for points outside FOV", + argstr="-setbackground {bgvalue}", + ) + wm_seg: File = shell.arg( + help="white matter segmentation volume needed by BBR cost function", + argstr="-wmseg {wm_seg}", + ) + wmcoords: File = shell.arg( + help="white matter boundary coordinates for BBR cost function", + argstr="-wmcoords {wmcoords}", + ) + wmnorms: File = shell.arg( + help="white matter boundary normals for BBR cost function", + argstr="-wmnorms {wmnorms}", + ) + fieldmap: File = shell.arg( + help="fieldmap image in rads/s - must be already registered to the reference image", + argstr="-fieldmap {fieldmap}", + ) + fieldmapmask: File = shell.arg( + help="mask for fieldmap image", argstr="-fieldmapmask {fieldmapmask}" + ) + pedir: int = shell.arg( + help="phase encode direction of EPI - 1/2/3=x/y/z & -1/-2/-3=-x/-y/-z", + argstr="-pedir {pedir}", + ) + echospacing: float = shell.arg( + help="value of EPI echo spacing - units of seconds", + argstr="-echospacing {echospacing}", + ) + bbrtype: ty.Any = shell.arg( + help="type of bbr cost function: signed [default], global_abs, local_abs", + argstr="-bbrtype {bbrtype}", + ) + bbrslope: float = shell.arg( + help="value of bbr slope", argstr="-bbrslope {bbrslope}" + ) + + class Outputs(shell.Outputs): + out_file: File = shell.outarg( + help="registered output file", + argstr="-out {out_file}", + path_template="{in_file}_flirt", + position=3, + ) + out_matrix_file: File = shell.outarg( + help="output affine matrix in 4x4 asciii format", + argstr="-omat {out_matrix_file}", + path_template="{in_file}_flirt.mat", + position=4, + ) + out_log: File | None = shell.outarg( + help="output log", + requires=["save_log"], + path_template="{in_file}_flirt.log", + ) diff --git a/pydra/tasks/fsl/v6/preprocess/fnirt.py b/pydra/tasks/fsl/v6/preprocess/fnirt.py new file mode 100644 index 0000000..6752983 --- /dev/null +++ b/pydra/tasks/fsl/v6/preprocess/fnirt.py @@ -0,0 +1,421 @@ +import attrs +from fileformats.generic import File +import logging +from pydra.tasks.fsl.v6.nipype_ports.utils.filemanip import fname_presuffix +import os +from pathlib import Path +from pathlib import Path +from pydra.compose import shell +import typing as ty + + +logger = logging.getLogger(__name__) + + +def _format_arg(name, value, inputs, argstr): + if value is None: + return "" + + if name in ("in_intensitymap_file", "out_intensitymap_file"): + if name == "out_intensitymap_file": + value = _list_outputs( + in_file=inputs["in_file"], output_type=inputs["output_type"] + )[name] + value = [FNIRT.intensitymap_file_basename(v) for v in value] + assert len(set(value)) == 1, "Found different basenames for {}: {}".format( + name, value + ) + return argstr.format(**{name: value[0]}) + if name in list(parsed_inputs["filemap"].keys()): + return argstr.format( + **{ + name: _list_outputs( + in_file=inputs["in_file"], output_type=inputs["output_type"] + )[name] + } + ) + + return argstr.format(**inputs) + + +def out_intensitymap_file_formatter(field, inputs): + return _format_arg( + "out_intensitymap_file", + field, + inputs, + argstr="--intout={out_intensitymap_file}", + ) + + +def _list_outputs(inputs=None, stdout=None, stderr=None, output_dir=None): + inputs = attrs.asdict(inputs) + self_dict = {} + + outputs = {} + for key, suffix in list(parsed_inputs["filemap"].items()): + inval = getattr(self_dict["inputs"], key) + change_ext = True + if key in ["warped_file", "log_file"]: + if suffix.endswith(".txt"): + change_ext = False + if inval is not attrs.NOTHING: + outputs[key] = os.path.abspath(inval) + else: + outputs[key] = _gen_fname( + inputs["in_file"], + suffix="_" + suffix, + change_ext=change_ext, + output_type=inputs["output_type"], + inputs=inputs["inputs"], + output_dir=inputs["output_dir"], + stderr=inputs["stderr"], + stdout=inputs["stdout"], + ) + elif inval is not attrs.NOTHING: + if isinstance(inval, bool): + if inval: + outputs[key] = _gen_fname( + inputs["in_file"], + suffix="_" + suffix, + change_ext=change_ext, + output_type=inputs["output_type"], + inputs=inputs["inputs"], + output_dir=inputs["output_dir"], + stderr=inputs["stderr"], + stdout=inputs["stdout"], + ) + else: + outputs[key] = os.path.abspath(inval) + + if key == "out_intensitymap_file" and (outputs[key] is not attrs.NOTHING): + basename = FNIRT.intensitymap_file_basename(outputs[key]) + outputs[key] = [outputs[key], "%s.txt" % basename] + return outputs + + +def fieldcoeff_file_callable(output_dir, inputs, stdout, stderr): + outputs = _list_outputs( + output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr + ) + return outputs.get("fieldcoeff_file") + + +def field_file_callable(output_dir, inputs, stdout, stderr): + outputs = _list_outputs( + output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr + ) + return outputs.get("field_file") + + +def jacobian_file_callable(output_dir, inputs, stdout, stderr): + outputs = _list_outputs( + output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr + ) + return outputs.get("jacobian_file") + + +def modulatedref_file_callable(output_dir, inputs, stdout, stderr): + outputs = _list_outputs( + output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr + ) + return outputs.get("modulatedref_file") + + +def out_intensitymap_file_callable(output_dir, inputs, stdout, stderr): + outputs = _list_outputs( + output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr + ) + return outputs.get("out_intensitymap_file") + + +def _gen_filename(name, inputs): + if name in ["warped_file", "log_file"]: + return _list_outputs( + in_file=inputs["in_file"], output_type=inputs["output_type"] + )[name] + return None + + +def log_file_default(inputs): + return _gen_filename("log_file", inputs=inputs) + + +def warped_file_default(inputs): + return _gen_filename("warped_file", inputs=inputs) + + +@shell.define( + xor=[ + ["apply_inmask", "skip_inmask"], + ["apply_intensity_mapping", "skip_intensity_mapping"], + ["apply_refmask", "skip_refmask"], + ] +) +class FNIRT(shell.Task["FNIRT.Outputs"]): + """ + Examples + ------- + + >>> from fileformats.generic import File + >>> from pathlib import Path + >>> from pydra.tasks.fsl.v6.preprocess.fnirt import FNIRT + + >>> task = FNIRT() + >>> task.ref_file = File.mock() + >>> task.in_file = File.mock() + >>> task.affine_file = File.mock() + >>> task.inwarp_file = File.mock() + >>> task.refmask_file = File.mock() + >>> task.inmask_file = File.mock() + >>> task.warp_resolution = (6, 6, 6) + >>> task.in_fwhm = [8, 4, 2, 2] + >>> task.cmdline + 'None' + + + """ + + executable = "fnirt" + ref_file: File = shell.arg( + help="name of reference image", argstr="--ref={ref_file}" + ) + in_file: File = shell.arg(help="name of input image", argstr="--in={in_file}") + affine_file: File = shell.arg( + help="name of file containing affine transform", argstr="--aff={affine_file}" + ) + inwarp_file: File = shell.arg( + help="name of file containing initial non-linear warps", + argstr="--inwarp={inwarp_file}", + ) + in_intensitymap_file: list[File] = shell.arg( + help="name of file/files containing initial intensity mapping usually generated by previous fnirt run", + argstr="--intin={in_intensitymap_file}", + ) + fieldcoeff_file: ty.Any = shell.arg( + help="name of output file with field coefficients or true", + argstr="--cout={fieldcoeff_file}", + ) + field_file: ty.Any = shell.arg( + help="name of output file with field or true", argstr="--fout={field_file}" + ) + jacobian_file: ty.Any = shell.arg( + help="name of file for writing out the Jacobian of the field (for diagnostic or VBM purposes)", + argstr="--jout={jacobian_file}", + ) + modulatedref_file: ty.Any = shell.arg( + help="name of file for writing out intensity modulated --ref (for diagnostic purposes)", + argstr="--refout={modulatedref_file}", + ) + out_intensitymap_file: ty.Any = shell.arg( + help="name of files for writing information pertaining to intensity mapping", + formatter="out_intensitymap_file_formatter", + ) + config_file: ty.Any = shell.arg( + help="Name of config file specifying command line arguments", + argstr="--config={config_file}", + ) + refmask_file: File = shell.arg( + help="name of file with mask in reference space", + argstr="--refmask={refmask_file}", + ) + inmask_file: File = shell.arg( + help="name of file with mask in input image space", + argstr="--inmask={inmask_file}", + ) + skip_refmask: bool = shell.arg( + help="Skip specified refmask if set, default false", argstr="--applyrefmask=0" + ) + skip_inmask: bool = shell.arg( + help="skip specified inmask if set, default false", argstr="--applyinmask=0" + ) + apply_refmask: list[ty.Any] = shell.arg( + help="list of iterations to use reference mask on (1 to use, 0 to skip)", + argstr="--applyrefmask={apply_refmask}", + sep=",", + ) + apply_inmask: list[ty.Any] = shell.arg( + help="list of iterations to use input mask on (1 to use, 0 to skip)", + argstr="--applyinmask={apply_inmask}", + sep=",", + ) + skip_implicit_ref_masking: bool = shell.arg( + help="skip implicit masking based on value in --ref image. Default = 0", + argstr="--imprefm=0", + ) + skip_implicit_in_masking: bool = shell.arg( + help="skip implicit masking based on value in --in image. Default = 0", + argstr="--impinm=0", + ) + refmask_val: float = shell.arg( + help="Value to mask out in --ref image. Default =0.0", + argstr="--imprefval={refmask_val}", + ) + inmask_val: float = shell.arg( + help="Value to mask out in --in image. Default =0.0", + argstr="--impinval={inmask_val}", + ) + max_nonlin_iter: list[int] = shell.arg( + help="Max # of non-linear iterations list, default [5, 5, 5, 5]", + argstr="--miter={max_nonlin_iter}", + sep=",", + ) + subsampling_scheme: list[int] = shell.arg( + help="sub-sampling scheme, list, default [4, 2, 1, 1]", + argstr="--subsamp={subsampling_scheme}", + sep=",", + ) + warp_resolution: ty.Any = shell.arg( + help="(approximate) resolution (in mm) of warp basis in x-, y- and z-direction, default 10, 10, 10", + argstr="--warpres={warp_resolution[0]},{warp_resolution[1]},{warp_resolution[2]}", + ) + spline_order: int = shell.arg( + help="Order of spline, 2->Qadratic spline, 3->Cubic spline. Default=3", + argstr="--splineorder={spline_order}", + ) + in_fwhm: list[int] = shell.arg( + help="FWHM (in mm) of gaussian smoothing kernel for input volume, default [6, 4, 2, 2]", + argstr="--infwhm={in_fwhm}", + sep=",", + ) + ref_fwhm: list[int] = shell.arg( + help="FWHM (in mm) of gaussian smoothing kernel for ref volume, default [4, 2, 0, 0]", + argstr="--reffwhm={ref_fwhm}", + sep=",", + ) + regularization_model: ty.Any = shell.arg( + help="Model for regularisation of warp-field [membrane_energy bending_energy], default bending_energy", + argstr="--regmod={regularization_model}", + ) + regularization_lambda: list[float] = shell.arg( + help="Weight of regularisation, default depending on --ssqlambda and --regmod switches. See user documentation.", + argstr="--lambda={regularization_lambda}", + sep=",", + ) + skip_lambda_ssq: bool = shell.arg( + help="If true, lambda is not weighted by current ssq, default false", + argstr="--ssqlambda=0", + ) + jacobian_range: ty.Any = shell.arg( + help="Allowed range of Jacobian determinants, default 0.01, 100.0", + argstr="--jacrange={jacobian_range[0]},{jacobian_range[1]}", + ) + derive_from_ref: bool = shell.arg( + help="If true, ref image is used to calculate derivatives. Default false", + argstr="--refderiv", + ) + intensity_mapping_model: ty.Any = shell.arg( + help="Model for intensity-mapping", argstr="--intmod={intensity_mapping_model}" + ) + intensity_mapping_order: int = shell.arg( + help="Order of poynomial for mapping intensities, default 5", + argstr="--intorder={intensity_mapping_order}", + ) + biasfield_resolution: ty.Any = shell.arg( + help="Resolution (in mm) of bias-field modelling local intensities, default 50, 50, 50", + argstr="--biasres={biasfield_resolution[0]},{biasfield_resolution[1]},{biasfield_resolution[2]}", + ) + bias_regularization_lambda: float = shell.arg( + help="Weight of regularisation for bias-field, default 10000", + argstr="--biaslambda={bias_regularization_lambda}", + ) + skip_intensity_mapping: bool = shell.arg( + help="Skip estimate intensity-mapping default false", argstr="--estint=0" + ) + apply_intensity_mapping: list[ty.Any] = shell.arg( + help="List of subsampling levels to apply intensity mapping for (0 to skip, 1 to apply)", + argstr="--estint={apply_intensity_mapping}", + sep=",", + ) + hessian_precision: ty.Any = shell.arg( + help="Precision for representing Hessian, double or float. Default double", + argstr="--numprec={hessian_precision}", + ) + + class Outputs(shell.Outputs): + warped_file: Path = shell.outarg( + help="name of output image", + argstr="--iout={warped_file}", + path_template="warped_file", + ) + log_file: Path = shell.outarg( + help="Name of log-file", + argstr="--logout={log_file}", + path_template="log_file", + ) + fieldcoeff_file: File | None = shell.out( + help="file with field coefficients", callable=fieldcoeff_file_callable + ) + field_file: File | None = shell.out( + help="file with warp field", callable=field_file_callable + ) + jacobian_file: File | None = shell.out( + help="file containing Jacobian of the field", + callable=jacobian_file_callable, + ) + modulatedref_file: File | None = shell.out( + help="file containing intensity modulated --ref", + callable=modulatedref_file_callable, + ) + out_intensitymap_file: list[File] | None = shell.out( + help="files containing info pertaining to intensity mapping", + callable=out_intensitymap_file_callable, + ) + + +def _gen_fname( + basename, + cwd=None, + suffix=None, + change_ext=True, + ext=None, + output_type=None, + inputs=None, + output_dir=None, + stderr=None, + stdout=None, +): + """Generate a filename based on the given parameters. + + The filename will take the form: cwd/basename. + If change_ext is True, it will use the extensions specified in + inputs.output_type. + + Parameters + ---------- + basename : str + Filename to base the new filename on. + cwd : str + Path to prefix to the new filename. (default is output_dir) + suffix : str + Suffix to add to the `basename`. (defaults is '' ) + change_ext : bool + Flag to change the filename extension to the FSL output type. + (default True) + + Returns + ------- + fname : str + New filename based on given parameters. + + """ + + if basename == "": + msg = "Unable to generate filename for command %s. " % "fnirt" + msg += "basename is not set!" + raise ValueError(msg) + if cwd is None: + cwd = output_dir + if ext is None: + ext = Info.output_type_to_ext(output_type) + if change_ext: + if suffix: + suffix = f"{suffix}{ext}" + else: + suffix = ext + if suffix is None: + suffix = "" + fname = fname_presuffix(basename, suffix=suffix, use_ext=False, newpath=cwd) + return fname + + +IFLOGGER = logging.getLogger("nipype.interface") diff --git a/pydra/tasks/fsl/v6/preprocess/fugue.py b/pydra/tasks/fsl/v6/preprocess/fugue.py new file mode 100644 index 0000000..20de165 --- /dev/null +++ b/pydra/tasks/fsl/v6/preprocess/fugue.py @@ -0,0 +1,311 @@ +import attrs +from fileformats.generic import File +from fileformats.medimage import Nifti1 +import logging +import os +from pathlib import Path +from pathlib import Path +from pydra.compose import shell +import typing as ty + + +logger = logging.getLogger(__name__) + + +def _parse_inputs(inputs, output_dir=None): + if not output_dir: + output_dir = os.getcwd() + parsed_inputs = {} + skip = [] + self_dict = {} + + if skip is None: + skip = [] + + input_phase = inputs["phasemap_in_file"] is not attrs.NOTHING + input_vsm = inputs["shift_in_file"] is not attrs.NOTHING + input_fmap = inputs["fmap_in_file"] is not attrs.NOTHING + + if not input_phase and not input_vsm and not input_fmap: + raise RuntimeError( + "Either phasemap_in_file, shift_in_file or fmap_in_file must be set." + ) + + if inputs["in_file"] is attrs.NOTHING: + skip += ["unwarped_file", "warped_file"] + else: + if inputs["forward_warping"]: + skip += ["unwarped_file"] + trait_spec = self_dict["inputs"].trait("warped_file") + trait_spec.name_template = "%s_warped" + trait_spec.name_source = "in_file" + trait_spec.output_name = "warped_file" + else: + skip += ["warped_file"] + trait_spec = self_dict["inputs"].trait("unwarped_file") + trait_spec.name_template = "%s_unwarped" + trait_spec.name_source = "in_file" + trait_spec.output_name = "unwarped_file" + + if inputs["shift_out_file"] is attrs.NOTHING: + vsm_save_masked = (inputs["save_shift"] is not attrs.NOTHING) and inputs[ + "save_shift" + ] + vsm_save_unmasked = ( + inputs["save_unmasked_shift"] is not attrs.NOTHING + ) and inputs["save_unmasked_shift"] + + if vsm_save_masked or vsm_save_unmasked: + trait_spec = self_dict["inputs"].trait("shift_out_file") + trait_spec.output_name = "shift_out_file" + + if input_fmap: + trait_spec.name_source = "fmap_in_file" + elif input_phase: + trait_spec.name_source = "phasemap_in_file" + elif input_vsm: + trait_spec.name_source = "shift_in_file" + else: + raise RuntimeError( + "Either phasemap_in_file, shift_in_file or " + "fmap_in_file must be set." + ) + + if vsm_save_unmasked: + trait_spec.name_template = "%s_vsm_unmasked" + else: + trait_spec.name_template = "%s_vsm" + else: + skip += ["save_shift", "save_unmasked_shift", "shift_out_file"] + + if inputs["fmap_out_file"] is attrs.NOTHING: + fmap_save_masked = (inputs["save_fmap"] is not attrs.NOTHING) and inputs[ + "save_fmap" + ] + fmap_save_unmasked = ( + inputs["save_unmasked_fmap"] is not attrs.NOTHING + ) and inputs["save_unmasked_fmap"] + + if fmap_save_masked or fmap_save_unmasked: + trait_spec = self_dict["inputs"].trait("fmap_out_file") + trait_spec.output_name = "fmap_out_file" + + if input_vsm: + trait_spec.name_source = "shift_in_file" + elif input_phase: + trait_spec.name_source = "phasemap_in_file" + elif input_fmap: + trait_spec.name_source = "fmap_in_file" + else: + raise RuntimeError( + "Either phasemap_in_file, shift_in_file or " + "fmap_in_file must be set." + ) + + if fmap_save_unmasked: + trait_spec.name_template = "%s_fieldmap_unmasked" + else: + trait_spec.name_template = "%s_fieldmap" + else: + skip += ["save_fmap", "save_unmasked_fmap", "fmap_out_file"] + + return parsed_inputs + + +def unwarped_file_callable(output_dir, inputs, stdout, stderr): + parsed_inputs = _parse_inputs(inputs) + return parsed_inputs.get("unwarped_file", attrs.NOTHING) + + +def warped_file_callable(output_dir, inputs, stdout, stderr): + parsed_inputs = _parse_inputs(inputs) + return parsed_inputs.get("warped_file", attrs.NOTHING) + + +def shift_out_file_callable(output_dir, inputs, stdout, stderr): + parsed_inputs = _parse_inputs(inputs) + return parsed_inputs.get("shift_out_file", attrs.NOTHING) + + +def fmap_out_file_callable(output_dir, inputs, stdout, stderr): + parsed_inputs = _parse_inputs(inputs) + return parsed_inputs.get("fmap_out_file", attrs.NOTHING) + + +@shell.define( + xor=[ + ["save_fmap", "save_unmasked_fmap"], + ["save_shift", "save_unmasked_shift"], + ["unwarped_file", "warped_file"], + ] +) +class FUGUE(shell.Task["FUGUE.Outputs"]): + """ + Examples + ------- + + >>> from fileformats.generic import File + >>> from fileformats.medimage import Nifti1 + >>> from pathlib import Path + >>> from pydra.tasks.fsl.v6.preprocess.fugue import FUGUE + + >>> task = FUGUE() + >>> task.in_file = Nifti1.mock("epi.nii") + >>> task.shift_in_file = Nifti1.mock("vsm.nii" # Previously computed with fugue as well) + >>> task.phasemap_in_file = Nifti1.mock() + >>> task.fmap_in_file = File.mock() + >>> task.mask_file = Nifti1.mock() + >>> task.cmdline + 'fugue --in=epi.nii --mask=epi_mask.nii --loadshift=vsm.nii --unwarpdir=y --unwarp=epi_unwarped.nii.gz' + + + >>> task = FUGUE() + >>> task.in_file = Nifti1.mock("epi.nii") + >>> task.shift_in_file = Nifti1.mock() + >>> task.phasemap_in_file = Nifti1.mock() + >>> task.fmap_in_file = File.mock() + >>> task.unwarp_direction = "y" + >>> task.mask_file = Nifti1.mock("epi_mask.nii") + >>> task.cmdline + 'fugue --in=epi.nii --mask=epi_mask.nii --loadshift=vsm.nii --unwarpdir=y --warp=epi_warped.nii.gz' + + + >>> task = FUGUE() + >>> task.in_file = Nifti1.mock() + >>> task.shift_in_file = Nifti1.mock() + >>> task.phasemap_in_file = Nifti1.mock("epi_phasediff.nii") + >>> task.fmap_in_file = File.mock() + >>> task.dwell_to_asym_ratio = (0.77e-3 * 3) / 2.46e-3 + >>> task.mask_file = Nifti1.mock() + >>> task.save_shift = True + >>> task.cmdline + 'fugue --dwelltoasym=0.9390243902 --mask=epi_mask.nii --phasemap=epi_phasediff.nii --saveshift=epi_phasediff_vsm.nii.gz --unwarpdir=y' + + + """ + + executable = "fugue" + in_file: Nifti1 = shell.arg( + help="filename of input volume", argstr="--in={in_file}" + ) + shift_in_file: Nifti1 = shell.arg( + help="filename for reading pixel shift volume", + argstr="--loadshift={shift_in_file}", + ) + phasemap_in_file: Nifti1 = shell.arg( + help="filename for input phase image", argstr="--phasemap={phasemap_in_file}" + ) + fmap_in_file: File = shell.arg( + help="filename for loading fieldmap (rad/s)", argstr="--loadfmap={fmap_in_file}" + ) + unwarped_file: Path | None = shell.arg( + help="apply unwarping and save as filename", + argstr="--unwarp={unwarped_file}", + requires=["in_file"], + ) + warped_file: Path | None = shell.arg( + help="apply forward warping and save as filename", + argstr="--warp={warped_file}", + requires=["in_file"], + ) + forward_warping: bool = shell.arg( + help="apply forward warping instead of unwarping", default=False + ) + dwell_to_asym_ratio: float = shell.arg( + help="set the dwell to asym time ratio", + argstr="--dwelltoasym={dwell_to_asym_ratio:.10}", + ) + dwell_time: float = shell.arg( + help="set the EPI dwell time per phase-encode line - same as echo spacing - (sec)", + argstr="--dwell={dwell_time:.10}", + ) + asym_se_time: float = shell.arg( + help="set the fieldmap asymmetric spin echo time (sec)", + argstr="--asym={asym_se_time:.10}", + ) + median_2dfilter: bool = shell.arg( + help="apply 2D median filtering", argstr="--median" + ) + despike_2dfilter: bool = shell.arg( + help="apply a 2D de-spiking filter", argstr="--despike" + ) + no_gap_fill: bool = shell.arg( + help="do not apply gap-filling measure to the fieldmap", argstr="--nofill" + ) + no_extend: bool = shell.arg( + help="do not apply rigid-body extrapolation to the fieldmap", + argstr="--noextend", + ) + smooth2d: float = shell.arg( + help="apply 2D Gaussian smoothing of sigma N (in mm)", + argstr="--smooth2={smooth2d:.2}", + ) + smooth3d: float = shell.arg( + help="apply 3D Gaussian smoothing of sigma N (in mm)", + argstr="--smooth3={smooth3d:.2}", + ) + poly_order: int = shell.arg( + help="apply polynomial fitting of order N", argstr="--poly={poly_order}" + ) + fourier_order: int = shell.arg( + help="apply Fourier (sinusoidal) fitting of order N", + argstr="--fourier={fourier_order}", + ) + pava: bool = shell.arg(help="apply monotonic enforcement via PAVA", argstr="--pava") + despike_threshold: float = shell.arg( + help="specify the threshold for de-spiking (default=3.0)", + argstr="--despikethreshold={despike_threshold}", + ) + unwarp_direction: ty.Any = shell.arg( + help="specifies direction of warping (default y)", + argstr="--unwarpdir={unwarp_direction}", + ) + phase_conjugate: bool = shell.arg( + help="apply phase conjugate method of unwarping", argstr="--phaseconj" + ) + icorr: bool = shell.arg( + help="apply intensity correction to unwarping (pixel shift method only)", + argstr="--icorr", + requires=["shift_in_file"], + ) + icorr_only: bool = shell.arg( + help="apply intensity correction only", + argstr="--icorronly", + requires=["unwarped_file"], + ) + mask_file: Nifti1 = shell.arg( + help="filename for loading valid mask", argstr="--mask={mask_file}" + ) + nokspace: bool = shell.arg( + help="do not use k-space forward warping", argstr="--nokspace" + ) + save_shift: bool = shell.arg(help="write pixel shift volume") + shift_out_file: Path = shell.arg( + help="filename for saving pixel shift volume", + argstr="--saveshift={shift_out_file}", + ) + save_unmasked_shift: bool = shell.arg( + help="saves the unmasked shiftmap when using --saveshift", + argstr="--unmaskshift", + ) + save_fmap: bool = shell.arg(help="write field map volume") + fmap_out_file: Path = shell.arg( + help="filename for saving fieldmap (rad/s)", argstr="--savefmap={fmap_out_file}" + ) + save_unmasked_fmap: bool = shell.arg( + help="saves the unmasked fieldmap when using --savefmap", argstr="--unmaskfmap" + ) + + class Outputs(shell.Outputs): + unwarped_file: File | None = shell.out( + help="unwarped file", callable=unwarped_file_callable + ) + warped_file: File | None = shell.out( + help="forward warped file", callable=warped_file_callable + ) + shift_out_file: File | None = shell.out( + help="voxel shift map file", callable=shift_out_file_callable + ) + fmap_out_file: File | None = shell.out( + help="fieldmap file", callable=fmap_out_file_callable + ) diff --git a/pydra/tasks/fsl/v6/preprocess/mcflirt.py b/pydra/tasks/fsl/v6/preprocess/mcflirt.py new file mode 100644 index 0000000..b8c16b9 --- /dev/null +++ b/pydra/tasks/fsl/v6/preprocess/mcflirt.py @@ -0,0 +1,356 @@ +import attrs +from fileformats.generic import File +from fileformats.medimage import Nifti1 +import logging +from looseversion import LooseVersion +from nibabel import load +from pydra.tasks.fsl.v6.base import Info +from pydra.tasks.fsl.v6.nipype_ports.utils.filemanip import fname_presuffix +import os +from pathlib import Path +from pathlib import Path +from pydra.compose import shell +import typing as ty + + +logger = logging.getLogger(__name__) + + +def _format_arg(name, value, inputs, argstr): + if value is None: + return "" + + if name == "interpolation": + if value == "trilinear": + return "" + else: + return argstr.format(**{name: value}) + + return argstr.format(**inputs) + + +def interpolation_formatter(field, inputs): + return _format_arg("interpolation", field, inputs, argstr="-{interpolation}_final") + + +def _list_outputs(inputs=None, stdout=None, stderr=None, output_dir=None): + inputs = attrs.asdict(inputs) + + outputs = {} + + outputs["out_file"] = _gen_outfilename( + in_file=inputs["in_file"], + out_file=inputs["out_file"], + output_type=inputs["output_type"], + inputs=inputs["inputs"], + output_dir=inputs["output_dir"], + stderr=inputs["stderr"], + stdout=inputs["stdout"], + ) + output_dir = os.path.dirname(outputs["out_file"]) + + if (inputs["stats_imgs"] is not attrs.NOTHING) and inputs["stats_imgs"]: + if LooseVersion(Info.version()) < LooseVersion("6.0.0"): + + outputs["variance_img"] = _gen_fname( + outputs["out_file"] + "_variance.ext", + cwd=output_dir, + output_type=inputs["output_type"], + inputs=inputs["inputs"], + output_dir=inputs["output_dir"], + stderr=inputs["stderr"], + stdout=inputs["stdout"], + ) + outputs["std_img"] = _gen_fname( + outputs["out_file"] + "_sigma.ext", + cwd=output_dir, + output_type=inputs["output_type"], + inputs=inputs["inputs"], + output_dir=inputs["output_dir"], + stderr=inputs["stderr"], + stdout=inputs["stdout"], + ) + else: + outputs["variance_img"] = _gen_fname( + outputs["out_file"], + suffix="_variance", + cwd=output_dir, + output_type=inputs["output_type"], + inputs=inputs["inputs"], + output_dir=inputs["output_dir"], + stderr=inputs["stderr"], + stdout=inputs["stdout"], + ) + outputs["std_img"] = _gen_fname( + outputs["out_file"], + suffix="_sigma", + cwd=output_dir, + output_type=inputs["output_type"], + inputs=inputs["inputs"], + output_dir=inputs["output_dir"], + stderr=inputs["stderr"], + stdout=inputs["stdout"], + ) + + if (inputs["mean_vol"] is not attrs.NOTHING) and inputs["mean_vol"]: + if LooseVersion(Info.version()) < LooseVersion("6.0.0"): + + outputs["mean_img"] = _gen_fname( + outputs["out_file"] + "_mean_reg.ext", + cwd=output_dir, + output_type=inputs["output_type"], + inputs=inputs["inputs"], + output_dir=inputs["output_dir"], + stderr=inputs["stderr"], + stdout=inputs["stdout"], + ) + else: + outputs["mean_img"] = _gen_fname( + outputs["out_file"], + suffix="_mean_reg", + cwd=output_dir, + output_type=inputs["output_type"], + inputs=inputs["inputs"], + output_dir=inputs["output_dir"], + stderr=inputs["stderr"], + stdout=inputs["stdout"], + ) + + if (inputs["save_mats"] is not attrs.NOTHING) and inputs["save_mats"]: + _, filename = os.path.split(outputs["out_file"]) + matpathname = os.path.join(output_dir, filename + ".mat") + _, _, _, timepoints = load(inputs["in_file"]).shape + outputs["mat_file"] = [] + for t in range(timepoints): + outputs["mat_file"].append(os.path.join(matpathname, "MAT_%04d" % t)) + if (inputs["save_plots"] is not attrs.NOTHING) and inputs["save_plots"]: + + outputs["par_file"] = outputs["out_file"] + ".par" + if (inputs["save_rms"] is not attrs.NOTHING) and inputs["save_rms"]: + outfile = outputs["out_file"] + outputs["rms_files"] = [outfile + "_abs.rms", outfile + "_rel.rms"] + return outputs + + +def variance_img_callable(output_dir, inputs, stdout, stderr): + outputs = _list_outputs( + output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr + ) + return outputs.get("variance_img") + + +def std_img_callable(output_dir, inputs, stdout, stderr): + outputs = _list_outputs( + output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr + ) + return outputs.get("std_img") + + +def mean_img_callable(output_dir, inputs, stdout, stderr): + outputs = _list_outputs( + output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr + ) + return outputs.get("mean_img") + + +def par_file_callable(output_dir, inputs, stdout, stderr): + outputs = _list_outputs( + output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr + ) + return outputs.get("par_file") + + +def mat_file_callable(output_dir, inputs, stdout, stderr): + outputs = _list_outputs( + output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr + ) + return outputs.get("mat_file") + + +def rms_files_callable(output_dir, inputs, stdout, stderr): + outputs = _list_outputs( + output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr + ) + return outputs.get("rms_files") + + +def _gen_filename(name, inputs): + if name == "out_file": + return _gen_outfilename( + in_file=inputs["in_file"], + out_file=inputs["out_file"], + output_type=inputs["output_type"], + ) + return None + + +def out_file_default(inputs): + return _gen_filename("out_file", inputs=inputs) + + +@shell.define +class MCFLIRT(shell.Task["MCFLIRT.Outputs"]): + """ + Examples + ------- + + >>> from fileformats.generic import File + >>> from fileformats.medimage import Nifti1 + >>> from pathlib import Path + >>> from pydra.tasks.fsl.v6.preprocess.mcflirt import MCFLIRT + + >>> task = MCFLIRT() + >>> task.in_file = Nifti1.mock("functional.nii") + >>> task.out_file = "moco.nii" + >>> task.init = File.mock() + >>> task.ref_file = File.mock() + >>> task.cmdline + 'None' + + + """ + + executable = "mcflirt" + in_file: Nifti1 = shell.arg( + help="timeseries to motion-correct", argstr="-in {in_file}", position=1 + ) + cost: ty.Any = shell.arg(help="cost function to optimize", argstr="-cost {cost}") + bins: int = shell.arg(help="number of histogram bins", argstr="-bins {bins}") + dof: int = shell.arg( + help="degrees of freedom for the transformation", argstr="-dof {dof}" + ) + ref_vol: int = shell.arg( + help="volume to align frames to", argstr="-refvol {ref_vol}" + ) + scaling: float = shell.arg( + help="scaling factor to use", argstr="-scaling {scaling:.2}" + ) + smooth: float = shell.arg( + help="smoothing factor for the cost function", argstr="-smooth {smooth:.2}" + ) + rotation: int = shell.arg( + help="scaling factor for rotation tolerances", argstr="-rotation {rotation}" + ) + stages: int = shell.arg( + help="stages (if 4, perform final search with sinc interpolation", + argstr="-stages {stages}", + ) + init: File = shell.arg(help="initial transformation matrix", argstr="-init {init}") + interpolation: ty.Any = shell.arg( + help="interpolation method for transformation", + formatter="interpolation_formatter", + ) + use_gradient: bool = shell.arg(help="run search on gradient images", argstr="-gdt") + use_contour: bool = shell.arg(help="run search on contour images", argstr="-edge") + mean_vol: bool = shell.arg(help="register to mean volume", argstr="-meanvol") + stats_imgs: bool = shell.arg( + help="produce variance and std. dev. images", argstr="-stats" + ) + save_mats: bool = shell.arg(help="save transformation matrices", argstr="-mats") + save_plots: bool = shell.arg(help="save transformation parameters", argstr="-plots") + save_rms: bool = shell.arg( + help="save rms displacement parameters", argstr="-rmsabs -rmsrel" + ) + ref_file: File = shell.arg( + help="target image for motion correction", argstr="-reffile {ref_file}" + ) + + class Outputs(shell.Outputs): + out_file: Path = shell.outarg( + help="file to write", argstr="-out {out_file}", path_template='"moco.nii"' + ) + variance_img: File | None = shell.out( + help="variance image", callable=variance_img_callable + ) + std_img: File | None = shell.out( + help="standard deviation image", callable=std_img_callable + ) + mean_img: File | None = shell.out( + help="mean timeseries image (if mean_vol=True)", callable=mean_img_callable + ) + par_file: File | None = shell.out( + help="text-file with motion parameters", callable=par_file_callable + ) + mat_file: list[File] | None = shell.out( + help="transformation matrices", callable=mat_file_callable + ) + rms_files: list[File] | None = shell.out( + help="absolute and relative displacement parameters", + callable=rms_files_callable, + ) + + +def _gen_fname( + basename, + cwd=None, + suffix=None, + change_ext=True, + ext=None, + output_type=None, + inputs=None, + output_dir=None, + stderr=None, + stdout=None, +): + """Generate a filename based on the given parameters. + + The filename will take the form: cwd/basename. + If change_ext is True, it will use the extensions specified in + inputs.output_type. + + Parameters + ---------- + basename : str + Filename to base the new filename on. + cwd : str + Path to prefix to the new filename. (default is output_dir) + suffix : str + Suffix to add to the `basename`. (defaults is '' ) + change_ext : bool + Flag to change the filename extension to the FSL output type. + (default True) + + Returns + ------- + fname : str + New filename based on given parameters. + + """ + + if basename == "": + msg = "Unable to generate filename for command %s. " % "mcflirt" + msg += "basename is not set!" + raise ValueError(msg) + if cwd is None: + cwd = output_dir + if ext is None: + ext = Info.output_type_to_ext(output_type) + if change_ext: + if suffix: + suffix = f"{suffix}{ext}" + else: + suffix = ext + if suffix is None: + suffix = "" + fname = fname_presuffix(basename, suffix=suffix, use_ext=False, newpath=cwd) + return fname + + +def _gen_outfilename( + in_file=None, + out_file=None, + output_type=None, + inputs=None, + output_dir=None, + stderr=None, + stdout=None, +): + out_file = out_file + if out_file is not attrs.NOTHING: + out_file = os.path.realpath(out_file) + if (out_file is attrs.NOTHING) and (in_file is not attrs.NOTHING): + out_file = _gen_fname(in_file, suffix="_mcf", output_type=output_type) + return os.path.abspath(out_file) + + +IFLOGGER = logging.getLogger("nipype.interface") diff --git a/pydra/tasks/fsl/v6/preprocess/prelude.py b/pydra/tasks/fsl/v6/preprocess/prelude.py new file mode 100644 index 0000000..2f4dce7 --- /dev/null +++ b/pydra/tasks/fsl/v6/preprocess/prelude.py @@ -0,0 +1,173 @@ +import attrs +from fileformats.generic import File +import logging +from pydra.tasks.fsl.v6.nipype_ports.utils.filemanip import fname_presuffix +import os +from pathlib import Path +from pathlib import Path +from pydra.compose import shell + + +logger = logging.getLogger(__name__) + + +def _gen_filename(name, inputs): + if name == "unwrapped_phase_file": + return _list_outputs( + complex_phase_file=inputs["complex_phase_file"], + output_type=inputs["output_type"], + phase_file=inputs["phase_file"], + unwrapped_phase_file=inputs["unwrapped_phase_file"], + )["unwrapped_phase_file"] + return None + + +def unwrapped_phase_file_default(inputs): + return _gen_filename("unwrapped_phase_file", inputs=inputs) + + +@shell.define( + xor=[ + ["complex_phase_file", "magnitude_file"], + ["complex_phase_file", "magnitude_file", "phase_file"], + ["complex_phase_file", "phase_file"], + ["labelprocess2d", "process2d"], + ["labelprocess2d", "process2d", "process3d"], + ] +) +class PRELUDE(shell.Task["PRELUDE.Outputs"]): + """ + Examples + ------- + + >>> from fileformats.generic import File + >>> from pathlib import Path + >>> from pydra.tasks.fsl.v6.preprocess.prelude import PRELUDE + + """ + + executable = "prelude" + complex_phase_file: File | None = shell.arg( + help="complex phase input volume", argstr="--complex={complex_phase_file}" + ) + magnitude_file: File | None = shell.arg( + help="file containing magnitude image", argstr="--abs={magnitude_file}" + ) + phase_file: File | None = shell.arg( + help="raw phase file", argstr="--phase={phase_file}" + ) + num_partitions: int = shell.arg( + help="number of phase partitions to use", + argstr="--numphasesplit={num_partitions}", + ) + labelprocess2d: bool = shell.arg( + help="does label processing in 2D (slice at a time)", argstr="--labelslices" + ) + process2d: bool = shell.arg( + help="does all processing in 2D (slice at a time)", argstr="--slices" + ) + process3d: bool = shell.arg( + help="forces all processing to be full 3D", argstr="--force3D" + ) + threshold: float = shell.arg( + help="intensity threshold for masking", argstr="--thresh={threshold:.10}" + ) + mask_file: File = shell.arg( + help="filename of mask input volume", argstr="--mask={mask_file}" + ) + start: int = shell.arg( + help="first image number to process (default 0)", argstr="--start={start}" + ) + end: int = shell.arg( + help="final image number to process (default Inf)", argstr="--end={end}" + ) + savemask_file: File = shell.arg( + help="saving the mask volume", argstr="--savemask={savemask_file}" + ) + rawphase_file: File = shell.arg( + help="saving the raw phase output", argstr="--rawphase={rawphase_file}" + ) + label_file: File = shell.arg( + help="saving the area labels output", argstr="--labels={label_file}" + ) + removeramps: bool = shell.arg( + help="remove phase ramps during unwrapping", argstr="--removeramps" + ) + + class Outputs(shell.Outputs): + unwrapped_phase_file: Path = shell.outarg( + help="file containing unwrapepd phase", + argstr="--unwrap={unwrapped_phase_file}", + path_template="unwrapped_phase_file", + ) + + +def _gen_fname( + basename, cwd=None, suffix=None, change_ext=True, ext=None, output_type=None +): + """Generate a filename based on the given parameters. + + The filename will take the form: cwd/basename. + If change_ext is True, it will use the extensions specified in + inputs.output_type. + + Parameters + ---------- + basename : str + Filename to base the new filename on. + cwd : str + Path to prefix to the new filename. (default is output_dir) + suffix : str + Suffix to add to the `basename`. (defaults is '' ) + change_ext : bool + Flag to change the filename extension to the FSL output type. + (default True) + + Returns + ------- + fname : str + New filename based on given parameters. + + """ + + if basename == "": + msg = "Unable to generate filename for command %s. " % "prelude" + msg += "basename is not set!" + raise ValueError(msg) + if cwd is None: + cwd = output_dir + if ext is None: + ext = Info.output_type_to_ext(output_type) + if change_ext: + if suffix: + suffix = f"{suffix}{ext}" + else: + suffix = ext + if suffix is None: + suffix = "" + fname = fname_presuffix(basename, suffix=suffix, use_ext=False, newpath=cwd) + return fname + + +def _list_outputs( + complex_phase_file=None, + output_type=None, + phase_file=None, + unwrapped_phase_file=None, +): + outputs = {} + out_file = unwrapped_phase_file + if out_file is attrs.NOTHING: + if phase_file is not attrs.NOTHING: + out_file = _gen_fname( + phase_file, suffix="_unwrapped", output_type=output_type + ) + elif complex_phase_file is not attrs.NOTHING: + out_file = _gen_fname( + complex_phase_file, suffix="_phase_unwrapped", output_type=output_type + ) + outputs["unwrapped_phase_file"] = os.path.abspath(out_file) + return outputs + + +IFLOGGER = logging.getLogger("nipype.interface") diff --git a/pydra/tasks/fsl/v6/preprocess/slice_timer.py b/pydra/tasks/fsl/v6/preprocess/slice_timer.py new file mode 100644 index 0000000..ca6ab67 --- /dev/null +++ b/pydra/tasks/fsl/v6/preprocess/slice_timer.py @@ -0,0 +1,161 @@ +import attrs +from fileformats.generic import File +import logging +from pydra.tasks.fsl.v6.nipype_ports.utils.filemanip import fname_presuffix +import os +from pathlib import Path +from pathlib import Path +from pydra.compose import shell +import typing as ty + + +logger = logging.getLogger(__name__) + + +def _list_outputs(inputs=None, stdout=None, stderr=None, output_dir=None): + inputs = attrs.asdict(inputs) + + outputs = {} + out_file = inputs["out_file"] + if out_file is attrs.NOTHING: + out_file = _gen_fname( + inputs["in_file"], + suffix="_st", + output_type=inputs["output_type"], + inputs=inputs["inputs"], + output_dir=inputs["output_dir"], + stderr=inputs["stderr"], + stdout=inputs["stdout"], + ) + outputs["slice_time_corrected_file"] = os.path.abspath(out_file) + return outputs + + +def slice_time_corrected_file_callable(output_dir, inputs, stdout, stderr): + outputs = _list_outputs( + output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr + ) + return outputs.get("slice_time_corrected_file") + + +def _gen_filename(name, inputs): + if name == "out_file": + return _list_outputs( + in_file=inputs["in_file"], + out_file=inputs["out_file"], + output_type=inputs["output_type"], + )["slice_time_corrected_file"] + return None + + +def out_file_default(inputs): + return _gen_filename("out_file", inputs=inputs) + + +@shell.define +class SliceTimer(shell.Task["SliceTimer.Outputs"]): + """ + Examples + ------- + + >>> from fileformats.generic import File + >>> from pathlib import Path + >>> from pydra.tasks.fsl.v6.preprocess.slice_timer import SliceTimer + + """ + + executable = "slicetimer" + in_file: File = shell.arg( + help="filename of input timeseries", argstr="--in={in_file}", position=1 + ) + out_file: Path = shell.arg( + help="filename of output timeseries", argstr="--out={out_file}" + ) + index_dir: bool = shell.arg( + help="slice indexing from top to bottom", argstr="--down" + ) + time_repetition: float = shell.arg( + help="Specify TR of data - default is 3s", argstr="--repeat={time_repetition}" + ) + slice_direction: ty.Any = shell.arg( + help="direction of slice acquisition (x=1, y=2, z=3) - default is z", + argstr="--direction={slice_direction}", + ) + interleaved: bool = shell.arg(help="use interleaved acquisition", argstr="--odd") + custom_timings: File = shell.arg( + help="slice timings, in fractions of TR, range 0:1 (default is 0.5 = no shift)", + argstr="--tcustom={custom_timings}", + ) + global_shift: float = shell.arg( + help="shift in fraction of TR, range 0:1 (default is 0.5 = no shift)", + argstr="--tglobal", + ) + custom_order: File = shell.arg( + help="filename of single-column custom interleave order file (first slice is referred to as 1 not 0)", + argstr="--ocustom={custom_order}", + ) + + class Outputs(shell.Outputs): + slice_time_corrected_file: File | None = shell.out( + help="slice time corrected file", + callable=slice_time_corrected_file_callable, + ) + + +def _gen_fname( + basename, + cwd=None, + suffix=None, + change_ext=True, + ext=None, + output_type=None, + inputs=None, + output_dir=None, + stderr=None, + stdout=None, +): + """Generate a filename based on the given parameters. + + The filename will take the form: cwd/basename. + If change_ext is True, it will use the extensions specified in + inputs.output_type. + + Parameters + ---------- + basename : str + Filename to base the new filename on. + cwd : str + Path to prefix to the new filename. (default is output_dir) + suffix : str + Suffix to add to the `basename`. (defaults is '' ) + change_ext : bool + Flag to change the filename extension to the FSL output type. + (default True) + + Returns + ------- + fname : str + New filename based on given parameters. + + """ + + if basename == "": + msg = "Unable to generate filename for command %s. " % "slicetimer" + msg += "basename is not set!" + raise ValueError(msg) + if cwd is None: + cwd = output_dir + if ext is None: + ext = Info.output_type_to_ext(output_type) + if change_ext: + if suffix: + suffix = f"{suffix}{ext}" + else: + suffix = ext + if suffix is None: + suffix = "" + fname = fname_presuffix(basename, suffix=suffix, use_ext=False, newpath=cwd) + return fname + + +IFLOGGER = logging.getLogger("nipype.interface") diff --git a/pydra/tasks/fsl/v6/preprocess/susan.py b/pydra/tasks/fsl/v6/preprocess/susan.py new file mode 100644 index 0000000..4e7cccc --- /dev/null +++ b/pydra/tasks/fsl/v6/preprocess/susan.py @@ -0,0 +1,191 @@ +import attrs +from fileformats.generic import File +import logging +from pydra.tasks.fsl.v6.nipype_ports.utils.filemanip import fname_presuffix +import numpy as np +import os +from pathlib import Path +from pathlib import Path +from pydra.compose import shell +import typing as ty + + +logger = logging.getLogger(__name__) + + +def _format_arg(name, value, inputs, argstr): + if value is None: + return "" + + if name == "fwhm": + return argstr % (float(value) / np.sqrt(8 * np.log(2))) + if name == "usans": + if not value: + return "0" + arglist = [str(len(value))] + for filename, thresh in value: + arglist.extend([filename, "%.10f" % thresh]) + return " ".join(arglist) + + return argstr.format(**inputs) + + +def fwhm_formatter(field, inputs): + return _format_arg("fwhm", field, inputs, argstr="{fwhm:.10}") + + +def usans_formatter(field, inputs): + return _format_arg("usans", field, inputs, argstr="") + + +def _list_outputs(inputs=None, stdout=None, stderr=None, output_dir=None): + inputs = attrs.asdict(inputs) + + outputs = {} + out_file = inputs["out_file"] + if out_file is attrs.NOTHING: + out_file = _gen_fname( + inputs["in_file"], + suffix="_smooth", + output_type=inputs["output_type"], + inputs=inputs["inputs"], + output_dir=inputs["output_dir"], + stderr=inputs["stderr"], + stdout=inputs["stdout"], + ) + outputs["smoothed_file"] = os.path.abspath(out_file) + return outputs + + +def smoothed_file_callable(output_dir, inputs, stdout, stderr): + outputs = _list_outputs( + output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr + ) + return outputs.get("smoothed_file") + + +def _gen_filename(name, inputs): + if name == "out_file": + return _list_outputs( + in_file=inputs["in_file"], + out_file=inputs["out_file"], + output_type=inputs["output_type"], + )["smoothed_file"] + return None + + +def out_file_default(inputs): + return _gen_filename("out_file", inputs=inputs) + + +@shell.define +class SUSAN(shell.Task["SUSAN.Outputs"]): + """ + Examples + ------- + + >>> from fileformats.generic import File + >>> from pathlib import Path + >>> from pydra.tasks.fsl.v6.preprocess.susan import SUSAN + + """ + + executable = "susan" + in_file: File = shell.arg( + help="filename of input timeseries", argstr="{in_file}", position=1 + ) + brightness_threshold: float = shell.arg( + help="brightness threshold and should be greater than noise level and less than contrast of edges to be preserved.", + argstr="{brightness_threshold:.10}", + position=2, + ) + fwhm: float = shell.arg( + help="fwhm of smoothing, in mm, gets converted using sqrt(8*log(2))", + position=3, + formatter="fwhm_formatter", + ) + dimension: ty.Any = shell.arg( + help="within-plane (2) or fully 3D (3)", + argstr="{dimension}", + position=4, + default=3, + ) + use_median: ty.Any = shell.arg( + help="whether to use a local median filter in the cases where single-point noise is detected", + argstr="{use_median}", + position=5, + default=1, + ) + usans: list[ty.Any] = shell.arg( + help="determines whether the smoothing area (USAN) is to be found from secondary images (0, 1 or 2). A negative value for any brightness threshold will auto-set the threshold at 10% of the robust range", + position=6, + formatter="usans_formatter", + default=[], + ) + out_file: Path = shell.arg( + help="output file name", argstr="{out_file}", position=-1 + ) + + class Outputs(shell.Outputs): + smoothed_file: File | None = shell.out( + help="smoothed output file", callable=smoothed_file_callable + ) + + +def _gen_fname( + basename, + cwd=None, + suffix=None, + change_ext=True, + ext=None, + output_type=None, + inputs=None, + output_dir=None, + stderr=None, + stdout=None, +): + """Generate a filename based on the given parameters. + + The filename will take the form: cwd/basename. + If change_ext is True, it will use the extensions specified in + inputs.output_type. + + Parameters + ---------- + basename : str + Filename to base the new filename on. + cwd : str + Path to prefix to the new filename. (default is output_dir) + suffix : str + Suffix to add to the `basename`. (defaults is '' ) + change_ext : bool + Flag to change the filename extension to the FSL output type. + (default True) + + Returns + ------- + fname : str + New filename based on given parameters. + + """ + + if basename == "": + msg = "Unable to generate filename for command %s. " % "susan" + msg += "basename is not set!" + raise ValueError(msg) + if cwd is None: + cwd = output_dir + if ext is None: + ext = Info.output_type_to_ext(output_type) + if change_ext: + if suffix: + suffix = f"{suffix}{ext}" + else: + suffix = ext + if suffix is None: + suffix = "" + fname = fname_presuffix(basename, suffix=suffix, use_ext=False, newpath=cwd) + return fname + + +IFLOGGER = logging.getLogger("nipype.interface") diff --git a/pydra/tasks/fsl/v6/preprocess/tests/conftest.py b/pydra/tasks/fsl/v6/preprocess/tests/conftest.py new file mode 100644 index 0000000..8c8af14 --- /dev/null +++ b/pydra/tasks/fsl/v6/preprocess/tests/conftest.py @@ -0,0 +1,24 @@ +# For debugging in IDE's don't catch raised exceptions and let the IDE +# break at it +import os +import pytest + + +if os.getenv("_PYTEST_RAISE", "0") != "0": + + @pytest.hookimpl(tryfirst=True) + def pytest_exception_interact(call): + raise call.excinfo.value # raise internal errors instead of capturing them + + @pytest.hookimpl(tryfirst=True) + def pytest_internalerror(excinfo): + raise excinfo.value # raise internal errors instead of capturing them + + def pytest_configure(config): + config.option.capture = "no" # allow print statements to show up in the console + config.option.log_cli = True # show log messages in the console + config.option.log_level = "INFO" # set the log level to INFO + + CATCH_CLI_EXCEPTIONS = False +else: + CATCH_CLI_EXCEPTIONS = True diff --git a/pydra/tasks/fsl/v6/preprocess/tests/test_applywarp.py b/pydra/tasks/fsl/v6/preprocess/tests/test_applywarp.py new file mode 100644 index 0000000..7809948 --- /dev/null +++ b/pydra/tasks/fsl/v6/preprocess/tests/test_applywarp.py @@ -0,0 +1,22 @@ +from fileformats.generic import File +import logging +from nipype2pydra.testing import PassAfterTimeoutWorker +from pydra.tasks.fsl.v6.preprocess.apply_warp import ApplyWarp +import pytest + + +logger = logging.getLogger(__name__) + + +@pytest.mark.xfail +def test_applywarp_1(): + task = ApplyWarp() + task.in_file = File.sample(seed=0) + task.ref_file = File.sample(seed=2) + task.field_file = File.sample(seed=3) + task.premat = File.sample(seed=9) + task.postmat = File.sample(seed=10) + task.mask_file = File.sample(seed=11) + print(f"CMDLINE: {task.cmdline}\n\n") + res = task(worker=PassAfterTimeoutWorker) + print("RESULT: ", res) diff --git a/pydra/tasks/fsl/v6/preprocess/tests/test_applyxfm.py b/pydra/tasks/fsl/v6/preprocess/tests/test_applyxfm.py new file mode 100644 index 0000000..3b9cdbf --- /dev/null +++ b/pydra/tasks/fsl/v6/preprocess/tests/test_applyxfm.py @@ -0,0 +1,28 @@ +from fileformats.generic import File +import logging +from nipype2pydra.testing import PassAfterTimeoutWorker +from pydra.tasks.fsl.v6.preprocess.apply_xfm import ApplyXFM +import pytest + + +logger = logging.getLogger(__name__) + + +@pytest.mark.xfail +def test_applyxfm_1(): + task = ApplyXFM() + task.apply_xfm = True + task.in_file = File.sample(seed=1) + task.reference = File.sample(seed=2) + task.in_matrix_file = File.sample(seed=6) + task.schedule = File.sample(seed=29) + task.ref_weight = File.sample(seed=30) + task.in_weight = File.sample(seed=31) + task.wm_seg = File.sample(seed=38) + task.wmcoords = File.sample(seed=39) + task.wmnorms = File.sample(seed=40) + task.fieldmap = File.sample(seed=41) + task.fieldmapmask = File.sample(seed=42) + print(f"CMDLINE: {task.cmdline}\n\n") + res = task(worker=PassAfterTimeoutWorker) + print("RESULT: ", res) diff --git a/pydra/tasks/fsl/v6/preprocess/tests/test_bet.py b/pydra/tasks/fsl/v6/preprocess/tests/test_bet.py new file mode 100644 index 0000000..a306802 --- /dev/null +++ b/pydra/tasks/fsl/v6/preprocess/tests/test_bet.py @@ -0,0 +1,29 @@ +from fileformats.generic import File +from fileformats.medimage import Nifti1 +import logging +from nipype2pydra.testing import PassAfterTimeoutWorker +from pydra.tasks.fsl.v6.preprocess.bet import BET +import pytest + + +logger = logging.getLogger(__name__) + + +@pytest.mark.xfail +def test_bet_1(): + task = BET() + task.in_file = Nifti1.sample(seed=0) + task.t2_guided = File.sample(seed=16) + print(f"CMDLINE: {task.cmdline}\n\n") + res = task(worker=PassAfterTimeoutWorker) + print("RESULT: ", res) + + +@pytest.mark.xfail +def test_bet_2(): + task = BET() + task.in_file = Nifti1.sample(seed=0) + task.out_file = "brain_anat.nii" + print(f"CMDLINE: {task.cmdline}\n\n") + res = task(worker=PassAfterTimeoutWorker) + print("RESULT: ", res) diff --git a/pydra/tasks/fsl/v6/preprocess/tests/test_fast.py b/pydra/tasks/fsl/v6/preprocess/tests/test_fast.py new file mode 100644 index 0000000..1cf1ab7 --- /dev/null +++ b/pydra/tasks/fsl/v6/preprocess/tests/test_fast.py @@ -0,0 +1,30 @@ +from fileformats.generic import File +from fileformats.medimage import Nifti1 +import logging +from nipype2pydra.testing import PassAfterTimeoutWorker +from pydra.tasks.fsl.v6.preprocess.fast import FAST +import pytest + + +logger = logging.getLogger(__name__) + + +@pytest.mark.xfail +def test_fast_1(): + task = FAST() + task.in_files = [Nifti1.sample(seed=0)] + task.init_transform = File.sample(seed=10) + task.other_priors = [File.sample(seed=11)] + task.manual_seg = File.sample(seed=20) + print(f"CMDLINE: {task.cmdline}\n\n") + res = task(worker=PassAfterTimeoutWorker) + print("RESULT: ", res) + + +@pytest.mark.xfail +def test_fast_2(): + task = FAST() + task.in_files = [Nifti1.sample(seed=0)] + print(f"CMDLINE: {task.cmdline}\n\n") + res = task(worker=PassAfterTimeoutWorker) + print("RESULT: ", res) diff --git a/pydra/tasks/fsl/v6/preprocess/tests/test_first.py b/pydra/tasks/fsl/v6/preprocess/tests/test_first.py new file mode 100644 index 0000000..856f7e4 --- /dev/null +++ b/pydra/tasks/fsl/v6/preprocess/tests/test_first.py @@ -0,0 +1,20 @@ +from fileformats.generic import File +import logging +from nipype2pydra.testing import PassAfterTimeoutWorker +from pydra.tasks.fsl.v6.preprocess.first import FIRST +import pytest + + +logger = logging.getLogger(__name__) + + +@pytest.mark.xfail +def test_first_1(): + task = FIRST() + task.in_file = File.sample(seed=0) + task.out_file = "segmented" + task.method = "auto" + task.affine_file = File.sample(seed=8) + print(f"CMDLINE: {task.cmdline}\n\n") + res = task(worker=PassAfterTimeoutWorker) + print("RESULT: ", res) diff --git a/pydra/tasks/fsl/v6/preprocess/tests/test_flirt.py b/pydra/tasks/fsl/v6/preprocess/tests/test_flirt.py new file mode 100644 index 0000000..f16d93b --- /dev/null +++ b/pydra/tasks/fsl/v6/preprocess/tests/test_flirt.py @@ -0,0 +1,39 @@ +from fileformats.generic import File +from fileformats.medimage import Nifti1 +import logging +from nipype2pydra.testing import PassAfterTimeoutWorker +from pydra.tasks.fsl.v6.preprocess.flirt import FLIRT +import pytest + + +logger = logging.getLogger(__name__) + + +@pytest.mark.xfail +def test_flirt_1(): + task = FLIRT() + task.in_file = Nifti1.sample(seed=0) + task.reference = File.sample(seed=1) + task.in_matrix_file = File.sample(seed=5) + task.schedule = File.sample(seed=29) + task.ref_weight = File.sample(seed=30) + task.in_weight = File.sample(seed=31) + task.wm_seg = File.sample(seed=38) + task.wmcoords = File.sample(seed=39) + task.wmnorms = File.sample(seed=40) + task.fieldmap = File.sample(seed=41) + task.fieldmapmask = File.sample(seed=42) + print(f"CMDLINE: {task.cmdline}\n\n") + res = task(worker=PassAfterTimeoutWorker) + print("RESULT: ", res) + + +@pytest.mark.xfail +def test_flirt_2(): + task = FLIRT() + task.in_file = Nifti1.sample(seed=0) + task.cost_func = "mutualinfo" + task.bins = 640 + print(f"CMDLINE: {task.cmdline}\n\n") + res = task(worker=PassAfterTimeoutWorker) + print("RESULT: ", res) diff --git a/pydra/tasks/fsl/v6/preprocess/tests/test_fnirt.py b/pydra/tasks/fsl/v6/preprocess/tests/test_fnirt.py new file mode 100644 index 0000000..fbcec56 --- /dev/null +++ b/pydra/tasks/fsl/v6/preprocess/tests/test_fnirt.py @@ -0,0 +1,33 @@ +from fileformats.generic import File +import logging +from nipype2pydra.testing import PassAfterTimeoutWorker +from pydra.tasks.fsl.v6.preprocess.fnirt import FNIRT +import pytest + + +logger = logging.getLogger(__name__) + + +@pytest.mark.xfail +def test_fnirt_1(): + task = FNIRT() + task.ref_file = File.sample(seed=0) + task.in_file = File.sample(seed=1) + task.affine_file = File.sample(seed=2) + task.inwarp_file = File.sample(seed=3) + task.in_intensitymap_file = [File.sample(seed=4)] + task.refmask_file = File.sample(seed=13) + task.inmask_file = File.sample(seed=14) + print(f"CMDLINE: {task.cmdline}\n\n") + res = task(worker=PassAfterTimeoutWorker) + print("RESULT: ", res) + + +@pytest.mark.xfail +def test_fnirt_2(): + task = FNIRT() + task.warp_resolution = (6, 6, 6) + task.in_fwhm = [8, 4, 2, 2] + print(f"CMDLINE: {task.cmdline}\n\n") + res = task(worker=PassAfterTimeoutWorker) + print("RESULT: ", res) diff --git a/pydra/tasks/fsl/v6/preprocess/tests/test_fugue.py b/pydra/tasks/fsl/v6/preprocess/tests/test_fugue.py new file mode 100644 index 0000000..b5dde4a --- /dev/null +++ b/pydra/tasks/fsl/v6/preprocess/tests/test_fugue.py @@ -0,0 +1,55 @@ +from fileformats.generic import File +from fileformats.medimage import Nifti1 +import logging +from nipype2pydra.testing import PassAfterTimeoutWorker +from pydra.tasks.fsl.v6.preprocess.fugue import FUGUE +import pytest + + +logger = logging.getLogger(__name__) + + +@pytest.mark.xfail +def test_fugue_1(): + task = FUGUE() + task.in_file = Nifti1.sample(seed=0) + task.shift_in_file = Nifti1.sample(seed=1) + task.phasemap_in_file = Nifti1.sample(seed=2) + task.fmap_in_file = File.sample(seed=3) + task.forward_warping = False + task.mask_file = Nifti1.sample(seed=24) + print(f"CMDLINE: {task.cmdline}\n\n") + res = task(worker=PassAfterTimeoutWorker) + print("RESULT: ", res) + + +@pytest.mark.xfail +def test_fugue_2(): + task = FUGUE() + task.in_file = Nifti1.sample(seed=0) + task.shift_in_file = Nifti1.sample(seed=1) + print(f"CMDLINE: {task.cmdline}\n\n") + res = task(worker=PassAfterTimeoutWorker) + print("RESULT: ", res) + + +@pytest.mark.xfail +def test_fugue_3(): + task = FUGUE() + task.in_file = Nifti1.sample(seed=0) + task.unwarp_direction = "y" + task.mask_file = Nifti1.sample(seed=24) + print(f"CMDLINE: {task.cmdline}\n\n") + res = task(worker=PassAfterTimeoutWorker) + print("RESULT: ", res) + + +@pytest.mark.xfail +def test_fugue_4(): + task = FUGUE() + task.phasemap_in_file = Nifti1.sample(seed=2) + task.dwell_to_asym_ratio = (0.77e-3 * 3) / 2.46e-3 + task.save_shift = True + print(f"CMDLINE: {task.cmdline}\n\n") + res = task(worker=PassAfterTimeoutWorker) + print("RESULT: ", res) diff --git a/pydra/tasks/fsl/v6/preprocess/tests/test_mcflirt.py b/pydra/tasks/fsl/v6/preprocess/tests/test_mcflirt.py new file mode 100644 index 0000000..cae74d1 --- /dev/null +++ b/pydra/tasks/fsl/v6/preprocess/tests/test_mcflirt.py @@ -0,0 +1,30 @@ +from fileformats.generic import File +from fileformats.medimage import Nifti1 +import logging +from nipype2pydra.testing import PassAfterTimeoutWorker +from pydra.tasks.fsl.v6.preprocess.mcflirt import MCFLIRT +import pytest + + +logger = logging.getLogger(__name__) + + +@pytest.mark.xfail +def test_mcflirt_1(): + task = MCFLIRT() + task.in_file = Nifti1.sample(seed=0) + task.init = File.sample(seed=10) + task.ref_file = File.sample(seed=19) + print(f"CMDLINE: {task.cmdline}\n\n") + res = task(worker=PassAfterTimeoutWorker) + print("RESULT: ", res) + + +@pytest.mark.xfail +def test_mcflirt_2(): + task = MCFLIRT() + task.in_file = Nifti1.sample(seed=0) + task.out_file = "moco.nii" + print(f"CMDLINE: {task.cmdline}\n\n") + res = task(worker=PassAfterTimeoutWorker) + print("RESULT: ", res) diff --git a/pydra/tasks/fsl/v6/preprocess/tests/test_prelude.py b/pydra/tasks/fsl/v6/preprocess/tests/test_prelude.py new file mode 100644 index 0000000..ab5de4d --- /dev/null +++ b/pydra/tasks/fsl/v6/preprocess/tests/test_prelude.py @@ -0,0 +1,23 @@ +from fileformats.generic import File +import logging +from nipype2pydra.testing import PassAfterTimeoutWorker +from pydra.tasks.fsl.v6.preprocess.prelude import PRELUDE +import pytest + + +logger = logging.getLogger(__name__) + + +@pytest.mark.xfail +def test_prelude_1(): + task = PRELUDE() + task.complex_phase_file = File.sample(seed=0) + task.magnitude_file = File.sample(seed=1) + task.phase_file = File.sample(seed=2) + task.mask_file = File.sample(seed=9) + task.savemask_file = File.sample(seed=12) + task.rawphase_file = File.sample(seed=13) + task.label_file = File.sample(seed=14) + print(f"CMDLINE: {task.cmdline}\n\n") + res = task(worker=PassAfterTimeoutWorker) + print("RESULT: ", res) diff --git a/pydra/tasks/fsl/v6/preprocess/tests/test_slicetimer.py b/pydra/tasks/fsl/v6/preprocess/tests/test_slicetimer.py new file mode 100644 index 0000000..e880689 --- /dev/null +++ b/pydra/tasks/fsl/v6/preprocess/tests/test_slicetimer.py @@ -0,0 +1,19 @@ +from fileformats.generic import File +import logging +from nipype2pydra.testing import PassAfterTimeoutWorker +from pydra.tasks.fsl.v6.preprocess.slice_timer import SliceTimer +import pytest + + +logger = logging.getLogger(__name__) + + +@pytest.mark.xfail +def test_slicetimer_1(): + task = SliceTimer() + task.in_file = File.sample(seed=0) + task.custom_timings = File.sample(seed=6) + task.custom_order = File.sample(seed=8) + print(f"CMDLINE: {task.cmdline}\n\n") + res = task(worker=PassAfterTimeoutWorker) + print("RESULT: ", res) diff --git a/pydra/tasks/fsl/v6/preprocess/tests/test_susan.py b/pydra/tasks/fsl/v6/preprocess/tests/test_susan.py new file mode 100644 index 0000000..6ac2466 --- /dev/null +++ b/pydra/tasks/fsl/v6/preprocess/tests/test_susan.py @@ -0,0 +1,20 @@ +from fileformats.generic import File +import logging +from nipype2pydra.testing import PassAfterTimeoutWorker +from pydra.tasks.fsl.v6.preprocess.susan import SUSAN +import pytest + + +logger = logging.getLogger(__name__) + + +@pytest.mark.xfail +def test_susan_1(): + task = SUSAN() + task.in_file = File.sample(seed=0) + task.dimension = 3 + task.use_median = 1 + task.usans = [] + print(f"CMDLINE: {task.cmdline}\n\n") + res = task(worker=PassAfterTimeoutWorker) + print("RESULT: ", res) diff --git a/pydra/tasks/fsl/v6/utils/__init__.py b/pydra/tasks/fsl/v6/utils/__init__.py new file mode 100644 index 0000000..e3d45ea --- /dev/null +++ b/pydra/tasks/fsl/v6/utils/__init__.py @@ -0,0 +1,31 @@ +from .av_scale import AvScale +from .complex import Complex +from .convert_warp import ConvertWarp +from .convert_xfm import ConvertXFM +from .copy_geom import CopyGeom +from .extract_roi import ExtractROI +from .filter_regressor import FilterRegressor +from .image_maths import ImageMaths +from .image_meants import ImageMeants +from .image_stats import ImageStats +from .inv_warp import InvWarp +from .merge import Merge +from .motion_outliers import MotionOutliers +from .overlay import Overlay +from .plot_motion_params import PlotMotionParams +from .plot_time_series import PlotTimeSeries +from .power_spectrum import PowerSpectrum +from .reorient_2_std import Reorient2Std +from .robust_fov import RobustFOV +from .sig_loss import SigLoss +from .slice import Slice +from .slicer import Slicer +from .smooth import Smooth +from .split import Split +from .swap_dimensions import SwapDimensions +from .text_2_vest import Text2Vest +from .vest_2_text import Vest2Text +from .warp_points import WarpPoints +from .warp_points_from_std import WarpPointsFromStd +from .warp_points_to_std import WarpPointsToStd +from .warp_utils import WarpUtils diff --git a/pydra/tasks/fsl/v6/utils/av_scale.py b/pydra/tasks/fsl/v6/utils/av_scale.py new file mode 100644 index 0000000..0b39ca1 --- /dev/null +++ b/pydra/tasks/fsl/v6/utils/av_scale.py @@ -0,0 +1,138 @@ +import attrs +from fileformats.generic import File +import logging +from pydra.compose import shell + + +logger = logging.getLogger(__name__) + + +def _list_outputs(inputs=None, stdout=None, stderr=None, output_dir=None): + inputs = attrs.asdict(inputs) + + return parsed_inputs["_results"] + + +def rotation_translation_matrix_callable(output_dir, inputs, stdout, stderr): + outputs = _list_outputs( + output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr + ) + return outputs.get("rotation_translation_matrix") + + +def scales_callable(output_dir, inputs, stdout, stderr): + outputs = _list_outputs( + output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr + ) + return outputs.get("scales") + + +def skews_callable(output_dir, inputs, stdout, stderr): + outputs = _list_outputs( + output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr + ) + return outputs.get("skews") + + +def average_scaling_callable(output_dir, inputs, stdout, stderr): + outputs = _list_outputs( + output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr + ) + return outputs.get("average_scaling") + + +def determinant_callable(output_dir, inputs, stdout, stderr): + outputs = _list_outputs( + output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr + ) + return outputs.get("determinant") + + +def forward_half_transform_callable(output_dir, inputs, stdout, stderr): + outputs = _list_outputs( + output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr + ) + return outputs.get("forward_half_transform") + + +def backward_half_transform_callable(output_dir, inputs, stdout, stderr): + outputs = _list_outputs( + output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr + ) + return outputs.get("backward_half_transform") + + +def left_right_orientation_preserved_callable(output_dir, inputs, stdout, stderr): + outputs = _list_outputs( + output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr + ) + return outputs.get("left_right_orientation_preserved") + + +def rot_angles_callable(output_dir, inputs, stdout, stderr): + outputs = _list_outputs( + output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr + ) + return outputs.get("rot_angles") + + +def translations_callable(output_dir, inputs, stdout, stderr): + outputs = _list_outputs( + output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr + ) + return outputs.get("translations") + + +@shell.define +class AvScale(shell.Task["AvScale.Outputs"]): + """ + Examples + ------- + + >>> from fileformats.generic import File + >>> from pydra.tasks.fsl.v6.utils.av_scale import AvScale + + """ + + executable = "avscale" + all_param: bool = shell.arg(help="", argstr="--allparams") + mat_file: File = shell.arg( + help="mat file to read", argstr="{mat_file}", position=-2 + ) + ref_file: File = shell.arg( + help="reference file to get center of rotation", + argstr="{ref_file}", + position=-1, + ) + + class Outputs(shell.Outputs): + rotation_translation_matrix: list[list[float]] | None = shell.out( + help="Rotation and Translation Matrix", + callable=rotation_translation_matrix_callable, + ) + scales: list[float] | None = shell.out( + help="Scales (x,y,z)", callable=scales_callable + ) + skews: list[float] | None = shell.out(help="Skews", callable=skews_callable) + average_scaling: float | None = shell.out( + help="Average Scaling", callable=average_scaling_callable + ) + determinant: float | None = shell.out( + help="Determinant", callable=determinant_callable + ) + forward_half_transform: list[list[float]] | None = shell.out( + help="Forward Half Transform", callable=forward_half_transform_callable + ) + backward_half_transform: list[list[float]] | None = shell.out( + help="Backwards Half Transform", callable=backward_half_transform_callable + ) + left_right_orientation_preserved: bool | None = shell.out( + help="True if LR orientation preserved", + callable=left_right_orientation_preserved_callable, + ) + rot_angles: list[float] | None = shell.out( + help="rotation angles", callable=rot_angles_callable + ) + translations: list[float] | None = shell.out( + help="translations", callable=translations_callable + ) diff --git a/pydra/tasks/fsl/v6/utils/complex.py b/pydra/tasks/fsl/v6/utils/complex.py new file mode 100644 index 0000000..be52321 --- /dev/null +++ b/pydra/tasks/fsl/v6/utils/complex.py @@ -0,0 +1,278 @@ +from fileformats.generic import File +import logging +from pydra.tasks.fsl.v6.nipype_ports.utils.filemanip import fname_presuffix +import os +from pathlib import Path +from pathlib import Path +from pydra.compose import shell + + +logger = logging.getLogger(__name__) + + +def _parse_inputs(inputs, output_dir=None): + if not output_dir: + output_dir = os.getcwd() + parsed_inputs = {} + skip = [] + + if skip is None: + skip = [] + if inputs["real_cartesian"]: + skip += inputs["_ofs"][:3] + elif inputs["real_polar"]: + skip += inputs["_ofs"][:1] + inputs["_ofs"][3:] + else: + skip += inputs["_ofs"][1:] + + return parsed_inputs + + +def _gen_filename(name, inputs): + parsed_inputs = _parse_inputs(inputs) if inputs else {} + if name == "complex_out_file": + if inputs["complex_cartesian"]: + in_file = inputs["real_in_file"] + elif inputs["complex_polar"]: + in_file = inputs["magnitude_in_file"] + elif inputs["complex_split"] or inputs["complex_merge"]: + in_file = inputs["complex_in_file"] + else: + return None + return _gen_fname(in_file, suffix="_cplx", output_type=inputs["output_type"]) + elif name == "magnitude_out_file": + return _gen_fname( + inputs["complex_in_file"], suffix="_mag", output_type=inputs["output_type"] + ) + elif name == "phase_out_file": + return _gen_fname( + inputs["complex_in_file"], + suffix="_phase", + output_type=inputs["output_type"], + ) + elif name == "real_out_file": + return _gen_fname( + inputs["complex_in_file"], suffix="_real", output_type=inputs["output_type"] + ) + elif name == "imaginary_out_file": + return _gen_fname( + inputs["complex_in_file"], suffix="_imag", output_type=inputs["output_type"] + ) + return None + + +def complex_out_file_default(inputs): + return _gen_filename("complex_out_file", inputs=inputs) + + +def imaginary_out_file_default(inputs): + return _gen_filename("imaginary_out_file", inputs=inputs) + + +def magnitude_out_file_default(inputs): + return _gen_filename("magnitude_out_file", inputs=inputs) + + +def phase_out_file_default(inputs): + return _gen_filename("phase_out_file", inputs=inputs) + + +def real_out_file_default(inputs): + return _gen_filename("real_out_file", inputs=inputs) + + +@shell.define( + xor=[ + [ + "complex_cartesian", + "complex_merge", + "complex_out_file", + "complex_polar", + "complex_split", + "imaginary_out_file", + "magnitude_out_file", + "phase_out_file", + "real_polar", + ], + [ + "complex_cartesian", + "complex_merge", + "complex_out_file", + "complex_polar", + "complex_split", + "imaginary_out_file", + "magnitude_out_file", + "real_cartesian", + "real_out_file", + ], + [ + "complex_cartesian", + "complex_merge", + "complex_out_file", + "complex_polar", + "complex_split", + "imaginary_out_file", + "phase_out_file", + "real_cartesian", + "real_out_file", + ], + [ + "complex_cartesian", + "complex_merge", + "complex_out_file", + "complex_polar", + "complex_split", + "magnitude_out_file", + "phase_out_file", + "real_out_file", + "real_polar", + ], + [ + "complex_cartesian", + "complex_merge", + "complex_polar", + "complex_split", + "end_vol", + "real_cartesian", + "real_polar", + "start_vol", + ], + [ + "complex_cartesian", + "complex_merge", + "complex_polar", + "complex_split", + "real_cartesian", + "real_polar", + ], + [ + "complex_out_file", + "imaginary_out_file", + "magnitude_out_file", + "phase_out_file", + "real_cartesian", + "real_out_file", + "real_polar", + ], + ["complex_in_file", "real_in_file", "magnitude_in_file"], + ["complex_in_file2", "imaginary_in_file", "phase_in_file"], + ] +) +class Complex(shell.Task["Complex.Outputs"]): + """ + Examples + ------- + + >>> from fileformats.generic import File + >>> from pathlib import Path + >>> from pydra.tasks.fsl.v6.utils.complex import Complex + + """ + + executable = "fslcomplex" + complex_in_file: File | None = shell.arg( + help="", argstr="{complex_in_file}", position=2 + ) + complex_in_file2: File | None = shell.arg( + help="", argstr="{complex_in_file2}", position=3 + ) + real_in_file: File | None = shell.arg(help="", argstr="{real_in_file}", position=2) + imaginary_in_file: File | None = shell.arg( + help="", argstr="{imaginary_in_file}", position=3 + ) + magnitude_in_file: File | None = shell.arg( + help="", argstr="{magnitude_in_file}", position=2 + ) + phase_in_file: File | None = shell.arg( + help="", argstr="{phase_in_file}", position=3 + ) + start_vol: int | None = shell.arg(help="", argstr="{start_vol}", position=-2) + end_vol: int | None = shell.arg(help="", argstr="{end_vol}", position=-1) + real_polar: bool = shell.arg(help="", argstr="-realpolar", position=1) + real_cartesian: bool = shell.arg(help="", argstr="-realcartesian", position=1) + complex_cartesian: bool = shell.arg(help="", argstr="-complex", position=1) + complex_polar: bool = shell.arg(help="", argstr="-complexpolar", position=1) + complex_split: bool = shell.arg(help="", argstr="-complexsplit", position=1) + complex_merge: bool = shell.arg(help="", argstr="-complexmerge", position=1) + + class Outputs(shell.Outputs): + complex_out_file: Path | None = shell.outarg( + help="", + argstr="{complex_out_file}", + position=-3, + path_template="complex_out_file", + ) + magnitude_out_file: Path | None = shell.outarg( + help="", + argstr="{magnitude_out_file}", + position=-4, + path_template="magnitude_out_file", + ) + phase_out_file: Path | None = shell.outarg( + help="", + argstr="{phase_out_file}", + position=-3, + path_template="phase_out_file", + ) + real_out_file: Path | None = shell.outarg( + help="", + argstr="{real_out_file}", + position=-4, + path_template="real_out_file", + ) + imaginary_out_file: Path | None = shell.outarg( + help="", + argstr="{imaginary_out_file}", + position=-3, + path_template="imaginary_out_file", + ) + + +def _gen_fname( + basename, cwd=None, suffix=None, change_ext=True, ext=None, output_type=None +): + """Generate a filename based on the given parameters. + + The filename will take the form: cwd/basename. + If change_ext is True, it will use the extensions specified in + inputs.output_type. + + Parameters + ---------- + basename : str + Filename to base the new filename on. + cwd : str + Path to prefix to the new filename. (default is output_dir) + suffix : str + Suffix to add to the `basename`. (defaults is '' ) + change_ext : bool + Flag to change the filename extension to the FSL output type. + (default True) + + Returns + ------- + fname : str + New filename based on given parameters. + + """ + + if basename == "": + msg = "Unable to generate filename for command %s. " % "fslcomplex" + msg += "basename is not set!" + raise ValueError(msg) + if cwd is None: + cwd = output_dir + if ext is None: + ext = Info.output_type_to_ext(output_type) + if change_ext: + if suffix: + suffix = f"{suffix}{ext}" + else: + suffix = ext + if suffix is None: + suffix = "" + fname = fname_presuffix(basename, suffix=suffix, use_ext=False, newpath=cwd) + return fname + + +IFLOGGER = logging.getLogger("nipype.interface") diff --git a/pydra/tasks/fsl/v6/utils/convert_warp.py b/pydra/tasks/fsl/v6/utils/convert_warp.py new file mode 100644 index 0000000..93cf003 --- /dev/null +++ b/pydra/tasks/fsl/v6/utils/convert_warp.py @@ -0,0 +1,108 @@ +from fileformats.generic import File +from fileformats.medimage import Nifti1 +import logging +from pathlib import Path +from pathlib import Path +from pydra.compose import shell +import typing as ty + + +logger = logging.getLogger(__name__) + + +@shell.define(xor=[["abswarp", "relwarp"], ["out_abswarp", "out_relwarp"]]) +class ConvertWarp(shell.Task["ConvertWarp.Outputs"]): + """ + Examples + ------- + + >>> from fileformats.generic import File + >>> from fileformats.medimage import Nifti1 + >>> from pathlib import Path + >>> from pydra.tasks.fsl.v6.utils.convert_warp import ConvertWarp + + >>> task = ConvertWarp() + >>> task.reference = File.mock() + >>> task.premat = File.mock() + >>> task.warp1 = Nifti1.mock("warpfield.nii") + >>> task.midmat = File.mock() + >>> task.warp2 = File.mock() + >>> task.postmat = File.mock() + >>> task.shift_in_file = File.mock() + >>> task.relwarp = True + >>> task.cmdline + 'convertwarp --ref=T1.nii --rel --warp1=warpfield.nii --out=T1_concatwarp.nii.gz' + + + """ + + executable = "convertwarp" + reference: File = shell.arg( + help="Name of a file in target space of the full transform.", + argstr="--ref={reference}", + position=1, + ) + premat: File = shell.arg( + help="filename for pre-transform (affine matrix)", argstr="--premat={premat}" + ) + warp1: Nifti1 = shell.arg( + help="Name of file containing initial warp-fields/coefficients (follows premat). This could e.g. be a fnirt-transform from a subjects structural scan to an average of a group of subjects.", + argstr="--warp1={warp1}", + ) + midmat: File = shell.arg( + help="Name of file containing mid-warp-affine transform", + argstr="--midmat={midmat}", + ) + warp2: File = shell.arg( + help="Name of file containing secondary warp-fields/coefficients (after warp1/midmat but before postmat). This could e.g. be a fnirt-transform from the average of a group of subjects to some standard space (e.g. MNI152).", + argstr="--warp2={warp2}", + ) + postmat: File = shell.arg( + help="Name of file containing an affine transform (applied last). It could e.g. be an affine transform that maps the MNI152-space into a better approximation to the Talairach-space (if indeed there is one).", + argstr="--postmat={postmat}", + ) + shift_in_file: File = shell.arg( + help='Name of file containing a "shiftmap", a non-linear transform with displacements only in one direction (applied first, before premat). This would typically be a fieldmap that has been pre-processed using fugue that maps a subjects functional (EPI) data onto an undistorted space (i.e. a space that corresponds to his/her true anatomy).', + argstr="--shiftmap={shift_in_file}", + ) + shift_direction: ty.Any = shell.arg( + help="Indicates the direction that the distortions from --shiftmap goes. It depends on the direction and polarity of the phase-encoding in the EPI sequence.", + argstr="--shiftdir={shift_direction}", + requires=["shift_in_file"], + ) + cons_jacobian: bool = shell.arg( + help="Constrain the Jacobian of the warpfield to lie within specified min/max limits.", + argstr="--constrainj", + ) + jacobian_min: float = shell.arg( + help="Minimum acceptable Jacobian value for constraint (default 0.01)", + argstr="--jmin={jacobian_min}", + ) + jacobian_max: float = shell.arg( + help="Maximum acceptable Jacobian value for constraint (default 100.0)", + argstr="--jmax={jacobian_max}", + ) + abswarp: bool = shell.arg( + help="If set it indicates that the warps in --warp1 and --warp2 should be interpreted as absolute. I.e. the values in --warp1/2 are the coordinates in the next space, rather than displacements. This flag is ignored if --warp1/2 was created by fnirt, which always creates relative displacements.", + argstr="--abs", + ) + relwarp: bool = shell.arg( + help="If set it indicates that the warps in --warp1/2 should be interpreted as relative. I.e. the values in --warp1/2 are displacements from the coordinates in the next space.", + argstr="--rel", + ) + out_abswarp: bool = shell.arg( + help="If set it indicates that the warps in --out should be absolute, i.e. the values in --out are displacements from the coordinates in --ref.", + argstr="--absout", + ) + out_relwarp: bool = shell.arg( + help="If set it indicates that the warps in --out should be relative, i.e. the values in --out are displacements from the coordinates in --ref.", + argstr="--relout", + ) + + class Outputs(shell.Outputs): + out_file: Path = shell.outarg( + help="Name of output file, containing warps that are the combination of all those given as arguments. The format of this will be a field-file (rather than spline coefficients) with any affine components included.", + argstr="--out={out_file}", + position=-1, + path_template="{reference}_concatwarp", + ) diff --git a/pydra/tasks/fsl/v6/utils/convert_xfm.py b/pydra/tasks/fsl/v6/utils/convert_xfm.py new file mode 100644 index 0000000..f837db9 --- /dev/null +++ b/pydra/tasks/fsl/v6/utils/convert_xfm.py @@ -0,0 +1,114 @@ +import attrs +from fileformats.datascience import TextMatrix +from fileformats.generic import File +import logging +from pydra.tasks.fsl.v6.nipype_ports.utils.filemanip import ( + fname_presuffix, + split_filename, +) +import os +from pathlib import Path +from pathlib import Path +from pydra.compose import shell + + +logger = logging.getLogger(__name__) + + +def _gen_filename(name, inputs): + if name == "out_file": + return _list_outputs( + concat_xfm=inputs["concat_xfm"], + in_file=inputs["in_file"], + in_file2=inputs["in_file2"], + invert_xfm=inputs["invert_xfm"], + out_file=inputs["out_file"], + )["out_file"] + return None + + +def out_file_default(inputs): + return _gen_filename("out_file", inputs=inputs) + + +@shell.define(xor=[["concat_xfm", "fix_scale_skew", "invert_xfm"]]) +class ConvertXFM(shell.Task["ConvertXFM.Outputs"]): + """ + Examples + ------- + + >>> from fileformats.datascience import TextMatrix + >>> from fileformats.generic import File + >>> from pathlib import Path + >>> from pydra.tasks.fsl.v6.utils.convert_xfm import ConvertXFM + + >>> task = ConvertXFM() + >>> task.in_file = TextMatrix.mock("flirt.mat") + >>> task.in_file2 = File.mock() + >>> task.out_file = "flirt_inv.mat" + >>> task.cmdline + 'convert_xfm -omat flirt_inv.mat -inverse flirt.mat' + + + """ + + executable = "convert_xfm" + in_file: TextMatrix = shell.arg( + help="input transformation matrix", argstr="{in_file}", position=-1 + ) + in_file2: File = shell.arg( + help="second input matrix (for use with fix_scale_skew or concat_xfm)", + argstr="{in_file2}", + position=-2, + ) + invert_xfm: bool = shell.arg( + help="invert input transformation", argstr="-inverse", position=-3 + ) + concat_xfm: bool = shell.arg( + help="write joint transformation of two input matrices", + argstr="-concat", + position=-3, + requires=["in_file2"], + ) + fix_scale_skew: bool = shell.arg( + help="use secondary matrix to fix scale and skew", + argstr="-fixscaleskew", + position=-3, + requires=["in_file2"], + ) + + class Outputs(shell.Outputs): + out_file: Path = shell.outarg( + help="final transformation matrix", + argstr="-omat {out_file}", + position=1, + path_template='"flirt_inv.mat"', + ) + + +def _list_outputs( + concat_xfm=None, in_file=None, in_file2=None, invert_xfm=None, out_file=None +): + outputs = {} + outfile = out_file + if outfile is attrs.NOTHING: + _, infile1, _ = split_filename(in_file) + if invert_xfm: + outfile = fname_presuffix( + infile1, suffix="_inv.mat", newpath=output_dir, use_ext=False + ) + else: + if concat_xfm: + _, infile2, _ = split_filename(in_file2) + outfile = fname_presuffix( + f"{infile1}_{infile2}", + suffix=".mat", + newpath=output_dir, + use_ext=False, + ) + else: + outfile = fname_presuffix( + infile1, suffix="_fix.mat", newpath=output_dir, use_ext=False + ) + outputs["out_file"] = os.path.abspath(outfile) + return outputs diff --git a/pydra/tasks/fsl/v6/utils/copy_geom.py b/pydra/tasks/fsl/v6/utils/copy_geom.py new file mode 100644 index 0000000..158e446 --- /dev/null +++ b/pydra/tasks/fsl/v6/utils/copy_geom.py @@ -0,0 +1,42 @@ +from fileformats.generic import File +import logging +from pathlib import Path +from pathlib import Path +from pydra.compose import shell + + +logger = logging.getLogger(__name__) + + +def out_file_callable(output_dir, inputs, stdout, stderr): + return inputs.dest_file + + +@shell.define +class CopyGeom(shell.Task["CopyGeom.Outputs"]): + """ + Examples + ------- + + >>> from fileformats.generic import File + >>> from pathlib import Path + >>> from pydra.tasks.fsl.v6.utils.copy_geom import CopyGeom + + """ + + executable = "fslcpgeom" + in_file: File = shell.arg(help="source image", argstr="{in_file}", position=1) + dest_file: Path = shell.arg( + help="destination image", + argstr="{dest_file}", + position=2, + copy_mode="File.CopyMode.copy", + ) + ignore_dims: bool = shell.arg( + help="Do not copy image dimensions", argstr="-d", position=-1 + ) + + class Outputs(shell.Outputs): + out_file: File | None = shell.out( + help="image with new geometry header", callable=out_file_callable + ) diff --git a/pydra/tasks/fsl/v6/utils/extract_roi.py b/pydra/tasks/fsl/v6/utils/extract_roi.py new file mode 100644 index 0000000..29b80e5 --- /dev/null +++ b/pydra/tasks/fsl/v6/utils/extract_roi.py @@ -0,0 +1,177 @@ +import attrs +from fileformats.generic import File +import logging +from pydra.tasks.fsl.v6.nipype_ports.utils.filemanip import fname_presuffix +import os +from pathlib import Path +from pathlib import Path +from pydra.compose import shell +import typing as ty + + +logger = logging.getLogger(__name__) + + +def _format_arg(name, value, inputs, argstr): + if value is None: + return "" + + if name == "crop_list": + return " ".join(str(x) for sublist in value for x in sublist) + + return argstr.format(**inputs) + + +def crop_list_formatter(field, inputs): + return _format_arg("crop_list", field, inputs, argstr="{crop_list}") + + +def _gen_filename(name, inputs): + if name == "roi_file": + return _list_outputs( + in_file=inputs["in_file"], + output_type=inputs["output_type"], + roi_file=inputs["roi_file"], + )[name] + return None + + +def roi_file_default(inputs): + return _gen_filename("roi_file", inputs=inputs) + + +@shell.define( + xor=[ + [ + "crop_list", + "t_min", + "t_size", + "x_min", + "x_size", + "y_min", + "y_size", + "z_min", + "z_size", + ] + ] +) +class ExtractROI(shell.Task["ExtractROI.Outputs"]): + """ + Examples + ------- + + >>> from fileformats.generic import File + >>> from pathlib import Path + >>> from pydra.tasks.fsl.v6.utils.extract_roi import ExtractROI + + >>> task = ExtractROI() + >>> task.in_file = File.mock(anatfile) + >>> task.roi_file = "bar.nii" + >>> task.t_min = 0 + >>> task.t_size = 1 + >>> task.cmdline + 'None' + + + """ + + executable = "fslroi" + in_file: File = shell.arg(help="input file", argstr="{in_file}", position=1) + x_min: int | None = shell.arg(help="", argstr="{x_min}", position=3) + x_size: int | None = shell.arg(help="", argstr="{x_size}", position=4) + y_min: int | None = shell.arg(help="", argstr="{y_min}", position=5) + y_size: int | None = shell.arg(help="", argstr="{y_size}", position=6) + z_min: int | None = shell.arg(help="", argstr="{z_min}", position=7) + z_size: int | None = shell.arg(help="", argstr="{z_size}", position=8) + t_min: int | None = shell.arg(help="", argstr="{t_min}", position=9) + t_size: int | None = shell.arg(help="", argstr="{t_size}", position=10) + crop_list: list[ty.Any] = shell.arg( + help="list of two tuples specifying crop options", + formatter="crop_list_formatter", + position=3, + ) + + class Outputs(shell.Outputs): + roi_file: Path = shell.outarg( + help="output file", + argstr="{roi_file}", + path_template='"bar.nii"', + position=2, + ) + + +def _gen_fname( + basename, cwd=None, suffix=None, change_ext=True, ext=None, output_type=None +): + """Generate a filename based on the given parameters. + + The filename will take the form: cwd/basename. + If change_ext is True, it will use the extensions specified in + inputs.output_type. + + Parameters + ---------- + basename : str + Filename to base the new filename on. + cwd : str + Path to prefix to the new filename. (default is output_dir) + suffix : str + Suffix to add to the `basename`. (defaults is '' ) + change_ext : bool + Flag to change the filename extension to the FSL output type. + (default True) + + Returns + ------- + fname : str + New filename based on given parameters. + + """ + + if basename == "": + msg = "Unable to generate filename for command %s. " % "fslroi" + msg += "basename is not set!" + raise ValueError(msg) + if cwd is None: + cwd = output_dir + if ext is None: + ext = Info.output_type_to_ext(output_type) + if change_ext: + if suffix: + suffix = f"{suffix}{ext}" + else: + suffix = ext + if suffix is None: + suffix = "" + fname = fname_presuffix(basename, suffix=suffix, use_ext=False, newpath=cwd) + return fname + + +def _list_outputs(in_file=None, output_type=None, roi_file=None): + """Create a Bunch which contains all possible files generated + by running the interface. Some files are always generated, others + depending on which ``inputs`` options are set. + + + Returns + ------- + + outputs : Bunch object + Bunch object containing all possible files generated by + interface object. + + If None, file was not generated + Else, contains path, filename of generated outputfile + + """ + outputs = {} + outputs["roi_file"] = roi_file + if outputs["roi_file"] is attrs.NOTHING: + outputs["roi_file"] = _gen_fname( + in_file, suffix="_roi", output_type=output_type + ) + outputs["roi_file"] = os.path.abspath(outputs["roi_file"]) + return outputs + + +IFLOGGER = logging.getLogger("nipype.interface") diff --git a/pydra/tasks/fsl/v6/utils/filter_regressor.py b/pydra/tasks/fsl/v6/utils/filter_regressor.py new file mode 100644 index 0000000..2a362c8 --- /dev/null +++ b/pydra/tasks/fsl/v6/utils/filter_regressor.py @@ -0,0 +1,161 @@ +import attrs +from fileformats.generic import File +import logging +from pydra.tasks.fsl.v6.nipype_ports.utils.filemanip import fname_presuffix +import numpy as np +import os +from pathlib import Path +from pathlib import Path +from pydra.compose import shell + + +logger = logging.getLogger(__name__) + + +def _format_arg(name, value, inputs, argstr): + if value is None: + return "" + + if name == "filter_columns": + return argstr.format(**{name: ",".join(map(str, value))}) + elif name == "filter_all": + design = np.loadtxt(inputs["design_file"]) + try: + n_cols = design.shape[1] + except IndexError: + n_cols = 1 + return argstr.format(**{name: ",".join(map(str, list(range(1, n_cols + 1))))}) + + return argstr.format(**inputs) + + +def filter_columns_formatter(field, inputs): + return _format_arg("filter_columns", field, inputs, argstr="-f '{filter_columns}'") + + +def filter_all_formatter(field, inputs): + return _format_arg("filter_all", field, inputs, argstr="-f '{filter_all:d}'") + + +def _gen_filename(name, inputs): + if name == "out_file": + return _list_outputs( + in_file=inputs["in_file"], + out_file=inputs["out_file"], + output_type=inputs["output_type"], + )[name] + return None + + +def out_file_default(inputs): + return _gen_filename("out_file", inputs=inputs) + + +@shell.define(xor=[["filter_all", "filter_columns"]]) +class FilterRegressor(shell.Task["FilterRegressor.Outputs"]): + """ + Examples + ------- + + >>> from fileformats.generic import File + >>> from pathlib import Path + >>> from pydra.tasks.fsl.v6.utils.filter_regressor import FilterRegressor + + """ + + executable = "fsl_regfilt" + in_file: File = shell.arg( + help="input file name (4D image)", argstr="-i {in_file}", position=1 + ) + design_file: File = shell.arg( + help="name of the matrix with time courses (e.g. GLM design or MELODIC mixing matrix)", + argstr="-d {design_file}", + position=3, + ) + filter_columns: list[int] = shell.arg( + help="(1-based) column indices to filter out of the data", + position=4, + formatter="filter_columns_formatter", + ) + filter_all: bool = shell.arg( + help="use all columns in the design file in denoising", + position=4, + formatter="filter_all_formatter", + ) + mask: File = shell.arg(help="mask image file name", argstr="-m {mask}") + var_norm: bool = shell.arg( + help="perform variance-normalization on data", argstr="--vn" + ) + out_vnscales: bool = shell.arg( + help="output scaling factors for variance normalization", + argstr="--out_vnscales", + ) + + class Outputs(shell.Outputs): + out_file: Path = shell.outarg( + help="output file name for the filtered data", + argstr="-o {out_file}", + position=2, + path_template="out_file", + ) + + +def _gen_fname( + basename, cwd=None, suffix=None, change_ext=True, ext=None, output_type=None +): + """Generate a filename based on the given parameters. + + The filename will take the form: cwd/basename. + If change_ext is True, it will use the extensions specified in + inputs.output_type. + + Parameters + ---------- + basename : str + Filename to base the new filename on. + cwd : str + Path to prefix to the new filename. (default is output_dir) + suffix : str + Suffix to add to the `basename`. (defaults is '' ) + change_ext : bool + Flag to change the filename extension to the FSL output type. + (default True) + + Returns + ------- + fname : str + New filename based on given parameters. + + """ + + if basename == "": + msg = "Unable to generate filename for command %s. " % "fsl_regfilt" + msg += "basename is not set!" + raise ValueError(msg) + if cwd is None: + cwd = output_dir + if ext is None: + ext = Info.output_type_to_ext(output_type) + if change_ext: + if suffix: + suffix = f"{suffix}{ext}" + else: + suffix = ext + if suffix is None: + suffix = "" + fname = fname_presuffix(basename, suffix=suffix, use_ext=False, newpath=cwd) + return fname + + +def _list_outputs(in_file=None, out_file=None, output_type=None): + outputs = {} + outputs["out_file"] = out_file + if outputs["out_file"] is attrs.NOTHING: + outputs["out_file"] = _gen_fname( + in_file, suffix="_regfilt", output_type=output_type + ) + outputs["out_file"] = os.path.abspath(outputs["out_file"]) + return outputs + + +IFLOGGER = logging.getLogger("nipype.interface") diff --git a/pydra/tasks/fsl/v6/utils/image_maths.py b/pydra/tasks/fsl/v6/utils/image_maths.py new file mode 100644 index 0000000..6af41cf --- /dev/null +++ b/pydra/tasks/fsl/v6/utils/image_maths.py @@ -0,0 +1,137 @@ +import attrs +from fileformats.generic import File +import logging +from pydra.tasks.fsl.v6.nipype_ports.utils.filemanip import fname_presuffix +import os +from pathlib import Path +from pathlib import Path +from pydra.compose import shell +import typing as ty + + +logger = logging.getLogger(__name__) + + +def _gen_filename(name, inputs): + if name == "out_file": + return _list_outputs( + in_file=inputs["in_file"], + out_file=inputs["out_file"], + output_type=inputs["output_type"], + suffix=inputs["suffix"], + )[name] + return None + + +def out_file_default(inputs): + return _gen_filename("out_file", inputs=inputs) + + +@shell.define +class ImageMaths(shell.Task["ImageMaths.Outputs"]): + """ + Examples + ------- + + >>> from fileformats.generic import File + >>> from pathlib import Path + >>> from pydra.tasks.fsl.v6.utils.image_maths import ImageMaths + + >>> task = ImageMaths() + >>> task.in_file = File.mock(anatfile) + >>> task.in_file2 = File.mock() + >>> task.mask_file = File.mock() + >>> task.out_file = "foo_maths.nii" + >>> task.op_string = "-add 5" + >>> task.cmdline + 'None' + + + """ + + executable = "fslmaths" + in_file: File = shell.arg(help="", argstr="{in_file}", position=1) + in_file2: File = shell.arg(help="", argstr="{in_file2}", position=3) + mask_file: File = shell.arg( + help="use (following image>0) to mask current image", argstr="-mas {mask_file}" + ) + op_string: str = shell.arg( + help="string defining the operation, i. e. -add", + argstr="{op_string}", + position=2, + ) + suffix: str = shell.arg(help="out_file suffix") + out_data_type: ty.Any = shell.arg( + help="output datatype, one of (char, short, int, float, double, input)", + argstr="-odt {out_data_type}", + position=-1, + ) + + class Outputs(shell.Outputs): + out_file: Path = shell.outarg( + help="", argstr="{out_file}", position=-2, path_template='"foo_maths.nii"' + ) + + +def _gen_fname( + basename, cwd=None, suffix=None, change_ext=True, ext=None, output_type=None +): + """Generate a filename based on the given parameters. + + The filename will take the form: cwd/basename. + If change_ext is True, it will use the extensions specified in + inputs.output_type. + + Parameters + ---------- + basename : str + Filename to base the new filename on. + cwd : str + Path to prefix to the new filename. (default is output_dir) + suffix : str + Suffix to add to the `basename`. (defaults is '' ) + change_ext : bool + Flag to change the filename extension to the FSL output type. + (default True) + + Returns + ------- + fname : str + New filename based on given parameters. + + """ + + if basename == "": + msg = "Unable to generate filename for command %s. " % "fslmaths" + msg += "basename is not set!" + raise ValueError(msg) + if cwd is None: + cwd = output_dir + if ext is None: + ext = Info.output_type_to_ext(output_type) + if change_ext: + if suffix: + suffix = f"{suffix}{ext}" + else: + suffix = ext + if suffix is None: + suffix = "" + fname = fname_presuffix(basename, suffix=suffix, use_ext=False, newpath=cwd) + return fname + + +def _list_outputs(in_file=None, out_file=None, output_type=None, suffix=None): + suffix = "_maths" # ohinds: build suffix + if suffix is not attrs.NOTHING: + suffix = suffix + outputs = {} + outputs["out_file"] = out_file + if outputs["out_file"] is attrs.NOTHING: + outputs["out_file"] = _gen_fname( + in_file, suffix=suffix, output_type=output_type + ) + outputs["out_file"] = os.path.abspath(outputs["out_file"]) + return outputs + + +IFLOGGER = logging.getLogger("nipype.interface") diff --git a/pydra/tasks/fsl/v6/utils/image_meants.py b/pydra/tasks/fsl/v6/utils/image_meants.py new file mode 100644 index 0000000..d94b61c --- /dev/null +++ b/pydra/tasks/fsl/v6/utils/image_meants.py @@ -0,0 +1,140 @@ +import attrs +from fileformats.generic import File +import logging +from pydra.tasks.fsl.v6.nipype_ports.utils.filemanip import fname_presuffix +import os +from pathlib import Path +from pathlib import Path +from pydra.compose import shell + + +logger = logging.getLogger(__name__) + + +def _gen_filename(name, inputs): + if name == "out_file": + return _list_outputs( + in_file=inputs["in_file"], + out_file=inputs["out_file"], + output_type=inputs["output_type"], + )[name] + return None + + +def out_file_default(inputs): + return _gen_filename("out_file", inputs=inputs) + + +@shell.define +class ImageMeants(shell.Task["ImageMeants.Outputs"]): + """ + Examples + ------- + + >>> from fileformats.generic import File + >>> from pathlib import Path + >>> from pydra.tasks.fsl.v6.utils.image_meants import ImageMeants + + """ + + executable = "fslmeants" + in_file: File = shell.arg( + help="input file for computing the average timeseries", + argstr="-i {in_file}", + position=1, + ) + mask: File = shell.arg(help="input 3D mask", argstr="-m {mask}") + spatial_coord: list[int] = shell.arg( + help=" requested spatial coordinate (instead of mask)", + argstr="-c {spatial_coord}", + ) + use_mm: bool = shell.arg( + help="use mm instead of voxel coordinates (for -c option)", argstr="--usemm" + ) + show_all: bool = shell.arg( + help="show all voxel time series (within mask) instead of averaging", + argstr="--showall", + ) + eig: bool = shell.arg( + help="calculate Eigenvariate(s) instead of mean (output will have 0 mean)", + argstr="--eig", + ) + order: int = shell.arg( + help="select number of Eigenvariates", argstr="--order={order}", default=1 + ) + nobin: bool = shell.arg( + help="do not binarise the mask for calculation of Eigenvariates", + argstr="--no_bin", + ) + transpose: bool = shell.arg( + help="output results in transpose format (one row per voxel/mean)", + argstr="--transpose", + ) + + class Outputs(shell.Outputs): + out_file: Path = shell.outarg( + help="name of output text matrix", + argstr="-o {out_file}", + path_template="out_file", + ) + + +def _gen_fname( + basename, cwd=None, suffix=None, change_ext=True, ext=None, output_type=None +): + """Generate a filename based on the given parameters. + + The filename will take the form: cwd/basename. + If change_ext is True, it will use the extensions specified in + inputs.output_type. + + Parameters + ---------- + basename : str + Filename to base the new filename on. + cwd : str + Path to prefix to the new filename. (default is output_dir) + suffix : str + Suffix to add to the `basename`. (defaults is '' ) + change_ext : bool + Flag to change the filename extension to the FSL output type. + (default True) + + Returns + ------- + fname : str + New filename based on given parameters. + + """ + + if basename == "": + msg = "Unable to generate filename for command %s. " % "fslmeants" + msg += "basename is not set!" + raise ValueError(msg) + if cwd is None: + cwd = output_dir + if ext is None: + ext = Info.output_type_to_ext(output_type) + if change_ext: + if suffix: + suffix = f"{suffix}{ext}" + else: + suffix = ext + if suffix is None: + suffix = "" + fname = fname_presuffix(basename, suffix=suffix, use_ext=False, newpath=cwd) + return fname + + +def _list_outputs(in_file=None, out_file=None, output_type=None): + outputs = {} + outputs["out_file"] = out_file + if outputs["out_file"] is attrs.NOTHING: + outputs["out_file"] = _gen_fname( + in_file, suffix="_ts", ext=".txt", change_ext=True, output_type=output_type + ) + outputs["out_file"] = os.path.abspath(outputs["out_file"]) + return outputs + + +IFLOGGER = logging.getLogger("nipype.interface") diff --git a/pydra/tasks/fsl/v6/utils/image_stats.py b/pydra/tasks/fsl/v6/utils/image_stats.py new file mode 100644 index 0000000..91c0d42 --- /dev/null +++ b/pydra/tasks/fsl/v6/utils/image_stats.py @@ -0,0 +1,118 @@ +import attrs +from fileformats.generic import File +import logging +from pydra.tasks.fsl.v6.nipype_ports.utils.filemanip import load_json, save_json +import os +from pydra.compose import shell +import typing as ty + + +logger = logging.getLogger(__name__) + + +def _format_arg(name, value, inputs, argstr): + if value is None: + return "" + + if name == "mask_file": + return "" + if name == "op_string": + if "-k %s" in inputs["op_string"]: + if inputs["mask_file"] is not attrs.NOTHING: + return inputs["op_string"] % inputs["mask_file"] + else: + raise ValueError("-k %s option in op_string requires mask_file") + + return argstr.format(**inputs) + + +def mask_file_formatter(field, inputs): + return _format_arg("mask_file", field, inputs, argstr="") + + +def op_string_formatter(field, inputs): + return _format_arg("op_string", field, inputs, argstr="{op_string}") + + +def aggregate_outputs(inputs=None, stdout=None, stderr=None, output_dir=None): + inputs = attrs.asdict(inputs) + needed_outputs = ["out_stat"] + + outputs = {} + + outfile = os.path.join(os.getcwd(), "stat_result.json") + if runtime is None: + try: + out_stat = load_json(outfile)["stat"] + except OSError: + return None.outputs + else: + out_stat = [] + for line in stdout.split("\n"): + if line: + values = line.split() + if len(values) > 1: + out_stat.append([float(val) for val in values]) + else: + out_stat.extend([float(val) for val in values]) + if len(out_stat) == 1: + out_stat = out_stat[0] + save_json(outfile, dict(stat=out_stat)) + outputs["out_stat"] = out_stat + return outputs + + +def out_stat_callable(output_dir, inputs, stdout, stderr): + outputs = aggregate_outputs( + output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr + ) + return outputs.get("out_stat") + + +@shell.define +class ImageStats(shell.Task["ImageStats.Outputs"]): + """ + Examples + ------- + + >>> from fileformats.generic import File + >>> from pydra.tasks.fsl.v6.utils.image_stats import ImageStats + + >>> task = ImageStats() + >>> task.in_file = File.mock(funcfile) + >>> task.op_string = "-M" + >>> task.mask_file = File.mock() + >>> task.index_mask_file = File.mock() + >>> task.cmdline + 'None' + + + """ + + executable = "fslstats" + split_4d: bool = shell.arg( + help="give a separate output line for each 3D volume of a 4D timeseries", + argstr="-t", + position=1, + ) + in_file: File = shell.arg( + help="input file to generate stats of", argstr="{in_file}", position=3 + ) + op_string: str = shell.arg( + help="string defining the operation, options are applied in order, e.g. -M -l 10 -M will report the non-zero mean, apply a threshold and then report the new nonzero mean", + position=4, + formatter="op_string_formatter", + ) + mask_file: File = shell.arg( + help="mask file used for option -k %s", formatter="mask_file_formatter" + ) + index_mask_file: File = shell.arg( + help="generate separate n submasks from indexMask, for indexvalues 1..n where n is the maximum index value in indexMask, and generate statistics for each submask", + argstr="-K {index_mask_file}", + position=2, + ) + + class Outputs(shell.Outputs): + out_stat: ty.Any | None = shell.out( + help="stats output", callable=out_stat_callable + ) diff --git a/pydra/tasks/fsl/v6/utils/inv_warp.py b/pydra/tasks/fsl/v6/utils/inv_warp.py new file mode 100644 index 0000000..6829e4f --- /dev/null +++ b/pydra/tasks/fsl/v6/utils/inv_warp.py @@ -0,0 +1,74 @@ +from fileformats.generic import File +from fileformats.medimage import Nifti1 +import logging +from pathlib import Path +from pathlib import Path +from pydra.compose import shell + + +logger = logging.getLogger(__name__) + + +@shell.define(xor=[["absolute", "relative"]]) +class InvWarp(shell.Task["InvWarp.Outputs"]): + """ + Examples + ------- + + >>> from fileformats.generic import File + >>> from fileformats.medimage import Nifti1 + >>> from pathlib import Path + >>> from pydra.tasks.fsl.v6.utils.inv_warp import InvWarp + + >>> task = InvWarp() + >>> task.warp = Nifti1.mock("struct2mni.nii") + >>> task.reference = File.mock() + >>> task.cmdline + 'invwarp --out=struct2mni_inverse.nii.gz --ref=anatomical.nii --warp=struct2mni.nii' + + + """ + + executable = "invwarp" + warp: Nifti1 = shell.arg( + help="Name of file containing warp-coefficients/fields. This would typically be the output from the --cout switch of fnirt (but can also use fields, like the output from --fout).", + argstr="--warp={warp}", + ) + reference: File = shell.arg( + help="Name of a file in target space. Note that the target space is now different from the target space that was used to create the --warp file. It would typically be the file that was specified with the --in argument when running fnirt.", + argstr="--ref={reference}", + ) + absolute: bool = shell.arg( + help="If set it indicates that the warps in --warp should be interpreted as absolute, provided that it is not created by fnirt (which always uses relative warps). If set it also indicates that the output --out should be absolute.", + argstr="--abs", + ) + relative: bool = shell.arg( + help="If set it indicates that the warps in --warp should be interpreted as relative. I.e. the values in --warp are displacements from the coordinates in the --ref space. If set it also indicates that the output --out should be relative.", + argstr="--rel", + ) + niter: int = shell.arg( + help="Determines how many iterations of the gradient-descent search that should be run.", + argstr="--niter={niter}", + ) + regularise: float = shell.arg( + help="Regularization strength (default=1.0).", + argstr="--regularise={regularise}", + ) + noconstraint: bool = shell.arg( + help="Do not apply Jacobian constraint", argstr="--noconstraint" + ) + jacobian_min: float = shell.arg( + help="Minimum acceptable Jacobian value for constraint (default 0.01)", + argstr="--jmin={jacobian_min}", + ) + jacobian_max: float = shell.arg( + help="Maximum acceptable Jacobian value for constraint (default 100.0)", + argstr="--jmax={jacobian_max}", + ) + + class Outputs(shell.Outputs): + inverse_warp: Path = shell.outarg( + help='Name of output file, containing warps that are the "reverse" of those in --warp. This will be a field-file (rather than a file of spline coefficients), and it will have any affine component included as part of the displacements.', + argstr="--out={inverse_warp}", + path_template="{warp}_inverse", + ) diff --git a/pydra/tasks/fsl/v6/utils/merge.py b/pydra/tasks/fsl/v6/utils/merge.py new file mode 100644 index 0000000..678f8a2 --- /dev/null +++ b/pydra/tasks/fsl/v6/utils/merge.py @@ -0,0 +1,75 @@ +import attrs +from fileformats.medimage import Nifti1 +import logging +from pathlib import Path +from pathlib import Path +from pydra.compose import shell +import typing as ty + + +logger = logging.getLogger(__name__) + + +def _format_arg(name, value, inputs, argstr): + if value is None: + return "" + + if name == "tr": + if inputs["dimension"] != "t": + raise ValueError("When TR is specified, dimension must be t") + return argstr.format(**{name: value}) + if name == "dimension": + if inputs["tr"] is not attrs.NOTHING: + return "-tr" + return argstr.format(**{name: value}) + + return argstr.format(**inputs) + + +def tr_formatter(field, inputs): + return _format_arg("tr", field, inputs, argstr="{tr:.2}") + + +def dimension_formatter(field, inputs): + return _format_arg("dimension", field, inputs, argstr="-{dimension}") + + +@shell.define +class Merge(shell.Task["Merge.Outputs"]): + """ + Examples + ------- + + >>> from fileformats.medimage import Nifti1 + >>> from pathlib import Path + >>> from pydra.tasks.fsl.v6.utils.merge import Merge + + >>> task = Merge() + >>> task.in_files = [Nifti1.mock("functional2.nii"), Nifti1.mock("functional3.nii")] + >>> task.tr = 2.25 + >>> task.cmdline + 'fslmerge -tr functional2_merged.nii.gz functional2.nii functional3.nii 2.25' + + + """ + + executable = "fslmerge" + in_files: list[Nifti1] = shell.arg(help="", argstr="{in_files}", position=3) + dimension: ty.Any = shell.arg( + help="dimension along which to merge, optionally set tr input when dimension is t", + formatter="dimension_formatter", + position=1, + ) + tr: float = shell.arg( + help="use to specify TR in seconds (default is 1.00 sec), overrides dimension and sets it to tr", + position=-1, + formatter="tr_formatter", + ) + + class Outputs(shell.Outputs): + merged_file: Path = shell.outarg( + help="", + argstr="{merged_file}", + path_template="{in_files}_merged", + position=2, + ) diff --git a/pydra/tasks/fsl/v6/utils/motion_outliers.py b/pydra/tasks/fsl/v6/utils/motion_outliers.py new file mode 100644 index 0000000..93be809 --- /dev/null +++ b/pydra/tasks/fsl/v6/utils/motion_outliers.py @@ -0,0 +1,67 @@ +from fileformats.generic import File +from fileformats.medimage import Nifti1 +import logging +from pathlib import Path +from pathlib import Path +from pydra.compose import shell +import typing as ty + + +logger = logging.getLogger(__name__) + + +@shell.define +class MotionOutliers(shell.Task["MotionOutliers.Outputs"]): + """ + Examples + ------- + + >>> from fileformats.generic import File + >>> from fileformats.medimage import Nifti1 + >>> from pathlib import Path + >>> from pydra.tasks.fsl.v6.utils.motion_outliers import MotionOutliers + + >>> task = MotionOutliers() + >>> task.in_file = Nifti1.mock("epi.nii") + >>> task.mask = File.mock() + >>> task.cmdline + 'None' + + + """ + + executable = "fsl_motion_outliers" + in_file: Nifti1 = shell.arg(help="unfiltered 4D image", argstr="-i {in_file}") + mask: File = shell.arg(help="mask image for calculating metric", argstr="-m {mask}") + metric: ty.Any = shell.arg( + help="metrics: refrms - RMS intensity difference to reference volume as metric [default metric], refmse - Mean Square Error version of refrms (used in original version of fsl_motion_outliers), dvars - DVARS, fd - frame displacement, fdrms - FD with RMS matrix calculation", + argstr="--{metric}", + ) + threshold: float = shell.arg( + help="specify absolute threshold value (otherwise use box-plot cutoff = P75 + 1.5*IQR)", + argstr="--thresh={threshold}", + ) + no_motion_correction: bool = shell.arg( + help="do not run motion correction (assumed already done)", argstr="--nomoco" + ) + dummy: int = shell.arg( + help="number of dummy scans to delete (before running anything and creating EVs)", + argstr="--dummy={dummy}", + ) + + class Outputs(shell.Outputs): + out_file: Path = shell.outarg( + help="output outlier file name", + argstr="-o {out_file}", + path_template="{in_file}_outliers.txt", + ) + out_metric_values: Path = shell.outarg( + help="output metric values (DVARS etc.) file name", + argstr="-s {out_metric_values}", + path_template="{in_file}_metrics.txt", + ) + out_metric_plot: Path = shell.outarg( + help="output metric values plot (DVARS etc.) file name", + argstr="-p {out_metric_plot}", + path_template="{in_file}_metrics.png", + ) diff --git a/pydra/tasks/fsl/v6/utils/overlay.py b/pydra/tasks/fsl/v6/utils/overlay.py new file mode 100644 index 0000000..a037eac --- /dev/null +++ b/pydra/tasks/fsl/v6/utils/overlay.py @@ -0,0 +1,223 @@ +import attrs +from fileformats.generic import File +import logging +from pydra.tasks.fsl.v6.nipype_ports.utils.filemanip import ( + fname_presuffix, + split_filename, +) +import os +from pathlib import Path +from pathlib import Path +from pydra.compose import shell +import typing as ty + + +logger = logging.getLogger(__name__) + + +def _format_arg(name, value, inputs, argstr): + if value is None: + return "" + + if name == "transparency": + if value: + return "1" + else: + return "0" + if name == "out_type": + if value == "float": + return "0" + else: + return "1" + if name == "show_negative_stats": + return "{} {:.2f} {:.2f}".format( + inputs["stat_image"], + inputs["stat_thresh"][0] * -1, + inputs["stat_thresh"][1] * -1, + ) + + return argstr.format(**inputs) + + +def transparency_formatter(field, inputs): + return _format_arg("transparency", field, inputs, argstr="{transparency:d}") + + +def out_type_formatter(field, inputs): + return _format_arg("out_type", field, inputs, argstr="{out_type}") + + +def show_negative_stats_formatter(field, inputs): + return _format_arg( + "show_negative_stats", field, inputs, argstr="{show_negative_stats:d}" + ) + + +def _gen_filename(name, inputs): + if name == "out_file": + return _list_outputs( + out_file=inputs["out_file"], + output_type=inputs["output_type"], + show_negative_stats=inputs["show_negative_stats"], + stat_image=inputs["stat_image"], + stat_image2=inputs["stat_image2"], + )["out_file"] + return None + + +def out_file_default(inputs): + return _gen_filename("out_file", inputs=inputs) + + +@shell.define( + xor=[ + ["auto_thresh_bg", "bg_thresh", "full_bg_range"], + ["show_negative_stats", "stat_image2"], + ] +) +class Overlay(shell.Task["Overlay.Outputs"]): + """ + Examples + ------- + + >>> from fileformats.generic import File + >>> from pathlib import Path + >>> from pydra.tasks.fsl.v6.utils.overlay import Overlay + + """ + + executable = "overlay" + transparency: bool = shell.arg( + help="make overlay colors semi-transparent", + position=1, + formatter="transparency_formatter", + default=True, + ) + out_type: ty.Any = shell.arg( + help="write output with float or int", + position=2, + formatter="out_type_formatter", + default="float", + ) + use_checkerboard: bool = shell.arg( + help="use checkerboard mask for overlay", argstr="-c", position=3 + ) + background_image: File = shell.arg( + help="image to use as background", argstr="{background_image}", position=4 + ) + auto_thresh_bg: bool = shell.arg( + help="automatically threshold the background image", argstr="-a", position=5 + ) + full_bg_range: bool = shell.arg( + help="use full range of background image", argstr="-A", position=5 + ) + bg_thresh: ty.Any | None = shell.arg( + help="min and max values for background intensity", + argstr="{bg_thresh[0]:.3} {bg_thresh[1]:.3}", + position=5, + ) + stat_image: File = shell.arg( + help="statistical image to overlay in color", argstr="{stat_image}", position=6 + ) + stat_thresh: ty.Any = shell.arg( + help="min and max values for the statistical overlay", + argstr="{stat_thresh[0]:.2} {stat_thresh[1]:.2}", + position=7, + ) + show_negative_stats: bool = shell.arg( + help="display negative statistics in overlay", + position=8, + formatter="show_negative_stats_formatter", + ) + stat_image2: File | None = shell.arg( + help="second statistical image to overlay in color", + argstr="{stat_image2}", + position=9, + ) + stat_thresh2: ty.Any = shell.arg( + help="min and max values for second statistical overlay", + argstr="{stat_thresh2[0]:.2} {stat_thresh2[1]:.2}", + position=10, + ) + + class Outputs(shell.Outputs): + out_file: Path = shell.outarg( + help="combined image volume", + argstr="{out_file}", + position=-1, + path_template="out_file", + ) + + +def _gen_fname( + basename, cwd=None, suffix=None, change_ext=True, ext=None, output_type=None +): + """Generate a filename based on the given parameters. + + The filename will take the form: cwd/basename. + If change_ext is True, it will use the extensions specified in + inputs.output_type. + + Parameters + ---------- + basename : str + Filename to base the new filename on. + cwd : str + Path to prefix to the new filename. (default is output_dir) + suffix : str + Suffix to add to the `basename`. (defaults is '' ) + change_ext : bool + Flag to change the filename extension to the FSL output type. + (default True) + + Returns + ------- + fname : str + New filename based on given parameters. + + """ + + if basename == "": + msg = "Unable to generate filename for command %s. " % "overlay" + msg += "basename is not set!" + raise ValueError(msg) + if cwd is None: + cwd = output_dir + if ext is None: + ext = Info.output_type_to_ext(output_type) + if change_ext: + if suffix: + suffix = f"{suffix}{ext}" + else: + suffix = ext + if suffix is None: + suffix = "" + fname = fname_presuffix(basename, suffix=suffix, use_ext=False, newpath=cwd) + return fname + + +def _list_outputs( + out_file=None, + output_type=None, + show_negative_stats=None, + stat_image=None, + stat_image2=None, +): + outputs = {} + out_file = out_file + if out_file is attrs.NOTHING: + if (stat_image2 is not attrs.NOTHING) and ( + (show_negative_stats is attrs.NOTHING) or not show_negative_stats + ): + stem = "{}_and_{}".format( + split_filename(stat_image)[1], + split_filename(stat_image2)[1], + ) + else: + stem = split_filename(stat_image)[1] + out_file = _gen_fname(stem, suffix="_overlay", output_type=output_type) + outputs["out_file"] = os.path.abspath(out_file) + return outputs + + +IFLOGGER = logging.getLogger("nipype.interface") diff --git a/pydra/tasks/fsl/v6/utils/plot_motion_params.py b/pydra/tasks/fsl/v6/utils/plot_motion_params.py new file mode 100644 index 0000000..db519b6 --- /dev/null +++ b/pydra/tasks/fsl/v6/utils/plot_motion_params.py @@ -0,0 +1,120 @@ +import attrs +import logging +from pydra.tasks.fsl.v6.nipype_ports.utils.filemanip import fname_presuffix +import os +from pathlib import Path +from pathlib import Path +from pydra.compose import shell +import typing as ty + + +logger = logging.getLogger(__name__) + + +def _format_arg(name, value, inputs, argstr): + if value is None: + return "" + + if name == "plot_type": + source = inputs["in_source"] + + if inputs["plot_type"] == "displacement": + title = "-t 'MCFLIRT estimated mean displacement (mm)'" + labels = "-a abs,rel" + return f"{title} {labels}" + + sfdict = dict(fsl_rot=(1, 3), fsl_tra=(4, 6), spm_rot=(4, 6), spm_tra=(1, 3)) + + sfstr = "--start=%d --finish=%d" % sfdict[f"{source}_{value[:3]}"] + titledict = dict(fsl="MCFLIRT", spm="Realign") + unitdict = dict(rot="radians", tra="mm") + + title = "'{} estimated {} ({})'".format( + titledict[source], + value, + unitdict[value[:3]], + ) + + return f"-t {title} {sfstr} -a x,y,z" + elif name == "plot_size": + return "-h %d -w %d" % value + elif name == "in_file": + if isinstance(value, list): + args = ",".join(value) + return "-i %s" % args + else: + return "-i %s" % value + + return argstr.format(**inputs) + + +def plot_type_formatter(field, inputs): + return _format_arg("plot_type", field, inputs, argstr="{plot_type}") + + +def plot_size_formatter(field, inputs): + return _format_arg("plot_size", field, inputs, argstr="{plot_size}") + + +def in_file_formatter(field, inputs): + return _format_arg("in_file", field, inputs, argstr="{in_file}") + + +def _gen_filename(name, inputs): + if name == "out_file": + return _list_outputs( + in_file=inputs["in_file"], + out_file=inputs["out_file"], + plot_type=inputs["plot_type"], + )["out_file"] + return None + + +def out_file_default(inputs): + return _gen_filename("out_file", inputs=inputs) + + +@shell.define +class PlotMotionParams(shell.Task["PlotMotionParams.Outputs"]): + """ + Examples + ------- + + >>> from pathlib import Path + >>> from pydra.tasks.fsl.v6.utils.plot_motion_params import PlotMotionParams + + """ + + executable = "fsl_tsplot" + in_file: ty.Any = shell.arg( + help="file with motion parameters", position=1, formatter="in_file_formatter" + ) + in_source: ty.Any = shell.arg( + help="which program generated the motion parameter file - fsl, spm" + ) + plot_type: ty.Any = shell.arg( + help="which motion type to plot - rotations, translations, displacement", + formatter="plot_type_formatter", + ) + plot_size: ty.Any = shell.arg( + help="plot image height and width", formatter="plot_size_formatter" + ) + + class Outputs(shell.Outputs): + out_file: Path = shell.outarg( + help="image to write", argstr="-o {out_file}", path_template="out_file" + ) + + +def _list_outputs(in_file=None, out_file=None, plot_type=None): + outputs = {} + out_file = out_file + if out_file is attrs.NOTHING: + if isinstance(in_file, list): + infile = in_file[0] + else: + infile = in_file + plttype = dict(rot="rot", tra="trans", dis="disp")[plot_type[:3]] + out_file = fname_presuffix(infile, suffix="_%s.png" % plttype, use_ext=False) + outputs["out_file"] = os.path.abspath(out_file) + return outputs diff --git a/pydra/tasks/fsl/v6/utils/plot_time_series.py b/pydra/tasks/fsl/v6/utils/plot_time_series.py new file mode 100644 index 0000000..c9ca7ca --- /dev/null +++ b/pydra/tasks/fsl/v6/utils/plot_time_series.py @@ -0,0 +1,207 @@ +import attrs +from fileformats.generic import File +import logging +from pydra.tasks.fsl.v6.nipype_ports.utils.filemanip import fname_presuffix +import os +from pathlib import Path +from pathlib import Path +from pydra.compose import shell +import typing as ty + + +logger = logging.getLogger(__name__) + + +def _format_arg(name, value, inputs, argstr): + if value is None: + return "" + + if name == "in_file": + if isinstance(value, list): + args = ",".join(value) + return "-i %s" % args + else: + return "-i %s" % value + elif name == "labels": + if isinstance(value, list): + args = ",".join(value) + return "-a %s" % args + else: + return "-a %s" % value + elif name == "title": + return "-t '%s'" % value + elif name == "plot_range": + return "--start=%d --finish=%d" % value + elif name == "y_range": + return "--ymin=%d --ymax=%d" % value + elif name == "plot_size": + return "-h %d -w %d" % value + + return argstr.format(**inputs) + + +def in_file_formatter(field, inputs): + return _format_arg("in_file", field, inputs, argstr="{in_file}") + + +def labels_formatter(field, inputs): + return _format_arg("labels", field, inputs, argstr="{labels}") + + +def title_formatter(field, inputs): + return _format_arg("title", field, inputs, argstr="{title}") + + +def plot_range_formatter(field, inputs): + return _format_arg("plot_range", field, inputs, argstr="{plot_range}") + + +def y_range_formatter(field, inputs): + return _format_arg("y_range", field, inputs, argstr="{y_range}") + + +def plot_size_formatter(field, inputs): + return _format_arg("plot_size", field, inputs, argstr="{plot_size}") + + +def _gen_filename(name, inputs): + if name == "out_file": + return _list_outputs( + in_file=inputs["in_file"], + out_file=inputs["out_file"], + output_type=inputs["output_type"], + )["out_file"] + return None + + +def out_file_default(inputs): + return _gen_filename("out_file", inputs=inputs) + + +@shell.define( + xor=[ + ["plot_finish", "plot_range"], + ["plot_finish", "plot_range", "plot_start"], + ["plot_range", "plot_start"], + ["y_max", "y_min", "y_range"], + ["y_max", "y_range"], + ["y_min", "y_range"], + ] +) +class PlotTimeSeries(shell.Task["PlotTimeSeries.Outputs"]): + """ + Examples + ------- + + >>> from fileformats.generic import File + >>> from pathlib import Path + >>> from pydra.tasks.fsl.v6.utils.plot_time_series import PlotTimeSeries + + """ + + executable = "fsl_tsplot" + in_file: ty.Any = shell.arg( + help="file or list of files with columns of timecourse information", + position=1, + formatter="in_file_formatter", + ) + plot_start: int | None = shell.arg( + help="first column from in-file to plot", argstr="--start={plot_start}" + ) + plot_finish: int | None = shell.arg( + help="final column from in-file to plot", argstr="--finish={plot_finish}" + ) + plot_range: ty.Any | None = shell.arg( + help="first and last columns from the in-file to plot", + formatter="plot_range_formatter", + ) + title: str = shell.arg(help="plot title", formatter="title_formatter") + legend_file: File = shell.arg(help="legend file", argstr="--legend={legend_file}") + labels: ty.Any = shell.arg( + help="label or list of labels", formatter="labels_formatter" + ) + y_min: float | None = shell.arg(help="minimum y value", argstr="--ymin={y_min:.2}") + y_max: float | None = shell.arg(help="maximum y value", argstr="--ymax={y_max:.2}") + y_range: ty.Any | None = shell.arg( + help="min and max y axis values", formatter="y_range_formatter" + ) + x_units: int = shell.arg( + help="scaling units for x-axis (between 1 and length of in file)", + argstr="-u {x_units}", + default=1, + ) + plot_size: ty.Any = shell.arg( + help="plot image height and width", formatter="plot_size_formatter" + ) + x_precision: int = shell.arg( + help="precision of x-axis labels", argstr="--precision={x_precision}" + ) + sci_notation: bool = shell.arg(help="switch on scientific notation", argstr="--sci") + + class Outputs(shell.Outputs): + out_file: Path = shell.outarg( + help="image to write", argstr="-o {out_file}", path_template="out_file" + ) + + +def _gen_fname( + basename, cwd=None, suffix=None, change_ext=True, ext=None, output_type=None +): + """Generate a filename based on the given parameters. + + The filename will take the form: cwd/basename. + If change_ext is True, it will use the extensions specified in + inputs.output_type. + + Parameters + ---------- + basename : str + Filename to base the new filename on. + cwd : str + Path to prefix to the new filename. (default is output_dir) + suffix : str + Suffix to add to the `basename`. (defaults is '' ) + change_ext : bool + Flag to change the filename extension to the FSL output type. + (default True) + + Returns + ------- + fname : str + New filename based on given parameters. + + """ + + if basename == "": + msg = "Unable to generate filename for command %s. " % "fsl_tsplot" + msg += "basename is not set!" + raise ValueError(msg) + if cwd is None: + cwd = output_dir + if ext is None: + ext = Info.output_type_to_ext(output_type) + if change_ext: + if suffix: + suffix = f"{suffix}{ext}" + else: + suffix = ext + if suffix is None: + suffix = "" + fname = fname_presuffix(basename, suffix=suffix, use_ext=False, newpath=cwd) + return fname + + +def _list_outputs(in_file=None, out_file=None, output_type=None): + outputs = {} + out_file = out_file + if out_file is attrs.NOTHING: + if isinstance(in_file, list): + infile = in_file[0] + else: + infile = in_file + out_file = _gen_fname(infile, ext=".png", output_type=output_type) + outputs["out_file"] = os.path.abspath(out_file) + return outputs + + +IFLOGGER = logging.getLogger("nipype.interface") diff --git a/pydra/tasks/fsl/v6/utils/power_spectrum.py b/pydra/tasks/fsl/v6/utils/power_spectrum.py new file mode 100644 index 0000000..8baf844 --- /dev/null +++ b/pydra/tasks/fsl/v6/utils/power_spectrum.py @@ -0,0 +1,109 @@ +import attrs +from fileformats.generic import File +import logging +from pydra.tasks.fsl.v6.nipype_ports.utils.filemanip import fname_presuffix +from pathlib import Path +from pathlib import Path +from pydra.compose import shell + + +logger = logging.getLogger(__name__) + + +def _gen_filename(name, inputs): + if name == "out_file": + return _gen_outfilename( + in_file=inputs["in_file"], + out_file=inputs["out_file"], + output_type=inputs["output_type"], + ) + return None + + +def out_file_default(inputs): + return _gen_filename("out_file", inputs=inputs) + + +@shell.define +class PowerSpectrum(shell.Task["PowerSpectrum.Outputs"]): + """ + Examples + ------- + + >>> from fileformats.generic import File + >>> from pathlib import Path + >>> from pydra.tasks.fsl.v6.utils.power_spectrum import PowerSpectrum + + """ + + executable = "fslpspec" + in_file: File = shell.arg( + help="input 4D file to estimate the power spectrum", + argstr="{in_file}", + position=1, + ) + + class Outputs(shell.Outputs): + out_file: Path = shell.outarg( + help="name of output 4D file for power spectrum", + argstr="{out_file}", + path_template="out_file", + position=2, + ) + + +def _gen_fname( + basename, cwd=None, suffix=None, change_ext=True, ext=None, output_type=None +): + """Generate a filename based on the given parameters. + + The filename will take the form: cwd/basename. + If change_ext is True, it will use the extensions specified in + inputs.output_type. + + Parameters + ---------- + basename : str + Filename to base the new filename on. + cwd : str + Path to prefix to the new filename. (default is output_dir) + suffix : str + Suffix to add to the `basename`. (defaults is '' ) + change_ext : bool + Flag to change the filename extension to the FSL output type. + (default True) + + Returns + ------- + fname : str + New filename based on given parameters. + + """ + + if basename == "": + msg = "Unable to generate filename for command %s. " % "fslpspec" + msg += "basename is not set!" + raise ValueError(msg) + if cwd is None: + cwd = output_dir + if ext is None: + ext = Info.output_type_to_ext(output_type) + if change_ext: + if suffix: + suffix = f"{suffix}{ext}" + else: + suffix = ext + if suffix is None: + suffix = "" + fname = fname_presuffix(basename, suffix=suffix, use_ext=False, newpath=cwd) + return fname + + +def _gen_outfilename(in_file=None, out_file=None, output_type=None): + out_file = out_file + if (out_file is attrs.NOTHING) and (in_file is not attrs.NOTHING): + out_file = _gen_fname(in_file, suffix="_ps", output_type=output_type) + return out_file + + +IFLOGGER = logging.getLogger("nipype.interface") diff --git a/pydra/tasks/fsl/v6/utils/reorient_2_std.py b/pydra/tasks/fsl/v6/utils/reorient_2_std.py new file mode 100644 index 0000000..f088924 --- /dev/null +++ b/pydra/tasks/fsl/v6/utils/reorient_2_std.py @@ -0,0 +1,92 @@ +from fileformats.generic import File +import logging +from pydra.tasks.fsl.v6.nipype_ports.utils.filemanip import fname_presuffix +from pathlib import Path +from pathlib import Path +from pydra.compose import shell + + +logger = logging.getLogger(__name__) + + +def _gen_filename(name, inputs): + if name == "out_file": + return _gen_fname( + inputs["in_file"], suffix="_reoriented", output_type=inputs["output_type"] + ) + return None + + +def out_file_default(inputs): + return _gen_filename("out_file", inputs=inputs) + + +@shell.define +class Reorient2Std(shell.Task["Reorient2Std.Outputs"]): + """ + Examples + ------- + + >>> from fileformats.generic import File + >>> from pathlib import Path + >>> from pydra.tasks.fsl.v6.utils.reorient_2_std import Reorient2Std + + """ + + executable = "fslreorient2std" + in_file: File = shell.arg(help="", argstr="{in_file}") + + class Outputs(shell.Outputs): + out_file: Path = shell.outarg( + help="", argstr="{out_file}", path_template="out_file" + ) + + +def _gen_fname( + basename, cwd=None, suffix=None, change_ext=True, ext=None, output_type=None +): + """Generate a filename based on the given parameters. + + The filename will take the form: cwd/basename. + If change_ext is True, it will use the extensions specified in + inputs.output_type. + + Parameters + ---------- + basename : str + Filename to base the new filename on. + cwd : str + Path to prefix to the new filename. (default is output_dir) + suffix : str + Suffix to add to the `basename`. (defaults is '' ) + change_ext : bool + Flag to change the filename extension to the FSL output type. + (default True) + + Returns + ------- + fname : str + New filename based on given parameters. + + """ + + if basename == "": + msg = "Unable to generate filename for command %s. " % "fslreorient2std" + msg += "basename is not set!" + raise ValueError(msg) + if cwd is None: + cwd = output_dir + if ext is None: + ext = Info.output_type_to_ext(output_type) + if change_ext: + if suffix: + suffix = f"{suffix}{ext}" + else: + suffix = ext + if suffix is None: + suffix = "" + fname = fname_presuffix(basename, suffix=suffix, use_ext=False, newpath=cwd) + return fname + + +IFLOGGER = logging.getLogger("nipype.interface") diff --git a/pydra/tasks/fsl/v6/utils/robust_fov.py b/pydra/tasks/fsl/v6/utils/robust_fov.py new file mode 100644 index 0000000..b76cd9b --- /dev/null +++ b/pydra/tasks/fsl/v6/utils/robust_fov.py @@ -0,0 +1,40 @@ +from fileformats.generic import File +import logging +from pathlib import Path +from pathlib import Path +from pydra.compose import shell + + +logger = logging.getLogger(__name__) + + +@shell.define +class RobustFOV(shell.Task["RobustFOV.Outputs"]): + """ + Examples + ------- + + >>> from fileformats.generic import File + >>> from pathlib import Path + >>> from pydra.tasks.fsl.v6.utils.robust_fov import RobustFOV + + """ + + executable = "robustfov" + in_file: File = shell.arg(help="input filename", argstr="-i {in_file}", position=1) + brainsize: int = shell.arg( + help="size of brain in z-dimension (default 170mm/150mm)", + argstr="-b {brainsize}", + ) + + class Outputs(shell.Outputs): + out_roi: Path = shell.outarg( + help="ROI volume output name", + argstr="-r {out_roi}", + path_template="{in_file}_ROI", + ) + out_transform: Path = shell.outarg( + help="Transformation matrix in_file to out_roi output name", + argstr="-m {out_transform}", + path_template="{in_file}_to_ROI", + ) diff --git a/pydra/tasks/fsl/v6/utils/sig_loss.py b/pydra/tasks/fsl/v6/utils/sig_loss.py new file mode 100644 index 0000000..6696b8d --- /dev/null +++ b/pydra/tasks/fsl/v6/utils/sig_loss.py @@ -0,0 +1,113 @@ +import attrs +from fileformats.generic import File +import logging +from pydra.tasks.fsl.v6.nipype_ports.utils.filemanip import fname_presuffix +from pathlib import Path +from pathlib import Path +from pydra.compose import shell +import typing as ty + + +logger = logging.getLogger(__name__) + + +def _gen_filename(name, inputs): + if name == "out_file": + return _list_outputs( + in_file=inputs["in_file"], + out_file=inputs["out_file"], + output_type=inputs["output_type"], + )["out_file"] + return None + + +def out_file_default(inputs): + return _gen_filename("out_file", inputs=inputs) + + +@shell.define +class SigLoss(shell.Task["SigLoss.Outputs"]): + """ + Examples + ------- + + >>> from fileformats.generic import File + >>> from pathlib import Path + >>> from pydra.tasks.fsl.v6.utils.sig_loss import SigLoss + + """ + + executable = "sigloss" + in_file: File = shell.arg(help="b0 fieldmap file", argstr="-i {in_file}") + mask_file: File = shell.arg(help="brain mask file", argstr="-m {mask_file}") + echo_time: float = shell.arg(help="echo time in seconds", argstr="--te={echo_time}") + slice_direction: ty.Any = shell.arg( + help="slicing direction", argstr="-d {slice_direction}" + ) + + class Outputs(shell.Outputs): + out_file: Path = shell.outarg( + help="output signal loss estimate file", + argstr="-s {out_file}", + path_template="out_file", + ) + + +def _gen_fname( + basename, cwd=None, suffix=None, change_ext=True, ext=None, output_type=None +): + """Generate a filename based on the given parameters. + + The filename will take the form: cwd/basename. + If change_ext is True, it will use the extensions specified in + inputs.output_type. + + Parameters + ---------- + basename : str + Filename to base the new filename on. + cwd : str + Path to prefix to the new filename. (default is output_dir) + suffix : str + Suffix to add to the `basename`. (defaults is '' ) + change_ext : bool + Flag to change the filename extension to the FSL output type. + (default True) + + Returns + ------- + fname : str + New filename based on given parameters. + + """ + + if basename == "": + msg = "Unable to generate filename for command %s. " % "sigloss" + msg += "basename is not set!" + raise ValueError(msg) + if cwd is None: + cwd = output_dir + if ext is None: + ext = Info.output_type_to_ext(output_type) + if change_ext: + if suffix: + suffix = f"{suffix}{ext}" + else: + suffix = ext + if suffix is None: + suffix = "" + fname = fname_presuffix(basename, suffix=suffix, use_ext=False, newpath=cwd) + return fname + + +def _list_outputs(in_file=None, out_file=None, output_type=None): + outputs = {} + outputs["out_file"] = out_file + if (outputs["out_file"] is attrs.NOTHING) and (in_file is not attrs.NOTHING): + outputs["out_file"] = _gen_fname( + in_file, suffix="_sigloss", output_type=output_type + ) + return outputs + + +IFLOGGER = logging.getLogger("nipype.interface") diff --git a/pydra/tasks/fsl/v6/utils/slice.py b/pydra/tasks/fsl/v6/utils/slice.py new file mode 100644 index 0000000..026f3bc --- /dev/null +++ b/pydra/tasks/fsl/v6/utils/slice.py @@ -0,0 +1,65 @@ +import attrs +from fileformats.generic import File +from fileformats.medimage import Nifti1 +from glob import glob +import logging +from pydra.tasks.fsl.v6.base import Info +from pydra.tasks.fsl.v6.nipype_ports.utils.filemanip import fname_presuffix +import os +from pydra.compose import shell + + +logger = logging.getLogger(__name__) + + +def _list_outputs(inputs=None, stdout=None, stderr=None, output_dir=None): + inputs = attrs.asdict(inputs) + + outputs = {} + ext = Info.output_type_to_ext(inputs["output_type"]) + suffix = "_slice_*" + ext + if inputs["out_base_name"] is not attrs.NOTHING: + fname_template = os.path.abspath(inputs["out_base_name"] + suffix) + else: + fname_template = fname_presuffix( + inputs["in_file"], suffix=suffix, use_ext=False + ) + + outputs["out_files"] = sorted(glob(fname_template)) + + return outputs + + +def out_files_callable(output_dir, inputs, stdout, stderr): + outputs = _list_outputs( + output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr + ) + return outputs.get("out_files") + + +@shell.define +class Slice(shell.Task["Slice.Outputs"]): + """ + Examples + ------- + + >>> from fileformats.generic import File + >>> from fileformats.medimage import Nifti1 + >>> from pydra.tasks.fsl.v6.utils.slice import Slice + + >>> task = Slice() + >>> task.in_file = Nifti1.mock("functional.nii") + >>> task.cmdline + 'fslslice functional.nii sl' + + + """ + + executable = "fslslice" + in_file: Nifti1 = shell.arg(help="input filename", argstr="{in_file}", position=1) + out_base_name: str = shell.arg( + help="outputs prefix", argstr="{out_base_name}", position=2 + ) + + class Outputs(shell.Outputs): + out_files: list[File] | None = shell.out(callable=out_files_callable) diff --git a/pydra/tasks/fsl/v6/utils/slicer.py b/pydra/tasks/fsl/v6/utils/slicer.py new file mode 100644 index 0000000..4a9baec --- /dev/null +++ b/pydra/tasks/fsl/v6/utils/slicer.py @@ -0,0 +1,201 @@ +import attrs +from fileformats.generic import File +import logging +from pydra.tasks.fsl.v6.nipype_ports.utils.filemanip import fname_presuffix +import os +from pathlib import Path +from pathlib import Path +from pydra.compose import shell +import typing as ty + + +logger = logging.getLogger(__name__) + + +def _format_arg(name, value, inputs, argstr): + if value is None: + return "" + + if name == "show_orientation": + if value: + return "" + else: + return "-u" + elif name == "label_slices": + if value: + return "-L" + else: + return "" + + return argstr.format(**inputs) + + +def show_orientation_formatter(field, inputs): + return _format_arg("show_orientation", field, inputs, argstr="{show_orientation:d}") + + +def label_slices_formatter(field, inputs): + return _format_arg("label_slices", field, inputs, argstr="-L") + + +def _gen_filename(name, inputs): + if name == "out_file": + return _list_outputs( + in_file=inputs["in_file"], + out_file=inputs["out_file"], + output_type=inputs["output_type"], + )["out_file"] + return None + + +def out_file_default(inputs): + return _gen_filename("out_file", inputs=inputs) + + +@shell.define(xor=[["all_axial", "middle_slices", "sample_axial", "single_slice"]]) +class Slicer(shell.Task["Slicer.Outputs"]): + """ + Examples + ------- + + >>> from fileformats.generic import File + >>> from pathlib import Path + >>> from pydra.tasks.fsl.v6.utils.slicer import Slicer + + """ + + executable = "slicer" + in_file: File = shell.arg(help="input volume", argstr="{in_file}", position=2) + image_edges: File = shell.arg( + help="volume to display edge overlay for (useful for checking registration", + argstr="{image_edges}", + position=3, + ) + label_slices: bool = shell.arg( + help="display slice number", + formatter="label_slices_formatter", + position=4, + default=True, + ) + colour_map: File = shell.arg( + help="use different colour map from that stored in nifti header", + argstr="-l {colour_map}", + position=5, + ) + intensity_range: ty.Any = shell.arg( + help="min and max intensities to display", + argstr="-i {intensity_range[0]:.3} {intensity_range[1]:.3}", + position=6, + ) + threshold_edges: float = shell.arg( + help="use threshold for edges", argstr="-e {threshold_edges:.3}", position=7 + ) + dither_edges: bool = shell.arg( + help="produce semi-transparent (dithered) edges", argstr="-t", position=8 + ) + nearest_neighbour: bool = shell.arg( + help="use nearest neighbor interpolation for output", argstr="-n", position=9 + ) + show_orientation: bool = shell.arg( + help="label left-right orientation", + formatter="show_orientation_formatter", + position=10, + default=True, + ) + single_slice: ty.Any | None = shell.arg( + help="output picture of single slice in the x, y, or z plane", + argstr="-{single_slice}", + requires=["slice_number"], + position=11, + ) + slice_number: int = shell.arg( + help="slice number to save in picture", argstr="-{slice_number}", position=12 + ) + middle_slices: bool = shell.arg( + help="output picture of mid-sagittal, axial, and coronal slices", + argstr="-a", + position=11, + ) + all_axial: bool = shell.arg( + help="output all axial slices into one picture", + argstr="-A", + requires=["image_width"], + position=11, + ) + sample_axial: int | None = shell.arg( + help="output every n axial slices into one picture", + argstr="-S {sample_axial}", + requires=["image_width"], + position=11, + ) + image_width: int = shell.arg( + help="max picture width", argstr="{image_width}", position=-2 + ) + scaling: float = shell.arg(help="image scale", argstr="-s {scaling}", position=1) + + class Outputs(shell.Outputs): + out_file: Path = shell.outarg( + help="picture to write", + argstr="{out_file}", + position=-1, + path_template="out_file", + ) + + +def _gen_fname( + basename, cwd=None, suffix=None, change_ext=True, ext=None, output_type=None +): + """Generate a filename based on the given parameters. + + The filename will take the form: cwd/basename. + If change_ext is True, it will use the extensions specified in + inputs.output_type. + + Parameters + ---------- + basename : str + Filename to base the new filename on. + cwd : str + Path to prefix to the new filename. (default is output_dir) + suffix : str + Suffix to add to the `basename`. (defaults is '' ) + change_ext : bool + Flag to change the filename extension to the FSL output type. + (default True) + + Returns + ------- + fname : str + New filename based on given parameters. + + """ + + if basename == "": + msg = "Unable to generate filename for command %s. " % "slicer" + msg += "basename is not set!" + raise ValueError(msg) + if cwd is None: + cwd = output_dir + if ext is None: + ext = Info.output_type_to_ext(output_type) + if change_ext: + if suffix: + suffix = f"{suffix}{ext}" + else: + suffix = ext + if suffix is None: + suffix = "" + fname = fname_presuffix(basename, suffix=suffix, use_ext=False, newpath=cwd) + return fname + + +def _list_outputs(in_file=None, out_file=None, output_type=None): + outputs = {} + out_file = out_file + if out_file is attrs.NOTHING: + out_file = _gen_fname(in_file, ext=".png", output_type=output_type) + outputs["out_file"] = os.path.abspath(out_file) + return outputs + + +IFLOGGER = logging.getLogger("nipype.interface") diff --git a/pydra/tasks/fsl/v6/utils/smooth.py b/pydra/tasks/fsl/v6/utils/smooth.py new file mode 100644 index 0000000..360b509 --- /dev/null +++ b/pydra/tasks/fsl/v6/utils/smooth.py @@ -0,0 +1,78 @@ +from fileformats.generic import File +import logging +import numpy as np +from pathlib import Path +from pathlib import Path +from pydra.compose import shell + + +logger = logging.getLogger(__name__) + + +def _format_arg(name, value, inputs, argstr): + if value is None: + return "" + + if name == "fwhm": + sigma = float(value) / np.sqrt(8 * np.log(2)) + pass + + return argstr.format(**inputs) + + +def fwhm_formatter(field, inputs): + return _format_arg("fwhm", field, inputs, argstr="-kernel gauss {fwhm:.03} -fmean") + + +@shell.define(xor=[["fwhm", "sigma"]]) +class Smooth(shell.Task["Smooth.Outputs"]): + """ + Examples + ------- + + >>> from fileformats.generic import File + >>> from pathlib import Path + >>> from pydra.tasks.fsl.v6.utils.smooth import Smooth + + >>> task = Smooth() + >>> task.in_file = File.mock() + >>> task.sigma = 8.0 + >>> task.cmdline + 'fslmaths functional2.nii -kernel gauss 8.000 -fmean functional2_smooth.nii.gz' + + + >>> task = Smooth() + >>> task.in_file = File.mock() + >>> task.fwhm = 8.0 + >>> task.cmdline + 'fslmaths functional2.nii -kernel gauss 3.397 -fmean functional2_smooth.nii.gz' + + + >>> task = Smooth() + >>> task.in_file = File.mock() + >>> task.cmdline + 'None' + + + """ + + executable = "fslmaths" + in_file: File = shell.arg(help="", argstr="{in_file}", position=1) + sigma: float | None = shell.arg( + help="gaussian kernel sigma in mm (not voxels)", + argstr="-kernel gauss {sigma:.03} -fmean", + position=2, + ) + fwhm: float | None = shell.arg( + help="gaussian kernel fwhm, will be converted to sigma in mm (not voxels)", + formatter="fwhm_formatter", + position=2, + ) + + class Outputs(shell.Outputs): + smoothed_file: Path = shell.outarg( + help="", + argstr="{smoothed_file}", + path_template="{in_file}_smooth", + position=3, + ) diff --git a/pydra/tasks/fsl/v6/utils/split.py b/pydra/tasks/fsl/v6/utils/split.py new file mode 100644 index 0000000..f9443d5 --- /dev/null +++ b/pydra/tasks/fsl/v6/utils/split.py @@ -0,0 +1,56 @@ +import attrs +from fileformats.generic import File +from glob import glob +import logging +from pydra.tasks.fsl.v6.base import Info +import os +from pydra.compose import shell +import typing as ty + + +logger = logging.getLogger(__name__) + + +def _list_outputs(inputs=None, stdout=None, stderr=None, output_dir=None): + inputs = attrs.asdict(inputs) + + outputs = {} + ext = Info.output_type_to_ext(inputs["output_type"]) + outbase = "vol[0-9]*" + if inputs["out_base_name"] is not attrs.NOTHING: + outbase = "%s[0-9]*" % inputs["out_base_name"] + outputs["out_files"] = sorted(glob(os.path.join(os.getcwd(), outbase + ext))) + return outputs + + +def out_files_callable(output_dir, inputs, stdout, stderr): + outputs = _list_outputs( + output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr + ) + return outputs.get("out_files") + + +@shell.define +class Split(shell.Task["Split.Outputs"]): + """ + Examples + ------- + + >>> from fileformats.generic import File + >>> from pydra.tasks.fsl.v6.utils.split import Split + + """ + + executable = "fslsplit" + in_file: File = shell.arg(help="input filename", argstr="{in_file}", position=1) + out_base_name: str = shell.arg( + help="outputs prefix", argstr="{out_base_name}", position=2 + ) + dimension: ty.Any = shell.arg( + help="dimension along which the file will be split", + argstr="-{dimension}", + position=3, + ) + + class Outputs(shell.Outputs): + out_files: list[File] | None = shell.out(callable=out_files_callable) diff --git a/pydra/tasks/fsl/v6/utils/swap_dimensions.py b/pydra/tasks/fsl/v6/utils/swap_dimensions.py new file mode 100644 index 0000000..f0328bf --- /dev/null +++ b/pydra/tasks/fsl/v6/utils/swap_dimensions.py @@ -0,0 +1,112 @@ +import attrs +from fileformats.generic import File +import logging +from pydra.tasks.fsl.v6.nipype_ports.utils.filemanip import fname_presuffix +import os +from pathlib import Path +from pathlib import Path +from pydra.compose import shell +import typing as ty + + +logger = logging.getLogger(__name__) + + +def _gen_filename(name, inputs): + if name == "out_file": + return _list_outputs( + in_file=inputs["in_file"], + out_file=inputs["out_file"], + output_type=inputs["output_type"], + )["out_file"] + return None + + +def out_file_default(inputs): + return _gen_filename("out_file", inputs=inputs) + + +@shell.define +class SwapDimensions(shell.Task["SwapDimensions.Outputs"]): + """ + Examples + ------- + + >>> from fileformats.generic import File + >>> from pathlib import Path + >>> from pydra.tasks.fsl.v6.utils.swap_dimensions import SwapDimensions + + """ + + executable = "fslswapdim" + in_file: File = shell.arg(help="input image", argstr="{in_file}", position=1) + new_dims: ty.Any = shell.arg( + help="3-tuple of new dimension order", + argstr="{new_dims[0]} {new_dims[1]} {new_dims[2]}", + ) + + class Outputs(shell.Outputs): + out_file: Path = shell.outarg( + help="image to write", argstr="{out_file}", path_template="out_file" + ) + + +def _gen_fname( + basename, cwd=None, suffix=None, change_ext=True, ext=None, output_type=None +): + """Generate a filename based on the given parameters. + + The filename will take the form: cwd/basename. + If change_ext is True, it will use the extensions specified in + inputs.output_type. + + Parameters + ---------- + basename : str + Filename to base the new filename on. + cwd : str + Path to prefix to the new filename. (default is output_dir) + suffix : str + Suffix to add to the `basename`. (defaults is '' ) + change_ext : bool + Flag to change the filename extension to the FSL output type. + (default True) + + Returns + ------- + fname : str + New filename based on given parameters. + + """ + + if basename == "": + msg = "Unable to generate filename for command %s. " % "fslswapdim" + msg += "basename is not set!" + raise ValueError(msg) + if cwd is None: + cwd = output_dir + if ext is None: + ext = Info.output_type_to_ext(output_type) + if change_ext: + if suffix: + suffix = f"{suffix}{ext}" + else: + suffix = ext + if suffix is None: + suffix = "" + fname = fname_presuffix(basename, suffix=suffix, use_ext=False, newpath=cwd) + return fname + + +def _list_outputs(in_file=None, out_file=None, output_type=None): + outputs = {} + outputs["out_file"] = out_file + if out_file is attrs.NOTHING: + outputs["out_file"] = _gen_fname( + in_file, suffix="_newdims", output_type=output_type + ) + outputs["out_file"] = os.path.abspath(outputs["out_file"]) + return outputs + + +IFLOGGER = logging.getLogger("nipype.interface") diff --git a/pydra/tasks/fsl/v6/utils/tests/conftest.py b/pydra/tasks/fsl/v6/utils/tests/conftest.py new file mode 100644 index 0000000..8c8af14 --- /dev/null +++ b/pydra/tasks/fsl/v6/utils/tests/conftest.py @@ -0,0 +1,24 @@ +# For debugging in IDE's don't catch raised exceptions and let the IDE +# break at it +import os +import pytest + + +if os.getenv("_PYTEST_RAISE", "0") != "0": + + @pytest.hookimpl(tryfirst=True) + def pytest_exception_interact(call): + raise call.excinfo.value # raise internal errors instead of capturing them + + @pytest.hookimpl(tryfirst=True) + def pytest_internalerror(excinfo): + raise excinfo.value # raise internal errors instead of capturing them + + def pytest_configure(config): + config.option.capture = "no" # allow print statements to show up in the console + config.option.log_cli = True # show log messages in the console + config.option.log_level = "INFO" # set the log level to INFO + + CATCH_CLI_EXCEPTIONS = False +else: + CATCH_CLI_EXCEPTIONS = True diff --git a/pydra/tasks/fsl/v6/utils/tests/test_avscale.py b/pydra/tasks/fsl/v6/utils/tests/test_avscale.py new file mode 100644 index 0000000..37470b2 --- /dev/null +++ b/pydra/tasks/fsl/v6/utils/tests/test_avscale.py @@ -0,0 +1,18 @@ +from fileformats.generic import File +import logging +from nipype2pydra.testing import PassAfterTimeoutWorker +from pydra.tasks.fsl.v6.utils.av_scale import AvScale +import pytest + + +logger = logging.getLogger(__name__) + + +@pytest.mark.xfail +def test_avscale_1(): + task = AvScale() + task.mat_file = File.sample(seed=1) + task.ref_file = File.sample(seed=2) + print(f"CMDLINE: {task.cmdline}\n\n") + res = task(worker=PassAfterTimeoutWorker) + print("RESULT: ", res) diff --git a/pydra/tasks/fsl/v6/utils/tests/test_complex.py b/pydra/tasks/fsl/v6/utils/tests/test_complex.py new file mode 100644 index 0000000..d522c1b --- /dev/null +++ b/pydra/tasks/fsl/v6/utils/tests/test_complex.py @@ -0,0 +1,22 @@ +from fileformats.generic import File +import logging +from nipype2pydra.testing import PassAfterTimeoutWorker +from pydra.tasks.fsl.v6.utils.complex import Complex +import pytest + + +logger = logging.getLogger(__name__) + + +@pytest.mark.xfail +def test_complex_1(): + task = Complex() + task.complex_in_file = File.sample(seed=0) + task.complex_in_file2 = File.sample(seed=1) + task.real_in_file = File.sample(seed=2) + task.imaginary_in_file = File.sample(seed=3) + task.magnitude_in_file = File.sample(seed=4) + task.phase_in_file = File.sample(seed=5) + print(f"CMDLINE: {task.cmdline}\n\n") + res = task(worker=PassAfterTimeoutWorker) + print("RESULT: ", res) diff --git a/pydra/tasks/fsl/v6/utils/tests/test_convertwarp.py b/pydra/tasks/fsl/v6/utils/tests/test_convertwarp.py new file mode 100644 index 0000000..98124e3 --- /dev/null +++ b/pydra/tasks/fsl/v6/utils/tests/test_convertwarp.py @@ -0,0 +1,34 @@ +from fileformats.generic import File +from fileformats.medimage import Nifti1 +import logging +from nipype2pydra.testing import PassAfterTimeoutWorker +from pydra.tasks.fsl.v6.utils.convert_warp import ConvertWarp +import pytest + + +logger = logging.getLogger(__name__) + + +@pytest.mark.xfail +def test_convertwarp_1(): + task = ConvertWarp() + task.reference = File.sample(seed=0) + task.premat = File.sample(seed=2) + task.warp1 = Nifti1.sample(seed=3) + task.midmat = File.sample(seed=4) + task.warp2 = File.sample(seed=5) + task.postmat = File.sample(seed=6) + task.shift_in_file = File.sample(seed=7) + print(f"CMDLINE: {task.cmdline}\n\n") + res = task(worker=PassAfterTimeoutWorker) + print("RESULT: ", res) + + +@pytest.mark.xfail +def test_convertwarp_2(): + task = ConvertWarp() + task.warp1 = Nifti1.sample(seed=3) + task.relwarp = True + print(f"CMDLINE: {task.cmdline}\n\n") + res = task(worker=PassAfterTimeoutWorker) + print("RESULT: ", res) diff --git a/pydra/tasks/fsl/v6/utils/tests/test_convertxfm.py b/pydra/tasks/fsl/v6/utils/tests/test_convertxfm.py new file mode 100644 index 0000000..14a9fea --- /dev/null +++ b/pydra/tasks/fsl/v6/utils/tests/test_convertxfm.py @@ -0,0 +1,29 @@ +from fileformats.datascience import TextMatrix +from fileformats.generic import File +import logging +from nipype2pydra.testing import PassAfterTimeoutWorker +from pydra.tasks.fsl.v6.utils.convert_xfm import ConvertXFM +import pytest + + +logger = logging.getLogger(__name__) + + +@pytest.mark.xfail +def test_convertxfm_1(): + task = ConvertXFM() + task.in_file = TextMatrix.sample(seed=0) + task.in_file2 = File.sample(seed=1) + print(f"CMDLINE: {task.cmdline}\n\n") + res = task(worker=PassAfterTimeoutWorker) + print("RESULT: ", res) + + +@pytest.mark.xfail +def test_convertxfm_2(): + task = ConvertXFM() + task.in_file = TextMatrix.sample(seed=0) + task.out_file = "flirt_inv.mat" + print(f"CMDLINE: {task.cmdline}\n\n") + res = task(worker=PassAfterTimeoutWorker) + print("RESULT: ", res) diff --git a/pydra/tasks/fsl/v6/utils/tests/test_copygeom.py b/pydra/tasks/fsl/v6/utils/tests/test_copygeom.py new file mode 100644 index 0000000..45c4fbf --- /dev/null +++ b/pydra/tasks/fsl/v6/utils/tests/test_copygeom.py @@ -0,0 +1,17 @@ +from fileformats.generic import File +import logging +from nipype2pydra.testing import PassAfterTimeoutWorker +from pydra.tasks.fsl.v6.utils.copy_geom import CopyGeom +import pytest + + +logger = logging.getLogger(__name__) + + +@pytest.mark.xfail +def test_copygeom_1(): + task = CopyGeom() + task.in_file = File.sample(seed=0) + print(f"CMDLINE: {task.cmdline}\n\n") + res = task(worker=PassAfterTimeoutWorker) + print("RESULT: ", res) diff --git a/pydra/tasks/fsl/v6/utils/tests/test_extractroi.py b/pydra/tasks/fsl/v6/utils/tests/test_extractroi.py new file mode 100644 index 0000000..ebe7846 --- /dev/null +++ b/pydra/tasks/fsl/v6/utils/tests/test_extractroi.py @@ -0,0 +1,29 @@ +from fileformats.generic import File +import logging +from nipype2pydra.testing import PassAfterTimeoutWorker +from pydra.tasks.fsl.v6.utils.extract_roi import ExtractROI +import pytest + + +logger = logging.getLogger(__name__) + + +@pytest.mark.xfail +def test_extractroi_1(): + task = ExtractROI() + task.in_file = File.sample(seed=0) + print(f"CMDLINE: {task.cmdline}\n\n") + res = task(worker=PassAfterTimeoutWorker) + print("RESULT: ", res) + + +@pytest.mark.xfail +def test_extractroi_2(): + task = ExtractROI() + task.in_file = File.sample(seed=0) + task.roi_file = "bar.nii" + task.t_min = 0 + task.t_size = 1 + print(f"CMDLINE: {task.cmdline}\n\n") + res = task(worker=PassAfterTimeoutWorker) + print("RESULT: ", res) diff --git a/pydra/tasks/fsl/v6/utils/tests/test_filterregressor.py b/pydra/tasks/fsl/v6/utils/tests/test_filterregressor.py new file mode 100644 index 0000000..d9af05d --- /dev/null +++ b/pydra/tasks/fsl/v6/utils/tests/test_filterregressor.py @@ -0,0 +1,19 @@ +from fileformats.generic import File +import logging +from nipype2pydra.testing import PassAfterTimeoutWorker +from pydra.tasks.fsl.v6.utils.filter_regressor import FilterRegressor +import pytest + + +logger = logging.getLogger(__name__) + + +@pytest.mark.xfail +def test_filterregressor_1(): + task = FilterRegressor() + task.in_file = File.sample(seed=0) + task.design_file = File.sample(seed=2) + task.mask = File.sample(seed=5) + print(f"CMDLINE: {task.cmdline}\n\n") + res = task(worker=PassAfterTimeoutWorker) + print("RESULT: ", res) diff --git a/pydra/tasks/fsl/v6/utils/tests/test_imagemaths.py b/pydra/tasks/fsl/v6/utils/tests/test_imagemaths.py new file mode 100644 index 0000000..56122a9 --- /dev/null +++ b/pydra/tasks/fsl/v6/utils/tests/test_imagemaths.py @@ -0,0 +1,30 @@ +from fileformats.generic import File +import logging +from nipype2pydra.testing import PassAfterTimeoutWorker +from pydra.tasks.fsl.v6.utils.image_maths import ImageMaths +import pytest + + +logger = logging.getLogger(__name__) + + +@pytest.mark.xfail +def test_imagemaths_1(): + task = ImageMaths() + task.in_file = File.sample(seed=0) + task.in_file2 = File.sample(seed=1) + task.mask_file = File.sample(seed=2) + print(f"CMDLINE: {task.cmdline}\n\n") + res = task(worker=PassAfterTimeoutWorker) + print("RESULT: ", res) + + +@pytest.mark.xfail +def test_imagemaths_2(): + task = ImageMaths() + task.in_file = File.sample(seed=0) + task.out_file = "foo_maths.nii" + task.op_string = "-add 5" + print(f"CMDLINE: {task.cmdline}\n\n") + res = task(worker=PassAfterTimeoutWorker) + print("RESULT: ", res) diff --git a/pydra/tasks/fsl/v6/utils/tests/test_imagemeants.py b/pydra/tasks/fsl/v6/utils/tests/test_imagemeants.py new file mode 100644 index 0000000..a23d0cd --- /dev/null +++ b/pydra/tasks/fsl/v6/utils/tests/test_imagemeants.py @@ -0,0 +1,19 @@ +from fileformats.generic import File +import logging +from nipype2pydra.testing import PassAfterTimeoutWorker +from pydra.tasks.fsl.v6.utils.image_meants import ImageMeants +import pytest + + +logger = logging.getLogger(__name__) + + +@pytest.mark.xfail +def test_imagemeants_1(): + task = ImageMeants() + task.in_file = File.sample(seed=0) + task.mask = File.sample(seed=2) + task.order = 1 + print(f"CMDLINE: {task.cmdline}\n\n") + res = task(worker=PassAfterTimeoutWorker) + print("RESULT: ", res) diff --git a/pydra/tasks/fsl/v6/utils/tests/test_imagestats.py b/pydra/tasks/fsl/v6/utils/tests/test_imagestats.py new file mode 100644 index 0000000..d8fd445 --- /dev/null +++ b/pydra/tasks/fsl/v6/utils/tests/test_imagestats.py @@ -0,0 +1,29 @@ +from fileformats.generic import File +import logging +from nipype2pydra.testing import PassAfterTimeoutWorker +from pydra.tasks.fsl.v6.utils.image_stats import ImageStats +import pytest + + +logger = logging.getLogger(__name__) + + +@pytest.mark.xfail +def test_imagestats_1(): + task = ImageStats() + task.in_file = File.sample(seed=1) + task.mask_file = File.sample(seed=3) + task.index_mask_file = File.sample(seed=4) + print(f"CMDLINE: {task.cmdline}\n\n") + res = task(worker=PassAfterTimeoutWorker) + print("RESULT: ", res) + + +@pytest.mark.xfail +def test_imagestats_2(): + task = ImageStats() + task.in_file = File.sample(seed=1) + task.op_string = "-M" + print(f"CMDLINE: {task.cmdline}\n\n") + res = task(worker=PassAfterTimeoutWorker) + print("RESULT: ", res) diff --git a/pydra/tasks/fsl/v6/utils/tests/test_invwarp.py b/pydra/tasks/fsl/v6/utils/tests/test_invwarp.py new file mode 100644 index 0000000..1d2b053 --- /dev/null +++ b/pydra/tasks/fsl/v6/utils/tests/test_invwarp.py @@ -0,0 +1,28 @@ +from fileformats.generic import File +from fileformats.medimage import Nifti1 +import logging +from nipype2pydra.testing import PassAfterTimeoutWorker +from pydra.tasks.fsl.v6.utils.inv_warp import InvWarp +import pytest + + +logger = logging.getLogger(__name__) + + +@pytest.mark.xfail +def test_invwarp_1(): + task = InvWarp() + task.warp = Nifti1.sample(seed=0) + task.reference = File.sample(seed=1) + print(f"CMDLINE: {task.cmdline}\n\n") + res = task(worker=PassAfterTimeoutWorker) + print("RESULT: ", res) + + +@pytest.mark.xfail +def test_invwarp_2(): + task = InvWarp() + task.warp = Nifti1.sample(seed=0) + print(f"CMDLINE: {task.cmdline}\n\n") + res = task(worker=PassAfterTimeoutWorker) + print("RESULT: ", res) diff --git a/pydra/tasks/fsl/v6/utils/tests/test_merge.py b/pydra/tasks/fsl/v6/utils/tests/test_merge.py new file mode 100644 index 0000000..2378f58 --- /dev/null +++ b/pydra/tasks/fsl/v6/utils/tests/test_merge.py @@ -0,0 +1,27 @@ +from fileformats.medimage import Nifti1 +import logging +from nipype2pydra.testing import PassAfterTimeoutWorker +from pydra.tasks.fsl.v6.utils.merge import Merge +import pytest + + +logger = logging.getLogger(__name__) + + +@pytest.mark.xfail +def test_merge_1(): + task = Merge() + task.in_files = [Nifti1.sample(seed=0)] + print(f"CMDLINE: {task.cmdline}\n\n") + res = task(worker=PassAfterTimeoutWorker) + print("RESULT: ", res) + + +@pytest.mark.xfail +def test_merge_2(): + task = Merge() + task.in_files = [Nifti1.sample(seed=0)] + task.tr = 2.25 + print(f"CMDLINE: {task.cmdline}\n\n") + res = task(worker=PassAfterTimeoutWorker) + print("RESULT: ", res) diff --git a/pydra/tasks/fsl/v6/utils/tests/test_motionoutliers.py b/pydra/tasks/fsl/v6/utils/tests/test_motionoutliers.py new file mode 100644 index 0000000..e244937 --- /dev/null +++ b/pydra/tasks/fsl/v6/utils/tests/test_motionoutliers.py @@ -0,0 +1,28 @@ +from fileformats.generic import File +from fileformats.medimage import Nifti1 +import logging +from nipype2pydra.testing import PassAfterTimeoutWorker +from pydra.tasks.fsl.v6.utils.motion_outliers import MotionOutliers +import pytest + + +logger = logging.getLogger(__name__) + + +@pytest.mark.xfail +def test_motionoutliers_1(): + task = MotionOutliers() + task.in_file = Nifti1.sample(seed=0) + task.mask = File.sample(seed=2) + print(f"CMDLINE: {task.cmdline}\n\n") + res = task(worker=PassAfterTimeoutWorker) + print("RESULT: ", res) + + +@pytest.mark.xfail +def test_motionoutliers_2(): + task = MotionOutliers() + task.in_file = Nifti1.sample(seed=0) + print(f"CMDLINE: {task.cmdline}\n\n") + res = task(worker=PassAfterTimeoutWorker) + print("RESULT: ", res) diff --git a/pydra/tasks/fsl/v6/utils/tests/test_overlay.py b/pydra/tasks/fsl/v6/utils/tests/test_overlay.py new file mode 100644 index 0000000..80dabc7 --- /dev/null +++ b/pydra/tasks/fsl/v6/utils/tests/test_overlay.py @@ -0,0 +1,21 @@ +from fileformats.generic import File +import logging +from nipype2pydra.testing import PassAfterTimeoutWorker +from pydra.tasks.fsl.v6.utils.overlay import Overlay +import pytest + + +logger = logging.getLogger(__name__) + + +@pytest.mark.xfail +def test_overlay_1(): + task = Overlay() + task.transparency = True + task.out_type = "float" + task.background_image = File.sample(seed=3) + task.stat_image = File.sample(seed=7) + task.stat_image2 = File.sample(seed=10) + print(f"CMDLINE: {task.cmdline}\n\n") + res = task(worker=PassAfterTimeoutWorker) + print("RESULT: ", res) diff --git a/pydra/tasks/fsl/v6/utils/tests/test_plotmotionparams.py b/pydra/tasks/fsl/v6/utils/tests/test_plotmotionparams.py new file mode 100644 index 0000000..d67dc79 --- /dev/null +++ b/pydra/tasks/fsl/v6/utils/tests/test_plotmotionparams.py @@ -0,0 +1,15 @@ +import logging +from nipype2pydra.testing import PassAfterTimeoutWorker +from pydra.tasks.fsl.v6.utils.plot_motion_params import PlotMotionParams +import pytest + + +logger = logging.getLogger(__name__) + + +@pytest.mark.xfail +def test_plotmotionparams_1(): + task = PlotMotionParams() + print(f"CMDLINE: {task.cmdline}\n\n") + res = task(worker=PassAfterTimeoutWorker) + print("RESULT: ", res) diff --git a/pydra/tasks/fsl/v6/utils/tests/test_plottimeseries.py b/pydra/tasks/fsl/v6/utils/tests/test_plottimeseries.py new file mode 100644 index 0000000..d5daaa0 --- /dev/null +++ b/pydra/tasks/fsl/v6/utils/tests/test_plottimeseries.py @@ -0,0 +1,18 @@ +from fileformats.generic import File +import logging +from nipype2pydra.testing import PassAfterTimeoutWorker +from pydra.tasks.fsl.v6.utils.plot_time_series import PlotTimeSeries +import pytest + + +logger = logging.getLogger(__name__) + + +@pytest.mark.xfail +def test_plottimeseries_1(): + task = PlotTimeSeries() + task.legend_file = File.sample(seed=5) + task.x_units = 1 + print(f"CMDLINE: {task.cmdline}\n\n") + res = task(worker=PassAfterTimeoutWorker) + print("RESULT: ", res) diff --git a/pydra/tasks/fsl/v6/utils/tests/test_powerspectrum.py b/pydra/tasks/fsl/v6/utils/tests/test_powerspectrum.py new file mode 100644 index 0000000..d584022 --- /dev/null +++ b/pydra/tasks/fsl/v6/utils/tests/test_powerspectrum.py @@ -0,0 +1,17 @@ +from fileformats.generic import File +import logging +from nipype2pydra.testing import PassAfterTimeoutWorker +from pydra.tasks.fsl.v6.utils.power_spectrum import PowerSpectrum +import pytest + + +logger = logging.getLogger(__name__) + + +@pytest.mark.xfail +def test_powerspectrum_1(): + task = PowerSpectrum() + task.in_file = File.sample(seed=0) + print(f"CMDLINE: {task.cmdline}\n\n") + res = task(worker=PassAfterTimeoutWorker) + print("RESULT: ", res) diff --git a/pydra/tasks/fsl/v6/utils/tests/test_reorient2std.py b/pydra/tasks/fsl/v6/utils/tests/test_reorient2std.py new file mode 100644 index 0000000..aa52ab1 --- /dev/null +++ b/pydra/tasks/fsl/v6/utils/tests/test_reorient2std.py @@ -0,0 +1,17 @@ +from fileformats.generic import File +import logging +from nipype2pydra.testing import PassAfterTimeoutWorker +from pydra.tasks.fsl.v6.utils.reorient_2_std import Reorient2Std +import pytest + + +logger = logging.getLogger(__name__) + + +@pytest.mark.xfail +def test_reorient2std_1(): + task = Reorient2Std() + task.in_file = File.sample(seed=0) + print(f"CMDLINE: {task.cmdline}\n\n") + res = task(worker=PassAfterTimeoutWorker) + print("RESULT: ", res) diff --git a/pydra/tasks/fsl/v6/utils/tests/test_robustfov.py b/pydra/tasks/fsl/v6/utils/tests/test_robustfov.py new file mode 100644 index 0000000..43db056 --- /dev/null +++ b/pydra/tasks/fsl/v6/utils/tests/test_robustfov.py @@ -0,0 +1,17 @@ +from fileformats.generic import File +import logging +from nipype2pydra.testing import PassAfterTimeoutWorker +from pydra.tasks.fsl.v6.utils.robust_fov import RobustFOV +import pytest + + +logger = logging.getLogger(__name__) + + +@pytest.mark.xfail +def test_robustfov_1(): + task = RobustFOV() + task.in_file = File.sample(seed=0) + print(f"CMDLINE: {task.cmdline}\n\n") + res = task(worker=PassAfterTimeoutWorker) + print("RESULT: ", res) diff --git a/pydra/tasks/fsl/v6/utils/tests/test_sigloss.py b/pydra/tasks/fsl/v6/utils/tests/test_sigloss.py new file mode 100644 index 0000000..80b4050 --- /dev/null +++ b/pydra/tasks/fsl/v6/utils/tests/test_sigloss.py @@ -0,0 +1,18 @@ +from fileformats.generic import File +import logging +from nipype2pydra.testing import PassAfterTimeoutWorker +from pydra.tasks.fsl.v6.utils.sig_loss import SigLoss +import pytest + + +logger = logging.getLogger(__name__) + + +@pytest.mark.xfail +def test_sigloss_1(): + task = SigLoss() + task.in_file = File.sample(seed=0) + task.mask_file = File.sample(seed=2) + print(f"CMDLINE: {task.cmdline}\n\n") + res = task(worker=PassAfterTimeoutWorker) + print("RESULT: ", res) diff --git a/pydra/tasks/fsl/v6/utils/tests/test_slice.py b/pydra/tasks/fsl/v6/utils/tests/test_slice.py new file mode 100644 index 0000000..1409ba6 --- /dev/null +++ b/pydra/tasks/fsl/v6/utils/tests/test_slice.py @@ -0,0 +1,26 @@ +from fileformats.medimage import Nifti1 +import logging +from nipype2pydra.testing import PassAfterTimeoutWorker +from pydra.tasks.fsl.v6.utils.slice import Slice +import pytest + + +logger = logging.getLogger(__name__) + + +@pytest.mark.xfail +def test_slice_1(): + task = Slice() + task.in_file = Nifti1.sample(seed=0) + print(f"CMDLINE: {task.cmdline}\n\n") + res = task(worker=PassAfterTimeoutWorker) + print("RESULT: ", res) + + +@pytest.mark.xfail +def test_slice_2(): + task = Slice() + task.in_file = Nifti1.sample(seed=0) + print(f"CMDLINE: {task.cmdline}\n\n") + res = task(worker=PassAfterTimeoutWorker) + print("RESULT: ", res) diff --git a/pydra/tasks/fsl/v6/utils/tests/test_slicer.py b/pydra/tasks/fsl/v6/utils/tests/test_slicer.py new file mode 100644 index 0000000..1abccee --- /dev/null +++ b/pydra/tasks/fsl/v6/utils/tests/test_slicer.py @@ -0,0 +1,21 @@ +from fileformats.generic import File +import logging +from nipype2pydra.testing import PassAfterTimeoutWorker +from pydra.tasks.fsl.v6.utils.slicer import Slicer +import pytest + + +logger = logging.getLogger(__name__) + + +@pytest.mark.xfail +def test_slicer_1(): + task = Slicer() + task.in_file = File.sample(seed=0) + task.image_edges = File.sample(seed=1) + task.label_slices = True + task.colour_map = File.sample(seed=3) + task.show_orientation = True + print(f"CMDLINE: {task.cmdline}\n\n") + res = task(worker=PassAfterTimeoutWorker) + print("RESULT: ", res) diff --git a/pydra/tasks/fsl/v6/utils/tests/test_smooth.py b/pydra/tasks/fsl/v6/utils/tests/test_smooth.py new file mode 100644 index 0000000..6b5aa58 --- /dev/null +++ b/pydra/tasks/fsl/v6/utils/tests/test_smooth.py @@ -0,0 +1,43 @@ +from fileformats.generic import File +import logging +from nipype2pydra.testing import PassAfterTimeoutWorker +from pydra.tasks.fsl.v6.utils.smooth import Smooth +import pytest + + +logger = logging.getLogger(__name__) + + +@pytest.mark.xfail +def test_smooth_1(): + task = Smooth() + task.in_file = File.sample(seed=0) + print(f"CMDLINE: {task.cmdline}\n\n") + res = task(worker=PassAfterTimeoutWorker) + print("RESULT: ", res) + + +@pytest.mark.xfail +def test_smooth_2(): + task = Smooth() + task.sigma = 8.0 + print(f"CMDLINE: {task.cmdline}\n\n") + res = task(worker=PassAfterTimeoutWorker) + print("RESULT: ", res) + + +@pytest.mark.xfail +def test_smooth_3(): + task = Smooth() + task.fwhm = 8.0 + print(f"CMDLINE: {task.cmdline}\n\n") + res = task(worker=PassAfterTimeoutWorker) + print("RESULT: ", res) + + +@pytest.mark.xfail +def test_smooth_4(): + task = Smooth() + print(f"CMDLINE: {task.cmdline}\n\n") + res = task(worker=PassAfterTimeoutWorker) + print("RESULT: ", res) diff --git a/pydra/tasks/fsl/v6/utils/tests/test_split.py b/pydra/tasks/fsl/v6/utils/tests/test_split.py new file mode 100644 index 0000000..3b6fad3 --- /dev/null +++ b/pydra/tasks/fsl/v6/utils/tests/test_split.py @@ -0,0 +1,17 @@ +from fileformats.generic import File +import logging +from nipype2pydra.testing import PassAfterTimeoutWorker +from pydra.tasks.fsl.v6.utils.split import Split +import pytest + + +logger = logging.getLogger(__name__) + + +@pytest.mark.xfail +def test_split_1(): + task = Split() + task.in_file = File.sample(seed=0) + print(f"CMDLINE: {task.cmdline}\n\n") + res = task(worker=PassAfterTimeoutWorker) + print("RESULT: ", res) diff --git a/pydra/tasks/fsl/v6/utils/tests/test_swapdimensions.py b/pydra/tasks/fsl/v6/utils/tests/test_swapdimensions.py new file mode 100644 index 0000000..e89756f --- /dev/null +++ b/pydra/tasks/fsl/v6/utils/tests/test_swapdimensions.py @@ -0,0 +1,17 @@ +from fileformats.generic import File +import logging +from nipype2pydra.testing import PassAfterTimeoutWorker +from pydra.tasks.fsl.v6.utils.swap_dimensions import SwapDimensions +import pytest + + +logger = logging.getLogger(__name__) + + +@pytest.mark.xfail +def test_swapdimensions_1(): + task = SwapDimensions() + task.in_file = File.sample(seed=0) + print(f"CMDLINE: {task.cmdline}\n\n") + res = task(worker=PassAfterTimeoutWorker) + print("RESULT: ", res) diff --git a/pydra/tasks/fsl/v6/utils/tests/test_text2vest.py b/pydra/tasks/fsl/v6/utils/tests/test_text2vest.py new file mode 100644 index 0000000..6dd5076 --- /dev/null +++ b/pydra/tasks/fsl/v6/utils/tests/test_text2vest.py @@ -0,0 +1,26 @@ +from fileformats.text import TextFile +import logging +from nipype2pydra.testing import PassAfterTimeoutWorker +from pydra.tasks.fsl.v6.utils.text_2_vest import Text2Vest +import pytest + + +logger = logging.getLogger(__name__) + + +@pytest.mark.xfail +def test_text2vest_1(): + task = Text2Vest() + task.in_file = TextFile.sample(seed=0) + print(f"CMDLINE: {task.cmdline}\n\n") + res = task(worker=PassAfterTimeoutWorker) + print("RESULT: ", res) + + +@pytest.mark.xfail +def test_text2vest_2(): + task = Text2Vest() + task.in_file = TextFile.sample(seed=0) + print(f"CMDLINE: {task.cmdline}\n\n") + res = task(worker=PassAfterTimeoutWorker) + print("RESULT: ", res) diff --git a/pydra/tasks/fsl/v6/utils/tests/test_vest2text.py b/pydra/tasks/fsl/v6/utils/tests/test_vest2text.py new file mode 100644 index 0000000..e1e5a71 --- /dev/null +++ b/pydra/tasks/fsl/v6/utils/tests/test_vest2text.py @@ -0,0 +1,27 @@ +from fileformats.datascience import TextMatrix +import logging +from nipype2pydra.testing import PassAfterTimeoutWorker +from pydra.tasks.fsl.v6.utils.vest_2_text import Vest2Text +import pytest + + +logger = logging.getLogger(__name__) + + +@pytest.mark.xfail +def test_vest2text_1(): + task = Vest2Text() + task.in_file = TextMatrix.sample(seed=0) + task.out_file = "design.txt" + print(f"CMDLINE: {task.cmdline}\n\n") + res = task(worker=PassAfterTimeoutWorker) + print("RESULT: ", res) + + +@pytest.mark.xfail +def test_vest2text_2(): + task = Vest2Text() + task.in_file = TextMatrix.sample(seed=0) + print(f"CMDLINE: {task.cmdline}\n\n") + res = task(worker=PassAfterTimeoutWorker) + print("RESULT: ", res) diff --git a/pydra/tasks/fsl/v6/utils/tests/test_warppoints.py b/pydra/tasks/fsl/v6/utils/tests/test_warppoints.py new file mode 100644 index 0000000..de028c0 --- /dev/null +++ b/pydra/tasks/fsl/v6/utils/tests/test_warppoints.py @@ -0,0 +1,34 @@ +from fileformats.generic import File +from fileformats.medimage import Nifti1 +from fileformats.text import TextFile +import logging +from nipype2pydra.testing import PassAfterTimeoutWorker +from pydra.tasks.fsl.v6.utils.warp_points import WarpPoints +import pytest + + +logger = logging.getLogger(__name__) + + +@pytest.mark.xfail +def test_warppoints_1(): + task = WarpPoints() + task.src_file = File.sample(seed=0) + task.dest_file = Nifti1.sample(seed=1) + task.in_coords = TextFile.sample(seed=2) + task.xfm_file = File.sample(seed=3) + task.warp_file = File.sample(seed=4) + print(f"CMDLINE: {task.cmdline}\n\n") + res = task(worker=PassAfterTimeoutWorker) + print("RESULT: ", res) + + +@pytest.mark.xfail +def test_warppoints_2(): + task = WarpPoints() + task.dest_file = Nifti1.sample(seed=1) + task.in_coords = TextFile.sample(seed=2) + task.coord_mm = True + print(f"CMDLINE: {task.cmdline}\n\n") + res = task(worker=PassAfterTimeoutWorker) + print("RESULT: ", res) diff --git a/pydra/tasks/fsl/v6/utils/tests/test_warppointsfromstd.py b/pydra/tasks/fsl/v6/utils/tests/test_warppointsfromstd.py new file mode 100644 index 0000000..f53ea9d --- /dev/null +++ b/pydra/tasks/fsl/v6/utils/tests/test_warppointsfromstd.py @@ -0,0 +1,34 @@ +from fileformats.generic import File +from fileformats.medimage import Nifti1 +from fileformats.text import TextFile +import logging +from nipype2pydra.testing import PassAfterTimeoutWorker +from pydra.tasks.fsl.v6.utils.warp_points_from_std import WarpPointsFromStd +import pytest + + +logger = logging.getLogger(__name__) + + +@pytest.mark.xfail +def test_warppointsfromstd_1(): + task = WarpPointsFromStd() + task.img_file = File.sample(seed=0) + task.std_file = Nifti1.sample(seed=1) + task.in_coords = TextFile.sample(seed=2) + task.xfm_file = File.sample(seed=3) + task.warp_file = File.sample(seed=4) + print(f"CMDLINE: {task.cmdline}\n\n") + res = task(worker=PassAfterTimeoutWorker) + print("RESULT: ", res) + + +@pytest.mark.xfail +def test_warppointsfromstd_2(): + task = WarpPointsFromStd() + task.std_file = Nifti1.sample(seed=1) + task.in_coords = TextFile.sample(seed=2) + task.coord_mm = True + print(f"CMDLINE: {task.cmdline}\n\n") + res = task(worker=PassAfterTimeoutWorker) + print("RESULT: ", res) diff --git a/pydra/tasks/fsl/v6/utils/tests/test_warppointstostd.py b/pydra/tasks/fsl/v6/utils/tests/test_warppointstostd.py new file mode 100644 index 0000000..bde7ec0 --- /dev/null +++ b/pydra/tasks/fsl/v6/utils/tests/test_warppointstostd.py @@ -0,0 +1,35 @@ +from fileformats.generic import File +from fileformats.medimage import Nifti1 +from fileformats.text import TextFile +import logging +from nipype2pydra.testing import PassAfterTimeoutWorker +from pydra.tasks.fsl.v6.utils.warp_points_to_std import WarpPointsToStd +import pytest + + +logger = logging.getLogger(__name__) + + +@pytest.mark.xfail +def test_warppointstostd_1(): + task = WarpPointsToStd() + task.img_file = File.sample(seed=0) + task.std_file = Nifti1.sample(seed=1) + task.premat_file = File.sample(seed=2) + task.in_coords = TextFile.sample(seed=3) + task.xfm_file = File.sample(seed=4) + task.warp_file = File.sample(seed=5) + print(f"CMDLINE: {task.cmdline}\n\n") + res = task(worker=PassAfterTimeoutWorker) + print("RESULT: ", res) + + +@pytest.mark.xfail +def test_warppointstostd_2(): + task = WarpPointsToStd() + task.std_file = Nifti1.sample(seed=1) + task.in_coords = TextFile.sample(seed=3) + task.coord_mm = True + print(f"CMDLINE: {task.cmdline}\n\n") + res = task(worker=PassAfterTimeoutWorker) + print("RESULT: ", res) diff --git a/pydra/tasks/fsl/v6/utils/tests/test_warputils.py b/pydra/tasks/fsl/v6/utils/tests/test_warputils.py new file mode 100644 index 0000000..2bfc4a4 --- /dev/null +++ b/pydra/tasks/fsl/v6/utils/tests/test_warputils.py @@ -0,0 +1,30 @@ +from fileformats.generic import File +from fileformats.medimage import Nifti1 +import logging +from nipype2pydra.testing import PassAfterTimeoutWorker +from pydra.tasks.fsl.v6.utils.warp_utils import WarpUtils +import pytest + + +logger = logging.getLogger(__name__) + + +@pytest.mark.xfail +def test_warputils_1(): + task = WarpUtils() + task.in_file = Nifti1.sample(seed=0) + task.reference = File.sample(seed=1) + task.write_jacobian = False + print(f"CMDLINE: {task.cmdline}\n\n") + res = task(worker=PassAfterTimeoutWorker) + print("RESULT: ", res) + + +@pytest.mark.xfail +def test_warputils_2(): + task = WarpUtils() + task.in_file = Nifti1.sample(seed=0) + task.out_format = "spline" + print(f"CMDLINE: {task.cmdline}\n\n") + res = task(worker=PassAfterTimeoutWorker) + print("RESULT: ", res) diff --git a/pydra/tasks/fsl/v6/utils/text_2_vest.py b/pydra/tasks/fsl/v6/utils/text_2_vest.py new file mode 100644 index 0000000..9268114 --- /dev/null +++ b/pydra/tasks/fsl/v6/utils/text_2_vest.py @@ -0,0 +1,51 @@ +from fileformats.generic import File +from fileformats.text import TextFile +import logging +from pathlib import Path +from pathlib import Path +from pydra.compose import shell + + +logger = logging.getLogger(__name__) + + +def out_file_callable(output_dir, inputs, stdout, stderr): + raise NotImplementedError + + +@shell.define +class Text2Vest(shell.Task["Text2Vest.Outputs"]): + """ + Examples + ------- + + >>> from fileformats.generic import File + >>> from fileformats.text import TextFile + >>> from pathlib import Path + >>> from pydra.tasks.fsl.v6.utils.text_2_vest import Text2Vest + + >>> task = Text2Vest() + >>> task.in_file = TextFile.mock("design.txt") + >>> task.cmdline + 'None' + + + """ + + executable = "Text2Vest" + in_file: TextFile = shell.arg( + help="plain text file representing your design, contrast, or f-test matrix", + argstr="{in_file}", + position=1, + ) + out_file: Path = shell.arg( + help="file name to store matrix data in the format used by FSL tools (e.g., design.mat, design.con design.fts)", + argstr="{out_file}", + position=2, + ) + + class Outputs(shell.Outputs): + out_file: File | None = shell.out( + help="matrix data in the format used by FSL tools", + callable=out_file_callable, + ) diff --git a/pydra/tasks/fsl/v6/utils/vest_2_text.py b/pydra/tasks/fsl/v6/utils/vest_2_text.py new file mode 100644 index 0000000..504b70e --- /dev/null +++ b/pydra/tasks/fsl/v6/utils/vest_2_text.py @@ -0,0 +1,51 @@ +from fileformats.datascience import TextMatrix +from fileformats.generic import File +import logging +from pathlib import Path +from pathlib import Path +from pydra.compose import shell + + +logger = logging.getLogger(__name__) + + +def out_file_callable(output_dir, inputs, stdout, stderr): + raise NotImplementedError + + +@shell.define +class Vest2Text(shell.Task["Vest2Text.Outputs"]): + """ + Examples + ------- + + >>> from fileformats.datascience import TextMatrix + >>> from fileformats.generic import File + >>> from pathlib import Path + >>> from pydra.tasks.fsl.v6.utils.vest_2_text import Vest2Text + + >>> task = Vest2Text() + >>> task.in_file = TextMatrix.mock("design.mat") + >>> task.cmdline + 'None' + + + """ + + executable = "Vest2Text" + in_file: TextMatrix = shell.arg( + help="matrix data stored in the format used by FSL tools", + argstr="{in_file}", + position=1, + ) + out_file: Path = shell.arg( + help="file name to store text output from matrix", + argstr="{out_file}", + position=2, + default="design.txt", + ) + + class Outputs(shell.Outputs): + out_file: File | None = shell.out( + help="plain text representation of FSL matrix", callable=out_file_callable + ) diff --git a/pydra/tasks/fsl/v6/utils/warp_points.py b/pydra/tasks/fsl/v6/utils/warp_points.py new file mode 100644 index 0000000..a5f31a1 --- /dev/null +++ b/pydra/tasks/fsl/v6/utils/warp_points.py @@ -0,0 +1,108 @@ +from fileformats.generic import File +from fileformats.medimage import Nifti1 +from fileformats.text import TextFile +import logging +import os +import os.path as op +from pathlib import Path +from pathlib import Path +from pydra.compose import shell +import tempfile + + +logger = logging.getLogger(__name__) + + +def _format_arg(name, value, inputs, argstr): + parsed_inputs = _parse_inputs(inputs) if inputs else {} + if value is None: + return "" + + if name == "out_file": + return "" + + return argstr.format(**inputs) + + +def out_file_formatter(field, inputs): + return _format_arg("out_file", field, inputs, argstr="") + + +def _parse_inputs(inputs, output_dir=None): + if not output_dir: + output_dir = os.getcwd() + parsed_inputs = {} + skip = [] + + fname, ext = op.splitext(inputs["in_coords"]) + parsed_inputs["_in_file"] = fname + parsed_inputs["_outformat"] = ext[1:] + first_args = {} + + second_args = fname + ".txt" + + if ext in [".vtk", ".trk"]: + if parsed_inputs["_tmpfile"] is None: + parsed_inputs["_tmpfile"] = tempfile.NamedTemporaryFile( + suffix=".txt", dir=os.getcwd(), delete=False + ).name + second_args = parsed_inputs["_tmpfile"] + return parsed_inputs + + +@shell.define(xor=[["coord_mm", "coord_vox"], ["warp_file", "xfm_file"]]) +class WarpPoints(shell.Task["WarpPoints.Outputs"]): + """ + Examples + ------- + + >>> from fileformats.generic import File + >>> from fileformats.medimage import Nifti1 + >>> from fileformats.text import TextFile + >>> from pathlib import Path + >>> from pydra.tasks.fsl.v6.utils.warp_points import WarpPoints + + >>> task = WarpPoints() + >>> task.src_file = File.mock() + >>> task.dest_file = Nifti1.mock("T1.nii") + >>> task.in_coords = TextFile.mock("surf.txt") + >>> task.xfm_file = File.mock() + >>> task.warp_file = File.mock() + >>> task.coord_mm = True + >>> task.cmdline + 'img2imgcoord -mm -dest T1.nii -src epi.nii -warp warpfield.nii surf.txt' + + + """ + + executable = "img2imgcoord" + src_file: File = shell.arg( + help="filename of source image", argstr="-src {src_file}" + ) + dest_file: Nifti1 = shell.arg( + help="filename of destination image", argstr="-dest {dest_file}" + ) + in_coords: TextFile = shell.arg( + help="filename of file containing coordinates", + argstr="{in_coords}", + position=-1, + ) + xfm_file: File | None = shell.arg( + help="filename of affine transform (e.g. source2dest.mat)", + argstr="-xfm {xfm_file}", + ) + warp_file: File | None = shell.arg( + help="filename of warpfield (e.g. intermediate2dest_warp.nii.gz)", + argstr="-warp {warp_file}", + ) + coord_vox: bool = shell.arg( + help="all coordinates in voxels - default", argstr="-vox" + ) + coord_mm: bool = shell.arg(help="all coordinates in mm", argstr="-mm") + + class Outputs(shell.Outputs): + out_file: Path = shell.outarg( + help="output file name", + path_template="{in_coords}_warped", + formatter="out_file_formatter", + ) diff --git a/pydra/tasks/fsl/v6/utils/warp_points_from_std.py b/pydra/tasks/fsl/v6/utils/warp_points_from_std.py new file mode 100644 index 0000000..099219a --- /dev/null +++ b/pydra/tasks/fsl/v6/utils/warp_points_from_std.py @@ -0,0 +1,81 @@ +import attrs +from fileformats.generic import File +from fileformats.medimage import Nifti1 +from fileformats.text import TextFile +import logging +import os.path as op +from pydra.compose import shell + + +logger = logging.getLogger(__name__) + + +def _list_outputs(inputs=None, stdout=None, stderr=None, output_dir=None): + inputs = attrs.asdict(inputs) + + outputs = {} + outputs["out_file"] = op.abspath("stdout.nipype") + return outputs + + +def out_file_callable(output_dir, inputs, stdout, stderr): + outputs = _list_outputs( + output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr + ) + return outputs.get("out_file") + + +@shell.define(xor=[["coord_mm", "coord_vox"], ["warp_file", "xfm_file"]]) +class WarpPointsFromStd(shell.Task["WarpPointsFromStd.Outputs"]): + """ + Examples + ------- + + >>> from fileformats.generic import File + >>> from fileformats.medimage import Nifti1 + >>> from fileformats.text import TextFile + >>> from pydra.tasks.fsl.v6.utils.warp_points_from_std import WarpPointsFromStd + + >>> task = WarpPointsFromStd() + >>> task.img_file = File.mock() + >>> task.std_file = Nifti1.mock("mni.nii") + >>> task.in_coords = TextFile.mock("surf.txt") + >>> task.xfm_file = File.mock() + >>> task.warp_file = File.mock() + >>> task.coord_mm = True + >>> task.cmdline + 'std2imgcoord -mm -img T1.nii -std mni.nii -warp warpfield.nii surf.txt' + + + """ + + executable = "std2imgcoord" + img_file: File = shell.arg( + help="filename of a destination image", argstr="-img {img_file}" + ) + std_file: Nifti1 = shell.arg( + help="filename of the image in standard space", argstr="-std {std_file}" + ) + in_coords: TextFile = shell.arg( + help="filename of file containing coordinates", + argstr="{in_coords}", + position=-2, + ) + xfm_file: File | None = shell.arg( + help="filename of affine transform (e.g. source2dest.mat)", + argstr="-xfm {xfm_file}", + ) + warp_file: File | None = shell.arg( + help="filename of warpfield (e.g. intermediate2dest_warp.nii.gz)", + argstr="-warp {warp_file}", + ) + coord_vox: bool = shell.arg( + help="all coordinates in voxels - default", argstr="-vox" + ) + coord_mm: bool = shell.arg(help="all coordinates in mm", argstr="-mm") + + class Outputs(shell.Outputs): + out_file: File | None = shell.out( + help="Name of output file, containing the warp as field or coefficients.", + callable=out_file_callable, + ) diff --git a/pydra/tasks/fsl/v6/utils/warp_points_to_std.py b/pydra/tasks/fsl/v6/utils/warp_points_to_std.py new file mode 100644 index 0000000..ba934f0 --- /dev/null +++ b/pydra/tasks/fsl/v6/utils/warp_points_to_std.py @@ -0,0 +1,111 @@ +from fileformats.generic import File +from fileformats.medimage import Nifti1 +from fileformats.text import TextFile +import logging +import os +import os.path as op +from pathlib import Path +from pathlib import Path +from pydra.compose import shell +import tempfile + + +logger = logging.getLogger(__name__) + + +def _format_arg(name, value, inputs, argstr): + parsed_inputs = _parse_inputs(inputs) if inputs else {} + if value is None: + return "" + + if name == "out_file": + return "" + + return argstr.format(**inputs) + + +def out_file_formatter(field, inputs): + return _format_arg("out_file", field, inputs, argstr="") + + +def _parse_inputs(inputs, output_dir=None): + if not output_dir: + output_dir = os.getcwd() + parsed_inputs = {} + skip = [] + + fname, ext = op.splitext(inputs["in_coords"]) + parsed_inputs["_in_file"] = fname + parsed_inputs["_outformat"] = ext[1:] + first_args = {} + + second_args = fname + ".txt" + + if ext in [".vtk", ".trk"]: + if parsed_inputs["_tmpfile"] is None: + parsed_inputs["_tmpfile"] = tempfile.NamedTemporaryFile( + suffix=".txt", dir=os.getcwd(), delete=False + ).name + second_args = parsed_inputs["_tmpfile"] + return parsed_inputs + + +@shell.define(xor=[["coord_mm", "coord_vox"], ["warp_file", "xfm_file"]]) +class WarpPointsToStd(shell.Task["WarpPointsToStd.Outputs"]): + """ + Examples + ------- + + >>> from fileformats.generic import File + >>> from fileformats.medimage import Nifti1 + >>> from fileformats.text import TextFile + >>> from pathlib import Path + >>> from pydra.tasks.fsl.v6.utils.warp_points_to_std import WarpPointsToStd + + >>> task = WarpPointsToStd() + >>> task.img_file = File.mock() + >>> task.std_file = Nifti1.mock("mni.nii") + >>> task.premat_file = File.mock() + >>> task.in_coords = TextFile.mock("surf.txt") + >>> task.xfm_file = File.mock() + >>> task.warp_file = File.mock() + >>> task.coord_mm = True + >>> task.cmdline + 'img2stdcoord -mm -img T1.nii -std mni.nii -warp warpfield.nii surf.txt' + + + """ + + executable = "img2stdcoord" + img_file: File = shell.arg(help="filename of input image", argstr="-img {img_file}") + std_file: Nifti1 = shell.arg( + help="filename of destination image", argstr="-std {std_file}" + ) + premat_file: File = shell.arg( + help="filename of pre-warp affine transform (e.g. example_func2highres.mat)", + argstr="-premat {premat_file}", + ) + in_coords: TextFile = shell.arg( + help="filename of file containing coordinates", + argstr="{in_coords}", + position=-1, + ) + xfm_file: File | None = shell.arg( + help="filename of affine transform (e.g. source2dest.mat)", + argstr="-xfm {xfm_file}", + ) + warp_file: File | None = shell.arg( + help="filename of warpfield (e.g. intermediate2dest_warp.nii.gz)", + argstr="-warp {warp_file}", + ) + coord_vox: bool = shell.arg( + help="all coordinates in voxels - default", argstr="-vox" + ) + coord_mm: bool = shell.arg(help="all coordinates in mm", argstr="-mm") + + class Outputs(shell.Outputs): + out_file: Path = shell.outarg( + help="output file name", + path_template="{in_coords}_warped", + formatter="out_file_formatter", + ) diff --git a/pydra/tasks/fsl/v6/utils/warp_utils.py b/pydra/tasks/fsl/v6/utils/warp_utils.py new file mode 100644 index 0000000..961c4d4 --- /dev/null +++ b/pydra/tasks/fsl/v6/utils/warp_utils.py @@ -0,0 +1,122 @@ +import attrs +from fileformats.generic import File +from fileformats.medimage import Nifti1 +import logging +import os +from pathlib import Path +from pathlib import Path +from pydra.compose import shell +import typing as ty + + +logger = logging.getLogger(__name__) + + +def _parse_inputs(inputs, output_dir=None): + if not output_dir: + output_dir = os.getcwd() + parsed_inputs = {} + skip = [] + self_dict = {} + + if skip is None: + skip = [] + + suffix = "field" + if (inputs["out_format"] is not attrs.NOTHING) and inputs["out_format"] == "spline": + suffix = "coeffs" + + trait_spec = self_dict["inputs"].trait("out_file") + trait_spec.name_template = "%s_" + suffix + + if inputs["write_jacobian"]: + if inputs["out_jacobian"] is attrs.NOTHING: + jac_spec = self_dict["inputs"].trait("out_jacobian") + jac_spec.name_source = ["in_file"] + jac_spec.name_template = "%s_jac" + jac_spec.output_name = "out_jacobian" + else: + skip += ["out_jacobian"] + + skip += ["write_jacobian"] + + return parsed_inputs + + +def out_file_callable(output_dir, inputs, stdout, stderr): + return inputs.out_file + + +def out_jacobian_callable(output_dir, inputs, stdout, stderr): + parsed_inputs = _parse_inputs(inputs) + return parsed_inputs.get("out_jacobian", attrs.NOTHING) + + +@shell.define +class WarpUtils(shell.Task["WarpUtils.Outputs"]): + """ + Examples + ------- + + >>> from fileformats.generic import File + >>> from fileformats.medimage import Nifti1 + >>> from pathlib import Path + >>> from pydra.tasks.fsl.v6.utils.warp_utils import WarpUtils + + >>> task = WarpUtils() + >>> task.in_file = Nifti1.mock("warpfield.nii") + >>> task.reference = File.mock() + >>> task.out_format = "spline" + >>> task.cmdline + 'fnirtfileutils --in=warpfield.nii --outformat=spline --ref=T1.nii --warpres=10.0000,10.0000,10.0000 --out=warpfield_coeffs.nii.gz' + + + """ + + executable = "fnirtfileutils" + in_file: Nifti1 = shell.arg( + help="Name of file containing warp-coefficients/fields. This would typically be the output from the --cout switch of fnirt (but can also use fields, like the output from --fout).", + argstr="--in={in_file}", + ) + reference: File = shell.arg( + help="Name of a file in target space. Note that the target space is now different from the target space that was used to create the --warp file. It would typically be the file that was specified with the --in argument when running fnirt.", + argstr="--ref={reference}", + ) + out_format: ty.Any = shell.arg( + help="Specifies the output format. If set to field (default) the output will be a (4D) field-file. If set to spline the format will be a (4D) file of spline coefficients.", + argstr="--outformat={out_format}", + ) + warp_resolution: ty.Any = shell.arg( + help="Specifies the resolution/knot-spacing of the splines pertaining to the coefficients in the --out file. This parameter is only relevant if --outformat is set to spline. It should be noted that if the --in file has a higher resolution, the resulting coefficients will pertain to the closest (in a least-squares sense) file in the space of fields with the --warpres resolution. It should also be noted that the resolution will always be an integer multiple of the voxel size.", + argstr="--warpres={warp_resolution[0]:0.4},{warp_resolution[1]:0.4},{warp_resolution[2]:0.4}", + ) + knot_space: ty.Any = shell.arg( + help="Alternative (to --warpres) specification of the resolution of the output spline-field.", + argstr="--knotspace={knot_space[0]},{knot_space[1]},{knot_space[2]}", + ) + out_file: Path = shell.arg( + help="Name of output file. The format of the output depends on what other parameters are set. The default format is a (4D) field-file. If the --outformat is set to spline the format will be a (4D) file of spline coefficients.", + argstr="--out={out_file}", + position=-1, + ) + write_jacobian: bool | None = shell.arg( + help="Switch on --jac flag with automatically generated filename", default=False + ) + out_jacobian: Path = shell.arg( + help="Specifies that a (3D) file of Jacobian determinants corresponding to --in should be produced and written to filename.", + argstr="--jac={out_jacobian}", + ) + with_affine: bool = shell.arg( + help="Specifies that the affine transform (i.e. that which was specified for the --aff parameter in fnirt) should be included as displacements in the --out file. That can be useful for interfacing with software that cannot decode FSL/fnirt coefficient-files (where the affine transform is stored separately from the displacements).", + argstr="--withaff", + ) + + class Outputs(shell.Outputs): + out_file: File | None = shell.out( + help="Name of output file, containing the warp as field or coefficients.", + callable=out_file_callable, + ) + out_jacobian: File | None = shell.out( + help="Name of output file, containing the map of the determinant of the Jacobian", + callable=out_jacobian_callable, + ) diff --git a/pydra/tasks/fsl/v6_0/__init__.py b/pydra/tasks/fsl/v6_0/__init__.py deleted file mode 100644 index 8d155ce..0000000 --- a/pydra/tasks/fsl/v6_0/__init__.py +++ /dev/null @@ -1,69 +0,0 @@ -""" -This is a basic doctest demonstrating that the package and pydra can both be successfully -imported. - -FSL interfaces are available within the `pydra.tasks.fsl` package. - ->>> from pydra.tasks import fsl - -.. automodule:: pydra.tasks.fsl.bet -.. automodule:: pydra.tasks.fsl.eddy -.. automodule:: pydra.tasks.fsl.fast -.. automodule:: pydra.tasks.fsl.flirt -.. automodule:: pydra.tasks.fsl.fnirt -.. automodule:: pydra.tasks.fsl.fslmaths -.. automodule:: pydra.tasks.fsl.fugue -.. automodule:: pydra.tasks.fsl.susan -.. automodule:: pydra.tasks.fsl.utils -""" - -from . import maths -from .bet import BET, RobustFOV -from .eddy import ApplyTopup, Eddy, Topup -from .fast import FAST -from .flirt import ( - FLIRT, - ApplyXFM, - ConcatXFM, - ConvertXFM, - FixScaleSkew, - Img2ImgCoord, - Img2StdCoord, - InvertXFM, - Std2ImgCoord, -) -from .fnirt import FNIRT, ApplyWarp, ConvertWarp, FNIRTFileUtils, InvWarp -from .fugue import FUGUE, Prelude, PrepareFieldmap, SigLoss -from .susan import SUSAN -from .utils import ( - FFT, - ROI, - ChFileType, - Info, - Interleave, - Merge, - Orient, - Reorient2Std, - SelectVols, - Slice, - SmoothFill, - Split, - SwapDim, -) - -# TODO: Drop compatibility aliases when 0.x is released. -FSLFFT = FFT -FSLROI = ROI -FSLChFileType = ChFileType -FSLInfo = Info -FSLInterleave = Interleave -FSLMerge = Merge -FSLOrient = Orient -FSLPrepareFieldmap = PrepareFieldmap -FSLReorient2Std = Reorient2Std -FSLSelectVols = SelectVols -FSLSlice = Slice -FSLSmoothFill = SmoothFill -FSLSplit = Split -FSLSwapDim = SwapDim -fslmaths = maths diff --git a/pydra/tasks/fsl/v6_0/bet/__init__.py b/pydra/tasks/fsl/v6_0/bet/__init__.py deleted file mode 100644 index 9762648..0000000 --- a/pydra/tasks/fsl/v6_0/bet/__init__.py +++ /dev/null @@ -1,10 +0,0 @@ -""" -BET -=== - -.. automodule:: pydra.tasks.fsl.bet.bet -.. automodule:: pydra.tasks.fsl.bet.robustfov -""" - -from .bet import BET -from .robustfov import RobustFOV diff --git a/pydra/tasks/fsl/v6_0/bet/bet.py b/pydra/tasks/fsl/v6_0/bet/bet.py deleted file mode 100644 index 1247886..0000000 --- a/pydra/tasks/fsl/v6_0/bet/bet.py +++ /dev/null @@ -1,207 +0,0 @@ -""" -Brain Extraction Tool (BET) -=========================== - -BET removes non-brain tissues from whole-head images. -It can also estimate the inner and outer skull surfaces, and outer scalp surface, -when provided with good quality T1 and T2 input images. -""" - -__all__ = ["BET"] - -import os -import typing as ty - -import attrs - -import pydra - - -@attrs.define(slots=False, kw_only=True) -class BETSpec(pydra.specs.ShellSpec): - """Specifications for BET.""" - - input_image: os.PathLike = attrs.field( - metadata={ - "help_string": "input image", - "mandatory": True, - "argstr": "", - } - ) - - output_image: str = attrs.field( - metadata={ - "help_string": "output image", - "argstr": "", - "output_file_template": "{input_image}_bet", - } - ) - - save_brain_surface_outline: bool = attrs.field( - metadata={"help_string": "save brain surface outline", "argstr": "-o"} - ) - - save_brain_mask: bool = attrs.field( - metadata={"help_string": "save binary brain mask", "argstr": "-m"} - ) - - save_skull_image: bool = attrs.field( - metadata={"help_string": "save approximate skull image", "argstr": "-s"} - ) - - save_brain_surface_mesh: bool = attrs.field( - metadata={ - "help_string": "save brain surface as mesh in .vtk format", - "argstr": "-e", - } - ) - - fractional_intensity_threshold: float = attrs.field( - metadata={ - "help_string": ( - "Fractional intensity threshold (between 0 and 1). Default is 0.5. " - "Smaller values give larger brain outline estimates." - ), - "argstr": "-f", - } - ) - - vertical_gradient: float = attrs.field( - metadata={ - "help_string": ( - "Vertical gradient in fractional intensity threshold (between -1 and 1)." - " Default is 0. Positive values give larger brain outlines." - ), - "argstr": "-g", - } - ) - - head_radius: float = attrs.field( - metadata={ - "help_string": "Head radius (in millimeters)." - " Initial surface sphere is set to half of this value.", - "argstr": "-r", - } - ) - - center_of_gravity: ty.Tuple[int, int, int] = attrs.field( - metadata={ - "help_string": "centre-of-gravity (in voxel coordinates) of initial mesh surface", - "argstr": "-c", - } - ) - - apply_thresholding: bool = attrs.field( - metadata={ - "help_string": "apply thresholding to segmented brain image and mask", - "argstr": "-t", - } - ) - - verbose: bool = attrs.field( - metadata={ - "help_string": "enable verbose logging", - "argstr": "-v", - } - ) - - -@attrs.define(slots=False, kw_only=True) -class BETVariationsSpec(pydra.specs.ShellSpec): - """Specifications for BET variations.""" - - _xor = { - "with_robust_brain_center_estimation", - "with_eye_and_optic_nerve_cleanup", - "with_bias_field_and_neck_cleanup", - "with_small_fov_in_z", - "with_4d_fmri_data", - } - - with_robust_brain_center_estimation: bool = attrs.field( - metadata={ - "help_string": "iterate BET several times to improve robustness", - "argstr": "-R", - "xor": _xor, - } - ) - - with_eye_and_optic_nerve_cleanup: bool = attrs.field( - metadata={ - "help_string": "remove eye and optic nerve", - "argstr": "-S", - "xor": _xor | {"save_brain_surface_outline"}, - } - ) - - with_bias_field_and_neck_cleanup: bool = attrs.field( - metadata={ - "help_string": "remove bias field and neck", - "argstr": "-B", - "xor": _xor, - } - ) - - with_small_fov_in_z: bool = attrs.field( - metadata={ - "help_string": "improve BET for very small FOV in Z", - "argstr": "-Z", - "xor": _xor, - } - ) - - with_4d_fmri_data: bool = attrs.field( - metadata={ - "help_string": "apply BET to 4D FMRI data", - "argstr": "-F", - "xor": _xor | {"fractional_intensity_threshold"}, - } - ) - - -@attrs.define(slots=False, kw_only=True) -class BETOutSpec(pydra.specs.ShellOutSpec): - """Output specifications for BET.""" - - brain_surface_outline: pydra.specs.File = attrs.field( - metadata={ - "help_string": "brain surface outline", - "output_file_template": "{output_image}_overlay", - "requires": ["save_brain_surface_outline"], - } - ) - - brain_mask: pydra.specs.File = attrs.field( - metadata={ - "help_string": "brain mask", - "output_file_template": "{output_image}_mask", - "requires": ["save_brain_mask"], - } - ) - - skull_image: pydra.specs.File = attrs.field( - metadata={ - "help_string": "skull image", - "output_file_template": "{output_image}_skull", - "requires": ["save_skull_image"], - } - ) - - brain_surface_mesh: pydra.specs.File = attrs.field( - metadata={ - "help_string": "brain surface mesh", - "output_file_template": "{output_image}_mesh.vtk", - "keep_extension": False, - "requires": ["save_brain_surface_mesh"], - } - ) - - -class BET(pydra.engine.ShellCommandTask): - """Task definition for BET.""" - - executable = "bet" - - input_spec = pydra.specs.SpecInfo(name="Input", bases=(BETSpec, BETVariationsSpec)) - - output_spec = pydra.specs.SpecInfo(name="Output", bases=(BETOutSpec,)) diff --git a/pydra/tasks/fsl/v6_0/bet/robustfov.py b/pydra/tasks/fsl/v6_0/bet/robustfov.py deleted file mode 100644 index 7dbaf6c..0000000 --- a/pydra/tasks/fsl/v6_0/bet/robustfov.py +++ /dev/null @@ -1,68 +0,0 @@ -""" -RobustFOV -========= - -Automatic FOV reduction to remove the neck and lower part of the head -from structural brain images. - -Examples --------- - ->>> task = RobustFOV(input_image="image.nii") ->>> task.cmdline # doctest: +ELLIPSIS -'robustfov -i image.nii -r ...image_rfov.nii -b 170 -m ...image_rfov.mat' -""" - -__all__ = ["RobustFOV"] - -import os - -import attrs - -import pydra - - -@attrs.define(slots=False, kw_only=True) -class RobustFOVSpec(pydra.specs.ShellSpec): - """Specifications for robustfov.""" - - input_image: os.PathLike = attrs.field( - metadata={ - "help_string": "input image", - "mandatory": True, - "argstr": "-i", - } - ) - - output_image: str = attrs.field( - metadata={ - "help_string": "output image with reduced FOV", - "argstr": "-r", - "output_file_template": "{input_image}_rfov", - } - ) - - brain_size: int = attrs.field( - default=170, - metadata={ - "help_string": "size of the brain in z-axis", - "argstr": "-b", - }, - ) - - output_matrix: str = attrs.field( - metadata={ - "help_string": "output transformation matrix", - "argstr": "-m", - "output_file_template": "{input_image}_rfov.mat", - "keep_extension": False, - } - ) - - -class RobustFOV(pydra.engine.ShellCommandTask): - """Task definition for robustfov.""" - - executable = "robustfov" - - input_spec = pydra.specs.SpecInfo(name="Input", bases=(RobustFOVSpec,)) diff --git a/pydra/tasks/fsl/v6_0/eddy/__init__.py b/pydra/tasks/fsl/v6_0/eddy/__init__.py deleted file mode 100644 index 60dd623..0000000 --- a/pydra/tasks/fsl/v6_0/eddy/__init__.py +++ /dev/null @@ -1,12 +0,0 @@ -""" -Eddy -==== - -.. automodule:: pydra.tasks.fsl.eddy.eddy -.. automodule:: pydra.tasks.fsl.eddy.topup -.. automodule:: pydra.tasks.fsl.eddy.applytopup -""" - -from .applytopup import ApplyTopup -from .eddy import Eddy -from .topup import Topup diff --git a/pydra/tasks/fsl/v6_0/eddy/applytopup.py b/pydra/tasks/fsl/v6_0/eddy/applytopup.py deleted file mode 100644 index bc974d7..0000000 --- a/pydra/tasks/fsl/v6_0/eddy/applytopup.py +++ /dev/null @@ -1,193 +0,0 @@ -""" -ApplyTopup -========== - -Examples --------- - ->>> task = ApplyTopup( -... input_image="blipup.nii", -... encoding_file="parameters.txt", -... input_index=1, -... fieldmap_image="fieldmap.nii", -... method="jac", -... ) ->>> task.cmdline # doctest: +ELLIPSIS -'applytopup --imain=blipup.nii --datain=parameters.txt --inindex=1 \ ---topup=fieldmap --out=blipup_topup.nii --method=jac ...' - ->>> task = ApplyTopup( -... input_image=["blipup.nii", "blipdown.nii"], -... encoding_file="parameters.txt", -... input_index=[1, 2, 3], -... field_coefficients_image="topup_fieldcoef.nii", -... movement_parameters_file="topup_movpar.txt", -... output_image="corrected.nii", -... ) ->>> task.cmdline # doctest: +ELLIPSIS -'applytopup --imain=blipup.nii,blipdown.nii --datain=parameters.txt \ ---inindex=1,2,3 --topup=topup --out=corrected.nii ...' -""" - -__all__ = ["ApplyTopup"] - -from os import PathLike -from pathlib import PurePath -from typing import Sequence, Union - -from attrs import define, field -from pydra.engine.specs import ShellSpec, SpecInfo -from pydra.engine.task import ShellCommandTask - - -def _to_input_image(field: Union[PathLike, Sequence[PathLike]]) -> str: - try: - paths = [PurePath(field)] - except TypeError: - paths = [PurePath(path) for path in field] - - return f"--imain={','.join(str(path) for path in paths)}" - - -def _to_input_index(field: Union[int, Sequence[int]]) -> str: - try: - indexes = list(field) - except TypeError: - indexes = [field] - - return f"--inindex={','.join(str(index) for index in indexes)}" - - -def _to_topup_basename( - fieldmap_image: PathLike, field_coefficients_image: PathLike -) -> str: - if field_coefficients_image: - path = PurePath(field_coefficients_image) - basename = path.parent / path.name.split("_fieldcoef", 1)[0] - else: - path = PurePath(fieldmap_image) - basename = path.parent / path.name.split(".", 1)[0] - - return f"--topup={str(basename)}" - - -def _to_output_image( - output_image: PathLike, - input_image: Union[PathLike, Sequence[PathLike]], -) -> str: - if output_image: - path = PurePath(output_image) - else: - try: - path = PurePath(input_image) - except TypeError: - path = PurePath(input_image[0]) - name, ext = path.name.split(".", 1) - path = path.with_name(f"{name}_topup.{ext}") - - return f"--out={path}" - - -@define(slots=False, kw_only=True) -class ApplyTopupSpec(ShellSpec): - """Specifications for applytopup.""" - - input_image: Union[PathLike, Sequence[PathLike]] = field( - metadata={ - "help_string": "input image", - "mandatory": True, - "formatter": _to_input_image, - } - ) - - encoding_file: PathLike = field( - metadata={ - "help_string": "text file containing phase encoding directions and timings", - "mandatory": True, - "argstr": "--datain={encoding_file}", - } - ) - - input_index: Union[int, Sequence[int]] = field( - metadata={ - "help_string": "indices mapping each input image to a row of the encoding file", - "mandatory": True, - "formatter": _to_input_index, - } - ) - - topup_basename: str = field( - metadata={ - "help_string": "basename for fieldmap or topup output files", - "formatter": _to_topup_basename, - "readonly": True, - } - ) - - fieldmap_image: PathLike = field( - metadata={ - "help_string": "fieldmap image", - "mandatory": True, - "xor": {"field_coefficients_image"}, - } - ) - - field_coefficients_image: PathLike = field( - metadata={ - "help_string": "field coefficients image computed by topup", - "mandatory": True, - "xor": {"fieldmap_image"}, - "requires": {"movement_parameters_file"}, - } - ) - - movement_parameters_file: PathLike = field( - metadata={"help_string": "movement parameters file computed by topup"} - ) - - output_image: PathLike = field( - metadata={ - "help_string": "output image", - "argstr": "--out", - "formatter": _to_output_image, - } - ) - - method: str = field( - default="lsr", - metadata={ - "help_string": "resampling method", - "argstr": "--method={method}", - "allowed_values": {"jac", "lsr", "vb2D", "vb3D", "vb4D"}, - }, - ) - - interpolation: str = field( - default="spline", - metadata={ - "help_string": "interpolation model", - "argstr": "--interp={interpolation}", - "allowed_values": {"spline", "trilinear"}, - }, - ) - - datatype: str = field( - default="preserve", - metadata={ - "help_string": "force output datatype", - "argstr": "--datatype={datatype}", - "allowed_values": {"preserve", "char", "short", "int", "float", "double"}, - }, - ) - - verbose: bool = field( - metadata={"help_string": "enable verbose logging", "argstr": "--verbose"} - ) - - -class ApplyTopup(ShellCommandTask): - """Task definition for applytopup.""" - - executable = "applytopup" - - input_spec = SpecInfo(name="Input", bases=(ApplyTopupSpec,)) diff --git a/pydra/tasks/fsl/v6_0/eddy/eddy.py b/pydra/tasks/fsl/v6_0/eddy/eddy.py deleted file mode 100644 index 38335d4..0000000 --- a/pydra/tasks/fsl/v6_0/eddy/eddy.py +++ /dev/null @@ -1,429 +0,0 @@ -""" -Eddy -==== - -Correct for artifacts induced by Eddy currents and subject motion. - -Examples --------- - ->>> task = Eddy( -... input_image="input.nii", -... brain_mask="brain.nii", -... encoding_file="params.txt", -... index_file="index.txt", -... bvec_file="input.bvec", -... bval_file="input.bval", -... fieldmap_image="fieldmap.nii", -... ) ->>> task.cmdline # doctest: +ELLIPSIS -'eddy --imain=input.nii --mask=brain.nii --acqp=params.txt --index=index.txt \ ---bvecs=input.bvec --bvals=input.bval --field=fieldmap.nii --out=eddy ...' -""" - -__all__ = ["Eddy"] - -from os import PathLike -from pathlib import PurePath - -from attrs import define, field -from pydra.engine.specs import File, ShellOutSpec, ShellSpec, SpecInfo -from pydra.engine.task import ShellCommandTask - - -@define(slots=False, kw_only=True) -class EddySpec(ShellSpec): - """Specifications for eddy.""" - - # Parameters that specify input files. - input_image: PathLike = field( - metadata={ - "help_string": "input image as a 4D volume", - "mandatory": True, - "argstr": "--imain={input_image}", - } - ) - - brain_mask: PathLike = field( - metadata={ - "help_string": "brain mask as a single volume image", - "mandatory": True, - "argstr": "--mask={brain_mask}", - } - ) - - encoding_file: PathLike = field( - metadata={ - "help_string": "acquisition parameters for the diffusion protocol", - "mandatory": True, - "argstr": "--acqp={encoding_file}", - } - ) - - index_file: PathLike = field( - metadata={ - "help_string": "mapping from volume index to encoding parameters", - "mandatory": True, - "argstr": "--index={index_file}", - } - ) - - bvec_file: PathLike = field( - metadata={ - "help_string": "diffusion directions", - "mandatory": True, - "argstr": "--bvecs={bvec_file}", - } - ) - - bval_file: PathLike = field( - metadata={ - "help_string": "diffusion weighting", - "mandatory": True, - "argstr": "--bvals={bval_file}", - } - ) - - fieldmap_image: PathLike = field( - metadata={"help_string": "fieldmap image", "argstr": "--field={fieldmap_image}"} - ) - - fieldmap_matrix: PathLike = field( - metadata={ - "help_string": "rigid-body transformation matrix from fieldmap to first input volume", - "argstr": "--field_mat={fieldmap_matrix}", - "requires": {"fieldmap_image"}, - } - ) - - no_peas: bool = field( - metadata={ - "help_string": "do not perform post-Eddy alignment of shells", - "argstr": "--dont_peas", - } - ) - - # Parameters specifying names of output-files. - output_basename: str = field( - default="eddy", - metadata={ - "help_string": "basename for output files", - "argstr": "--out={output_basename}", - }, - ) - - # Parameters specifying how eddy should be run. - first_level_model: str = field( - default="quadratic", - metadata={ - "help_string": "model for the magnetic field generated by Eddy currents", - "argstr": "--flm={first_level_model}", - "allowed_values": {"movement", "linear", "quadratic", "cubic"}, - }, - ) - - second_level_model: str = field( - default="none", - metadata={ - "help_string": "model for how diffusion gradients generate Eddy currents", - "argstr": "--slm={second_level_model}", - "allowed_values": {"none", "linear", "quadratic"}, - }, - ) - - fwhm: float = field( - default=0, - metadata={ - "help_string": "filter width used for pre-conditioning data prior to estimating distortions", - "argstr": "--fwhm={fwhm}", - }, - ) - - num_iterations: int = field( - default=5, - metadata={ - "help_string": "number of iterations for eddy", - "argstr": "--niter={num_iterations}", - }, - ) - - fill_empty_planes: bool = field( - metadata={"help_string": "detect and fill empty planes", "argstr": "--fep"} - ) - - interpolation: str = field( - default="spline", - metadata={ - "help_string": "interpolation method for the estimation phase", - "argstr": "--interp={interpolation}", - "allowed_values": {"spline", "trilinear"}, - }, - ) - - resampling: str = field( - default="jac", - metadata={ - "help_string": "final resampling strategy", - "argstr": "--resamp={resampling}", - "allowed_values": {"jac", "lsr"}, - }, - ) - - num_voxels: int = field( - default=1000, - metadata={ - "help_string": "number of voxels to use for GP hyperparameter estimation", - "argstr": "--nvoxhp={num_voxels}", - }, - ) - - fudge_factor: int = field( - default=10, - metadata={ - "help_string": "fudge factor for Q-space smoothing during estimation", - "argstr": "--ff={fudge_factor}", - }, - ) - - # Parameters for outlier replacement (ol) - replace_outliers: bool = field( - metadata={"help_string": "replace outliers", "argstr": "--repol"} - ) - - outlier_num_stdevs: int = field( - metadata={ - "help_string": "number of times off the standard deviation to qualify as outlier", - "argstr": "--ol_nstd={outlier_num_stdevs}", - "requires": {"replace_outliers"}, - } - ) - - outlier_num_voxels: int = field( - metadata={ - "help_string": "minimum number of voxels in a slice to qualify for outlier detection", - "argstr": "--ol_nvox={outlier_num_voxels}", - "requires": {"replace_outliers"}, - } - ) - - outlier_type: str = field( - metadata={ - "help_string": "type of outliers detected", - "argstr": "--ol_type={outlier_type}", - "allowed_values": {"both", "gw", "sw"}, - "requires": {"replace_outliers"}, - } - ) - - multiband_factor: int = field( - metadata={ - "help_string": "multiband factor", - "argstr": "--mb={multiband_factor}", - } - ) - - multiband_offset: int = field( - metadata={ - "help_string": "multiband slice offset", - "argstr": "--mb_offs={multiband_offset}", - "requires": {"multiband_factor"}, - } - ) - - # Parameters for intra-volume movement correction (s2v) - movement_prediction_order: int = field( - default=0, - metadata={ - "help_string": "order of movement prediction model", - "argstr": "--mporder={movement_prediction_order}", - }, - ) - - s2v_num_iterations: int = field( - metadata={ - "help_string": "number of iterations for s2v movement estimation", - "argstr": "--s2v_niter={s2v_num_iterations}", - } - ) - - s2v_lambda: float = field( - metadata={ - "help_string": "weighting of regularization for s2v movement estimation", - "argstr": "--s2v_lambda={s2v_lambda}", - } - ) - - s2v_interpolation: str = field( - metadata={ - "help_string": "interpolation method for s2v movement estimation.", - "argstr": "--s2v_interp={s2v_interpolation}", - "allowed_values": {"spline", "trilinear"}, - } - ) - - slice_grouping_file: PathLike = field( - metadata={ - "help_string": "file containing slice grouping information", - "argstr": "--slspec={slice_grouping_file}", - "xor": {"slice_timing_file"}, - } - ) - - slice_timing_file: PathLike = field( - metadata={ - "help_string": "file containing slice timing information", - "argstr": "--json={slice_timing_file}", - "xor": {"slice_grouping_file"}, - } - ) - - # Parameters for move-by-susceptibility correction (mbs) - estimate_move_by_susceptibility: bool = field( - metadata={ - "help_string": "estimate susceptibility-induced field changes due to subject motion", - "argstr": "--estimate_move_by_susceptibility", - } - ) - - mbs_num_iterations: int = field( - metadata={ - "help_string": "number of iterations for MBS field estimation", - "argstr": "--mbs_niter={mbs_num_iterations}", - "requires": {"estimate_move_by_susceptibility"}, - } - ) - - mbs_lambda: int = field( - metadata={ - "help_string": "weighting of regularization for MBS field estimation", - "argstr": "--mbs_lambda={mbs_lambda}", - "requires": {"estimate_move_by_susceptibility"}, - } - ) - - mbs_knot_spacing: int = field( - metadata={ - "help_string": "knot-spacing for MBS field estimation", - "argstr": "--mbs_ksp={mbs_knot_spacing}", - "requires": {"estimate_move_by_susceptibility"}, - } - ) - - # Miscellaneous parameters. - data_is_shelled: bool = field( - metadata={ - "help_string": "bypass checks for data shelling", - "argstr": "--data_is_shelled", - } - ) - - random_seed: int = field( - metadata={ - "help_string": "random seed for voxel selection", - "argstr": "--initrand={random_seed}", - } - ) - - save_cnr_maps: bool = field( - metadata={"help_string": "save shell-wise CNR maps", "argstr": "--cnr_maps"} - ) - - save_residuals: bool = field( - metadata={ - "help_string": "save residuals for all scans", - "argstr": "--residuals", - } - ) - - verbose: bool = field( - metadata={"help_string": "enable verbose logging", "argstr": "--verbose"} - ) - - -@define(slots=False, kw_only=True) -class EddyOutSpec(ShellOutSpec): - """Output specification for eddy.""" - - corrected_image: File = field( - metadata={ - "help_string": "input image corrected for distortions", - "output_file_template": "{output_basename}.nii.gz", - } - ) - - parameters_file: File = field( - metadata={ - "help_string": "registration parameters for movement and EC", - "output_file_template": "{output_basename}.eddy_parameters", - } - ) - - rotated_bvec_file: File = field( - metadata={ - "help_string": "rotated b-vecs", - "output_file_template": "{output_basename}.eddy_rotated_bvecs", - } - ) - - movement_rms_matrix: File = field( - metadata={ - "help_string": "movement induced RMS", - "output_file_template": "{output_basename}.eddy_movement_rms", - } - ) - - restricted_movement_rms_matrix: File = field( - metadata={ - "help_string": "movement induced RMS without translation in the PE direction", - "output_file_template": "{output_basename}.eddy_restricted_movement_rms", - } - ) - - displacement_fields_image: File = field( - metadata={ - "help_string": "displacement fields in millimeters", - "output_file_template": "{output_basename}.eddy_displacement_fields", - } - ) - - outlier_free_image: File = field( - metadata={ - "help_string": "input image with outliers replaced by predictions", - "output_file_template": "{output_basename}.eddy_outlier_free_data", - "requires": ["replace_outliers"], - } - ) - - movement_over_time_file: File = field( - metadata={ - "help_string": "movement parameters per time-point (slice or group)", - "output_file_template": "{output_basename}.eddy_movement_over_time", - "requires": ["movement_prediction_order"], - } - ) - - cnr_maps_image: File = field( - metadata={ - "help_string": "path to optional CNR maps image", - "output_file_template": "{output_basename}.eddy_cnr_maps", - "requires": ["save_cnr_maps"], - } - ) - - residuals_image: File = field( - metadata={ - "help_string": "path to optional residuals image", - "output_file_template": "{output_basename}.eddy_residuals", - "requires": ["save_residuals"], - } - ) - - -class Eddy(ShellCommandTask): - """Task definition for eddy.""" - - executable = "eddy" - - input_spec = SpecInfo(name="Input", bases=(EddySpec,)) - - output_spec = SpecInfo(name="Output", bases=(EddyOutSpec,)) diff --git a/pydra/tasks/fsl/v6_0/eddy/topup.py b/pydra/tasks/fsl/v6_0/eddy/topup.py deleted file mode 100644 index 54f51f7..0000000 --- a/pydra/tasks/fsl/v6_0/eddy/topup.py +++ /dev/null @@ -1,249 +0,0 @@ -""" -Topup -===== - -Examples --------- - -Minimal call to `topup`: - ->>> task = Topup(input_image="input.nii", encoding_file="encoding.txt") ->>> task.cmdline # doctest: +ELLIPSIS -'topup --imain=input.nii --datain=encoding.txt --out=input_topup \ ---fout=...input_fieldmap.nii --iout=...input_unwarped.nii ...' - -Using a multiple resolution approach: - ->>> task = Topup( -... input_image="input.nii", -... encoding_file="encoding.txt", -... subsampling_per_level=(4, 2, 1), -... smoothing_per_level=(8.0, 4.0, 0.0), -... ) ->>> task.cmdline # doctest: +ELLIPSIS -'topup --imain=input.nii --datain=encoding.txt ... --subsamp=4,2,1 --fwhm=8.0,4.0,0.0 ...' -""" - -__all__ = ["Topup"] - -from os import PathLike -from pathlib import PurePath -from typing import Iterable - -from attrs import define, field -from pydra.engine.specs import File, ShellOutSpec, ShellSpec, SpecInfo -from pydra.engine.task import ShellCommandTask - - -def to_field_per_level(field, param) -> str: - return f"--{param}={','.join([str(elem) for elem in field])}" - - -def to_output_basename(field, input_image) -> str: - return f"--out={field or PurePath(input_image).name.split('.', 1)[0] + '_topup'}" - - -@define(slots=False, kw_only=True) -class TopupSpec(ShellSpec): - """Specifications for topup.""" - - input_image: PathLike = field( - metadata={ - "help_string": "input image", - "mandatory": True, - "argstr": "--imain={input_image}", - } - ) - - encoding_file: PathLike = field( - metadata={ - "help_string": "text file containing phase encoding directions and timings", - "mandatory": True, - "argstr": "--datain={encoding_file}", - } - ) - - output_basename: str = field( - metadata={ - "help_string": "output basename for field coefficients and movement parameters", - "formatter": to_output_basename, - }, - ) - - output_fieldmap_image: str = field( - metadata={ - "help_string": "output fieldmap image", - "argstr": "--fout={output_fieldmap_image}", - "output_file_template": "{input_image}_fieldmap", - } - ) - - output_unwarped_image: str = field( - metadata={ - "help_string": "output unwarped image", - "argstr": "--iout={output_unwarped_image}", - "output_file_template": "{input_image}_unwarped", - } - ) - - warp_resolution_per_level: Iterable[float] = field( - default=(10.0,), - metadata={ - "help_string": "resolution of warp basis in millimeters for a given level", - "formatter": lambda field: to_field_per_level(field, "warpres"), - }, - ) - - subsampling_per_level: Iterable[int] = field( - default=(1,), - metadata={ - "help_string": "subsampling factor for a given level", - "formatter": lambda field: to_field_per_level(field, "subsamp"), - }, - ) - - smoothing_per_level: Iterable[float] = field( - default=(8.0,), - metadata={ - "help_string": "FWHM of smoothing kernel in millimeters for a given level", - "formatter": lambda field: to_field_per_level(field, "fwhm"), - }, - ) - - max_iterations_per_level: Iterable[int] = field( - default=(5,), - metadata={ - "help_string": "maximum number of non-linear iterations for a given level", - "formatter": lambda field: to_field_per_level(field, "miter"), - }, - ) - - regularisation_per_level: Iterable[float] = field( - default=(0.0,), - metadata={ - "help_string": "weight of regularisation for a given level", - "formatter": lambda field: to_field_per_level(field, "lambda"), - }, - ) - - estimate_movement_per_level: Iterable[int] = field( - default=(1,), - metadata={ - "help_string": "wether to estimate (1) or keep movement parameters constant (0) for a given level", - "formatter": lambda field: to_field_per_level(field, "estmov"), - "allowed_values": {0, 1}, - }, - ) - - minimisation_method_per_level: Iterable[int] = field( - default=(0,), - metadata={ - "help_string": ( - "which minimisation method to use for a given level " - "(0: Levenberg-Marquardt, 1: Scaled Conjugate Gradient)" - ), - "formatter": lambda field: to_field_per_level(field, "minmet"), - "allowed_values": {0, 1}, - }, - ) - - weight_regularisation_by_ssq: bool = field( - default=True, - metadata={ - "help_string": "weight regularisation by sum-of-squares", - "formatter": lambda field: f"--ssqlambda={field:d}", - }, - ) - - regularisation_model: str = field( - default="bending_energy", - metadata={ - "help_string": "regularisation model", - "argstr": "--regmod={regularisation_model}", - "allowed_values": {"bending_energy", "membrane_energy"}, - }, - ) - - spline_order: int = field( - default=3, - metadata={ - "help_string": "use quadratic (2) or cubic (3) splines", - "argstr": "--splineorder={spline_order}", - "allowed_values": {2, 3}, - }, - ) - - precision: str = field( - default="double", - metadata={ - "help_string": "numerical precision", - "argstr": "--numprec={precision}", - "allowed_values": {"float", "double"}, - }, - ) - - interpolation: str = field( - default="spline", - metadata={ - "help_string": "interpolation model", - "argstr": "--interp={interpolation}", - "allowed_values": {"linear", "spline"}, - }, - ) - - scale: bool = field( - default=False, - metadata={ - "help_string": "scale images to a common mean", - "formatter": lambda field: f"--scale={field:d}", - }, - ) - - regrid: bool = field( - default=True, - metadata={ - "help_string": "perform calculations on a different grid", - "formatter": lambda field: f"--regrid={field:d}", - }, - ) - - num_threads: int = field( - default=1, - metadata={ - "help_string": "number of threads to use", - "argstr": "--nthr={num_threads}", - }, - ) - - verbose: bool = field( - metadata={"help_string": "enable verbose logging", "argstr": "--verbose"} - ) - - -@define(slots=False, kw_only=True) -class TopupOutSpec(ShellOutSpec): - """Output specifications for topup.""" - - field_coefficients_image: File = field( - metadata={ - "help_string": "output field coefficients", - "output_file_template": "{output_basename}_fieldcoef.nii.gz", - } - ) - - movement_parameters_file: File = field( - metadata={ - "help_string": "output movement parameters", - "output_file_template": "{output_basename}_movpar.txt", - } - ) - - -class Topup(ShellCommandTask): - """Task definition for topup.""" - - executable = "topup" - - input_spec = SpecInfo(name="Input", bases=(TopupSpec,)) - - output_spec = SpecInfo(name="Output", bases=(TopupOutSpec,)) diff --git a/pydra/tasks/fsl/v6_0/fast.py b/pydra/tasks/fsl/v6_0/fast.py deleted file mode 100644 index ed3a010..0000000 --- a/pydra/tasks/fsl/v6_0/fast.py +++ /dev/null @@ -1,215 +0,0 @@ -""" -FAST -==== - -Automatic segmentation of 3D images of the brain. -""" - -__all__ = ["FAST"] - -import os - -import attrs - -import pydra - - -@attrs.define(slots=False, kw_only=True) -class FASTSpec(pydra.specs.ShellSpec): - """Specifications for FAST.""" - - # Input parameters. - input_image: os.PathLike = attrs.field( - metadata={ - "help_string": "input image (single-channel mode)", - "mandatory": True, - "argstr": "", - "position": -1, - } - ) - - image_type: str = attrs.field( - default="T1", - metadata={ - "help_string": "type of input image (T1, T2 or PD)", - "argstr": "-t", - "allowed_values": {"T1", "T2", "PD"}, - "formatter": lambda image_type: "-t {:d}".format( - {"T1": 1, "T2": 2, "PD": 3}.get(image_type) - ), - }, - ) - - # Output parameters. - output_basename: str = attrs.field( - default="fast", - metadata={ - "help_string": "basename used for output files", - "argstr": "-o", - }, - ) - - num_classes: int = attrs.field( - default=3, - metadata={ - "help_string": "number of tissue-type classes", - "argstr": "-n", - }, - ) - - save_probability_maps: bool = attrs.field( - metadata={ - "help_string": "save probability map for each class", - "argstr": "-p", - } - ) - - save_bias_field_image: bool = attrs.field( - metadata={ - "help_string": "save estimated bias field", - "argstr": "-b", - } - ) - - save_bias_corrected_image: bool = attrs.field( - metadata={ - "help_string": "save restored image after bias field correction", - "argstr": "-B", - } - ) - - save_segmentation_masks: bool = attrs.field( - metadata={ - "help_string": "save segmentation mask for each class", - "argstr": "-g", - } - ) - - # Advanced parameters. - main_mrf_parameter: float = attrs.field( - default=0.1, - metadata={ - "help_string": "", - "argstr": "-H", - }, - ) - - bias_field_iterations: int = attrs.field( - default=4, - metadata={ - "help_string": "number of iterations for bias field removal", - "argstr": "-I", - }, - ) - - bias_field_smoothing: float = attrs.field( - default=20, - metadata={ - "help_string": "bias field smoothing (FWHM in millimeters)", - "argstr": "-l", - }, - ) - - no_partial_volume_estimation: bool = attrs.field( - metadata={ - "help_string": "do not perform partial volume estimation", - "argstr": "--nopve", - } - ) - - verbose: bool = attrs.field( - metadata={ - "help_string": "enable verbose logging", - "argstr": "-v", - } - ) - - -def get_segmentation_image(output_basename): - return f"{output_basename}_seg" - - -def get_segmentation_masks(output_basename, num_classes): - return [f"{output_basename}_seg_{i}" for i in range(num_classes)] - - -def get_probability_maps(output_basename, num_classes): - return [f"{output_basename}_prob_{i}" for i in range(num_classes)] - - -def get_partial_volume_maps(output_basename, num_classes): - return [f"{output_basename}_pve_{i}" for i in range(num_classes)] - - -def get_bias_field_image(output_basename): - return f"{output_basename}_bias" - - -def get_bias_corrected_image(output_basename): - return f"{output_basename}_restore" - - -@attrs.define(slots=False, kw_only=True) -class FASTOutSpec(pydra.specs.ShellOutSpec): - """Ouput specifications for FAST.""" - - segmentation_image: pydra.specs.File = attrs.field( - metadata={ - "help_string": "segmentation image with each voxel assigned a class", - "mandatory": True, - "callable": get_segmentation_image, - } - ) - - segmentation_masks: pydra.specs.MultiOutputFile = attrs.field( - metadata={ - "help_string": ( - "one segmentation mask per class, each voxel is assigned a value of " - "1 if belonging to the class 0 otherwise." - ), - "requires": ["save_segmentation_masks"], - "callable": get_segmentation_masks, - } - ) - - probability_maps: pydra.specs.MultiOutputFile = attrs.field( - metadata={ - "help_string": "posterior probability mapping for each class", - "requires": ["save_probability_maps"], - "callable": get_probability_maps, - } - ) - - partial_volume_maps: pydra.specs.MultiOutputFile = attrs.field( - metadata={ - "help_string": "partial volume mapping for each class", - "requires": [("no_partial_volume_estimation", False)], - "callable": get_partial_volume_maps, - } - ) - - bias_field_image: pydra.specs.File = attrs.field( - metadata={ - "help_string": "estimated bias field", - "requires": ["save_bias_field_image"], - "callable": get_bias_field_image, - } - ) - - bias_corrected_image: pydra.specs.File = attrs.field( - metadata={ - "help_string": "restored input image after bias field correction", - "requires": ["save_bias_corrected_image"], - "callable": get_bias_corrected_image, - } - ) - - -class FAST(pydra.engine.ShellCommandTask): - """Task definition for FAST.""" - - executable = "fast" - - input_spec = pydra.specs.SpecInfo(name="Input", bases=(FASTSpec,)) - - output_spec = pydra.specs.SpecInfo(name="Ouput", bases=(FASTOutSpec,)) diff --git a/pydra/tasks/fsl/v6_0/flirt/__init__.py b/pydra/tasks/fsl/v6_0/flirt/__init__.py deleted file mode 100644 index 76fee01..0000000 --- a/pydra/tasks/fsl/v6_0/flirt/__init__.py +++ /dev/null @@ -1,16 +0,0 @@ -""" -FLIRT -===== - -.. automodule:: pydra.tasks.fsl.flirt.flirt -.. automodule:: pydra.tasks.fsl.flirt.convertxfm -.. automodule:: pydra.tasks.fsl.flirt.img2imgcoord -.. automodule:: pydra.tasks.fsl.flirt.img2stdcoord -.. automodule:: pydra.tasks.fsl.flirt.std2imgcoord -""" - -from .convertxfm import ConcatXFM, ConvertXFM, FixScaleSkew, InvertXFM -from .flirt import FLIRT, ApplyXFM -from .img2imgcoord import Img2ImgCoord -from .img2stdcoord import Img2StdCoord -from .std2imgcoord import Std2ImgCoord diff --git a/pydra/tasks/fsl/v6_0/flirt/convertxfm.py b/pydra/tasks/fsl/v6_0/flirt/convertxfm.py deleted file mode 100644 index 680ac08..0000000 --- a/pydra/tasks/fsl/v6_0/flirt/convertxfm.py +++ /dev/null @@ -1,143 +0,0 @@ -""" -ConvertXFM -========== - -Examples --------- - -Concatenate transformation matrix: - ->>> task = ConcatXFM(input_matrix="AtoB.mat", concat_matrix="BtoC.mat", output_matrix="AtoC.mat") ->>> task.cmdline -'convert_xfm -omat AtoC.mat -concat BtoC.mat AtoB.mat' - -Invert transformation matrix: - ->>> task = InvertXFM(input_matrix="AtoB.mat", output_matrix="BtoA.mat") ->>> task.cmdline -'convert_xfm -omat BtoA.mat -inverse AtoB.mat' - -Fix scaling and skewness with additional matrix: - ->>> task = FixScaleSkew(input_matrix="A.mat", fixscaleskew_matrix="B.mat") ->>> task.cmdline -'convert_xfm -omat ...A_cxfm.mat -fixscaleskew B.mat A.mat' - -Use ConvertXFM to combine multiple operations at once, such as concatenation and inversion: - ->>> task = ConvertXFM(input_matrix="AtoB.mat", concat_matrix="BtoC.mat", inverse=True, output_matrix="CtoA.mat") ->>> task.cmdline -'convert_xfm -omat CtoA.mat -concat BtoC.mat -inverse AtoB.mat' -""" - -__all__ = ["ConvertXFM", "ConcatXFM", "InvertXFM", "FixScaleSkew"] - -import os - -import attrs - -import pydra - - -@attrs.define(slots=False, kw_only=True) -class BaseSpec(pydra.specs.ShellSpec): - """Base specifications for all tasks using convert_xfm.""" - - input_matrix: os.PathLike = attrs.field( - metadata={ - "help_string": "input matrix in 4x4 ASCII format", - "mandatory": True, - "argstr": "", - "position": -1, - } - ) - - output_matrix: str = attrs.field( - metadata={ - "help_string": "output matrix in 4x4 ASCII format", - "argstr": "-omat", - "output_file_template": "{input_matrix}_cxfm", - } - ) - - -@attrs.define(slots=False, kw_only=True) -class ConvertXFMSpec(BaseSpec): - """Specifications for convert_xfm.""" - - fixscaleskew_matrix: os.PathLike = attrs.field( - metadata={ - "help_string": " fix scaling and skewness with this matrix", - "argstr": "-fixscaleskew", - } - ) - - concat_matrix: os.PathLike = attrs.field( - metadata={"help_string": "concatenate with this matrix", "argstr": "-concat"} - ) - - inverse: bool = attrs.field( - metadata={"help_string": "invert the resulting matrix", "argstr": "-inverse"} - ) - - -class ConvertXFM(pydra.engine.ShellCommandTask): - """Task definition for convert_xfm.""" - - executable = "convert_xfm" - - input_spec = pydra.specs.SpecInfo(name="Input", bases=(ConvertXFMSpec,)) - - -@attrs.define(slots=False, kw_only=True) -class ConcatXFMSpec(BaseSpec): - """Specifications for concat_xfm.""" - - concat_matrix: os.PathLike = attrs.field( - metadata={ - "help_string": "concatenate with this matrix", - "mandatory": True, - "argstr": "-concat", - } - ) - - -class ConcatXFM(ConvertXFM): - """Task definition for matrix concatenation using convert_xfm.""" - - input_spec = pydra.specs.SpecInfo(name="Input", bases=(ConcatXFMSpec,)) - - -@attrs.define(slots=False, kw_only=True) -class InvertXFMSpec(BaseSpec): - """Specifications for invert_xfm.""" - - inverse: bool = attrs.field( - default=True, - metadata={"help_string": "invert the input matrix", "argstr": "-inverse"}, - ) - - -class InvertXFM(ConvertXFM): - """Task definition for matrix inversion using convert_xfm.""" - - input_spec = pydra.specs.SpecInfo(name="Input", bases=(InvertXFMSpec,)) - - -@attrs.define(slots=False, kw_only=True) -class FixScaleSkewSpec(BaseSpec): - """Specifications for fixing matrix scaling and skewness using convert_xfm.""" - - fixscaleskew_matrix: os.PathLike = attrs.field( - metadata={ - "help_string": " fix scaling and skewness with this matrix", - "mandatory": True, - "argstr": "-fixscaleskew", - } - ) - - -class FixScaleSkew(ConvertXFM): - """Task definition for fixing matrix scaling and skewness using convert_xfm.""" - - input_spec = pydra.specs.SpecInfo(name="Input", bases=(FixScaleSkewSpec,)) diff --git a/pydra/tasks/fsl/v6_0/flirt/flirt.py b/pydra/tasks/fsl/v6_0/flirt/flirt.py deleted file mode 100644 index 8fe7a6a..0000000 --- a/pydra/tasks/fsl/v6_0/flirt/flirt.py +++ /dev/null @@ -1,203 +0,0 @@ -""" -FLIRT -===== - -FLIRT (FMRIB's Linear Image Registration Tool) is a robust and accurate tool -for affine registration of intra- and inter-modal brain images. - -Examples --------- - -Register two images together: - ->>> task = FLIRT( -... input_image="invol.nii", -... reference_image="refvol.nii", -... output_matrix="invol2refvol.mat", -... cost_function="mutualinfo", -... degrees_of_freedom=6, -... ) ->>> task.cmdline # doctest: +ELLIPSIS -'flirt -in invol.nii -ref refvol.nii -out ...invol_flirt.nii -omat invol2refvol.mat ... -cost mutualinfo ...' - -Perform a single slice registration: - ->>> task = FLIRT( -... input_image="inslice.nii", -... reference_image="refslice.nii", -... output_image="outslice.nii", -... output_matrix="i2r.mat", -... interpolation="nearestneighbour", -... use_2d_registration=True, -... no_search=True, -... ) ->>> task.cmdline -'flirt -in inslice.nii -ref refslice.nii -out outslice.nii -omat i2r.mat -2D -nosearch ... -interp nearestneighbour' - -Apply a transformation: - ->>> task = ApplyXFM( -... input_image="invol.nii", -... output_image="outvol.nii", -... reference_image="refvol.nii", -... initial_matrix="affine.mat", -... ) ->>> task.cmdline # doctest: +ELLIPSIS -'flirt -in invol.nii -ref refvol.nii -out outvol.nii -init affine.mat -applyxfm ...' - -Apply a trasnformation and force isotropic resampling to 1 mm: - ->>> task = ApplyXFM( -... input_image="invol.nii", -... output_image="outvol.nii", -... reference_image="refvol.nii", -... initial_matrix="affine.mat", -... isotropic_resolution=1, -... padding_size=5, -... ) ->>> task.cmdline # doctest: +ELLIPSIS -'flirt -in invol.nii -ref refvol.nii -out outvol.nii -init affine.mat -applyisoxfm 1 -paddingsize 5 ...' -""" - -__all__ = ["FLIRT", "ApplyXFM"] - -from os import PathLike - -from attrs import define, field -from pydra.engine.specs import ShellSpec, SpecInfo -from pydra.engine.task import ShellCommandTask - -from . import specs - - -@define(slots=False, kw_only=True) -class BaseSpec(ShellSpec): - """Common specifications for FLIRT-based tasks.""" - - input_image: PathLike = field( - metadata={"help_string": "input image", "mandatory": True, "argstr": "-in"} - ) - - reference_image: PathLike = field( - metadata={"help_string": "reference image", "mandatory": True, "argstr": "-ref"} - ) - - output_image: str = field( - metadata={ - "help_string": "output image", - "argstr": "-out", - "output_file_template": "{input_image}_flirt", - } - ) - - output_datatype: str = field( - metadata={ - "help_string": "output datatype", - "argstr": "-datatype", - "allowed_values": {"char", "short", "int", "float", "double"}, - } - ) - - -@define(slots=False, kw_only=True) -class FLIRTSpec(BaseSpec): - """Specifications for FLIRT.""" - - input_weights: PathLike = field( - metadata={ - "help_string": "voxel-wise weighting for input image", - "argstr": "-inweight", - } - ) - - reference_weights: PathLike = field( - metadata={ - "help_string": "voxel-wise weighting for reference image", - "argstr": "-refweight", - } - ) - - initial_matrix: PathLike = field( - metadata={"help_string": "initial transformation matrix", "argstr": "-init"} - ) - - output_matrix: str = field( - metadata={ - "help_string": "output transformation matrix", - "argstr": "-omat", - "output_file_template": "{input_image}_flirt.mat", - "keep_extension": False, - } - ) - - degrees_of_freedom: int = field( - metadata={ - "help_string": "degrees of freedom for the registration model", - "argstr": "-dof", - "allowed_values": {3, 6, 7, 9, 12}, - "xor": {"use_2d_registration"}, - } - ) - - use_2d_registration: bool = field( - metadata={ - "help_string": "use rigid-body registration model in 2D", - "argstr": "-2D", - "xor": {"degrees_of_freedom"}, - } - ) - - -class FLIRT(ShellCommandTask): - """Task definition for FLIRT.""" - - executable = "flirt" - - input_spec = SpecInfo( - name="Input", - bases=( - FLIRTSpec, - specs.SearchSpec, - specs.CostFunctionSpec, - specs.InterpolationSpec, - specs.WeightingSpec, - specs.VerboseSpec, - ), - ) - - -@define(slots=False, kw_only=True) -class ApplyXFMSpec(BaseSpec): - """Specifications for ApplyXFM.""" - - initial_matrix: PathLike = field( - metadata={ - "help_string": "initial transformation matrix", - "mandatory": True, - "argstr": "-init", - } - ) - - isotropic_resolution: float = field( - default=0.0, - metadata={ - "help_string": "force resampling to isotropic resolution", - "formatter": lambda isotropic_resolution: ( - f"-applyisoxfm {isotropic_resolution}" - if isotropic_resolution - else "-applyxfm" - ), - }, - ) - - padding_size: float = field( - metadata={"help_string": "padding size in voxels", "argstr": "-paddingsize"} - ) - - -class ApplyXFM(FLIRT): - """Task definition for ApplyXFM.""" - - input_spec = SpecInfo( - name="Input", bases=(ApplyXFMSpec, specs.InterpolationSpec, specs.VerboseSpec) - ) diff --git a/pydra/tasks/fsl/v6_0/flirt/img2imgcoord.py b/pydra/tasks/fsl/v6_0/flirt/img2imgcoord.py deleted file mode 100644 index d4794ec..0000000 --- a/pydra/tasks/fsl/v6_0/flirt/img2imgcoord.py +++ /dev/null @@ -1,63 +0,0 @@ -""" -Img2ImgCoord -============ - -Examples --------- - ->>> task = Img2ImgCoord( -... input_coordinates="coordinates.txt", -... source_image="source.nii", -... destination_image="target.nii", -... affine_matrix="affine.mat", -... ) ->>> task.cmdline # doctest: +ELLIPSIS -'img2imgcoord -xfm affine.mat ... -src source.nii -dest target.nii coordinates.txt' -""" - -__all__ = ["Img2ImgCoord"] - -import os - -import attrs - -import pydra - -from . import specs - - -@attrs.define(slots=False, kw_only=True) -class Img2ImgCoordSpec(specs.BaseCoordSpec): - """Specifications for img2imgcoord.""" - - source_image: os.PathLike = attrs.field( - metadata={ - "help_string": "source image", - "mandatory": True, - "argstr": "-src", - } - ) - - destination_image: os.PathLike = attrs.field( - metadata={ - "help_string": "destination image", - "mandatory": True, - "argstr": "-dest", - } - ) - - -class Img2ImgCoordOutSpec(specs.CoordOutSpec): - """Output specifications for img2imgcoord.""" - - -class Img2ImgCoord(pydra.engine.ShellCommandTask): - """Task definition for img2imgcoord.""" - - executable = "img2imgcoord" - - input_spec = pydra.specs.SpecInfo( - name="Input", bases=(Img2ImgCoordSpec, specs.VerboseSpec) - ) - - output_spec = pydra.specs.SpecInfo(name="Output", bases=(Img2ImgCoordOutSpec,)) diff --git a/pydra/tasks/fsl/v6_0/flirt/img2stdcoord.py b/pydra/tasks/fsl/v6_0/flirt/img2stdcoord.py deleted file mode 100644 index 3bfb396..0000000 --- a/pydra/tasks/fsl/v6_0/flirt/img2stdcoord.py +++ /dev/null @@ -1,62 +0,0 @@ -""" -Img2StdCoord -============ - -Examples --------- - ->>> task = Img2StdCoord( -... input_coordinates="coordinates.txt", -... input_image="input.nii", -... standard_image="standard.nii", -... affine_matrix="affine.mat", -... ) ->>> task.cmdline # doctest: +ELLIPSIS -'img2stdcoord -xfm affine.mat ... -img input.nii -std standard.nii coordinates.txt' -""" - -__all__ = ["Img2StdCoord"] - -import os - -import attrs - -import pydra - -from . import specs - - -@attrs.define(slots=False, kw_only=True) -class Img2StdCoordSpec(specs.BaseCoordSpec): - """Specifications for img2stdcoord.""" - - input_image: os.PathLike = attrs.field( - metadata={ - "help_string": "input image", - "mandatory": True, - "argstr": "-img", - } - ) - - standard_image: os.PathLike = attrs.field( - metadata={ - "help_string": "standard-space image", - "argstr": "-std", - } - ) - - -class Img2StdCoordOutSpec(specs.CoordOutSpec): - """Output specifications for img2stdcoord.""" - - -class Img2StdCoord(pydra.engine.ShellCommandTask): - """Task definition for img2stdcoord.""" - - executable = "img2stdcoord" - - input_spec = pydra.specs.SpecInfo( - name="Input", bases=(Img2StdCoordSpec, specs.VerboseSpec) - ) - - output_spec = pydra.specs.SpecInfo(name="Output", bases=(Img2StdCoordOutSpec,)) diff --git a/pydra/tasks/fsl/v6_0/flirt/specs.py b/pydra/tasks/fsl/v6_0/flirt/specs.py deleted file mode 100644 index fe1afe6..0000000 --- a/pydra/tasks/fsl/v6_0/flirt/specs.py +++ /dev/null @@ -1,190 +0,0 @@ -"""Common specifications for FLIRT.""" - -__all__ = [ - "CostFunctionSpec", - "InterpolationSpec", - "SearchSpec", - "WeightingSpec", -] - -import os -import pathlib -import typing as ty - -import attrs - -import pydra - - -@attrs.define(slots=False, kw_only=True) -class CostFunctionSpec(pydra.specs.ShellSpec): - cost_function: str = attrs.field( - default="corratio", - metadata={ - "help_string": "cost function", - "argstr": "-cost", - "allowed_values": { - "corratio", - "mutualinfo", - "normmi", - "normcorr", - "leastsq", - }, - }, - ) - - num_bins: int = attrs.field( - default=256, - metadata={ - "help_string": "number of histogram bins", - "argstr": "-bins", - }, - ) - - -@attrs.define(slots=False, kw_only=True) -class InterpolationSpec(pydra.specs.ShellSpec): - interpolation: str = attrs.field( - default="trilinear", - metadata={ - "help_string": "interpolation method", - "argstr": "-interp", - "allowed_values": { - "trilinear", - "nearestneighbour", - "spline", - "sinc", - }, - }, - ) - - -@attrs.define(slots=False, kw_only=True) -class SearchSpec(pydra.specs.ShellSpec): - # TODO: Change to Tuple[int, int] with pydra >=0.23 - SearchRange = ty.List[int] - - no_search: bool = attrs.field( - metadata={ - "help_string": "set all angular search ranges to 0", - "argstr": "-nosearch", - } - ) - - search_range_x: SearchRange = attrs.field( - default=[-90, 90], - metadata={ - "help_string": "range of search angles in x", - "formatter": lambda field, no_search: ( - "" if no_search else f"-searchrx {field[0]} {field[1]}" - ), - }, - ) - - search_range_y: SearchRange = attrs.field( - default=[-90, 90], - metadata={ - "help_string": "range of search angles in y", - "formatter": lambda field, no_search: ( - "" if no_search else f"-searchry {field[0]} {field[1]}" - ), - }, - ) - - search_range_z: SearchRange = attrs.field( - default=[-90, 90], - metadata={ - "help_string": "range of search angles in z", - "formatter": lambda field, no_search: ( - "" if no_search else f"-searchrz {field[0]} {field[1]}" - ), - }, - ) - - -@attrs.define(slots=False, kw_only=True) -class WeightingSpec(pydra.specs.ShellSpec): - reference_weighting_image: os.PathLike = attrs.field( - metadata={ - "help_string": "weights for reference image", - "argstr": "-refweight", - } - ) - - input_weighting_image: os.PathLike = attrs.field( - metadata={ - "help_string": "weights for input image", - "argstr": "-inweight", - } - ) - - -@attrs.define(slots=False, kw_only=True) -class VerboseSpec(pydra.specs.ShellSpec): - verbose: bool = attrs.field( - metadata={ - "help_string": "enable verbose logging", - "argstr": "-v", - } - ) - - -@attrs.define(slots=False, kw_only=True) -class BaseCoordSpec(pydra.specs.ShellSpec): - input_coordinates: os.PathLike = attrs.field( - metadata={ - "help_string": "input coordinates", - "mandatory": True, - "argstr": "", - "position": -1, - } - ) - - output_coordinates: str = attrs.field( - metadata={ - "help_string": "output coordinates", - "output_file_template": "{input_coordinates}_out", - } - ) - - affine_matrix: os.PathLike = attrs.field( - metadata={ - "help_string": "affine transformation matrix", - "argstr": "-xfm", - } - ) - - input_warpfield: os.PathLike = attrs.field( - metadata={ - "help_string": "input warpfield image", - "argstr": "-warp", - } - ) - - unit: str = attrs.field( - default="vox", - metadata={ - "help_string": "unit of coordinates: voxels (vox) or millimeters (mm)", - "argstr": "-{unit}", - "allowed_values": {"vox", "mm"}, - }, - ) - - -def _get_output_coordinates(output_coordinates: str, stdout): - output_coordinates = pathlib.Path.cwd() / output_coordinates - - with open(output_coordinates, mode="w") as f: - f.write(stdout) - - return output_coordinates - - -@attrs.define(slots=False, kw_only=True) -class CoordOutSpec(pydra.specs.ShellOutSpec): - output_coordinates: os.PathLike = attrs.field( - metadata={ - "help_string": "output coordinates", - "callable": _get_output_coordinates, - } - ) diff --git a/pydra/tasks/fsl/v6_0/flirt/std2imgcoord.py b/pydra/tasks/fsl/v6_0/flirt/std2imgcoord.py deleted file mode 100644 index f45e707..0000000 --- a/pydra/tasks/fsl/v6_0/flirt/std2imgcoord.py +++ /dev/null @@ -1,62 +0,0 @@ -""" -Std2ImgCoord -============ - -Examples --------- - ->>> task = Std2ImgCoord( -... input_coordinates="coordinates.txt", -... input_image="input.nii", -... standard_image="standard.nii", -... affine_matrix="affine.mat", -... ) ->>> task.cmdline # doctest: +ELLIPSIS -'std2imgcoord -xfm affine.mat ... -std standard.nii -img input.nii coordinates.txt' -""" - -__all__ = ["Std2ImgCoord"] - -import os - -import attrs - -import pydra - -from . import specs - - -@attrs.define(slots=False, kw_only=True) -class Std2ImgCoordSpec(specs.BaseCoordSpec): - """Specifications for std2imgcoord.""" - - standard_image: os.PathLike = attrs.field( - metadata={ - "help_string": "standard-space image", - "argstr": "-std", - } - ) - - input_image: os.PathLike = attrs.field( - metadata={ - "help_string": "input image", - "mandatory": True, - "argstr": "-img", - } - ) - - -class Std2ImgCoordOutSpec(specs.CoordOutSpec): - """Output specifications for std2imgcoord.""" - - -class Std2ImgCoord(pydra.engine.ShellCommandTask): - """Task definition for std2imgcoord.""" - - executable = "std2imgcoord" - - input_spec = pydra.specs.SpecInfo( - name="Input", bases=(Std2ImgCoordSpec, specs.VerboseSpec) - ) - - output_spec = pydra.specs.SpecInfo(name="Output", bases=(Std2ImgCoordOutSpec,)) diff --git a/pydra/tasks/fsl/v6_0/fnirt/__init__.py b/pydra/tasks/fsl/v6_0/fnirt/__init__.py deleted file mode 100644 index 2dae811..0000000 --- a/pydra/tasks/fsl/v6_0/fnirt/__init__.py +++ /dev/null @@ -1,16 +0,0 @@ -""" -FNIRT -===== - -.. automodule:: pydra.tasks.fsl.fnirt.fnirt -.. automodule:: pydra.tasks.fsl.fnirt.fnirtfileutils -.. automodule:: pydra.tasks.fsl.fnirt.applywarp -.. automodule:: pydra.tasks.fsl.fnirt.convertwarp -.. automodule:: pydra.tasks.fsl.fnirt.invwarp -""" - -from .applywarp import ApplyWarp -from .convertwarp import ConvertWarp -from .fnirt import FNIRT -from .fnirtfileutils import FNIRTFileUtils -from .invwarp import InvWarp diff --git a/pydra/tasks/fsl/v6_0/fnirt/applywarp.py b/pydra/tasks/fsl/v6_0/fnirt/applywarp.py deleted file mode 100644 index c14e640..0000000 --- a/pydra/tasks/fsl/v6_0/fnirt/applywarp.py +++ /dev/null @@ -1,152 +0,0 @@ -""" -ApplyWarp -========= - -Examples --------- - ->>> task = ApplyWarp( -... input_image="invol.nii", -... reference_image="refvol.nii", -... input_warpfield="warpvol.nii", -... warpfield_as="abs", -... ) ->>> task.cmdline # doctest: +ELLIPSIS -'applywarp --in invol.nii --ref refvol.nii --out ...invol_warped.nii \ ---warp warpvol.nii --abs ...' - ->>> task = ApplyWarp( -... input_image="invol.nii", -... reference_image="refvol.nii", -... output_image="outvol.nii", -... use_sqform=True, -... ) ->>> task.cmdline # doctest: +ELLIPSIS -'applywarp --in invol.nii --ref refvol.nii --out outvol.nii ... --usesqform' -""" - -__all__ = ["ApplyWarp"] - -import os -import typing as ty - -import attrs - -import pydra - -from . import specs - - -@attrs.define(slots=False, kw_only=True) -class ApplyWarpSpec(pydra.specs.ShellSpec): - """Specifications for applywarp.""" - - input_image: os.PathLike = attrs.field( - metadata={ - "help_string": "input image", - "mandatory": True, - "argstr": "--in", - } - ) - - reference_image: os.PathLike = attrs.field( - metadata={ - "help_string": "reference image", - "mandatory": True, - "argstr": "--ref", - } - ) - - output_image: str = attrs.field( - metadata={ - "help_string": "output image", - "argstr": "--out", - "output_file_template": "{input_image}_warped", - } - ) - - input_warpfield: os.PathLike = attrs.field( - metadata={ - "help_string": "deformation field or coefficients", - "argstr": "--warp", - } - ) - - warpfield_as: str = attrs.field( - metadata={ - "help_string": "treat deformation field as absolute (abs) or relative (rel)", - "argstr": "--{warpfield_as}", - "allowed_values": {"abs", "rel"}, - "requires": {"input_warpfield"}, - } - ) - - output_datatype: str = attrs.field( - metadata={ - "help_string": "force output datatype", - "argstr": "--datatype", - "allowed_values": {"char", "short", "int", "float", "double"}, - } - ) - - supersampling_level: ty.Union[str, int] = attrs.field( - metadata={ - "help_string": "level of intermediate supersampling", - "argstr": "--super --superlevel", - } - ) - - pre_affine_matrix: os.PathLike = attrs.field( - metadata={ - "help_string": "pre-affine matrix", - "argstr": "--premat", - } - ) - - post_affine_matrix: os.PathLike = attrs.field( - metadata={ - "help_string": "post-affine matrix", - "argstr": "--postmat", - } - ) - - reference_mask: os.PathLike = attrs.field( - metadata={ - "help_string": "mask image in reference space", - "argstr": "--mask", - } - ) - - interpolation: str = attrs.field( - default="trilinear", - metadata={ - "help_string": "interpolation method", - "argstr": "--interp", - "allowed_values": {"nn", "trilinear", "sinc", "spline"}, - }, - ) - - padding_size: float = attrs.field( - metadata={ - "help_string": "padding size in voxels", - "argstr": "--paddingsize", - } - ) - - use_sqform: bool = attrs.field( - metadata={ - "help_string": "use sform and qform from reference and input images", - "argstr": "--usesqform", - "requires": {"input_image", "reference_image"}, - } - ) - - -class ApplyWarp(pydra.engine.ShellCommandTask): - """Task definition for applywarp.""" - - executable = "applywarp" - - input_spec = pydra.specs.SpecInfo( - name="Input", bases=(ApplyWarpSpec, specs.VerboseSpec) - ) diff --git a/pydra/tasks/fsl/v6_0/fnirt/convertwarp.py b/pydra/tasks/fsl/v6_0/fnirt/convertwarp.py deleted file mode 100644 index 37a038f..0000000 --- a/pydra/tasks/fsl/v6_0/fnirt/convertwarp.py +++ /dev/null @@ -1,176 +0,0 @@ -""" -ConvertWarp -=========== - -Examples --------- - ->>> task = ConvertWarp( -... reference_image="refvol.nii", -... pre_affine_matrix="affine.mat", -... ) ->>> task.cmdline # doctest: +ELLIPSIS -'convertwarp --ref refvol.nii --out ...refvol_warp.nii --premat affine.mat \ ---jacobian ...refvol_jac.nii' - ->>> task = ConvertWarp( -... reference_image="refvol.nii", -... output_warpfield="outwarp.nii", -... pre_affine_matrix="pre.mat", -... pre_warpfield="warp1.nii", -... post_warpfield="warp2.nii", -... post_affine_matrix="post.mat", -... ) ->>> task.cmdline # doctest: +ELLIPSIS -'convertwarp --ref refvol.nii --out outwarp.nii --premat pre.mat \ ---warp1 warp1.nii --warp2 warp2.nii --postmat post.mat --jacobian \ -...refvol_jac.nii' - ->>> task = ConvertWarp( -... reference_image="refvol.nii", -... input_shiftmap="shiftmap.nii", -... shift_direction="y-", -... ) ->>> task.cmdline # doctest: +ELLIPSIS -'convertwarp --ref refvol.nii --out .../refvol_warp.nii --shiftmap shiftmap.nii \ ---shiftdir y- --jacobian .../refvol_jac.nii' -""" - -__all__ = ["ConvertWarp"] - -import os - -import attrs - -import pydra - -from . import specs - - -@attrs.define(slots=False, kw_only=True) -class ConvertWarpSpec(pydra.specs.ShellSpec): - """Specifications for convertwrap.""" - - reference_image: os.PathLike = attrs.field( - metadata={ - "help_string": "reference image", - "mandatory": True, - "argstr": "--ref", - } - ) - - output_warpfield: str = attrs.field( - metadata={ - "help_string": "output deformation field image", - "argstr": "--out", - "output_file_template": "{reference_image}_warp", - } - ) - - pre_affine_matrix: os.PathLike = attrs.field( - metadata={ - "help_string": "pre-affine matrix", - "argstr": "--premat", - } - ) - - pre_warpfield: os.PathLike = attrs.field( - metadata={ - "help_string": "warp following pre-affine transform", - "argstr": "--warp1", - } - ) - - mid_affine_matrix: os.PathLike = attrs.field( - metadata={ - "help_string": "mid-warp affine matrix", - "argstr": "--midmat", - } - ) - - post_warpfield: os.PathLike = attrs.field( - metadata={ - "help_string": "warp preceding post-affine transform", - "argstr": "--warp2", - } - ) - - post_affine_matrix: os.PathLike = attrs.field( - metadata={ - "help_string": "post-affine matrix", - "argstr": "--postmat", - } - ) - - input_shiftmap: os.PathLike = attrs.field( - metadata={ - "help_string": "shiftmap image (applied first)", - "argstr": "--shiftmap", - } - ) - - shift_direction: str = attrs.field( - metadata={ - "help_string": "direction to apply shiftmap", - "argstr": "--shiftdir", - "requires": {"input_shiftmap"}, - "allowed_values": {"x", "y", "z", "x-", "y-", "z-"}, - } - ) - - output_jacobian_image: str = attrs.field( - metadata={ - "help_string": "constrain the limits of the Jacobian of the deformation field", - "argstr": "--jacobian", - "output_file_template": "{reference_image}_jac", - } - ) - - constrain_jacobian: bool = attrs.field( - metadata={ - "help_string": "constrain the Jacobian of the deformation field", - "argstr": "--constrainj", - } - ) - - min_jacobian: float = attrs.field( - metadata={ - "help_string": "minimum Jacobian value", - "argstr": "--jmin", - "requires": {"constain_jacobian"}, - } - ) - - max_jacobian: float = attrs.field( - metadata={ - "help_string": "maximum Jacobian value", - "argstr": "--jmax", - "requires": {"constain_jacobian"}, - } - ) - - warpfield_as: str = attrs.field( - metadata={ - "help_string": "treat deformation field as absolute (abs) or relative (rel)", - "argstr": "--{warpfield_as}", - "allowed_values": {"abs", "rel"}, - } - ) - - output_warpfield_as: str = attrs.field( - metadata={ - "help_string": "save output deformation field as absolute (abs) or relative (rel)", - "argstr": "--{output_warpfield_as}out", - "allowed_values": {"abs", "rel"}, - } - ) - - -class ConvertWarp(pydra.engine.ShellCommandTask): - """Task definition for convertwarp.""" - - executable = "convertwarp" - - input_spec = pydra.specs.SpecInfo( - name="Input", bases=(ConvertWarpSpec, specs.VerboseSpec) - ) diff --git a/pydra/tasks/fsl/v6_0/fnirt/fnirt.py b/pydra/tasks/fsl/v6_0/fnirt/fnirt.py deleted file mode 100644 index 314f2b2..0000000 --- a/pydra/tasks/fsl/v6_0/fnirt/fnirt.py +++ /dev/null @@ -1,267 +0,0 @@ -""" -FNIRT -===== - -FNIRT (FSL Non-linear Image Registration Tool) performs non-linear registration of brain images. - -Examples --------- - ->>> task = FNIRT( -... reference_image="template.nii", -... input_image="input.nii", -... ) ->>> task.cmdline # doctest: +ELLIPSIS -'fnirt --ref template.nii --in input.nii --cout ...input_warpcoef.nii \ ---iout ...input_warped.nii --fout ...input_warpfield.nii \ ---jout ...input_jac.nii ...' - ->>> task = FNIRT( -... reference_image="template.nii", -... input_image="input.nii", -... subsampling=[4, 2, 1], -... warp_resolution=[8, 8, 8], -... input_fwhm=[8, 4, 2], -... ) ->>> task.cmdline # doctest: +ELLIPSIS -'fnirt --ref template.nii --in input.nii ... --subsamp 4,2,1 \ ---warpres 8,8,8 ... --infwhm 8,4,2 ...' -""" - -__all__ = ["FNIRT"] - -import os -import typing as ty - -import attrs - -import pydra - -from . import specs - - -def _format_list(field: list): - return f"{','.join(map(str, field))}" - - -@attrs.define(slots=False, kw_only=True) -class FNIRTSpec(pydra.specs.ShellSpec): - """Task specifications for FNIRT.""" - - reference_image: os.PathLike = attrs.field( - metadata={ - "help_string": "reference image", - "mandatory": True, - "argstr": "--ref", - } - ) - - input_image: os.PathLike = attrs.field( - metadata={ - "help_string": "input image", - "mandatory": True, - "argstr": "--in", - } - ) - - affine_matrix: os.PathLike = attrs.field( - metadata={ - "help_string": "affine matrix", - "argstr": "--aff", - } - ) - - input_warpfield: os.PathLike = attrs.field( - metadata={ - "help_string": "input warpfield", - "argstr": "--inwarp", - } - ) - - output_warpcoef: str = attrs.field( - metadata={ - "help_string": "output file containing the field coefficients", - "argstr": "--cout", - "output_file_template": "{input_image}_warpcoef", - } - ) - - output_image: str = attrs.field( - metadata={ - "help_string": "output image", - "argstr": "--iout", - "output_file_template": "{input_image}_warped", - } - ) - - output_warpfield: str = attrs.field( - metadata={ - "help_string": "output deformation field", - "argstr": "--fout", - "output_file_template": "{input_image}_warpfield", - } - ) - - output_jacobian_image: str = attrs.field( - metadata={ - "help_string": "output Jacobian determinant map", - "argstr": "--jout", - "output_file_template": "{input_image}_jac", - } - ) - - reference_mask: os.PathLike = attrs.field( - metadata={ - "help_string": "mask in reference space", - "argstr": "--applyrefmask --refmask", - } - ) - - input_mask: os.PathLike = attrs.field( - metadata={ - "help_string": "mask in input image space", - "argstr": "--applyinmask --inmask", - } - ) - - max_iterations: ty.Iterable[int] = attrs.field( - default=(5, 5, 5, 5), - metadata={ - "help_string": "maximum number of non-linear iterations", - "formatter": lambda field: f"--miter {_format_list(field)}", - }, - ) - - subsampling: ty.Iterable[int] = attrs.field( - default=(4, 2, 1, 1), - metadata={ - "help_string": "sub-sampling scheme", - "formatter": lambda field: f"--subsamp {_format_list(field)}", - }, - ) - - warp_resolution: ty.Tuple[float, float, float] = attrs.field( - default=(10, 10, 10), - metadata={ - "help_string": "resolution of warp basis in x, y and z (in millimeters)", - "formatter": lambda field: f"--warpres {_format_list(field)}", - }, - ) - - spline_order: int = attrs.field( - default=3, - metadata={ - "help_string": "use quadratic (2) or cubic (3) splines", - "argstr": "--splineorder", - "allowed_values": {2, 3}, - }, - ) - - input_fwhm: ty.Iterable[float] = attrs.field( - default=(6.0, 4.0, 2.0, 2.0), - metadata={ - "help_string": "FWHM for Gaussian kernel applied to input image (in millimeters)", - "formatter": lambda field: f"--infwhm {_format_list(field)}", - }, - ) - - reference_fwhm: ty.Iterable[float] = attrs.field( - default=(4.0, 2.0, 0.0, 0.0), - metadata={ - "help_string": "FWHM for Gaussian kernel applied to reference image (in millimeters)", - "formatter": lambda field: f"--reffwhm {_format_list(field)}", - }, - ) - - warp_model: str = attrs.field( - default="bending_energy", - metadata={ - "help_string": "model for warpfield regularisation", - "argstr": "--regmod", - "allowed_values": {"bending_energy", "membrane_energy"}, - }, - ) - - warp_lambda: ty.Iterable[float] = attrs.field( - default=(300, 75, 30, 30), - metadata={ - "help_string": "weight of warpfield regularisation", - "argstr": "--lambda", - }, - ) - - jacobian_range: ty.Tuple[float, float] = attrs.field( - default=(1e-2, 1e2), - metadata={ - "help_string": "range of Jacobian determinants", - "formatter": lambda field: f"--jacrange {_format_list(field)}", - }, - ) - - intensity_model: str = attrs.field( - default="global_non_linear_with_bias", - metadata={ - "help_string": "model for intensity mapping", - "argstr": "--intmod", - "allowed_values": { - "none", - "global_linear", - "global_non_linear", - "local_linear", - "global_non_linear_with_bias", - "local_non_linear", - }, - }, - ) - - intensity_order: int = attrs.field( - default=5, - metadata={ - "help_string": "polynomial order for intensity mapping", - "argstr": "--intorder", - }, - ) - - bias_resolution: ty.Tuple[float, float, float] = attrs.field( - default=(50, 50, 50), - metadata={ - "help_string": "resolution for bias field modelling (in millimeters)", - "formatter": lambda field: f"--biasres {_format_list(field)}", - }, - ) - - bias_lambda: float = attrs.field( - default=10000, - metadata={ - "help_string": "regularisation parameter for bias field modelling", - "argstr": "--biaslambda", - }, - ) - - precision: str = attrs.field( - default="double", - metadata={ - "help_string": "numerical precision for Hessian computation (float or double)", - "argstr": "--numprec", - "allowed_values": {"float", "double"}, - }, - ) - - interpolation: str = attrs.field( - default="linear", - metadata={ - "help_string": "interpolation model (linear or spline)", - "argstr": "--interp", - "allowed_values": {"linear", "spline"}, - }, - ) - - -class FNIRT(pydra.engine.ShellCommandTask): - """Task definition for FNIRT.""" - - executable = "fnirt" - - input_spec = pydra.specs.SpecInfo( - name="Input", bases=(FNIRTSpec, specs.VerboseSpec) - ) diff --git a/pydra/tasks/fsl/v6_0/fnirt/fnirtfileutils.py b/pydra/tasks/fsl/v6_0/fnirt/fnirtfileutils.py deleted file mode 100644 index d066e37..0000000 --- a/pydra/tasks/fsl/v6_0/fnirt/fnirtfileutils.py +++ /dev/null @@ -1,120 +0,0 @@ -""" -FNIRTFileUtils -============== - -Examples --------- - ->>> task = FNIRTFileUtils( -... input_image="input.nii", -... reference_image="reference.nii", -... ) ->>> task.cmdline # doctest: +ELLIPSIS -'fnirtfileutils --in input.nii --ref reference.nii --out ...input_field.nii \ ---outformat field ...' - ->>> task = FNIRTFileUtils( -... input_image="input.nii", -... reference_image="reference.nii", -... output_jacobian_image="jacobian.nii", -... with_affine_transform=True, -... ) ->>> task.cmdline # doctest: +ELLIPSIS -'fnirtfileutils --in input.nii --ref reference.nii ... --jac jacobian.nii \ -... --withaff' -""" - -__all__ = ["FNIRTFileUtils"] - -import os - -import attrs - -import pydra - -from . import specs - - -@attrs.define(slots=False, kw_only=True) -class FNIRTFileUtilsSpec(pydra.specs.ShellSpec): - """Specifications for fnirtfileutils.""" - - input_image: os.PathLike = attrs.field( - metadata={ - "help_string": "input image with FNIRT coefficients", - "argstr": "--in", - } - ) - - reference_image: os.PathLike = attrs.field( - metadata={ - "help_string": "reference image", - "argstr": "--ref", - } - ) - - output_image: str = attrs.field( - metadata={ - "help_string": "output field or coefficient image", - "argstr": "--out", - "output_file_template": "{input_image}_{output_format}", - } - ) - - output_format: str = attrs.field( - default="field", - metadata={ - "help_string": "output format (field or spline)", - "argstr": "--outformat", - "allowed_values": {"field", "spline"}, - }, - ) - - warp_resolution: float = attrs.field( - metadata={ - "help_string": "warp resolution in millimeters", - "argstr": "--warpres", - # "requires": {("output_format", "spline")}, # TODO - } - ) - - knot_spacing: float = attrs.field( - metadata={ - "help_string": "knot spacing in voxels", - "argstr": "--knotspace", - # "requires": {("output_format", "spline")}, # TODO - } - ) - - output_jacobian_image: str = attrs.field( - metadata={ - "help_string": "output Jacobian determinant map", - "argstr": "--jac", - "output_file_template": "{input_image}_jac", - } - ) - - output_jacobian_matrix: str = attrs.field( - metadata={ - "help_string": "output Jacobian matrix map", - "argstr": "--matjac", - "output_file_template": "{input_image}_matjac", - } - ) - - with_affine_transform: bool = attrs.field( - metadata={ - "help_string": "include affine transform in field and jacobian images", - "argstr": "--withaff", - } - ) - - -class FNIRTFileUtils(pydra.engine.ShellCommandTask): - """Task definition for fnirtfileutils.""" - - executable = "fnirtfileutils" - - input_spec = pydra.specs.SpecInfo( - name="Input", bases=(FNIRTFileUtilsSpec, specs.VerboseSpec) - ) diff --git a/pydra/tasks/fsl/v6_0/fnirt/invwarp.py b/pydra/tasks/fsl/v6_0/fnirt/invwarp.py deleted file mode 100644 index 0ee4ca2..0000000 --- a/pydra/tasks/fsl/v6_0/fnirt/invwarp.py +++ /dev/null @@ -1,105 +0,0 @@ -""" -InvWarp -======= - -Examples --------- - ->>> task = InvWarp( -... input_warpfield="warpvol.nii", -... reference_image= "refvol.nii", -... ) ->>> task.cmdline # doctest: +ELLIPSIS -'invwarp --warp warpvol.nii --ref refvol.nii --out ...warpvol_invwarp.nii' - ->>> task = InvWarp( -... input_warpfield="warpvol.nii", -... reference_image= "refvol.nii", -... output_warpfield="invwarpvol.nii", -... no_jacobian_constraints=True, -... ) ->>> task.cmdline -'invwarp --warp warpvol.nii --ref refvol.nii --out invwarpvol.nii --noconstraint' - -""" - -__all__ = ["InvWarp"] - -import os - -import attrs - -import pydra - -from . import specs - - -@attrs.define(slots=False, kw_only=True) -class InvWarpSpec(pydra.specs.ShellSpec): - """Specifications for invwarp.""" - - input_warpfield: os.PathLike = attrs.field( - metadata={ - "help_string": "input warp image", - "mandatory": True, - "argstr": "--warp", - } - ) - - reference_image: os.PathLike = attrs.field( - metadata={ - "help_string": "reference image", - "mandatory": True, - "argstr": "--ref", - } - ) - - output_warpfield: str = attrs.field( - metadata={ - "help_string": "output inverse warp image", - "argstr": "--out", - "output_file_template": "{input_warpfield}_invwarp", - } - ) - - warpfield_as: str = attrs.field( - metadata={ - "help_string": "treat deformation field as absolute (abs) or relative (rel)", - "argstr": "--{warpfield_as}", - "allowed_values": {"abs", "rel"}, - "requires": {"input_warpfield"}, - } - ) - - no_jacobian_constraints: bool = attrs.field( - metadata={ - "help_string": "do not constrain the Jacobian of the deformation field", - "argstr": "--noconstraint", - } - ) - - min_jacobian: float = attrs.field( - metadata={ - "help_string": "minimum Jacobian value", - "argstr": "--jmin", - "xor": {"no_jacobian_constraints"}, - } - ) - - max_jacobian: float = attrs.field( - metadata={ - "help_string": "maximum Jacobian value", - "argstr": "--jmax", - "xor": {"no_jacobian_constraints"}, - } - ) - - -class InvWarp(pydra.engine.ShellCommandTask): - """Task definition for invwarp.""" - - executable = "invwarp" - - input_spec = pydra.specs.SpecInfo( - name="Input", bases=(InvWarpSpec, specs.VerboseSpec) - ) diff --git a/pydra/tasks/fsl/v6_0/fnirt/specs.py b/pydra/tasks/fsl/v6_0/fnirt/specs.py deleted file mode 100644 index 7598831..0000000 --- a/pydra/tasks/fsl/v6_0/fnirt/specs.py +++ /dev/null @@ -1,13 +0,0 @@ -import attrs - -import pydra - - -@attrs.define(slots=False, kw_only=True) -class VerboseSpec(pydra.specs.ShellSpec): - verbose: bool = attrs.field( - metadata={ - "help_string": "enable verbose logging", - "argstr": "--verbose", - } - ) diff --git a/pydra/tasks/fsl/v6_0/fugue/__init__.py b/pydra/tasks/fsl/v6_0/fugue/__init__.py deleted file mode 100644 index efa3700..0000000 --- a/pydra/tasks/fsl/v6_0/fugue/__init__.py +++ /dev/null @@ -1,14 +0,0 @@ -""" -FUGUE -===== - -.. automodule:: pydra.tasks.fsl.fugue.fugue -.. automodule:: pydra.tasks.fsl.fugue.prepare_fieldmap -.. automodule:: pydra.tasks.fsl.fugue.prelude -.. automodule:: pydra.tasks.fsl.fugue.sigloss -""" - -from .fugue import FUGUE -from .prelude import Prelude -from .prepare_fieldmap import PrepareFieldmap -from .sigloss import SigLoss diff --git a/pydra/tasks/fsl/v6_0/fugue/fugue.py b/pydra/tasks/fsl/v6_0/fugue/fugue.py deleted file mode 100644 index 014e158..0000000 --- a/pydra/tasks/fsl/v6_0/fugue/fugue.py +++ /dev/null @@ -1,242 +0,0 @@ -""" -FUGUE -===== - ->>> task = FUGUE( -... input_image="epi.nii", -... input_phasemap="phasemap.nii", -... dwell_to_asym_time_ratio=0.3, -... output_inverse_warpfield="unwarped.nii", -... ) ->>> task.cmdline -'fugue --in epi.nii --unwarp unwarped.nii --phasemap phasemap.nii --dwelltoasym 0.3' - ->>> task = FUGUE( -... input_image="unwarped.nii", -... input_phasemap="phasemap.nii", -... dwell_to_asym_time_ratio=0.3, -... output_warpfield="warped.nii", -... ) ->>> task.cmdline -'fugue --in unwarped.nii --warp warped.nii --phasemap phasemap.nii --dwelltoasym 0.3' - ->>> task = FUGUE( -... input_phasemap="phasemap.nii", -... output_shiftmap="shiftmap.nii", -... ) ->>> task.cmdline -'fugue --phasemap phasemap.nii --saveshift shiftmap.nii' -""" - -__all__ = ["FUGUE"] - -import os - -import attrs - -import pydra - - -@attrs.define(slots=False, kw_only=True) -class FUGUESpec(pydra.specs.ShellSpec): - """Specifications for fugue.""" - - input_image: os.PathLike = attrs.field( - metadata={ - "help_string": "input image", - "argstr": "--in", - } - ) - - output_warpfield: os.PathLike = attrs.field( - metadata={ - "help_string": "output warpfield image", - "argstr": "--warp", - } - ) - - output_inverse_warpfield: os.PathLike = attrs.field( - metadata={ - "help_string": "output inverse warpfield image", - "argstr": "--unwarp", - } - ) - - input_phasemap: os.PathLike = attrs.field( - metadata={ - "help_string": "input phase image", - "argstr": "--phasemap", - } - ) - - dwell_to_asym_time_ratio: float = attrs.field( - metadata={ - "help_string": "dwell to asymmetric echo time ratio", - "argstr": "--dwelltoasym", - } - ) - - dwell_time: float = attrs.field( - metadata={ - "help_string": "EPI dwell time in seconds", - "argstr": "--dwell", - } - ) - - asym_time: float = attrs.field( - metadata={ - "help_string": "asymmetric spin echo time in seconds", - "argstr": "--asym", - } - ) - - input_fieldmap: os.PathLike = attrs.field( - metadata={ - "help_string": "load fieldmap image", - "argstr": "--loadfmap", - } - ) - - output_fieldmap: os.PathLike = attrs.field( - metadata={ - "help_string": "save fieldmap image", - "argstr": "--savefmap", - } - ) - - input_shiftmap: os.PathLike = attrs.field( - metadata={ - "help_string": "load pixel shift image", - "argstr": "--loadshift", - } - ) - - output_shiftmap: os.PathLike = attrs.field( - metadata={ - "help_string": "save pixel shift image", - "argstr": "--saveshift", - } - ) - - sigma2d: float = attrs.field( - metadata={ - "help_string": "apply 2D Gaussian smoothing of sigma in millimeter", - "argstr": "--smooth2", - } - ) - - sigma3d: float = attrs.field( - metadata={ - "help_string": "apply 3D Gaussian smoothing of sigma in millimeter", - "argstr": "--smooth3", - } - ) - - polynomial_order: int = attrs.field( - metadata={ - "help_string": "order of polynomial fitting", - "argstr": "--poly", - } - ) - - sinusoidal_order: int = attrs.field( - metadata={ - "help_string": "order of sinusoidal (Fourier) fitting", - "argstr": "--fourier", - } - ) - - direction: str = attrs.field( - metadata={ - "help_string": "unwarping direction", - "argstr": "--unwarpdir", - "allowed_values": {"x", "y", "z", "x-", "y-", "z-"}, - } - ) - - input_mask: os.PathLike = attrs.field( - metadata={ - "help_string": "mask for input image", - "argstr": "--mask", - } - ) - - output_unmasked_fieldmap: os.PathLike = attrs.field( - metadata={ - "help_string": "save unmasked fieldmap", - "argstr": "--unmaskfmap", - "requires": {"output_fieldmap"}, - } - ) - - output_unmasked_shiftmap: os.PathLike = attrs.field( - metadata={ - "help_string": "save unmasked shiftmap", - "argstr": "--unmaskshift", - "requires": {"output_shiftmap"}, - } - ) - - verbose: bool = attrs.field( - metadata={ - "help_string": "enable verbose logging", - "argstr": "--verbose", - } - ) - - -@attrs.define(slots=False, kw_only=True) -class FUGUEOutSpec(pydra.specs.ShellOutSpec): - """Output specifications for fugue.""" - - output_warpfield: str = attrs.field( - metadata={ - "help_string": "output warpfield image", - "output_file_template": "{output_warpfield}", - } - ) - - output_inverse_warpfield: str = attrs.field( - metadata={ - "help_string": "output inverse warpfield image", - "output_file_template": "{output_inverse_warpfield}", - } - ) - - output_fieldmap: str = attrs.field( - metadata={ - "help_string": "output fieldmap image", - "output_file_template": "{output_fieldmap}", - } - ) - - output_shiftmap: str = attrs.field( - metadata={ - "help_string": "output shiftmap image", - "output_file_template": "{output_shiftmap}", - } - ) - - output_unmasked_fieldmap: str = attrs.field( - metadata={ - "help_string": "output unmasked fieldmap", - "output_file_template": "{output_unmasked_fieldmap}", - } - ) - - output_unmasked_shiftmap: str = attrs.field( - metadata={ - "help_string": "output unmasked shiftmap", - "output_file_template": "{output_unmasked_shiftmap}", - } - ) - - -class FUGUE(pydra.engine.ShellCommandTask): - """Task definition for fugue.""" - - executable = "fugue" - - input_spec = pydra.specs.SpecInfo(name="Input", bases=(FUGUESpec,)) - - output_spec = pydra.specs.SpecInfo(name="Output", bases=(FUGUEOutSpec,)) diff --git a/pydra/tasks/fsl/v6_0/fugue/prelude.py b/pydra/tasks/fsl/v6_0/fugue/prelude.py deleted file mode 100644 index 6745f14..0000000 --- a/pydra/tasks/fsl/v6_0/fugue/prelude.py +++ /dev/null @@ -1,173 +0,0 @@ -""" -Prelude -======= - -Phase Region Expanding Labeller for Unwrapping Discrete Estimates. - -Examples --------- - ->>> task = Prelude(complex_image="complex.nii") ->>> task.cmdline # doctest: +ELLIPSIS -'prelude --complex complex.nii --out complex_unwrapped_phase.nii --rawphase complex_raw_phase.nii \ ---labels complex_labels.nii --savemask complex_mask.nii ...' - ->>> task = Prelude( -... phase_image="phase.nii", -... magnitude_image="magnitude.nii", -... output_unwrapped_phase_image="unwrapped.nii", -... ) ->>> task.cmdline # doctest: +ELLIPSIS -'prelude --abs magnitude.nii --phase phase.nii --out unwrapped.nii ...' -""" - -__all__ = ["Prelude"] - -import os - -import attrs - -import pydra - - -def _output_filename_factory(complex_image, phase_image, suffix): - from pathlib import PurePath - - stem, ext = PurePath(complex_image or phase_image).name.split(".", maxsplit=1) - - return f"{stem}_{suffix}.{ext}" - - -@attrs.define(slots=False, kw_only=True) -class PreludeSpec(pydra.specs.ShellSpec): - """Specifications for prelude.""" - - complex_image: os.PathLike = attrs.field( - metadata={ - "help_string": "complex phase image", - "mandatory": True, - "argstr": "--complex", - "xor": {"phase_image"}, - } - ) - - magnitude_image: os.PathLike = attrs.field( - metadata={"help_string": "magnitude image", "argstr": "--abs"} - ) - - phase_image: os.PathLike = attrs.field( - metadata={ - "help_string": "raw phase image", - "mandatory": True, - "argstr": "--phase", - "requires": {"magnitude_image"}, - "xor": {"complex_image"}, - } - ) - - input_mask: os.PathLike = attrs.field( - metadata={"help_string": "input mask", "argstr": "--mask"} - ) - - output_unwrapped_phase_image: str = attrs.field( - metadata={ - "help_string": "output unwrapped phase image", - "formatter": lambda field, complex_image, phase_image: "--out {}".format( - field - or _output_filename_factory( - complex_image, phase_image, "unwrapped_phase" - ) - ), - } - ) - - output_raw_phase_image: str = attrs.field( - metadata={ - "help_string": "output raw phase image", - "formatter": lambda field, complex_image, phase_image: "--rawphase {}".format( - field - or _output_filename_factory(complex_image, phase_image, "raw_phase") - ), - } - ) - - output_labels: str = attrs.field( - metadata={ - "help_string": "output labels", - "formatter": lambda field, complex_image, phase_image: "--labels {}".format( - field or _output_filename_factory(complex_image, phase_image, "labels") - ), - } - ) - - output_mask: os.PathLike = attrs.field( - metadata={ - "help_string": "output mask", - "formatter": lambda field, complex_image, phase_image: "--savemask {}".format( - field or _output_filename_factory(complex_image, phase_image, "mask") - ), - } - ) - - num_partitions: int = attrs.field( - default=8, - metadata={ - "help_string": "number of phase partitions", - "argstr": "--numphasesplit", - }, - ) - - process_labels_in_2d: bool = attrs.field( - metadata={ - "help_string": "process labels in 2D", - "argstr": "--labelslices", - "xor": {"process_all_in_2d", "process_all_in_3d"}, - } - ) - - process_all_in_2d: bool = attrs.field( - metadata={ - "help_string": "process all in 2D", - "argstr": "--slices", - "xor": {"process_labels_in_2d", "process_all_in_3d"}, - } - ) - - process_all_in_3d: bool = attrs.field( - metadata={ - "help_string": "process all in 3D", - "argstr": "--force3D", - "xor": {"process_labels_in_2d", "process_all_in_2d"}, - } - ) - - threshold: float = attrs.field( - default=0.0, - metadata={ - "help_string": "intensity threshold for masking", - "argstr": "--thresh", - }, - ) - - first_image_index: int = attrs.field( - metadata={"help_string": "index of first image to process", "argstr": "--start"} - ) - - last_image_index: int = attrs.field( - metadata={"help_string": "index of last image to process", "argstr": "--end"} - ) - - remove_ramps: bool = attrs.field( - metadata={ - "help_string": "remove phase ramps during unwrapping", - "argstr": "--removeramps", - } - ) - - -class Prelude(pydra.engine.ShellCommandTask): - """Task definition for prelude.""" - - executable = "prelude" - - input_spec = pydra.specs.SpecInfo(name="Input", bases=(PreludeSpec,)) diff --git a/pydra/tasks/fsl/v6_0/fugue/prepare_fieldmap.py b/pydra/tasks/fsl/v6_0/fugue/prepare_fieldmap.py deleted file mode 100644 index 6e6bb67..0000000 --- a/pydra/tasks/fsl/v6_0/fugue/prepare_fieldmap.py +++ /dev/null @@ -1,74 +0,0 @@ -""" -PrepareFieldmap -=============== - -EPI fieldmap preprocessing. - -Examples -======== - ->>> task = PrepareFieldmap(phase_image="gre_phase.nii", magnitude_image="gre_mag.nii", output_image="fmap.nii") ->>> task.cmdline -'fsl_prepare_fieldmap SIEMENS gre_phase.nii gre_mag.nii fmap.nii 2.46' -""" - -__all__ = ["PrepareFieldmap"] - -from os import PathLike - -from attrs import define, field -from pydra.engine.specs import ShellSpec, SpecInfo -from pydra.engine.task import ShellCommandTask - - -@define(kw_only=True) -class PrepareFieldmapSpec(ShellSpec): - """Specifications for fsl_prepare_fieldmap.""" - - scanner: str = field( - default="SIEMENS", - metadata={"help_string": "scanner (usually SIEMENS)", "argstr": ""}, - ) - - phase_image: PathLike = field( - metadata={"help_string": "phase image", "mandatory": True, "argstr": ""} - ) - - magnitude_image: PathLike = field( - metadata={ - "help_string": "magnitude (brain extracted) image", - "mandatory": True, - "argstr": "", - } - ) - - output_image: str = field( - metadata={ - "help_string": "output fieldmap image in rad/s", - "argstr": "", - "output_file_template": "{phase_image}_fmap", - } - ) - - delta_te: float = field( - default=2.46, - metadata={ - "help_string": "echo time difference of the fieldmap sequence in milliseconds (usually 2.46 on SIEMENS)", - "argstr": "", - }, - ) - - no_check: bool = field( - metadata={ - "help_string": "disable sanity checks for images", - "argstr": "--nocheck", - } - ) - - -class PrepareFieldmap(ShellCommandTask): - """Task definition for fsl_prepare_fieldmap.""" - - executable = "fsl_prepare_fieldmap" - - input_spec = SpecInfo(name="Input", bases=(PrepareFieldmapSpec,)) diff --git a/pydra/tasks/fsl/v6_0/fugue/sigloss.py b/pydra/tasks/fsl/v6_0/fugue/sigloss.py deleted file mode 100644 index 45ba20e..0000000 --- a/pydra/tasks/fsl/v6_0/fugue/sigloss.py +++ /dev/null @@ -1,68 +0,0 @@ -""" -SigLoss -======= - -Estimate signal loss from a B0 map. - -Examples --------- - ->>> task = SigLoss(input_image="b0map.nii", input_mask="mask.nii", output_image="sigloss.nii") ->>> task.cmdline # doctest: +ELLIPSIS -'sigloss ... --in b0map.nii --mask mask.nii --sigloss sigloss.nii' -""" - -__all__ = ["SigLoss"] - -import os - -import attrs - -import pydra - - -@attrs.define(slots=False, kw_only=True) -class SigLossSpec(pydra.specs.ShellSpec): - """Specifications for sigloss.""" - - echo_time: float = attrs.field( - default=1.0, - metadata={"help_string": "echo time in seconds", "argstr": "--te"}, - ) - - slice_direction: str = attrs.field( - default="z", - metadata={ - "help_string": "slice direction", - "argstr": "--slicedir", - "allowed_values": {"x", "y", "z"}, - }, - ) - - input_image: os.PathLike = attrs.field( - metadata={ - "help_string": "input B0-map image in rad/s", - "mandatory": True, - "argstr": "--in", - } - ) - - input_mask: os.PathLike = attrs.field( - metadata={"help_string": "input mask", "argstr": "--mask"} - ) - - output_image: str = attrs.field( - metadata={ - "help_string": "output signal-loss image", - "argstr": "--sigloss", - "output_file_template": "{input_image}_sigloss", - } - ) - - -class SigLoss(pydra.engine.ShellCommandTask): - """Task definition for sigloss.""" - - executable = "sigloss" - - input_spec = pydra.specs.SpecInfo(name="Input", bases=(SigLossSpec,)) diff --git a/pydra/tasks/fsl/v6_0/maths.py b/pydra/tasks/fsl/v6_0/maths.py deleted file mode 100644 index 1670cde..0000000 --- a/pydra/tasks/fsl/v6_0/maths.py +++ /dev/null @@ -1,128 +0,0 @@ -""" -fslmaths -======== - -Mathematical manipulation of images. - -Examples --------- - -Convert input image to float: - ->>> task = Maths(input_image="input.nii") ->>> task.cmdline # doctest: +ELLIPSIS -'fslmaths input.nii .../input_fslmaths.nii' - -Multiply input image with a binary mask: - ->>> task = Mul(input_image="input.nii", other_image="mask.nii", output_image="output.nii") ->>> task.cmdline -'fslmaths input.nii -mul mask.nii output.nii' - ->>> task = Threshold(input_image="input.nii", threshold=0.3, output_image="output.nii") ->>> task.cmdline -'fslmaths input.nii -thr 0.3 output.nii' -""" - -__all__ = ["Maths", "MathsSpec", "Mul", "Threshold"] - -from os import PathLike - -from attrs import define, field -from pydra.engine.specs import ShellSpec, SpecInfo -from pydra.engine.task import ShellCommandTask - - -@define(kw_only=True) -class MathsSpec(ShellSpec): - """Specifications for fslmaths.""" - - _datatypes = {"char", "short", "int", "float", "double", "input"} - - internal_datatype: str = field( - metadata={ - "help_string": "internal datatype", - "argstr": "-dt", - "position": 1, - "allowed_values": _datatypes, - } - ) - - input_image: PathLike = field( - metadata={ - "help_string": "input image", - "mandatory": True, - "argstr": "", - "position": 2, - } - ) - - output_image: str = field( - metadata={ - "help_string": "output image", - "argstr": "", - "position": -2, - "output_file_template": "{input_image}_fslmaths", - } - ) - - output_datatype: str = field( - metadata={ - "help_string": "output datatype", - "argstr": "-odt", - "position": -1, - "allowed_values": _datatypes, - } - ) - - -class Maths(ShellCommandTask): - """Task definition for fslmaths.""" - - executable = "fslmaths" - - input_spec = SpecInfo(name="Input", bases=(MathsSpec,)) - - -@define(kw_only=True) -class MulSpec(MathsSpec): - """Specifications for fslmaths' mul.""" - - other_image: PathLike = field( - metadata={ - "help_string": "multiply input with other image", - "mandatory": True, - "argstr": "-mul", - } - ) - - -class Mul(Maths): - """Task definition for fslmaths' mul.""" - - input_spec = SpecInfo(name="Input", bases=(MulSpec,)) - - -@define(kw_only=True) -class ThresholdSpec(MathsSpec): - """Specifications for fslmaths' threshold.""" - - threshold: float = field( - metadata={ - "help_string": "value for thresholding the image", - "mandatory": True, - "argstr": "-thr", - } - ) - - -class Threshold(Maths): - """Task definition for fslmaths' threshold.""" - - input_spec = SpecInfo(name="Input", bases=(ThresholdSpec,)) - - -# TODO: Drop compatibility alias for 0.x -FSLMaths = Maths -FSLMathsSpec = MathsSpec -__all__ += ["FSLMaths", "FSLMathsSpec"] diff --git a/pydra/tasks/fsl/v6_0/susan.py b/pydra/tasks/fsl/v6_0/susan.py deleted file mode 100644 index d6a06dd..0000000 --- a/pydra/tasks/fsl/v6_0/susan.py +++ /dev/null @@ -1,106 +0,0 @@ -""" -SUSAN -===== - -Structure-preserving noise reduction. - -Examples --------- - ->>> task = SUSAN(input_image="input.nii") ->>> task.cmdline # doctest: +ELLIPSIS -'susan input.nii 3.0 3 1 0 .../input_susan.nii' - ->>> task = SUSAN( -... input_image="input.nii", -... output_image="output.nii", -... use_median=False, -... usans=[("usan1.nii", 1.0), ("usan2.nii", -1.0)], -... ) ->>> task.cmdline -'susan input.nii 3.0 3 0 2 usan1.nii 1.0 usan2.nii -1.0 output.nii' -""" - -__all__ = ["SUSAN"] - -import os - -import attrs - -import pydra - - -@attrs.define(slots=False, kw_only=True) -class SUSANSpec(pydra.specs.ShellSpec): - """Specifications for SUSAN.""" - - input_image: os.PathLike = attrs.field( - metadata={ - "help_string": "input image", - "mandatory": True, - "argstr": "", - } - ) - - output_image: str = attrs.field( - metadata={ - "help_string": "output image", - "argstr": "", - "position": -1, - "output_file_template": "{input_image}_susan", - } - ) - - brightness_threshold: float = attrs.field( - default=0.0, - metadata={ - "help_string": "brightness threshold", - "argstr": "", - }, - ) - - smoothing: float = attrs.field( - default=3.0, - metadata={ - "help_string": "spatial smoothing in millimeters", - "argstr": "", - }, - ) - - dimensionality: int = attrs.field( - default=3, - metadata={ - "help_string": "perform smoothing in 2D or 3D", - "argstr": "", - "allowed_values": {2, 3}, - }, - ) - - use_median: bool = attrs.field( - default=True, - metadata={ - "help_string": "use median when no neighborhood is found", - "formatter": lambda field: f"{int(field)}", - }, - ) - - # TODO: Replace with factory=list. - usans: list = attrs.field( - metadata={ - "help_string": "find smoothing area from secondary images (up to 2)", - "formatter": lambda field: ( - " ".join( - [f"{len(field or [])}"] - + [f"{usan} {bt}" for usan, bt in field or []] - ) - ), - }, - ) - - -class SUSAN(pydra.engine.ShellCommandTask): - """Task definition for SUSAN.""" - - executable = "susan" - - input_spec = pydra.specs.SpecInfo(name="Input", bases=(SUSANSpec,)) diff --git a/pydra/tasks/fsl/v6_0/utils/__init__.py b/pydra/tasks/fsl/v6_0/utils/__init__.py deleted file mode 100644 index 4d94ca8..0000000 --- a/pydra/tasks/fsl/v6_0/utils/__init__.py +++ /dev/null @@ -1,30 +0,0 @@ -""" -Utils -===== - -.. automodule:: pydra.tasks.fsl.utils.chfiletype -.. automodule:: pydra.tasks.fsl.utils.fft -.. automodule:: pydra.tasks.fsl.utils.info -.. automodule:: pydra.tasks.fsl.utils.interleave -.. automodule:: pydra.tasks.fsl.utils.merge -.. automodule:: pydra.tasks.fsl.utils.orient -.. automodule:: pydra.tasks.fsl.utils.reorient2std -.. automodule:: pydra.tasks.fsl.utils.roi -.. automodule:: pydra.tasks.fsl.utils.selectvols -.. automodule:: pydra.tasks.fsl.utils.smoothfill -.. automodule:: pydra.tasks.fsl.utils.split -.. automodule:: pydra.tasks.fsl.utils.swapdim -""" - -from .chfiletype import ChFileType -from .fft import FFT -from .info import Info -from .interleave import Interleave -from .merge import Merge -from .orient import Orient -from .reorient2std import Reorient2Std -from .roi import ROI -from .selectvols import SelectVols -from .smoothfill import SmoothFill -from .split import Slice, Split -from .swapdim import SwapDim diff --git a/pydra/tasks/fsl/v6_0/utils/chfiletype.py b/pydra/tasks/fsl/v6_0/utils/chfiletype.py deleted file mode 100644 index dce65e9..0000000 --- a/pydra/tasks/fsl/v6_0/utils/chfiletype.py +++ /dev/null @@ -1,107 +0,0 @@ -""" -ChFileType -========== - -Convert image to a different NIfTI file format. - -Examples --------- - ->>> task = ChFileType(filetype="NIFTI2_GZ", input_image="input.nii", output_basename="output") ->>> task.cmdline -'fslchfiletype NIFTI2_GZ input.nii output' -""" - -__all__ = ["ChFileType"] - -from os import PathLike -from pathlib import Path - -from attrs import define, field -from pydra.engine.specs import File, ShellOutSpec, ShellSpec, SpecInfo -from pydra.engine.task import ShellCommandTask - -ALLOWED_FILETYPES = { - "ANALYZE", - "ANALYZE_GZ", - "NIFTI", - "NIFTI_GZ", - "NIFTI_STD::PAIR", - "NIFTI_STD::PAIR_GZ", - "NIFTI2", - "NIFTI2_GZ", - "NIFTI2_STD::PAIR", - "NIFTI2_STD::PAIR_GZ", -} - - -def _get_output_basename(output_basename, input_image): - return output_basename or Path(input_image).name.split(".", 1)[0] - - -def _get_output_image(output_basename, input_image, filetype): - output_basename = _get_output_basename(output_basename, input_image) - - extension = "img" if any(pat in filetype for pat in ["ANALYZE", "PAIR"]) else "nii" - if "GZ" in filetype: - extension += ".gz" - - return Path.cwd() / f"{output_basename}.{extension}" - - -def _get_output_header(output_basename, input_image, filetype): - output_basename = _get_output_basename(output_basename, input_image) - - if any(pat in filetype for pat in ["ANALYZE", "PAIR"]): - extension = "hdr.gz" if "GZ" in filetype else "hdr" - return Path.cwd() / f"{output_basename}.{extension}" - else: - return None - - -@define(kw_only=True) -class ChFileTypeSpec(ShellSpec): - """Specifications for fslchfiletype.""" - - filetype: str = field( - metadata={ - "help_string": "change to this file type", - "mandatory": True, - "argstr": "", - "allowed_values": ALLOWED_FILETYPES, - } - ) - - input_image: PathLike = field( - metadata={"help_string": "input image", "mandatory": True, "argstr": ""} - ) - - output_basename: str = field( - metadata={"help_string": "output basename", "formatter": _get_output_basename} - ) - - -@define(slots=False, kw_only=True) -class ChFileTypeOutSpec(ShellOutSpec): - """Output specifications for fslchfiletype.""" - - output_image: File = field( - metadata={"help_string": "output image", "callable": _get_output_image} - ) - - output_header: File = field( - metadata={ - "help_string": "output header for filetypes which support it", - "callable": _get_output_header, - } - ) - - -class ChFileType(ShellCommandTask): - """Task definition for fslchfiletype.""" - - executable = "fslchfiletype" - - input_spec = SpecInfo(name="Input", bases=(ChFileTypeSpec,)) - - output_spec = SpecInfo(name="Output", bases=(ChFileTypeOutSpec,)) diff --git a/pydra/tasks/fsl/v6_0/utils/fft.py b/pydra/tasks/fsl/v6_0/utils/fft.py deleted file mode 100644 index b51c86c..0000000 --- a/pydra/tasks/fsl/v6_0/utils/fft.py +++ /dev/null @@ -1,58 +0,0 @@ -""" -FFT (Fast Fourier Transform) -============================ - -Compute the forward or inverse Fast Fourier Transform of a NIfTI image. - -Examples --------- - -Compute the forward FFT: - ->>> task = FFT(input_image="input.nii") ->>> task.cmdline # doctest: +ELLIPSIS -'fslfft input.nii .../input_fft.nii' - -Compute the inverse FFT: - ->>> task = FFT(input_image="input.nii", output_image="output.nii", inverse=True) ->>> task.cmdline -'fslfft input.nii output.nii -inv' -""" - -__all__ = ["FFT"] - -from os import PathLike - -from attrs import define, field -from pydra.engine.specs import ShellSpec, SpecInfo -from pydra.engine.task import ShellCommandTask - - -@define(kw_only=True) -class FFTSpec(ShellSpec): - """Specifications for fslfft.""" - - input_image: PathLike = field( - metadata={"help_string": "input image", "mandatory": True, "argstr": ""} - ) - - output_image: str = field( - metadata={ - "help_string": "output image", - "argstr": "", - "output_file_template": "{input_image}_fft", - } - ) - - inverse: bool = field( - metadata={"help_string": "compute the inverse FFT", "argstr": "-inv"} - ) - - -class FFT(ShellCommandTask): - """Task definition for fslfft.""" - - executable = "fslfft" - - input_spec = SpecInfo(name="Input", bases=(FFTSpec,)) diff --git a/pydra/tasks/fsl/v6_0/utils/info.py b/pydra/tasks/fsl/v6_0/utils/info.py deleted file mode 100644 index a142bb6..0000000 --- a/pydra/tasks/fsl/v6_0/utils/info.py +++ /dev/null @@ -1,152 +0,0 @@ -""" -Info -==== - -Read essential metadata from the header of a NIfTI image. -""" - -__all__ = ["Info"] - -import re -from os import PathLike - -from attrs import define, field -from pydra.engine.specs import ShellOutSpec, ShellSpec, SpecInfo -from pydra.engine.task import ShellCommandTask - - -@define(slots=False, kw_only=True) -class InfoSpec(ShellSpec): - """Specifications for fslinfo.""" - - input_image: PathLike = field( - metadata={"help_string": "input image", "mandatory": True, "argstr": ""} - ) - - -@define(kw_only=True) -class InfoOutSpec(ShellOutSpec): - """Output specifications for fslinfo.""" - - data_type: str = field( - metadata={ - "help_string": "data type string", - "callable": lambda stdout: re.search(r"data_type\s*(.*)", stdout).group(1), - } - ) - - dim1: int = field( - metadata={ - "help_string": "array size in 1st dimension", - "callable": lambda stdout: int( - re.search(r"\sdim1\s*(.*)", stdout).group(1) - ), - } - ) - - dim2: int = field( - metadata={ - "help_string": "array size in 2nd dimension", - "callable": lambda stdout: int( - re.search(r"\sdim2\s*(.*)", stdout).group(1) - ), - } - ) - - dim3: int = field( - metadata={ - "help_string": "array size in 3rd dimension", - "callable": lambda stdout: int( - re.search(r"\sdim3\s*(.*)", stdout).group(1) - ), - } - ) - - dim4: int = field( - metadata={ - "help_string": "array size in 4th dimension", - "callable": lambda stdout: int( - re.search(r"\sdim4\s*(.*)", stdout).group(1) - ), - } - ) - - datatype: int = field( - metadata={ - "help_string": "data type code", - "callable": lambda stdout: int( - re.search(r"datatype\s*(.*)", stdout).group(1) - ), - } - ) - - pixdim1: float = field( - metadata={ - "help_string": "pixel spacing in 1st dimension", - "callable": lambda stdout: float( - re.search(r"pixdim1\s*(.*)", stdout).group(1) - ), - } - ) - - pixdim2: float = field( - metadata={ - "help_string": "pixel spacing in 2nd dimension", - "callable": lambda stdout: float( - re.search(r"pixdim2\s*(.*)", stdout).group(1) - ), - } - ) - - pixdim3: float = field( - metadata={ - "help_string": "pixel spacing in 3rd dimension", - "callable": lambda stdout: float( - re.search(r"pixdim3\s*(.*)", stdout).group(1) - ), - } - ) - - pixdim4: float = field( - metadata={ - "help_string": "pixel spacing in 4th dimension", - "callable": lambda stdout: float( - re.search(r"pixdim4\s*(.*)", stdout).group(1) - ), - } - ) - - cal_max: float = field( - metadata={ - "help_string": "maximum display intensity", - "callable": lambda stdout: float( - re.search(r"cal_max\s*(.*)", stdout).group(1) - ), - } - ) - - cal_min: float = field( - metadata={ - "help_string": "minimum display intensity", - "callable": lambda stdout: float( - re.search(r"cal_min\s*(.*)", stdout).group(1) - ), - } - ) - - file_type: str = field( - metadata={ - "help_string": "NIfTI file type", - "callable": lambda stdout: re.search(r"file_type\s*(.*)", stdout).group(1), - } - ) - - -class Info(ShellCommandTask): - """Task definition for fslinfo.""" - - executable = "fslinfo" - - input_spec = SpecInfo(name="Input", bases=(InfoSpec,)) - - output_spec = SpecInfo(name="Output", bases=(InfoOutSpec,)) diff --git a/pydra/tasks/fsl/v6_0/utils/interleave.py b/pydra/tasks/fsl/v6_0/utils/interleave.py deleted file mode 100644 index 14260ce..0000000 --- a/pydra/tasks/fsl/v6_0/utils/interleave.py +++ /dev/null @@ -1,65 +0,0 @@ -""" -Interleave -========== - -Examples --------- - -Interleave images: - ->>> task = Interleave(input_image="in1.nii", other_image="in2.nii") ->>> task.cmdline # doctest: +ELLIPSIS -'fslinterleave in1.nii in2.nii .../in1_interleave.nii' - -Interleave in reverse order: - ->>> task = Interleave( -... input_image="in1.nii", -... other_image="in2.nii", -... output_image="out.nii", -... reverse=True, -... ) ->>> task.cmdline -'fslinterleave in1.nii in2.nii out.nii -i' -""" - -__all__ = ["Interleave"] - -from os import PathLike - -from attrs import define, field -from pydra.engine.specs import ShellSpec, SpecInfo -from pydra.engine.task import ShellCommandTask - - -@define(kw_only=True) -class InterleaveSpec(ShellSpec): - """Specifications for fslinterleave.""" - - input_image: PathLike = field( - metadata={"help_string": "input image", "mandatory": True, "argstr": ""} - ) - - other_image: PathLike = field( - metadata={"help_string": "other image", "mandatory": True, "argstr": ""} - ) - - output_image: str = field( - metadata={ - "help_string": "output_image", - "argstr": "", - "output_file_template": "{input_image}_interleave", - } - ) - - reverse: bool = field( - metadata={"help_string": "reverse slice order", "argstr": "-i"} - ) - - -class Interleave(ShellCommandTask): - """Task definition for fslinterleave.""" - - executable = "fslinterleave" - - input_spec = SpecInfo(name="Input", bases=(InterleaveSpec,)) diff --git a/pydra/tasks/fsl/v6_0/utils/merge.py b/pydra/tasks/fsl/v6_0/utils/merge.py deleted file mode 100644 index 13b4e28..0000000 --- a/pydra/tasks/fsl/v6_0/utils/merge.py +++ /dev/null @@ -1,75 +0,0 @@ -""" -Merge -===== - -Examples --------- - ->>> task = Merge(dimension="t", input_images=["vol1.nii", "vol2.nii"]) ->>> task.cmdline # doctest: +ELLIPSIS -'fslmerge -t ...merged vol1.nii vol2.nii' -""" - -__all__ = ["Merge"] - -from os import PathLike -from typing import Iterable - -from attrs import define, field -from pydra.engine.specs import File, ShellOutSpec, ShellSpec, SpecInfo -from pydra.engine.task import ShellCommandTask - - -@define(kw_only=True) -class MergeSpec(ShellSpec): - """Specifications for fslmerge.""" - - dimension: str = field( - metadata={ - "help_string": "merge dimension", - "mandatory": True, - "argstr": "-{dimension}", - "allowed_values": {"t", "x", "y", "z", "a", "tr"}, - "xor": {"volume_index"}, - } - ) - - volume_index: int = field( - metadata={ - "help_string": "merge volume N from each input file", - "mandatory": True, - "argstr": "-n", - "xor": {"dimension"}, - } - ) - - output_image: str = field( - metadata={ - "help_string": "output image", - "argstr": "", - "output_file_template": "merged", - } - ) - - input_images: Iterable[PathLike] = field( - metadata={ - "help_string": "input image", - "mandatory": True, - "argstr": "...", - } - ) - - repetition_time: float = field( - metadata={ - "help_string": "specify TR value in seconds (default is 1.0)", - "argstr": "", - } - ) - - -class Merge(ShellCommandTask): - """Task definition for fslmerge.""" - - executable = "fslmerge" - - input_spec = SpecInfo(name="Input", bases=(MergeSpec,)) diff --git a/pydra/tasks/fsl/v6_0/utils/orient.py b/pydra/tasks/fsl/v6_0/utils/orient.py deleted file mode 100644 index dc93a09..0000000 --- a/pydra/tasks/fsl/v6_0/utils/orient.py +++ /dev/null @@ -1,118 +0,0 @@ -""" -Orient -====== - -Change the orientation of an image. - -Examples --------- - ->>> import tempfile ->>> input_file = tempfile.NamedTemporaryFile(suffix="input.nii") - -Change orientation to radiological: - ->>> task = Orient(input_image=input_file.name, force_radiological=True) ->>> task.cmdline # doctest: +ELLIPSIS -'fslorient -forceradiological ...input.nii' - -Change orientation to neurological: - ->>> task = Orient(input_image=input_file.name, force_neurological=True) ->>> task.cmdline # doctest: +ELLIPSIS -'fslorient -forceneurological ...input.nii' - -Swap between radiological and neurological: - ->>> task = Orient(input_image=input_file.name, swap_orientation=True) ->>> task.cmdline # doctest: +ELLIPSIS -'fslorient -swaporient ...input.nii' - -Delete orientation: - ->>> task = Orient(input_image=input_file.name, delete_orientation=True) ->>> task.cmdline # doctest: +ELLIPSIS -'fslorient -deleteorient ...input.nii' -""" - -__all__ = ["Orient"] - -from attrs import define, field -from pydra.engine.specs import File, ShellOutSpec, ShellSpec, SpecInfo -from pydra.engine.task import ShellCommandTask - - -@define(kw_only=True) -class OrientSpec(ShellSpec): - """Specifications for fslorient.""" - - _xor = { - "delete_orientation", - "force_radiological", - "force_neurological", - "swap_orientation", - } - - input_image: File = field( - metadata={ - "help_string": "input image", - "mandatory": True, - "argstr": "", - "position": -1, - "copyfile": True, - } - ) - - delete_orientation: bool = field( - metadata={ - "help_string": "delete orientation", - "argstr": "-deleteorient", - "xor": _xor, - } - ) - - force_radiological: bool = field( - metadata={ - "help_string": "force orientation to radiological", - "argstr": "-forceradiological", - "xor": _xor, - } - ) - - force_neurological: bool = field( - metadata={ - "help_string": "force orientation to neurological", - "argstr": "-forceneurological", - "xor": _xor, - } - ) - - swap_orientation: bool = field( - metadata={ - "help_string": "swap between radiological and neurological", - "argstr": "-swaporient", - "xor": _xor, - } - ) - - -@define(kw_only=True) -class OrientOutSpec(ShellOutSpec): - """Output specifications for fslorient.""" - - output_image: File = field( - metadata={ - "help_string": "output image", - "output_file_template": "{input_image}", - } - ) - - -class Orient(ShellCommandTask): - """Task definition for fslorient.""" - - executable = "fslorient" - - input_spec = SpecInfo(name="Input", bases=(OrientSpec,)) - - output_spec = SpecInfo(name="Output", bases=(OrientOutSpec,)) diff --git a/pydra/tasks/fsl/v6_0/utils/reorient2std.py b/pydra/tasks/fsl/v6_0/utils/reorient2std.py deleted file mode 100644 index e563216..0000000 --- a/pydra/tasks/fsl/v6_0/utils/reorient2std.py +++ /dev/null @@ -1,62 +0,0 @@ -""" -Reorient2Std -============ - -Change orientation of the image to match the one used -for standard template images (MNI152). - -Examples --------- - ->>> task = Reorient2Std(input_image="image.nii") ->>> task.cmdline # doctest: +ELLIPSIS -'fslreorient2std -m ...image_r2std.mat image.nii ...image_r2std.nii' -""" - -__all__ = ["Reorient2Std"] - -from os import PathLike - -from attrs import define, field -from pydra.engine.specs import ShellSpec, SpecInfo -from pydra.engine.task import ShellCommandTask - - -@define(kw_only=True) -class Reorient2StdSpec(ShellSpec): - """Specifications for fslreorient2std.""" - - input_image: PathLike = field( - metadata={ - "help_string": "input image", - "mandatory": True, - "argstr": "", - "position": -2, - } - ) - - output_image: str = field( - metadata={ - "help_string": "output reoriented image", - "argstr": "", - "position": -1, - "output_file_template": "{input_image}_r2std", - } - ) - - output_matrix: str = field( - metadata={ - "help_string": "output transformation matrix", - "argstr": "-m", - "output_file_template": "{input_image}_r2std.mat", - "keep_extension": False, - } - ) - - -class Reorient2Std(ShellCommandTask): - """Task definition for fslreorient2std.""" - - executable = "fslreorient2std" - - input_spec = SpecInfo(name="Input", bases=(Reorient2StdSpec,)) diff --git a/pydra/tasks/fsl/v6_0/utils/roi.py b/pydra/tasks/fsl/v6_0/utils/roi.py deleted file mode 100644 index 72cd5f7..0000000 --- a/pydra/tasks/fsl/v6_0/utils/roi.py +++ /dev/null @@ -1,122 +0,0 @@ -""" -ROI (Region-Of-Interest) -======================== - -Manual cropping to a region-of-interest for structural brain images. - -Examples --------- - -Extract a 16-voxel cube starting at position (10, 20, 30): - ->>> task = ROI( -... input_image="image.nii", -... x_min=10, -... x_size=16, -... y_min=20, -... y_size=16, -... z_min=30, -... z_size=16, -... ) ->>> task.cmdline # doctest: +ELLIPSIS -'fslroi image.nii ...image_roi.nii 10 16 20 16 30 16 ...' - -Extract a temporal window starting at 5 onwards: - ->>> task = ROI(input_image="input.nii", output_image="output.nii", t_min=5) ->>> task.cmdline -'fslroi input.nii output.nii 5 -1' -""" - -__all__ = ["ROI"] - -from os import PathLike - -from attrs import define, field -from pydra.engine.specs import ShellSpec, SpecInfo -from pydra.engine.task import ShellCommandTask - - -@define(kw_only=True) -class ROISpec(ShellSpec): - """Specifications for fslroi.""" - - _requires = {"x_min", "y_min", "z_min"} - - input_image: PathLike = field( - metadata={"help_string": "input image", "mandatory": True, "argstr": ""} - ) - - output_image: str = field( - metadata={ - "help_string": "output image", - "argstr": "", - "output_file_template": "{input_image}_roi", - } - ) - - x_min: int = field( - metadata={ - "help_string": "start of ROI in x (0-based indexing)", - "argstr": "", - "requires": _requires, - } - ) - - x_size: int = field( - metadata={ - "help_string": "size of ROI in x (-1 for maximum)", - "argstr": "", - "requires": {"x_min"}, - } - ) - - y_min: int = field( - metadata={ - "help_string": "start of ROI in y (0-based indexing)", - "argstr": "", - "requires": _requires, - } - ) - - y_size: int = field( - metadata={ - "help_string": "size of ROI in y (-1 for maximum)", - "argstr": "", - "requires": {"y_min"}, - } - ) - - z_min: int = field( - metadata={ - "help_string": "start of ROI in z (0-based indexing)", - "argstr": "", - "requires": _requires, - } - ) - - z_size: int = field( - metadata={ - "help_string": "size of ROI in z (-1 for maximum)", - "argstr": "", - "requires": {"z_min"}, - } - ) - - t_min: int = field( - default=0, - metadata={"help_string": "start of ROI in t (0-based indexing)", "argstr": ""}, - ) - - t_size: int = field( - default=-1, - metadata={"help_string": "size of ROI in t (-1 for maximum)", "argstr": ""}, - ) - - -class ROI(ShellCommandTask): - """Task definition for fslroi.""" - - executable = "fslroi" - - input_spec = SpecInfo(name="Input", bases=(ROISpec,)) diff --git a/pydra/tasks/fsl/v6_0/utils/selectvols.py b/pydra/tasks/fsl/v6_0/utils/selectvols.py deleted file mode 100644 index 972d04a..0000000 --- a/pydra/tasks/fsl/v6_0/utils/selectvols.py +++ /dev/null @@ -1,95 +0,0 @@ -""" -SelectVols -========== - -Examples --------- - -Select volumes from a list and concatenate them: - ->>> task = SelectVols(input_image="input.nii", volumes=[0, 1, 6, 7]) ->>> task.cmdline -'fslselectvols --in input.nii --out .../input_selectvols.nii --vols 0,1,6,7' - -Select volumes from a file and calculate their mean: - ->>> task = SelectVols( -... input_image="input.nii", -... output_image="mean.nii", -... volumes="volumes.txt", -... calculate_mean=True, -... ) ->>> task.cmdline -'fslselectvols --in input.nii --out mean.nii --vols volumes.txt -m' - -Select volumes from a file and calculate their variance: - ->>> task = SelectVols( -... input_image="input.nii", -... output_image="variance.nii", -... volumes="volumes.txt", -... calculate_variance=True, -... ) ->>> task.cmdline -'fslselectvols --in input.nii --out variance.nii --vols volumes.txt -v' -""" - -__all__ = ["SelectVols"] - -from os import PathLike -from typing import Iterable, Union - -from attrs import define, field -from pydra.engine.specs import ShellSpec, SpecInfo -from pydra.engine.task import ShellCommandTask - - -@define(kw_only=True) -class SelectVolsSpec(ShellSpec): - """Specifications for fslselectvols.""" - - input_image: PathLike = field( - metadata={"help_string": "input image", "mandatory": True, "argstr": "--in"} - ) - - output_image: str = field( - metadata={ - "help_string": "output image", - "argstr": "--out", - "output_file_template": "{input_image}_selectvols", - } - ) - - volumes: Union[PathLike, Iterable[int]] = field( - metadata={ - "help_string": "volumes to select (from a file or as a list)", - "mandatory": True, - "formatter": lambda volumes: ( - f"--vols {str(volumes) if isinstance(volumes, (PathLike, str)) else ','.join(map(str, volumes))}" - ), - } - ) - - calculate_mean: bool = field( - metadata={ - "help_string": "calculate mean", - "argstr": "-m", - "xor": {"calculate_variance"}, - } - ) - - calculate_variance: bool = field( - metadata={ - "help_string": "calculate variance", - "argstr": "-v", - "xor": {"calculate_mean"}, - } - ) - - -class SelectVols(ShellCommandTask): - """Task definition for fslselectvols.""" - - executable = "fslselectvols" - - input_spec = SpecInfo(name="Input", bases=(SelectVolsSpec,)) diff --git a/pydra/tasks/fsl/v6_0/utils/smoothfill.py b/pydra/tasks/fsl/v6_0/utils/smoothfill.py deleted file mode 100644 index 3a0eb66..0000000 --- a/pydra/tasks/fsl/v6_0/utils/smoothfill.py +++ /dev/null @@ -1,52 +0,0 @@ -""" -SmoothFill -========== - -Examples --------- - ->>> task = SmoothFill(input_image="input.nii", output_image="smoothed.nii", input_mask="mask.nii") ->>> task.cmdline -'fslsmoothfill --in input.nii --out smoothed.nii --mask mask.nii' -""" - -__all__ = ["SmoothFill"] - -from os import PathLike - -from attrs import define, field -from pydra.engine.specs import ShellSpec, SpecInfo -from pydra.engine.task import ShellCommandTask - - -@define(kw_only=True) -class SmoothFillSpec(ShellSpec): - """Specifications for fslsmoothfill.""" - - input_image: PathLike = field( - metadata={"help_string": "input image", "mandatory": True, "argstr": "--in"} - ) - - output_image: str = field( - metadata={ - "help_string": "output image", - "argstr": "--out", - "output_file_template": "{input_image}_smoothfill", - } - ) - - input_mask: PathLike = field( - metadata={"help_string": "input mask", "argstr": "--mask"} - ) - - num_iterations: int = field( - metadata={"help_string": "number of iterations", "argstr": "--niter"} - ) - - -class SmoothFill(ShellCommandTask): - """Task definition for fslsmoothfill.""" - - executable = "fslsmoothfill" - - input_spec = SpecInfo(name="Input", bases=(SmoothFillSpec,)) diff --git a/pydra/tasks/fsl/v6_0/utils/split.py b/pydra/tasks/fsl/v6_0/utils/split.py deleted file mode 100644 index 01e0d23..0000000 --- a/pydra/tasks/fsl/v6_0/utils/split.py +++ /dev/null @@ -1,100 +0,0 @@ -""" -Split -===== - -Examples --------- ->>> task = Split(input_image="input.nii.gz") ->>> task.cmdline -'fslsplit input.nii.gz input -t' - ->>> task = Slice(input_image="volume.nii", output_basename="slice") ->>> task.cmdline -'fslsplit volume.nii slice -z' -""" - -__all__ = ["Split", "Slice"] - -from os import PathLike -from pathlib import Path - -from attrs import define, field -from pydra.engine.specs import MultiOutputFile, ShellOutSpec, ShellSpec, SpecInfo -from pydra.engine.task import ShellCommandTask - - -def _get_output_basename(output_basename, input_image): - return output_basename or Path(input_image).name.split(".", 1)[0] - - -def _get_output_images(output_basename, input_image): - output_basename = _get_output_basename(output_basename, input_image) - - return sorted(Path.cwd().glob(f"{output_basename}*.*")) - - -@define(kw_only=True) -class SplitSpec(ShellSpec): - """Specifications for fslsplit.""" - - input_image: PathLike = field( - metadata={"help_string": "input image", "mandatory": True, "argstr": ""} - ) - - output_basename: str = field( - metadata={"help_string": "output basename", "formatter": _get_output_basename} - ) - - direction: str = field( - default="t", - metadata={ - "help_string": "split direction", - "argstr": "-{direction}", - "allowed_values": {"x", "y", "z", "t"}, - }, - ) - - -@define(slots=False, kw_only=True) -class SplitOutSpec(ShellOutSpec): - """Output specifications for fslsplit.""" - - output_images: MultiOutputFile = field( - metadata={"help_string": "output images", "callable": _get_output_images} - ) - - -class Split(ShellCommandTask): - """Task definition for fslsplit.""" - - executable = "fslsplit" - - input_spec = SpecInfo(name="Input", bases=(SplitSpec,)) - - output_spec = SpecInfo(name="Output", bases=(SplitOutSpec,)) - - -@define(kw_only=True) -class SliceSpec(SplitSpec): - """Specifications for fslslice.""" - - direction: str = field( - default="z", - metadata={ - "help_string": "split direction (z)", - "argstr": "-{direction}", - "allowed_values": {"z"}, - }, - ) - - -class SliceOutSpec(SplitOutSpec): - """Output specifications for fslslice.""" - - -class Slice(Split): - """Task definition for fslslice.""" - - input_spec = SpecInfo(name="Input", bases=(SliceSpec,)) - - output_spec = SpecInfo(name="Output", bases=(SliceOutSpec,)) diff --git a/pydra/tasks/fsl/v6_0/utils/swapdim.py b/pydra/tasks/fsl/v6_0/utils/swapdim.py deleted file mode 100644 index 4aa0607..0000000 --- a/pydra/tasks/fsl/v6_0/utils/swapdim.py +++ /dev/null @@ -1,88 +0,0 @@ -""" -SwapDim -======= - -Examples --------- - ->>> task = SwapDim( -... input_image="input.nii", -... new_x="y", -... new_y="x", -... new_z="-z", -... ) ->>> task.cmdline # doctest: +ELLIPSIS -'fslswapdim input.nii y x -z ...input_swapdim.nii' - ->>> task = SwapDim( -... input_image="input.nii", -... output_image="output.nii", -... new_x="RL", -... new_y="PA", -... new_z="IS", -... ) ->>> task.cmdline -'fslswapdim input.nii RL PA IS output.nii' -""" - -__all__ = ["SwapDim"] - -from os import PathLike - -from attrs import define, field -from pydra.engine.specs import ShellSpec, SpecInfo -from pydra.engine.task import ShellCommandTask - - -@define(kw_only=True) -class SwapDimSpec(ShellSpec): - """Specifications for fslswapdim.""" - - ALLOWED_AXES = {"x", "-x", "y", "-y", "z", "-z", "LR", "RL", "AP", "PA", "SI", "IS"} - - input_image: PathLike = field( - metadata={"help_string": "input image", "mandatory": True, "argstr": ""} - ) - - new_x: str = field( - metadata={ - "help_string": "new x-axis", - "mandatory": True, - "argstr": "", - "allowed_values": ALLOWED_AXES, - } - ) - - new_y: str = field( - metadata={ - "help_string": "new y-axis", - "mandatory": True, - "argstr": "", - "allowed_values": ALLOWED_AXES, - } - ) - - new_z: str = field( - metadata={ - "help_string": "new z-axis", - "mandatory": True, - "argstr": "", - "allowed_values": ALLOWED_AXES, - } - ) - - output_image: str = field( - metadata={ - "help_string": "output image", - "argstr": "", - "output_file_template": "{input_image}_swapdim", - } - ) - - -class SwapDim(ShellCommandTask): - """Task definition for fslswapdim.""" - - executable = "fslswapdim" - - input_spec = SpecInfo(name="Input", bases=(SwapDimSpec,)) diff --git a/pyproject.toml b/pyproject.toml index f3cd4b5..415ceb7 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -3,15 +3,15 @@ requires = ["hatchling", "hatch-vcs"] build-backend = "hatchling.build" [project] -name = "pydra-fsl" -description = "Pydra tasks package for fsl" +name = "pydra-tasks-fsl" +description = "Pydra tasks package for FMRIB Software Library (FSL)" readme = "README.md" -requires-python = ">=3.8" +requires-python = ">=3.11" dependencies = [ - "pydra >=0.22", - "fileformats >=0.8.3", - "fileformats-datascience >=0.1", - "fileformats-medimage >=0.4.1", + "pydra >=1.0a2", + "fileformats >=0.15.4", + "fileformats-datascience >=0.3.2", + "fileformats-medimage >=0.10.5", "fileformats-medimage-fsl", ] license = { file = "LICENSE" } @@ -30,33 +30,29 @@ classifiers = [ "License :: OSI Approved :: Apache Software License", "Operating System :: OS Independent", "Programming Language :: Python :: 3", - "Programming Language :: Python :: 3.7", - "Programming Language :: Python :: 3.8", - "Programming Language :: Python :: 3.9", - "Programming Language :: Python :: 3.10", "Programming Language :: Python :: 3.11", + "Programming Language :: Python :: 3.12", + "Programming Language :: Python :: 3.13", "Topic :: Scientific/Engineering", "Topic :: Software Development :: Libraries", ] dynamic = ["version"] +[project.urls] +Documentation = "https://github.com/nipype/pydra-tasks-fsl#readme" +Issues = "https://github.com/nipype/pydra-tasks-fsl/issues" +Source = "https://github.com/nipype/pydra-tasks-fsl" + [project.optional-dependencies] dev = ["black", "pre-commit"] -doc = [ - "packaging", - "sphinx >=2.1.2", - "sphinx_rtd_theme", - "sphinxcontrib-apidoc ~=0.3.0", - "sphinxcontrib-napoleon", - "sphinxcontrib-versioning", - "pydata-sphinx-theme >=0.13", -] + test = [ - "nipype2pydra", - "pytest >= 4.4.0", - "pytest-cov", + "nipype2pydra >=0.5.0", + "pytest >= 7.4", + "pluggy >= 1.2", + "pytest-cov >=4.1.0", "pytest-env", - "pytest-xdist", + "pytest-xdist >=3.5.0", "pytest-rerunfailures", "codecov", "fileformats-extras", @@ -65,18 +61,29 @@ test = [ "fileformats-medimage-fsl-extras", ] +[tool.hatch.build] +packages = ["pydra"] +exclude = [ + "pydra/**/tests", +] + [tool.hatch.version] source = "vcs" [tool.hatch.build.hooks.vcs] version-file = "pydra/tasks/fsl/_version.py" -[tool.hatch.build.targets.wheel] -packages = ["pydra"] -include-only = ["pydra/tasks/fsl"] +[tool.hatch.envs.default] +dependencies = ["pytest"] + +[tool.hatch.envs.default.scripts] +test = "pytest {args}" + +[[tool.hatch.envs.default.matrix]] +python = ["3.11", "3.12", "3.13"] [tool.black] -target-version = ["py38"] +target-version = ["py311"] exclude = "_version.py" [tool.codespell] @@ -89,11 +96,6 @@ max-line-length = 88 select = "C,E,F,W,B,B950" extend-ignore = ['E203', 'E501', 'E129', 'W503'] -[project.urls] -Documentation = "https://github.com/nipype/pydra-tasks-fsl#readme" -Issues = "https://github.com/nipype/pydra-tasks-fsl/issues" -Source = "https://github.com/nipype/pydra-tasks-fsl" - [tool.coverage.run] branch = true parallel = true @@ -105,5 +107,25 @@ profile = "black" [tool.pytest.ini_options] minversion = "6.0" -# addopts = ["--doctest-modules", "--doctest-continue-on-failure"] -testpaths = ["pydra"] +testpaths = ["pydra/tasks/fsl"] +log_cli_level = "INFO" +xfail_strict = true +addopts = [ + "-svv", + "-ra", + "--strict-config", + "--strict-markers", + # "--doctest-modules", + # "--doctest-continue-on-failure", + "--cov=pydra.tasks.fsl", + "--cov-report=xml", + "--cov-config=pyproject.toml", +] +doctest_optionflags = "ALLOW_UNICODE NORMALIZE_WHITESPACE ELLIPSIS" +env = "PYTHONHASHSEED=0" +filterwarnings = ["ignore::DeprecationWarning", "ignore:The NumPy module was reloaded:UserWarning"] +junit_family = "xunit2" + + +[tool.ruff.lint] +ignore = ["FBT001", "FA100"] diff --git a/related-packages/fileformats/pyproject.toml b/related-packages/fileformats/pyproject.toml index f0b3af0..d6feebc 100644 --- a/related-packages/fileformats/pyproject.toml +++ b/related-packages/fileformats/pyproject.toml @@ -51,7 +51,7 @@ test = [ "pytest-env>=0.6.2", "pytest-cov>=2.12.1", "codecov", - "fileformats-medimage-CHANGME-extras", + "fileformats-medimage-fsl-extras", ] [project.urls] diff --git a/tox.ini b/tox.ini new file mode 100644 index 0000000..4510402 --- /dev/null +++ b/tox.ini @@ -0,0 +1,107 @@ +[tox] +requires = + tox>=4 + tox-uv +envlist = + py3{11,12,13}-{latest,pre} + py311-min +skip_missing_interpreters = true + +# Configuration that allows us to split tests across GitHub runners effectively +[gh-actions] +python = + 3.11: py311 + 3.12: py312 + 3.13: py313 + +[gh-actions:env] +DEPENDS = + min: min + latest: latest + pre: pre + +[testenv] +description = Pytest with coverage +labels = test +editable = true +pip_pre = + pre: true +pass_env = + # getpass.getuser() sources for Windows: + LOGNAME + USER + LNAME + USERNAME + # Pass user color preferences through + PY_COLORS + FORCE_COLOR + NO_COLOR + CLICOLOR + CLICOLOR_FORCE + PYTHON_GIL +extras = test +setenv = + NO_ET: '1' + pre: PIP_EXTRA_INDEX_URL=https://pypi.anaconda.org/scientific-python-nightly-wheels/simple + pre: UV_INDEX=https://pypi.anaconda.org/scientific-python-nightly-wheels/simple + pre: UV_INDEX_STRATEGY=unsafe-best-match +uv_resolution = + min: lowest-direct + +commands = + pytest pydra/tasks/fsl --cov=pydra.tasks.fsl --cov-report term-missing --durations=20 --durations-min=1.0 {posargs:-n auto} + +[testenv:style] +description = Check our style guide +labels = check +deps = + ruff +skip_install = true +commands = + ruff check --diff + ruff format --diff + +[testenv:style-fix] +description = Auto-apply style guide to the extent possible +labels = pre-release +deps = + ruff +skip_install = true +commands = + ruff check --fix + ruff format + +[testenv:spellcheck] +description = Check spelling +labels = check +deps = + codespell[toml] +skip_install = true +commands = + codespell . {posargs} + +[testenv:build{,-strict}] +labels = + check + pre-release +deps = + build + twine +skip_install = true +set_env = + # Ignore specific known warnings: + # https://github.com/pypa/pip/issues/11684 + # https://github.com/pypa/pip/issues/12243 + strict: PYTHONWARNINGS=error,once:pkg_resources is deprecated as an API.:DeprecationWarning:pip._internal.metadata.importlib._envs,once:Unimplemented abstract methods {'locate_file'}:DeprecationWarning:pip._internal.metadata.importlib._dists +commands = + python -m build --installer uv + python -m twine check dist/* + +[testenv:publish] +depends = build +labels = release +deps = + twine +skip_install = true +commands = + python -m twine upload dist/*