diff --git a/.circleci/config.yml b/.circleci/config.yml index 9f216d61..47b0e00e 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -2,7 +2,7 @@ version: 2 jobs: build_pytest: machine: - image: ubuntu-2004:202107-02 + image: default working_directory: /tmp/src/nitransforms environment: TZ: "/usr/share/zoneinfo/America/Los_Angeles" @@ -12,9 +12,9 @@ jobs: - checkout - restore_cache: keys: - - env-v3-{{ .Branch }}- - - env-v3-master- - - env-v3- + - env-v6-{{ .Branch }}- + - env-v6-master- + - env-v6- - run: name: Setup git-annex command: | @@ -29,17 +29,14 @@ jobs: - run: name: Setup DataLad command: | - export PY3=$(pyenv versions | grep '3\.' | - sed -e 's/.* 3\./3./' -e 's/ .*//') - pyenv local $PY3 - python -m pip install --no-cache-dir -U pip "setuptools >= 45.0" "setuptools_scm[toml] >= 3.4" - python -m pip install --no-cache-dir -U datalad datalad-osf + python3 -m pip install --no-cache-dir -U pip "setuptools >= 45.0" "setuptools_scm[toml] >= 6.2" + python3 -m pip install --no-cache-dir -U datalad datalad-osf - save_cache: - key: env-v3-{{ .Branch }}-{{ .BuildNum }} + key: env-v6-{{ .Branch }}-{{ .BuildNum }} paths: - /opt/circleci/git-annex.linux - - /opt/circleci/.pyenv/versions/3.9.4 + - /opt/circleci/.pyenv/versions - restore_cache: keys: @@ -49,10 +46,9 @@ jobs: - run: name: Install test data from GIN command: | - export PY3=$(pyenv versions | grep '3\.' | - sed -e 's/.* 3\./3./' -e 's/ .*//') - pyenv local $PY3 export PATH=/opt/circleci/git-annex.linux:$PATH + pyenv local 3 + eval "$(pyenv init --path)" mkdir -p /tmp/data cd /tmp/data datalad install -r https://gin.g-node.org/oesteban/nitransforms-tests @@ -98,15 +94,12 @@ jobs: name: Build Docker image & push to registry no_output_timeout: 60m command: | - export PY3=$(pyenv versions | grep '3\.' | - sed -e 's/.* 3\./3./' -e 's/ .*//') - pyenv local $PY3 e=1 && for i in {1..5}; do docker build --rm --cache-from=nitransforms:latest \ -t nitransforms:latest \ --build-arg BUILD_DATE=`date -u +"%Y-%m-%dT%H:%M:%SZ"` \ --build-arg VCS_REF=`git rev-parse --short HEAD` \ - --build-arg VERSION=$( python3 setup.py --version ) . \ + --build-arg VERSION=$( python3 -m setuptools_scm ) . \ && e=0 && break || sleep 15 done && [ "$e" -eq "0" ] docker tag nitransforms:latest localhost:5000/nitransforms @@ -123,10 +116,7 @@ jobs: - run: name: Check version packaged in Docker image command: | - export PY3=$(pyenv versions | grep '3\.' | - sed -e 's/.* 3\./3./' -e 's/ .*//') - pyenv local $PY3 - THISVERSION=${CIRCLE_TAG:-$(python3 setup.py --version)} + THISVERSION=${CIRCLE_TAG:-$(python3 -m setuptools_scm)} INSTALLED_VERSION=$(\ docker run -it --rm --entrypoint=python nitransforms \ -c 'import nitransforms as nit; print(nit.__version__, end="")' ) @@ -141,13 +131,14 @@ jobs: echo "cHJpbnRmICJrcnp5c3p0b2YuZ29yZ29sZXdza2lAZ21haWwuY29tXG41MTcyXG4gKkN2dW12RVYzelRmZ1xuRlM1Si8yYzFhZ2c0RVxuIiA+IGxpY2Vuc2UudHh0Cg==" | base64 -d | sh - run: name: Get codecov - command: python -m pip install codecov + command: python3 -m pip install codecov - run: name: Run unit tests no_output_timeout: 2h command: | mkdir -p /tmp/tests/{artifacts,summaries} - docker run -u $( id -u ) -it --rm -w /src/nitransforms \ + docker run -u $( id -u ) -it --rm \ + -w /src/nitransforms -v $PWD:/src/nitransforms \ -v /tmp/data/nitransforms-tests:/data -e TEST_DATA_HOME=/data \ -e COVERAGE_FILE=/tmp/summaries/.pytest.coverage \ -v /tmp/fslicense/license.txt:/opt/freesurfer/license.txt:ro \ @@ -159,7 +150,7 @@ jobs: name: Submit unit test coverage command: | cd /tmp/src/nitransforms - python -m codecov --file /tmp/tests/summaries/unittests.xml \ + python3 -m codecov --file /tmp/tests/summaries/unittests.xml \ --flags unittests -e CIRCLE_JOB - run: name: Clean up tests directory @@ -186,9 +177,9 @@ jobs: command: | python3 -m venv /tmp/buildenv source /tmp/buildenv/bin/activate - python3 -m pip install "setuptools >= 45.0" wheel "setuptools_scm[toml] >= 3.4" \ + python3 -m pip install "setuptools >= 45.0" build wheel "setuptools_scm[toml] >= 6.2" \ "pip>=10.0.1" twine docutils - python setup.py sdist bdist_wheel + python3 -m build twine check dist/nitransforms* - store_artifacts: path: /tmp/src/nitransforms/dist @@ -200,9 +191,9 @@ jobs: command: | python3 -m venv /tmp/install_sdist source /tmp/install_sdist/bin/activate - python3 -m pip install "setuptools >= 45.0" "pip>=10.0.1" + python3 -m pip install "setuptools >= 45.0" "pip>=10.0.1" "setuptools_scm[toml] >= 6.2" - THISVERSION=$( python3 setup.py --version ) + THISVERSION=$( python3 -m setuptools_scm ) THISVERSION=${CIRCLE_TAG:-$THISVERSION} python3 -m pip install dist/nitransforms*.tar.gz INSTALLED_VERSION=$(python3 -c 'import nitransforms as nit; print(nit.__version__, end="")') @@ -214,9 +205,9 @@ jobs: command: | python3 -m venv /tmp/install_wheel source /tmp/install_wheel/bin/activate - python3 -m pip install "setuptools >= 45.0" "pip>=10.0.1" + python3 -m pip install "setuptools >= 45.0" "pip>=10.0.1" "setuptools_scm[toml] >= 6.2" - THISVERSION=$( python3 setup.py --version ) + THISVERSION=$( python3 -m setuptools_scm ) THISVERSION=${CIRCLE_TAG:-$THISVERSION} python3 -m pip install dist/nitransforms*.whl INSTALLED_VERSION=$(python3 -c 'import nitransforms as nit; print(nit.__version__, end="")') diff --git a/.github/dependabot.yml b/.github/dependabot.yml new file mode 100644 index 00000000..6c9e83fc --- /dev/null +++ b/.github/dependabot.yml @@ -0,0 +1,10 @@ +version: 2 +updates: + - package-ecosystem: "github-actions" + directory: "/" + schedule: + interval: "monthly" + groups: + actions-infrastructure: + patterns: + - "actions/*" diff --git a/.github/workflows/pythonpackage.yml b/.github/workflows/pythonpackage.yml deleted file mode 100644 index 97c4984e..00000000 --- a/.github/workflows/pythonpackage.yml +++ /dev/null @@ -1,98 +0,0 @@ -# This workflow will install Python dependencies, run tests and lint with a variety of Python versions -# For more information see: https://help.github.com/actions/language-and-framework-guides/using-python-with-github-actions - -name: Python package - -on: - push: - branches: [ '*' ] - tags: [ '*' ] - pull_request: - branches: [ master, 'maint/*' ] - -jobs: - build: - if: "!contains(github.event.head_commit.message, '[skip ci]')" - runs-on: ubuntu-latest - strategy: - matrix: - python-version: ['3.8', '3.9', '3.10', '3.11'] - - steps: - - name: Set up Python ${{ matrix.python-version }} - uses: actions/setup-python@v4 - with: - python-version: ${{ matrix.python-version }} - - uses: actions/checkout@v3 - with: - fetch-depth: 0 - - name: Build package - run: | - pipx run build - - name: Determine expected version - run: | - python -m venv /tmp/getversion - source /tmp/getversion/bin/activate - python -m pip install setuptools_scm - - # Interpolate version - if [[ "$GITHUB_REF" == refs/tags/* ]]; then - TAG=${GITHUB_REF##*/} - fi - THISVERSION=$( python -m setuptools_scm ) - THISVERSION=${TAG:-$THISVERSION} - echo "Expected VERSION: \"${THISVERSION}\"" - echo "THISVERSION=${THISVERSION}" >> ${GITHUB_ENV} - - - name: Install in confined environment [sdist] - run: | - python -m venv /tmp/install_sdist - source /tmp/install_sdist/bin/activate - python -m pip install --upgrade pip - python -m pip install dist/nitransforms*.tar.gz - INSTALLED_VERSION=$(python -c 'import nitransforms; print(nitransforms.__version__, end="")') - echo "VERSION: \"${THISVERSION}\"" - echo "INSTALLED: \"${INSTALLED_VERSION}\"" - test "${INSTALLED_VERSION}" = "${THISVERSION}" - - - name: Install in confined environment [wheel] - run: | - python -m venv /tmp/install_wheel - source /tmp/install_wheel/bin/activate - python -m pip install --upgrade pip - python -m pip install dist/nitransforms*.whl - INSTALLED_VERSION=$(python -c 'import nitransforms; print(nitransforms.__version__, end="")') - echo "INSTALLED: \"${INSTALLED_VERSION}\"" - test "${INSTALLED_VERSION}" = "${THISVERSION}" - - - name: Install in confined environment [pip install .] - run: | - python -m venv /tmp/setup_install - source /tmp/setup_install/bin/activate - python -m pip install --upgrade pip wheel - python -m pip install . - INSTALLED_VERSION=$(python -c 'import nitransforms; print(nitransforms.__version__, end="")') - echo "INSTALLED: \"${INSTALLED_VERSION}\"" - test "${INSTALLED_VERSION}" = "${THISVERSION}" - - - name: Install in confined environment [pip install -e .] - run: | - python -m venv /tmp/setup_develop - source /tmp/setup_develop/bin/activate - python -m pip install pip - python -m pip install --upgrade pip wheel - python -m pip install -e . - INSTALLED_VERSION=$(python -c 'import nitransforms; print(nitransforms.__version__, end="")') - echo "INSTALLED: \"${INSTALLED_VERSION}\"" - test "${INSTALLED_VERSION}" = "${THISVERSION}" - - flake8: - if: "!contains(github.event.head_commit.message, '[skip ci]')" - runs-on: ubuntu-latest - steps: - - uses: actions/checkout@v3 - - name: Set up Python 3.7 - uses: actions/setup-python@v4 - with: - python-version: 3.7 - - run: pipx run flake8 nitransforms diff --git a/.github/workflows/tox.yml b/.github/workflows/tox.yml new file mode 100644 index 00000000..0e9dcb48 --- /dev/null +++ b/.github/workflows/tox.yml @@ -0,0 +1,129 @@ +name: Tox + +on: + push: + branches: [ master, main, 'maint/*' ] + tags: [ '*' ] + pull_request: + branches: [ master, main, 'maint/*' ] + schedule: + - cron: '0 0 * * *' + +concurrency: + group: ${{ github.workflow }}-${{ github.ref }} + cancel-in-progress: true + +permissions: + contents: read + +env: + # Force tox and pytest to use color + FORCE_COLOR: true + TEST_DATA_HOME: /home/runner/testdata/nitransforms-tests + +jobs: + cache-test-data: + if: "!contains(github.event.head_commit.message, '[skip ci]')" + runs-on: ubuntu-latest + outputs: + SHA: ${{ steps.test-head.outputs.SHA }} + steps: + - name: Install the latest version of uv + uses: astral-sh/setup-uv@v6 + - name: Git settings (pacify DataLad) + run: | + git config --global user.name 'NiPreps Bot' + git config --global user.email 'nipreps@gmail.com' + - name: Install DataLad + run: | + $CONDA/bin/conda install -c conda-forge git-annex + uv tool install --with=datalad-next --with=datalad-osf datalad + - name: Check remote HEAD + id: test-head + run: | + git ls-remote https://gin.g-node.org/oesteban/nitransforms-tests \ + | awk '/HEAD/{ print "SHA=" $1 }' >> $GITHUB_OUTPUT + + - uses: actions/cache@v4 + with: + path: ${{ env.TEST_DATA_HOME }} + key: data-cache-v2-${{ steps.test-head.outputs.SHA }} + restore-keys: | + data-cache-v2-${{ steps.test-head.outputs.SHA }} + data-cache-v2 + + - name: Install test data + run: | + export PATH=$CONDA/bin:$PATH + mkdir -p /home/runner/testdata + + datalad install -s https://gin.g-node.org/oesteban/nitransforms-tests $TEST_DATA_HOME + datalad update --merge -d $TEST_DATA_HOME + datalad get -J 2 -d $TEST_DATA_HOME + + test: + runs-on: ubuntu-latest + needs: [cache-test-data] + strategy: + matrix: + python-version: ["3.9", "3.10", "3.11", "3.12", "3.13"] + dependencies: [latest, pre] + include: + - python-version: "3.9" + dependencies: min + exclude: + # Do not test pre-releases for versions out of SPEC0 + - python-version: "3.9" + dependencies: pre + - python-version: "3.10" + dependencies: pre + + env: + DEPENDS: ${{ matrix.dependencies }} + + steps: + - uses: actions/checkout@v4 + with: + submodules: recursive + fetch-depth: 0 + - name: Install the latest version of uv + uses: astral-sh/setup-uv@v6 + - uses: actions/cache/restore@v4 + with: + path: ${{ env.TEST_DATA_HOME }} + key: data-cache-v2-${{ needs.cache-test-data.outputs.SHA }} + - name: Set up Python ${{ matrix.python-version }} + uses: actions/setup-python@v5 + with: + python-version: ${{ matrix.python-version }} + - name: Display Python version + run: python -c "import sys; print(sys.version)" + - name: Install tox + run: | + uv tool install tox --with=tox-uv --with=tox-gh-actions + - name: Show tox config + run: tox c + - name: Run tox + run: tox -v --exit-and-dump-after 1200 + - uses: codecov/codecov-action@v5 + with: + token: ${{ secrets.CODECOV_TOKEN }} + if: ${{ always() }} + + checks: + runs-on: 'ubuntu-latest' + continue-on-error: true + strategy: + matrix: + check: ['style'] + + steps: + - uses: actions/checkout@v4 + - name: Install the latest version of uv + uses: astral-sh/setup-uv@v6 + - name: Show tox config + run: uvx tox c + - name: Show tox config (this call) + run: uvx tox c -e ${{ matrix.check }} + - name: Run check + run: uvx tox -e ${{ matrix.check }} diff --git a/.github/workflows/travis.yml b/.github/workflows/travis.yml deleted file mode 100644 index 7efe2c74..00000000 --- a/.github/workflows/travis.yml +++ /dev/null @@ -1,67 +0,0 @@ -name: Deps & CI - -on: [push] - -jobs: - build-linux: - if: "!contains(github.event.head_commit.message, '[skip ci]' && (github.event_name == 'push' || github.event.pull_request.head.repo.full_name != 'nipy/nitransforms'))" - runs-on: ubuntu-latest - env: - TEST_DATA_HOME: /home/runner/testdata/nitransforms-tests - strategy: - max-parallel: 5 - matrix: - python-version: ['3.8', '3.9', '3.10', '3.11'] - - steps: - - name: Git settings (pacify DataLad) - run: | - git config --global user.name 'NiPreps Bot' - git config --global user.email 'nipreps@gmail.com' - - name: Set up Python ${{ matrix.python-version }} - uses: actions/setup-python@v4 - with: - python-version: ${{ matrix.python-version }} - - uses: actions/cache@v3 - id: conda - with: - path: | - /usr/share/miniconda/pkgs - /home/runner/.cache/pip - key: python-${{ matrix.python-version }}-v1 - restore-keys: | - python-${{ matrix.python-version }}- - - name: Install DataLad - run: | - $CONDA/bin/conda install -c conda-forge git-annex datalad pip pytest - $CONDA/bin/python -m pip install datalad-osf - - - uses: actions/cache@v3 - with: - path: ${{ env.TEST_DATA_HOME }} - key: data-cache-v2 - restore-keys: | - data-cache-v2 - - - name: Install test data - run: | - export PATH=$CONDA/bin:$PATH - mkdir -p /home/runner/testdata - cd /home/runner/testdata - - $CONDA/bin/datalad install https://gin.g-node.org/oesteban/nitransforms-tests - $CONDA/bin/datalad update --merge -d nitransforms-tests/ - $CONDA/bin/datalad get -d nitransforms-tests/ - - - uses: actions/checkout@v3 - - name: Install minimal dependencies - run: | - $CONDA/bin/pip install .[tests] - - name: Run pytest - run: | - $CONDA/bin/pytest -v --cov nitransforms --cov-config .coveragerc --cov-report xml:cov.xml --doctest-modules nitransforms/ - - - name: Submit code coverage - uses: codecov/codecov-action@v3 - with: - files: cov.xml diff --git a/.gitignore b/.gitignore index 8681c41b..1494e83a 100644 --- a/.gitignore +++ b/.gitignore @@ -82,3 +82,4 @@ local_settings.py *.swp .vscode/ +.DS_Store diff --git a/.mailmap b/.mailmap new file mode 100644 index 00000000..dac8efaa --- /dev/null +++ b/.mailmap @@ -0,0 +1,12 @@ +Oscar Esteban +Christopher J. Markiewicz +Christopher J. Markiewicz +Mathias Goncalves +Mathias Goncalves +Mathias Goncalves +Julien Marabotto <166002186+jmarabotto@users.noreply.github.com> +Julien Marabotto <166002186+jmarabotto@users.noreply.github.com> +Julien Marabotto <166002186+jmarabotto@users.noreply.github.com> +Stefano Moia +Basile Pinsard +Jaume Banús diff --git a/.readthedocs.yaml b/.readthedocs.yaml index 34413ca6..56d55db7 100644 --- a/.readthedocs.yaml +++ b/.readthedocs.yaml @@ -1,19 +1,19 @@ -# Read the Docs configuration file -# See https://docs.readthedocs.io/en/stable/config-file/v2.html for details - version: 2 build: - os: ubuntu-20.04 + os: ubuntu-lts-latest tools: - python: "3.8" + python: latest -sphinx: - configuration: docs/conf.py + jobs: + pre_create_environment: + - asdf plugin add uv + - asdf install uv latest + - asdf global uv latest + create_environment: + - uv venv $READTHEDOCS_VIRTUALENV_PATH + install: + - VIRTUAL_ENV=$READTHEDOCS_VIRTUALENV_PATH uv pip install --cache-dir $READTHEDOCS_VIRTUALENV_PATH/../../uv_cache .[niftiext] -r docs/requirements.txt -python: - install: - - path: . - extra_requirements: - - all - - requirements: docs/requirements.txt +sphinx: + configuration: docs/conf.py diff --git a/.zenodo.json b/.zenodo.json deleted file mode 100644 index 2cea96d1..00000000 --- a/.zenodo.json +++ /dev/null @@ -1,50 +0,0 @@ -{ - "title": "NiTransforms: A Python tool to read, represent, manipulate, and apply $n$-dimensional spatial transforms", - "description": "

A Python package to access a cacophony of neuroimaging file formats representing spatio-temporal, linear-and-nonlinear transformations.

", - "creators": [ - { - "affiliation": "Department of Psychology, Stanford University, Stanford, CA, USA", - "name": "Goncalves, Mathias", - "orcid": "0000-0002-7252-7771" - }, - { - "affiliation": "Department of Psychology, Stanford University, Stanford, CA, USA", - "name": "Markiewicz, Christopher J.", - "orcid": "0000-0002-6533-164X" - }, - { - "affiliation": "Basque Center on Cognition Brain and Language, San Sebastian, Spain", - "name": "Moia, Stefano", - "orcid": "0000-0002-2553-3327" - }, - { - "affiliation": "McGovern Institute for Brain Research, Massachusetts Institute of Technology (MIT), Cambridge, MA, USA; and Department of Otolaryngology, Harvard Medical School, Boston, MA, USA", - "name": "Ghosh, Satrajit", - "orcid": "0000-0002-5312-6729" - }, - { - "affiliation": "Department of Psychology, Stanford University, Stanford, CA, USA", - "name": "Poldrack, Russell A.", - "orcid": "0000-0001-6755-0259" - }, - { - "affiliation": "Lausanne University Hospital and University of Lausanne, Lausanne, Switzerland", - "name": "Esteban, Oscar", - "orcid": "0000-0001-8435-6191" - } - ], - "contributors": [ - { - "affiliation": "Charite Universitatsmedizin Berlin, Germany", - "name": "Waller, Lea", - "orcid": "0000-0002-3239-6957", - "type": "Researcher" - } - ], - "keywords": [ - "neuroimaging", - "spatial normalization" - ], - "license": "mit-license", - "upload_type": "software" -} diff --git a/CHANGES.rst b/CHANGES.rst index f118e261..31628681 100644 --- a/CHANGES.rst +++ b/CHANGES.rst @@ -1,3 +1,124 @@ +24.1.2 (June 02, 2025) +====================== +New patch release that addresses a crash when applying a 3D transform to a 4D image. + +New Contributors +---------------- +* @coryshain made their first contribution in https://github.com/nipy/nitransforms/pull/236 + +CHANGES +------- +* FIX: Patch for crash when applying 3D transform to 4D image (#236) +* MNT: Switch from zenodo.json to CITATION.cff, add contributors (#237) + +**Full Changelog**: https://github.com/nipy/nitransforms/compare/24.1.1...24.1.2 + +24.1.1 (December 18, 2024) +========================== +New patch release that adds ``nitransforms.resampling.apply`` as a top-level import, and removes the `pkg_resources` dependency. + +CHANGES +------- + +* RF: Add nitransforms.resamping.apply to top module imports in https://github.com/nipy/nitransforms/pull/227 +* FIX: Remove pkg_resources dependency in https://github.com/nipy/nitransforms/pull/230 + +**Full Changelog**: https://github.com/nipy/nitransforms/compare/24.1.0...24.1.1 + +24.1.0 (November 17, 2024) +========================== +New feature release in the 24.1.x series. + +This release has the same code as 24.0.2, but the package has been +tested with Numpy 2.0 and Python 3.13 and the metadata updated accordingly. + +CHANGES +------- +* MAINT: Transition to pyproject.toml and tox, support numpy 2, python 3.13 + by @effigies in https://github.com/nipy/nitransforms/pull/228 + +**Full Changelog**: https://github.com/nipy/nitransforms/compare/24.0.2...24.1.0 + +24.0.2 (September 21, 2024) +=========================== +Bug-fix release in the 24.0.x series. + +CHANGES +------- + +* FIX: Add per-volume transforms as single transform in chain by @effigies in https://github.com/nipy/nitransforms/pull/226 + +**Full Changelog**: https://github.com/nipy/nitransforms/compare/24.0.1...24.0.2 + +24.0.1 (September 17, 2024) +=========================== +Bug-fix release in the 24.0.x series. + +New Contributors +---------------- +* @shnizzedy made their first contribution in https://github.com/nipy/nitransforms/pull/222 + +CHANGES +------- + +* FIX: Use standard library ``pathlib`` by @shnizzedy in https://github.com/nipy/nitransforms/pull/222 +* MAINT: Support pre-``__or__`` types by @effigies in https://github.com/nipy/nitransforms/pull/223 +* MAINT: Bump the actions-infrastructure group with 3 updates by @dependabot in https://github.com/nipy/nitransforms/pull/224 +* MAINT: Bump codecov/codecov-action from 3 to 4 by @dependabot in https://github.com/nipy/nitransforms/pull/225 + +**Full Changelog**: https://github.com/nipy/nitransforms/compare/24.0.0...24.0.1 + +24.0.0 (August 18, 2024) +======================== +A new series incorporating several major changes, including bugfixes and taking on several +housekeeping/maintenance actions. +One relevant change is the outsourcing of the ``apply()`` member out of +transformation data structures by @jmarabotto. +The method ``apply()`` is now a standalone method that operates on one transform +and images/surfaces/etc. provided as arguments. +A later major development is the adoption of a foundation for surface transforms by @feilong +and @Shotgunosine. + +New Contributors +---------------- + +* @mvdoc made their first contribution in https://github.com/nipy/nitransforms/pull/194 +* @jmarabotto made their first contribution in https://github.com/nipy/nitransforms/pull/197 +* @bpinsard made their first contribution in https://github.com/nipy/nitransforms/pull/182 +* @jbanusco made their first contribution in https://github.com/nipy/nitransforms/pull/188 +* @feilong made their first contribution in https://github.com/nipy/nitransforms/pull/203 + +CHANGES +------- + +* FIX: Inefficient iterative reloading of reference and moving images by @oesteban in https://github.com/nipy/nitransforms/pull/186 +* FIX: Postpone coordinate mapping on linear array transforms by @oesteban in https://github.com/nipy/nitransforms/pull/187 +* FIX: Remove unsafe cast during ``TransformBase.apply()`` by @effigies in https://github.com/nipy/nitransforms/pull/189 +* FIX: ``_is_oblique()`` by @mvdoc in https://github.com/nipy/nitransforms/pull/194 +* FIX: Update implementation of ``ndim`` property of transforms by @jmarabotto in https://github.com/nipy/nitransforms/pull/197 +* FIX: Output displacement fields by @bpinsard in https://github.com/nipy/nitransforms/pull/182 +* FIX: Composition of deformation fields by @jbanusco in https://github.com/nipy/nitransforms/pull/188 +* FIX: Indexing disallowed in lists introduced by bugfix by @oesteban in https://github.com/nipy/nitransforms/pull/204 +* FIX: Do not transpose (see :obj:`~scipy.ndimage.map_coordinates`) by @oesteban in https://github.com/nipy/nitransforms/pull/207 +* FIX: Forgotten test using ``xfm.apply()`` by @oesteban in https://github.com/nipy/nitransforms/pull/208 +* FIX: Load ITK fields from H5 correctly by @effigies in https://github.com/nipy/nitransforms/pull/211 +* FIX: Wrong warning argument name ``level`` in ``warnings.warn`` by @oesteban in https://github.com/nipy/nitransforms/pull/216 +* ENH: Define ``ndim`` property on nonlinear transforms by @oesteban in https://github.com/nipy/nitransforms/pull/201 +* ENH: Outsource ``apply()`` from transform objects by @jmarabotto in https://github.com/nipy/nitransforms/pull/195 +* ENH: Restore ``apply()`` method, warning of deprecation and calling function by @effigies in https://github.com/nipy/nitransforms/pull/209 +* ENH: ``SurfaceTransform`` class by @feilong in https://github.com/nipy/nitransforms/pull/203 +* ENH: reenable-parallelization-apply-214 (builds on PR #215, solves Issue #214) by @jmarabotto in https://github.com/nipy/nitransforms/pull/217 +* ENH: Parallelize serialized 3D+t transforms by @oesteban in https://github.com/nipy/nitransforms/pull/220 +* ENH: Implement a memory limitation mechanism in loading data by @oesteban in https://github.com/nipy/nitransforms/pull/221 +* ENH: Serialize+parallelize 4D ``apply()`` into 3D+t and add 'low memory' loading by @oesteban in https://github.com/nipy/nitransforms/pull/215 +* MAINT: Loosen dependencies by @mgxd in https://github.com/nipy/nitransforms/pull/164 +* MAINT: Drop Python 3.7 support, test through 3.11 by @effigies in https://github.com/nipy/nitransforms/pull/181 +* MAINT: Update CircleCI's infrastructure (machine image and Python version in Docker image) by @oesteban in https://github.com/nipy/nitransforms/pull/206 +* MAINT: Fix tests for Python 3.12, numpy 2.0, and pytest-xdist by @effigies in https://github.com/nipy/nitransforms/pull/210 +* MAINT: Update ANTs' pinnings by @oesteban in https://github.com/nipy/nitransforms/pull/219 + +**Full Changelog**: https://github.com/nipy/nitransforms/compare/23.0.1...24.0.0 + 23.0.1 (July 10, 2023) ====================== Hotfix release addressing two issues. diff --git a/CITATION.cff b/CITATION.cff new file mode 100644 index 00000000..575132c1 --- /dev/null +++ b/CITATION.cff @@ -0,0 +1,100 @@ +cff-version: 1.2.0 +title: "NiTransforms: A Python tool to read, represent, manipulate, and apply N-dimensional spatial transforms" +license: MIT +type: software +url: https://github.com/nipy/nitransforms/ +version: 24.1.1 +date-released: 2024-12-18 +abstract: | + Spatial transforms formalize mappings between coordinates of objects in biomedical images. + Transforms typically are the outcome of image registration methodologies, which estimate + the alignment between two images. + Image registration is a prominent task present in nearly all standard image processing + and analysis pipelines. + The proliferation of software implementations of image registration methodologies has + resulted in a spread of data structures and file formats used to preserve and communicate + transforms. + This segregation of formats precludes the compatibility between tools and endangers the + reproducibility of results. + We propose a software tool capable of converting between formats and resampling images + to apply transforms generated by the most popular neuroimaging packages and libraries + (AFNI, FSL, FreeSurfer, ITK, and SPM). + The proposed software is subject to continuous integration tests to check the + compatibility with each supported tool after every change to the code base. + Compatibility between software tools and imaging formats is a necessary bridge + to ensure the reproducibility of results and enable the optimization and evaluation + of current image processing and analysis workflows. +keywords: + - neuroimaging + - spatial normalization + +authors: + - family-names: Goncalves + given-names: Mathias + orcid: https://orcid.org/0000-0002-7252-7771 + affiliation: "Department of Psychology, Stanford University, Stanford, CA, USA" + - family-names: Markiewicz + given-names: Christopher J. + orcid: https://orcid.org/0000-0002-6533-164X + affiliation: "Department of Psychology, Stanford University, Stanford, CA, USA" + - family-names: Moia + given-names: Stefano + orcid: https://orcid.org/0000-0002-2553-3327 + affiliation: "Basque Center on Cognition Brain and Language, San Sebastian, Spain" + - family-names: Waller + given-names: Lea + orcid: https://orcid.org/0000-0002-3239-6957 + affiliation: Charite Universitatsmedizin Berlin, Germany + - family-names: Pinsard + given-names: Basile + orcid: https://orcid.org/0000-0002-4391-3075 + affiliation: University of Montréal, Montréal, Canada + - family-names: Banús + given-names: Jaume + orcid: https://orcid.org/0000-0001-9318-6323 + - family-names: Visconti di Oleggio Castello + given-names: Matteo + orcid: https://orcid.org/0000-0001-7931-5272 + affiliation: University of California Berkeley, Berkeley, CA, USA + - family-names: Marabotto + given-names: Julien + orcid: https://orcid.org/0009-0003-7070-5217 + affiliation: Lausanne University Hospital and University of Lausanne, Lausanne, Switzerland + - family-names: Ma + given-names: Feilong + orcid: https://orcid.org/0000-0002-6838-3971 + affiliation: Dartmouth College, Hanover, NH, United States + - family-names: Nielson + given-names: Dylan + orcid: https://orcid.org/0000-0003-4613-6643 + affiliation: Machine Learning Team, National Institute of Mental Health, USA + - family-names: Cluce + given-names: Jon + orcid: https://orcid.org/0000-0001-7590-5806 + affiliation: Child Mind Institute, New York, NY, USA + - family-names: Shain + given-names: Cory + orcid: https://orcid.org/0000-0002-2704-7197 + affiliation: Stanford University, Stanford, CA, USA + ## When contributing, please copy and uncomment the following lines + # - family-names: + # given-names: + # orcid: https://orcid.org/ + # affiliation: + - family-names: Ghosh + given-names: Satrajit + orcid: https://orcid.org/0000-0002-5312-6729 + affiliation: "McGovern Institute for Brain Research, Massachusetts Institute of Technology (MIT), Cambridge, MA, USA; and Department of Otolaryngology, Harvard Medical School, Boston, MA, USA" + - family-names: Poldrack + given-names: Russell A. + orcid: https://orcid.org/0000-0001-6755-0259 + affiliation: "Department of Psychology, Stanford University, Stanford, CA, USA" + - family-names: Esteban + given-names: Oscar + orcid: https://orcid.org/0000-0001-8435-6191 + affiliation: "Lausanne University Hospital and University of Lausanne, Lausanne, Switzerland" + +identifiers: + - description: Concept DOI for the software + type: doi + value: 10.5281/zenodo.5499693 diff --git a/Dockerfile b/Dockerfile index ae270b45..a1c5f4b2 100644 --- a/Dockerfile +++ b/Dockerfile @@ -1,48 +1,150 @@ -FROM ubuntu:xenial-20200114 - -# Pre-cache neurodebian key -COPY docker/files/neurodebian.gpg /usr/local/etc/neurodebian.gpg - -# Prepare environment +# Ubuntu 22.04 LTS - Jammy +ARG BASE_IMAGE=ubuntu:jammy-20240125 + +# +# Build wheel +# +FROM python:slim AS src +RUN pip install build +RUN apt-get update && \ + apt-get install -y --no-install-recommends git +COPY . /src +RUN python -m build /src + +# +# Download stages +# + +# Utilities for downloading packages +FROM ${BASE_IMAGE} as downloader +# Bump the date to current to refresh curl/certificates/etc +RUN echo "2023.07.20" RUN apt-get update && \ apt-get install -y --no-install-recommends \ - curl \ + binutils \ bzip2 \ ca-certificates \ - xvfb \ - build-essential \ - autoconf \ - libtool \ - pkg-config \ - git && \ - curl -sL https://deb.nodesource.com/setup_10.x | bash - && \ + curl \ + unzip && \ + apt-get clean && rm -rf /var/lib/apt/lists/* /tmp/* /var/tmp/* + +RUN update-ca-certificates -f + +# FreeSurfer 7.3.2 +FROM downloader as freesurfer +COPY docker/files/freesurfer7.3.2-exclude.txt /usr/local/etc/freesurfer7.3.2-exclude.txt +COPY docker/files/fs-cert.pem /usr/local/etc/fs-cert.pem +RUN curl --cacert /usr/local/etc/fs-cert.pem \ + -sSL https://surfer.nmr.mgh.harvard.edu/pub/dist/freesurfer/7.3.2/freesurfer-linux-ubuntu22_amd64-7.3.2.tar.gz \ + | tar zxv --no-same-owner -C /opt --exclude-from=/usr/local/etc/freesurfer7.3.2-exclude.txt + +# AFNI +FROM downloader as afni +# Bump the date to current to update AFNI +RUN echo "2023.07.20" +RUN mkdir -p /opt/afni-latest \ + && curl -fsSL --retry 5 https://afni.nimh.nih.gov/pub/dist/tgz/linux_openmp_64.tgz \ + | tar -xz -C /opt/afni-latest --strip-components 1 \ + --exclude "linux_openmp_64/*.gz" \ + --exclude "linux_openmp_64/funstuff" \ + --exclude "linux_openmp_64/shiny" \ + --exclude "linux_openmp_64/afnipy" \ + --exclude "linux_openmp_64/lib/RetroTS" \ + --exclude "linux_openmp_64/lib_RetroTS" \ + --exclude "linux_openmp_64/meica.libs" \ + # Keep only what we use + && find /opt/afni-latest -type f -not \( \ + -name "3dTshift" -or \ + -name "3dUnifize" -or \ + -name "3dAutomask" -or \ + -name "3dvolreg" -or \ + -name "3dNwarpApply" \ + \) -delete + +# Micromamba +FROM downloader as micromamba + +# Install a C compiler to build extensions when needed. +# traits<6.4 wheels are not available for Python 3.11+, but build easily. +RUN apt-get update && \ + apt-get install -y --no-install-recommends build-essential && \ + apt-get clean && rm -rf /var/lib/apt/lists/* /tmp/* /var/tmp/* + +WORKDIR / +# Bump the date to current to force update micromamba +RUN echo "2024.02.06" +RUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest | tar -xvj bin/micromamba + +ENV MAMBA_ROOT_PREFIX="/opt/conda" +COPY env.yml /tmp/env.yml +# COPY requirements.txt /tmp/requirements.txt +WORKDIR /tmp +RUN micromamba create -y -f /tmp/env.yml && \ + micromamba clean -y -a + +# +# Main stage +# +FROM ${BASE_IMAGE} as nitransforms + +# Configure apt +ENV DEBIAN_FRONTEND="noninteractive" \ + LANG="en_US.UTF-8" \ + LC_ALL="en_US.UTF-8" + +# Some baseline tools; bc is needed for FreeSurfer, so don't drop it +RUN apt-get update && \ apt-get install -y --no-install-recommends \ - nodejs && \ + bc \ + ca-certificates \ + curl \ + git \ + gnupg \ + lsb-release \ + netbase \ + xvfb && \ apt-get clean && rm -rf /var/lib/apt/lists/* /tmp/* /var/tmp/* -# Installing freesurfer -RUN curl -sSL https://surfer.nmr.mgh.harvard.edu/pub/dist/freesurfer/6.0.1/freesurfer-Linux-centos6_x86_64-stable-pub-v6.0.1.tar.gz | tar zxv --no-same-owner -C /opt \ - --exclude='freesurfer/diffusion' \ - --exclude='freesurfer/docs' \ - --exclude='freesurfer/fsfast' \ - --exclude='freesurfer/lib/cuda' \ - --exclude='freesurfer/lib/qt' \ - --exclude='freesurfer/matlab' \ - --exclude='freesurfer/mni/share/man' \ - --exclude='freesurfer/subjects/fsaverage_sym' \ - --exclude='freesurfer/subjects/fsaverage3' \ - --exclude='freesurfer/subjects/fsaverage4' \ - --exclude='freesurfer/subjects/cvs_avg35' \ - --exclude='freesurfer/subjects/cvs_avg35_inMNI152' \ - --exclude='freesurfer/subjects/bert' \ - --exclude='freesurfer/subjects/lh.EC_average' \ - --exclude='freesurfer/subjects/rh.EC_average' \ - --exclude='freesurfer/subjects/sample-*.mgz' \ - --exclude='freesurfer/subjects/V1_average' \ - --exclude='freesurfer/trctrain' - -ENV FSL_DIR="/usr/share/fsl/5.0" \ - OS="Linux" \ +# Configure PPAs for libpng12 and libxp6 +RUN GNUPGHOME=/tmp gpg --keyserver hkps://keyserver.ubuntu.com --no-default-keyring --keyring /usr/share/keyrings/linuxuprising.gpg --recv 0xEA8CACC073C3DB2A \ + && GNUPGHOME=/tmp gpg --keyserver hkps://keyserver.ubuntu.com --no-default-keyring --keyring /usr/share/keyrings/zeehio.gpg --recv 0xA1301338A3A48C4A \ + && echo "deb [signed-by=/usr/share/keyrings/linuxuprising.gpg] https://ppa.launchpadcontent.net/linuxuprising/libpng12/ubuntu jammy main" > /etc/apt/sources.list.d/linuxuprising.list \ + && echo "deb [signed-by=/usr/share/keyrings/zeehio.gpg] https://ppa.launchpadcontent.net/zeehio/libxp/ubuntu jammy main" > /etc/apt/sources.list.d/zeehio.list + +# Dependencies for AFNI; requires a discontinued multiarch-support package from bionic (18.04) +RUN apt-get update -qq \ + && apt-get install -y -q --no-install-recommends \ + ed \ + gsl-bin \ + libglib2.0-0 \ + libglu1-mesa-dev \ + libglw1-mesa \ + libgomp1 \ + libjpeg62 \ + libpng12-0 \ + libxm4 \ + libxp6 \ + netpbm \ + tcsh \ + xfonts-base \ + xvfb \ + && curl -sSL --retry 5 -o /tmp/multiarch.deb http://archive.ubuntu.com/ubuntu/pool/main/g/glibc/multiarch-support_2.27-3ubuntu1.5_amd64.deb \ + && dpkg -i /tmp/multiarch.deb \ + && rm /tmp/multiarch.deb \ + && apt-get install -f \ + && apt-get clean && rm -rf /var/lib/apt/lists/* /tmp/* /var/tmp/* \ + && gsl2_path="$(find / -name 'libgsl.so.19' || printf '')" \ + && if [ -n "$gsl2_path" ]; then \ + ln -sfv "$gsl2_path" "$(dirname $gsl2_path)/libgsl.so.0"; \ + fi \ + && ldconfig + +# Install files from stages +COPY --from=freesurfer /opt/freesurfer /opt/freesurfer +COPY --from=afni /opt/afni-latest /opt/afni-latest + +# Simulate SetUpFreeSurfer.sh +ENV OS="Linux" \ FS_OVERRIDE=0 \ FIX_VERTEX_AREA="" \ FSF_OUTPUT_FORMAT="nii.gz" \ @@ -56,95 +158,59 @@ ENV SUBJECTS_DIR="$FREESURFER_HOME/subjects" \ MNI_DATAPATH="$FREESURFER_HOME/mni/data" ENV PERL5LIB="$MINC_LIB_DIR/perl5/5.8.5" \ MNI_PERL5LIB="$MINC_LIB_DIR/perl5/5.8.5" \ - PATH="$FREESURFER_HOME/bin:$FSFAST_HOME/bin:$FREESURFER_HOME/tktools:$MINC_BIN_DIR:$PATH" + PATH="$FREESURFER_HOME/bin:$FREESURFER_HOME/tktools:$MINC_BIN_DIR:$PATH" -# Installing Neurodebian packages (FSL, AFNI, git) -RUN curl -sSL "http://neuro.debian.net/lists/$( lsb_release -c | cut -f2 ).us-ca.full" >> /etc/apt/sources.list.d/neurodebian.sources.list && \ - apt-key add /usr/local/etc/neurodebian.gpg && \ - (apt-key adv --refresh-keys --keyserver hkp://ha.pool.sks-keyservers.net 0xA5D32F012649A5A9 || true) +# AFNI config +ENV PATH="/opt/afni-latest:$PATH" \ + AFNI_IMSAVE_WARNINGS="NO" \ + AFNI_PLUGINPATH="/opt/afni-latest" -RUN apt-get update && \ - apt-get install -y --no-install-recommends \ - fsl-core=5.0.9-5~nd16.04+1 \ - fsl-mni152-templates=5.0.7-2 \ - afni=16.2.07~dfsg.1-5~nd16.04+1 \ - convert3d \ - connectome-workbench=1.3.2-2~nd16.04+1 \ - git-annex-standalone && \ - apt-get clean && rm -rf /var/lib/apt/lists/* /tmp/* /var/tmp/* +# Workbench config +ENV PATH="/opt/workbench/bin_linux64:$PATH" + +# Create a shared $HOME directory +RUN useradd -m -s /bin/bash -G users neuro +WORKDIR /home/neuro +ENV HOME="/home/neuro" \ + LD_LIBRARY_PATH="/usr/lib/x86_64-linux-gnu:$LD_LIBRARY_PATH" + +COPY --from=micromamba /bin/micromamba /bin/micromamba +COPY --from=micromamba /opt/conda/envs/nitransforms /opt/conda/envs/nitransforms -ENV FSLDIR="/usr/share/fsl/5.0" \ +ENV MAMBA_ROOT_PREFIX="/opt/conda" +RUN micromamba shell init -s bash && \ + echo "micromamba activate nitransforms" >> $HOME/.bashrc +ENV PATH="/opt/conda/envs/nitransforms/bin:$PATH" \ + CPATH="/opt/conda/envs/nitransforms/include:$CPATH" \ + LD_LIBRARY_PATH="/opt/conda/envs/nitransforms/lib:$LD_LIBRARY_PATH" + +# FSL environment +ENV LANG="C.UTF-8" \ + LC_ALL="C.UTF-8" \ + PYTHONNOUSERSITE=1 \ + FSLDIR="/opt/conda/envs/nitransforms" \ FSLOUTPUTTYPE="NIFTI_GZ" \ FSLMULTIFILEQUIT="TRUE" \ - POSSUMDIR="/usr/share/fsl/5.0" \ - LD_LIBRARY_PATH="/usr/lib/fsl/5.0:$LD_LIBRARY_PATH" \ - FSLTCLSH="/usr/bin/tclsh" \ - FSLWISH="/usr/bin/wish" \ - AFNI_MODELPATH="/usr/lib/afni/models" \ - AFNI_IMSAVE_WARNINGS="NO" \ - AFNI_TTATLAS_DATASET="/usr/share/afni/atlases" \ - AFNI_PLUGINPATH="/usr/lib/afni/plugins" -ENV PATH="/usr/lib/fsl/5.0:/usr/lib/afni/bin:$PATH" - -# Installing ANTs 2.3.3 (NeuroDocker build) -# Note: the URL says 2.3.4 but it is actually 2.3.3 -ENV ANTSPATH=/usr/lib/ants -RUN mkdir -p $ANTSPATH && \ - curl -sSL "https://dl.dropbox.com/s/gwf51ykkk5bifyj/ants-Linux-centos6_x86_64-v2.3.4.tar.gz" \ - | tar -xzC $ANTSPATH --strip-components 1 -ENV PATH=$ANTSPATH:$PATH - -# Installing and setting up miniconda -RUN curl -sSLO https://repo.continuum.io/miniconda/Miniconda3-4.5.11-Linux-x86_64.sh && \ - bash Miniconda3-4.5.11-Linux-x86_64.sh -b -p /usr/local/miniconda && \ - rm Miniconda3-4.5.11-Linux-x86_64.sh - -# Set CPATH for packages relying on compiled libs (e.g. indexed_gzip) -ENV PATH="/usr/local/miniconda/bin:$PATH" \ - CPATH="/usr/local/miniconda/include/:$CPATH" \ - LANG="C.UTF-8" \ - LC_ALL="C.UTF-8" \ - PYTHONNOUSERSITE=1 - -# Installing precomputed python packages -RUN conda install -y -c anaconda -c conda-forge \ - python=3.7 \ - libxml2=2.9 \ - libxslt=1.1 \ - lxml \ - mkl \ - mkl-service \ - numpy=1.20 \ - pip=21 \ - scipy=1.6 \ - setuptools \ - setuptools_scm \ - toml \ - zlib; sync && \ - chmod -R a+rX /usr/local/miniconda; sync && \ - chmod +x /usr/local/miniconda/bin/*; sync && \ - conda build purge-all; sync && \ - conda clean -tipsy && sync + FSLLOCKDIR="" \ + FSLMACHINELIST="" \ + FSLREMOTECALL="" \ + FSLGECUDAQ="cuda.q" # Unless otherwise specified each process should only use one thread - nipype # will handle parallelization ENV MKL_NUM_THREADS=1 \ OMP_NUM_THREADS=1 -# Create a shared $HOME directory -RUN useradd -m -s /bin/bash -G users neuro -WORKDIR /home/neuro -ENV HOME="/home/neuro" - # Install package # CRITICAL: Make sure python setup.py --version has been run at least once # outside the container, with access to the git history. -COPY . /src/nitransforms -RUN pip install --no-cache-dir "/src/nitransforms[all]" +COPY --from=src /src/dist/*.whl . +RUN python -m pip install --no-cache-dir $( ls *.whl )[all] -RUN find $HOME -type d -exec chmod go=u {} + && \ - find $HOME -type f -exec chmod go=u {} + +RUN find $HOME -type d -exec chmod go=u {} + && \ + find $HOME -type f -exec chmod go=u {} + && \ + rm -rf $HOME/.npm $HOME/.conda $HOME/.empty RUN ldconfig WORKDIR /tmp/ diff --git a/MANIFEST.in b/MANIFEST.in deleted file mode 100644 index 1d115b13..00000000 --- a/MANIFEST.in +++ /dev/null @@ -1,7 +0,0 @@ -recursive-exclude .circleci/ * -recursive-exclude .github/ * -recursive-exclude docker/ * -recursive-exclude docs/ * -recursive-exclude joss/ * -recursive-exclude nitransforms/tests * -exclude .codecov.yml .coveragerc .gitignore .pep8speaks.yml .travis.yml Dockerfile \ No newline at end of file diff --git a/README.md b/README.md index b5a45349..4ae2340f 100644 --- a/README.md +++ b/README.md @@ -1,7 +1,7 @@ # NiTransforms [![DOI](https://joss.theoj.org/papers/10.21105/joss.03459/status.svg)](https://doi.org/10.21105/joss.03459) [![ISBI2020](https://img.shields.io/badge/doi-10.31219%2Fosf.io%2F8aq7b-blue.svg)](https://doi.org/10.31219/osf.io/8aq7b) -[![Deps & CI](https://github.com/nipy/nitransforms/actions/workflows/travis.yml/badge.svg)](https://github.com/nipy/nitransforms/actions/workflows/travis.yml) +[![Deps & CI](https://github.com/nipy/nitransforms/actions/workflows/tox.yml/badge.svg)](https://github.com/nipy/nitransforms/actions/workflows/tox.yml) [![CircleCI](https://circleci.com/gh/nipy/nitransforms.svg?style=svg)](https://circleci.com/gh/nipy/nitransforms) [![codecov](https://codecov.io/gh/nipy/nitransforms/branch/master/graph/badge.svg)](https://codecov.io/gh/nipy/nitransforms) [![Binder](https://mybinder.org/badge_logo.svg)](https://mybinder.org/v2/gh/nipy/nitransforms/master?filepath=docs%2Fnotebooks%2F) diff --git a/docker/files/freesurfer7.3.2-exclude.txt b/docker/files/freesurfer7.3.2-exclude.txt new file mode 100644 index 00000000..3b07a64e --- /dev/null +++ b/docker/files/freesurfer7.3.2-exclude.txt @@ -0,0 +1,868 @@ +freesurfer/average/711-2B_as_mni_average_305.4dfp.hdr +freesurfer/average/711-2B_as_mni_average_305.4dfp.ifh +freesurfer/average/711-2B_as_mni_average_305.4dfp.img +freesurfer/average/711-2B_as_mni_average_305.4dfp.img.rec +freesurfer/average/711-2B_as_mni_average_305_mask.4dfp.hdr +freesurfer/average/711-2B_as_mni_average_305_mask.4dfp.img.rec +freesurfer/average/711-2C_as_mni_average_305.4dfp.hdr +freesurfer/average/711-2C_as_mni_average_305.4dfp.img.rec +freesurfer/average/711-2C_as_mni_average_305.4dfp.mat +freesurfer/average/aseg+spmhead+vermis+pons.ixi.gca +freesurfer/average/BrainstemSS +freesurfer/average/Buckner_JNeurophysiol11_MNI152 +freesurfer/average/Choi_JNeurophysiol12_MNI152 +freesurfer/average/colortable_desikan_killiany.txt +freesurfer/average/face.gca +freesurfer/average/HippoSF +freesurfer/average/label_scales.dat +freesurfer/average/lh.atlas2002_simple.gcs +freesurfer/average/lh.atlas2005_simple.gcs +freesurfer/average/lh.average.curvature.filled.buckner40.tif +freesurfer/average/lh.average.CURVATURE.tif +freesurfer/average/lh.average.tif +freesurfer/average/lh.curvature.buckner40.filled.desikan_killiany.2010-03-25.gcs +freesurfer/average/lh.destrieux.simple.2009-07-29.gcs +freesurfer/average/lh.DKTaparc.atlas.acfb40.noaparc.i12.2020-05-13.gcs +freesurfer/average/lh.DKTatlas100.gcs +freesurfer/average/lh.DKTatlas40.gcs +freesurfer/average/lh_trans_toSulc.gcs +freesurfer/average/mideface-atlas +freesurfer/average/mni152.mni305.cor.subfov1.dat +freesurfer/average/mni152.mni305.cor.subfov2.dat +freesurfer/average/mni152.register.dat +freesurfer/average/mni305.cor.readme +freesurfer/average/mni305.cor.subfov1.mgz +freesurfer/average/mni305.cor.subfov1.reg +freesurfer/average/mni305.cor.subfov2.mgz +freesurfer/average/mni305.cor.subfov2.reg +freesurfer/average/mni305.mask.cor.mgz +freesurfer/average/mni_average_305.4dfp.hdr +freesurfer/average/mni_average_305.4dfp.ifh +freesurfer/average/mni_average_305.4dfp.img +freesurfer/average/mni_average_305.4dfp.img.rec +freesurfer/average/mult-comp-cor +freesurfer/average/pons.mni152.2mm.mgz +freesurfer/average/RB_all_2008-03-26.mni152.2mm.lta +freesurfer/average/RB_all_2016-05-10.vc700.gca +freesurfer/average/RB_all_2019_10_25.talxfm.mni305.gca +freesurfer/average/RB_all_withskull_2016-05-10.vc700.gca +freesurfer/average/RB_all_withskull_2019_10_22.talxfm.mni305.gca +freesurfer/average/rh.atlas2002_simple.gcs +freesurfer/average/rh.atlas2005_simple.gcs +freesurfer/average/rh.average.curvature.filled.buckner40.tif +freesurfer/average/rh.average.CURVATURE.tif +freesurfer/average/rh.average.tif +freesurfer/average/rh.curvature.buckner40.filled.desikan_killiany.2010-03-25.gcs +freesurfer/average/rh.destrieux.simple.2009-07-29.gcs +freesurfer/average/rh.DKTaparc.atlas.acfb40.noaparc.i12.2020-05-13.gcs +freesurfer/average/rh.DKTatlas100.gcs +freesurfer/average/rh.DKTatlas40.gcs +freesurfer/average/rh_trans_toSulc.gcs +freesurfer/average/RLB700_atlas_as_orig.4dfp.hdr +freesurfer/average/RLB700_atlas_as_orig.4dfp.ifh +freesurfer/average/RLB700_atlas_as_orig.4dfp.img +freesurfer/average/RLB700_atlas_as_orig.4dfp.img.rec +freesurfer/average/samseg +freesurfer/average/surf +freesurfer/average/SVIP_Adult_Comp_N24_as_orig.4dfp.hdr +freesurfer/average/SVIP_Adult_Comp_N24_as_orig.4dfp.ifh +freesurfer/average/SVIP_Adult_Comp_N24_as_orig.4dfp.img +freesurfer/average/SVIP_Adult_Comp_N24_as_orig.4dfp.img.rec +freesurfer/average/SVIP_Adult_Control_N12_as_orig.4dfp.hdr +freesurfer/average/SVIP_Adult_Control_N12_as_orig.4dfp.ifh +freesurfer/average/SVIP_Adult_Control_N12_as_orig.4dfp.img +freesurfer/average/SVIP_Adult_Control_N12_as_orig.4dfp.img.rec +freesurfer/average/SVIP_Adult_Patient_N12_as_orig.4dfp.hdr +freesurfer/average/SVIP_Adult_Patient_N12_as_orig.4dfp.ifh +freesurfer/average/SVIP_Adult_Patient_N12_as_orig.4dfp.img +freesurfer/average/SVIP_Adult_Patient_N12_as_orig.4dfp.img.rec +freesurfer/average/SVIP_Child_Comp_N24_as_orig.4dfp.hdr +freesurfer/average/SVIP_Child_Comp_N24_as_orig.4dfp.ifh +freesurfer/average/SVIP_Child_Comp_N24_as_orig.4dfp.img +freesurfer/average/SVIP_Child_Comp_N24_as_orig.4dfp.img.rec +freesurfer/average/SVIP_Full_Comp_N48_as_orig.4dfp.hdr +freesurfer/average/SVIP_Full_Comp_N48_as_orig.4dfp.ifh +freesurfer/average/SVIP_Full_Comp_N48_as_orig.4dfp.img +freesurfer/average/SVIP_Full_Comp_N48_as_orig.4dfp.img.rec +freesurfer/average/SVIP_Young_Control_N12_as_orig.4dfp.hdr +freesurfer/average/SVIP_Young_Control_N12_as_orig.4dfp.ifh +freesurfer/average/SVIP_Young_Control_N12_as_orig.4dfp.img +freesurfer/average/SVIP_Young_Control_N12_as_orig.4dfp.img.rec +freesurfer/average/SVIP_Young_Patient_N12_as_orig.4dfp.hdr +freesurfer/average/SVIP_Young_Patient_N12_as_orig.4dfp.ifh +freesurfer/average/SVIP_Young_Patient_N12_as_orig.4dfp.img +freesurfer/average/SVIP_Young_Patient_N12_as_orig.4dfp.img.rec +freesurfer/average/talairach_mixed_with_skull.gca +freesurfer/average/ThalamicNuclei +freesurfer/average/tissue_parms.txt +freesurfer/average/wmsa_new_eesmith.gca +freesurfer/average/Yeo_Brainmap_MNI152 +freesurfer/average/Yeo_JNeurophysiol11_MNI152 +freesurfer/bin/3dvolreg.afni +freesurfer/bin/4dfptoanalyze +freesurfer/bin/anatomiCutsUtils +freesurfer/bin/annot2std +freesurfer/bin/aparc2feat +freesurfer/bin/aparcstats2table +freesurfer/bin/aparc_stats_aseg +freesurfer/bin/aparcstatsdiff +freesurfer/bin/apas2aseg +freesurfer/bin/applyMorph +freesurfer/bin/aseg2feat +freesurfer/bin/asegstats2table +freesurfer/bin/asegstatsdiff +freesurfer/bin/bblabel +freesurfer/bin/bbmask +freesurfer/bin/bedpostx_mgh +freesurfer/bin/beta2sxa +freesurfer/bin/bet.fsl +freesurfer/bin/biasfield +freesurfer/bin/bmedits2surf +freesurfer/bin/brec +freesurfer/bin/brec.awk +freesurfer/bin/browse-minc-header.tcl +freesurfer/bin/bugr +freesurfer/bin/build_desikan_killiany_gcs.csh +freesurfer/bin/cblumwmgyri +freesurfer/bin/checkMCR.sh +freesurfer/bin/check_recons.sh +freesurfer/bin/check_siemens_dir +freesurfer/bin/check_subject +freesurfer/bin/clear_fs_env.csh +freesurfer/bin/compute_interrater_variability.csh +freesurfer/bin/compute_label_vals.csh +freesurfer/bin/compute_label_volumes.csh +freesurfer/bin/connectgraph +freesurfer/bin/cor_to_minc +freesurfer/bin/cp-dicom +freesurfer/bin/createMorph +freesurfer/bin/csvprint +freesurfer/bin/dcmdir-info-mgh +freesurfer/bin/dcmdjpeg.fs +freesurfer/bin/dcmdrle.fs +freesurfer/bin/dcmsplit +freesurfer/bin/dcmunpack +freesurfer/bin/deface_subject +freesurfer/bin/defect-seg +freesurfer/bin/dicom-rename +freesurfer/bin/diffusionUtils +freesurfer/bin/dmri_ac.sh +freesurfer/bin/dmri_AnatomiCuts +freesurfer/bin/dmri_bset +freesurfer/bin/dmri_coloredFA +freesurfer/bin/dmri_extractSurfaceMeasurements +freesurfer/bin/dmri_forrest +freesurfer/bin/dmri_group +freesurfer/bin/dmri_groupByEndpoints +freesurfer/bin/dmri_match +freesurfer/bin/dmri_mergepaths +freesurfer/bin/dmri_motion +freesurfer/bin/dmri_neighboringRegions +freesurfer/bin/dmri_paths +freesurfer/bin/dmri_pathstats +freesurfer/bin/dmri_projectEndPoints +freesurfer/bin/dmrirc.example +freesurfer/bin/dmrirc.long.example +freesurfer/bin/dmrirc.long.multiscan.example +freesurfer/bin/dmrirc.multiscan.example +freesurfer/bin/dmri_saveHistograms +freesurfer/bin/dmri_spline +freesurfer/bin/dmri_stats_ac +freesurfer/bin/dmri_tensoreig +freesurfer/bin/dmri_train +freesurfer/bin/dmri_trk2trk +freesurfer/bin/dmri_violinPlots +freesurfer/bin/dmri_vox2vox +freesurfer/bin/dt_recon +freesurfer/bin/epidewarp.fsl +freesurfer/bin/exportGcam +freesurfer/bin/feat2segstats +freesurfer/bin/feat2surf +freesurfer/bin/findsession +freesurfer/bin/fix_subject +freesurfer/bin/fix_subject_corrected +freesurfer/bin/fix_subject_corrected-lh +freesurfer/bin/fix_subject_corrected-rh +freesurfer/bin/fix_subject-lh +freesurfer/bin/fix_subject_on_seychelles +freesurfer/bin/fix_subject-rh +freesurfer/bin/fixup_mni_paths +freesurfer/bin/flip_4dfp +freesurfer/bin/flirt.fsl +freesurfer/bin/flirt.newdefault.20080811.sch +freesurfer/bin/fname2ext +freesurfer/bin/fname2stem +freesurfer/bin/freesurfer +freesurfer/bin/freeview +freesurfer/bin/fscalc +freesurfer/bin/fscalc.fsl +freesurfer/bin/fsdcmdecompress +freesurfer/bin/fsfget +freesurfer/bin/fsfirst.fsl +freesurfer/bin/fs_install_mcr +freesurfer/bin/fsl.5.0.2.xyztrans.sch +freesurfer/bin/fs_lib_check +freesurfer/bin/fsl_label2voxel +freesurfer/bin/fslmaths.fsl +freesurfer/bin/fslorient.fsl +freesurfer/bin/fslregister +freesurfer/bin/fsl_rigid_register +freesurfer/bin/fsl_sub_mgh +freesurfer/bin/fslswapdim.fsl +freesurfer/bin/fspalm +freesurfer/bin/fsPrintHelp +freesurfer/bin/fsr-coreg +freesurfer/bin/fsr-import +freesurfer/bin/fs_run_from_mcr +freesurfer/bin/fs_spmreg.glnxa64 +freesurfer/bin/fs_tutorial_data +freesurfer/bin/fs_update +freesurfer/bin/fsvglrun +freesurfer/bin/fvcompare +freesurfer/bin/gca-apply +freesurfer/bin/gcainit +freesurfer/bin/gcaprepone +freesurfer/bin/gcatrain +freesurfer/bin/gcatrainskull +freesurfer/bin/gdcmconv.fs +freesurfer/bin/gems_compute_binary_atlas_probs +freesurfer/bin/get_label_thickness +freesurfer/bin/groupstats +freesurfer/bin/groupstatsdiff +freesurfer/bin/gtmseg +freesurfer/bin/help_xml_validate +freesurfer/bin/hiam_make_surfaces +freesurfer/bin/hiam_make_template +freesurfer/bin/hiam_register +freesurfer/bin/histo_compute_joint_density +freesurfer/bin/histo_fix_topology +freesurfer/bin/histo_register_block +freesurfer/bin/histo_segment +freesurfer/bin/histo_synthesize +freesurfer/bin/ico_supersample +freesurfer/bin/id.xfm +freesurfer/bin/inflate_subject +freesurfer/bin/inflate_subject3 +freesurfer/bin/inflate_subject-lh +freesurfer/bin/inflate_subject_new +freesurfer/bin/inflate_subject_new-lh +freesurfer/bin/inflate_subject_new-rh +freesurfer/bin/inflate_subject-rh +freesurfer/bin/inflate_subject_sc +freesurfer/bin/irepifitvol +freesurfer/bin/irepifitvol.glnx64 +freesurfer/bin/isanalyze +freesurfer/bin/isnifti +freesurfer/bin/isolate_labels.csh +freesurfer/bin/isolate_labels_keeporigval.csh +freesurfer/bin/is-surface +freesurfer/bin/jkgcatrain +freesurfer/bin/label2flat +freesurfer/bin/label2patch +freesurfer/bin/label_area +freesurfer/bin/label_border +freesurfer/bin/label_child +freesurfer/bin/label_elderly_subject +freesurfer/bin/labels_disjoint +freesurfer/bin/labels_intersect +freesurfer/bin/label_subject +freesurfer/bin/label_subject_flash +freesurfer/bin/label_subject_mixed +freesurfer/bin/labels_union +freesurfer/bin/list_otl_labels +freesurfer/bin/listsubj +freesurfer/bin/long_create_base_sigma +freesurfer/bin/long_create_orig +freesurfer/bin/longmc +freesurfer/bin/long_mris_slopes +freesurfer/bin/long_qdec_table +freesurfer/bin/long_stats_combine +freesurfer/bin/long_stats_slopes +freesurfer/bin/long_stats_tps +freesurfer/bin/long_submit_jobs +freesurfer/bin/long_submit_postproc +freesurfer/bin/lpcregister +freesurfer/bin/lta_diff +freesurfer/bin/make_average_subcort +freesurfer/bin/make_average_subject +freesurfer/bin/make_average_surface +freesurfer/bin/make_average_volume +freesurfer/bin/make_cortex_label +freesurfer/bin/make_exvivo_filled +freesurfer/bin/make_folding_atlas +freesurfer/bin/make_hemi_mask +freesurfer/bin/make-segvol-table +freesurfer/bin/make_symmetric +freesurfer/bin/make_upright +freesurfer/bin/makevol +freesurfer/bin/map_all_labels +freesurfer/bin/map_all_labels-lh +freesurfer/bin/map_central_sulcus +freesurfer/bin/map_to_base +freesurfer/bin/meanval +freesurfer/bin/mergeseg +freesurfer/bin/merge_stats_tables +freesurfer/bin/minc2seqinfo +freesurfer/bin/mkheadsurf +freesurfer/bin/mkima_index.tcl +freesurfer/bin/mkmnc_index.tcl +freesurfer/bin/mksubjdirs +freesurfer/bin/mksurfatlas +freesurfer/bin/mkxsubjreg +freesurfer/bin/mni152reg +freesurfer/bin/morph_only_subject +freesurfer/bin/morph_only_subject-lh +freesurfer/bin/morph_only_subject-rh +freesurfer/bin/morph_rgb-lh +freesurfer/bin/morph_rgb-rh +freesurfer/bin/morph_subject +freesurfer/bin/morph_subject-lh +freesurfer/bin/morph_subject_on_seychelles +freesurfer/bin/morph_subject-rh +freesurfer/bin/morph_tables-lh +freesurfer/bin/morph_tables-rh +freesurfer/bin/mri_align_long.csh +freesurfer/bin/mri_aparc2wmseg +freesurfer/bin/mri_apply_autoencoder +freesurfer/bin/mri_apply_bias +freesurfer/bin/mri_apply_inu_correction +freesurfer/bin/mri_aseg_edit_reclassify +freesurfer/bin/mri_aseg_edit_train +freesurfer/bin/mri_auto_fill +freesurfer/bin/mri_average +freesurfer/bin/mri_bc_sc_bias_correct +freesurfer/bin/mri_brain_volume +freesurfer/bin/mri_build_priors +freesurfer/bin/mri_cal_renormalize_gca +freesurfer/bin/mri_ca_tissue_parms +freesurfer/bin/mri_ca_train +freesurfer/bin/mri_cht2p +freesurfer/bin/mri_classify +freesurfer/bin/mri_cnr +freesurfer/bin/mri_compute_bias +freesurfer/bin/mri_compute_change_map +freesurfer/bin/mri_compute_distances +freesurfer/bin/mri_compute_layer_fractions +freesurfer/bin/mri_compute_structure_transforms +freesurfer/bin/mri_compute_volume_fractions +freesurfer/bin/mri_compute_volume_intensities +freesurfer/bin/mri_concatenate_gcam +freesurfer/bin/mri_convert_mdh +freesurfer/bin/mri_copy_params +freesurfer/bin/mri_copy_values +freesurfer/bin/mri_cor2label +freesurfer/bin/mri_correct_segmentations +freesurfer/bin/mri_create_t2combined +freesurfer/bin/mri_create_tests +freesurfer/bin/mri_cvs_check +freesurfer/bin/mri_cvs_data_copy +freesurfer/bin/mri_cvs_register +freesurfer/bin/mri_cvs_requiredfiles.txt +freesurfer/bin/mri_dct_align +freesurfer/bin/mri_dct_align_binary +freesurfer/bin/mri_distance_transform +freesurfer/bin/mri_dist_surf_label +freesurfer/bin/mri_divide_segmentation +freesurfer/bin/mri_edit_segmentation +freesurfer/bin/mri_edit_segmentation_with_surfaces +freesurfer/bin/mri_elastic_energy +freesurfer/bin/mri_estimate_tissue_parms +freesurfer/bin/mri_evaluate_morph +freesurfer/bin/mri_extract +freesurfer/bin/mri_extract_conditions +freesurfer/bin/mri_extract_fcd_features +freesurfer/bin/mri_extract_label +freesurfer/bin/mri_extract_largest_CC +freesurfer/bin/mri_fcili +freesurfer/bin/mri_fdr +freesurfer/bin/mri_fieldsign +freesurfer/bin/mri_fit_bias +freesurfer/bin/mri_fslmat_to_lta +freesurfer/bin/mri-func2sph +freesurfer/bin/mri-funcvits +freesurfer/bin/mri_fuse_intensity_images +freesurfer/bin/mri_gca_ambiguous +freesurfer/bin/mri_gcab_train +freesurfer/bin/mri_gdfglm +freesurfer/bin/mri_glmfit +freesurfer/bin/mri_glmfit-sim +freesurfer/bin/mri_gradient_info +freesurfer/bin/mri_gradunwarp +freesurfer/bin/mri_gtmpvc +freesurfer/bin/mri_gtmseg +freesurfer/bin/mri_hausdorff_dist +freesurfer/bin/mri_head +freesurfer/bin/mri_hires_register +freesurfer/bin/mri_histo_eq +freesurfer/bin/mri_histo_normalize +freesurfer/bin/mri_ibmc +freesurfer/bin/mri_interpolate +freesurfer/bin/mri_jacobian +freesurfer/bin/mri_joint_density +freesurfer/bin/mri_label_accuracy +freesurfer/bin/mri_label_histo +freesurfer/bin/mri_label_vals +freesurfer/bin/mri_label_volume +freesurfer/bin/mri_linear_align +freesurfer/bin/mri_linear_align_binary +freesurfer/bin/mri_linear_register +freesurfer/bin/mri_long_normalize +freesurfer/bin/mri_make_bem_surfaces +freesurfer/bin/mri_make_density_map +freesurfer/bin/mri_make_labels +freesurfer/bin/mri_make_register +freesurfer/bin/mri_make_template +freesurfer/bin/mri_map_cpdat +freesurfer/bin/mri_maps2csd +freesurfer/bin/mri_mark_temporal_lobe +freesurfer/bin/mri_mc +freesurfer/bin/mri_mcsim +freesurfer/bin/mri_mergelabels +freesurfer/bin/mri_mi +freesurfer/bin/mri_modify +freesurfer/bin/mri_morphology +freesurfer/bin/mri_mosaic +freesurfer/bin/mri_motion_correct +freesurfer/bin/mri_motion_correct2 +freesurfer/bin/mri_ms_EM +freesurfer/bin/mri_ms_EM_with_atlas +freesurfer/bin/mri_ms_fitparms +freesurfer/bin/mri_ms_LDA +freesurfer/bin/mri_multiscale_segment +freesurfer/bin/mri_multispectral_segment +freesurfer/bin/mri_nl_align +freesurfer/bin/mri_nl_align_binary +freesurfer/bin/mri_nlfilter +freesurfer/bin/mri_paint +freesurfer/bin/mri_parselabel +freesurfer/bin/mri_parse_sdcmdir +freesurfer/bin/mri_partial_ribbon +freesurfer/bin/mri_path2label +freesurfer/bin/mri_polv +freesurfer/bin/mri_probedicom +freesurfer/bin/mri_probe_ima +freesurfer/bin/mri_reduce +freesurfer/bin/mri_refine_seg +freesurfer/bin/mri_register +freesurfer/bin/mri_reorient_LR.csh +freesurfer/bin/mri_rf_label +freesurfer/bin/mri_rf_long_label +freesurfer/bin/mri_rf_long_train +freesurfer/bin/mri_rf_train +freesurfer/bin/mri_ribbon +freesurfer/bin/mri_rigid_register +freesurfer/bin/mris2rgb +freesurfer/bin/mris_AA_shrinkwrap +freesurfer/bin/mris_add_template +freesurfer/bin/mris_annot_diff +freesurfer/bin/mris_annot_to_segmentation +freesurfer/bin/mris_aseg_distance +freesurfer/bin/mris_average_curvature +freesurfer/bin/mris_average_parcellation +freesurfer/bin/mris_BA_segment +freesurfer/bin/mri_sbbr +freesurfer/bin/mris_ca_deform +freesurfer/bin/mris_ca_train +freesurfer/bin/mris_classify_thickness +freesurfer/bin/mris_compute_acorr +freesurfer/bin/mris_compute_layer_intensities +freesurfer/bin/mris_compute_lgi +freesurfer/bin/mris_compute_optimal_kernel +freesurfer/bin/mris_compute_overlap +freesurfer/bin/mris_compute_parc_overlap +freesurfer/bin/mris_compute_volume_fractions +freesurfer/bin/mris_congeal +freesurfer/bin/mris_copy_header +freesurfer/bin/mris_curvature2image +freesurfer/bin/mris_deform +freesurfer/bin/mris_density +freesurfer/bin/mris_distance_map +freesurfer/bin/mris_distance_to_label +freesurfer/bin/mris_distance_transform +freesurfer/bin/mri_segcentroids +freesurfer/bin/mri_seghead +freesurfer/bin/mri_segment_hypothalamic_subunits +freesurfer/bin/mri_segment_tumor +freesurfer/bin/mri_segment_wm_damage +freesurfer/bin/mri_seg_overlap +freesurfer/bin/mris_entropy +freesurfer/bin/mris_errors +freesurfer/bin/mris_extract_patches +freesurfer/bin/mris_extract_values +freesurfer/bin/mris_exvivo_surfaces +freesurfer/bin/mris_fbirn_annot +freesurfer/bin/mris_fill +freesurfer/bin/mris_find_flat_regions +freesurfer/bin/mris_flatten +freesurfer/bin/mris_fwhm +freesurfer/bin/mris_gradient +freesurfer/bin/mris_hausdorff_dist +freesurfer/bin/mris_image2vtk +freesurfer/bin/mri_simulate_atrophy +freesurfer/bin/mris_info +freesurfer/bin/mris_init_global_tractography +freesurfer/bin/mris_intensity_profile +freesurfer/bin/mris_interpolate_warp +freesurfer/bin/mris_label_area +freesurfer/bin/mris_label_calc +freesurfer/bin/mris_label_mode +freesurfer/bin/mris_longitudinal_surfaces +freesurfer/bin/mris_make_average_surface +freesurfer/bin/mris_make_face_parcellation +freesurfer/bin/mris_make_map_surfaces +freesurfer/bin/mris_make_surfaces +freesurfer/bin/mris_make_template +freesurfer/bin/mris_map_cuts +freesurfer/bin/mris_mef_surfaces +freesurfer/bin/mris_merge_parcellations +freesurfer/bin/mris_mesh_subdivide +freesurfer/bin/mris_morph_stats +freesurfer/bin/mris_ms_refine +freesurfer/bin/mris_ms_surface_CNR +freesurfer/bin/mris_multimodal +freesurfer/bin/mris_multimodal_surface_placement +freesurfer/bin/mris_multiscale_stats +freesurfer/bin/mris_niters2fwhm +freesurfer/bin/mris_nudge +freesurfer/bin/mris_parcellate_connectivity +freesurfer/bin/mri-sph2surf +freesurfer/bin/mris_pmake +freesurfer/bin/mris_preproc +freesurfer/bin/mris_profileClustering +freesurfer/bin/mrisp_write +freesurfer/bin/mris_refine_surfaces +freesurfer/bin/mris_register_label_map +freesurfer/bin/mris_register_to_label +freesurfer/bin/mris_register_to_volume +freesurfer/bin/mris_remove_negative_vertices +freesurfer/bin/mris_remove_variance +freesurfer/bin/mris_resample +freesurfer/bin/mris_rescale +freesurfer/bin/mris_reverse +freesurfer/bin/mris_rf_label +freesurfer/bin/mris_rf_train +freesurfer/bin/mris_rotate +freesurfer/bin/mris_sample_label +freesurfer/bin/mris_sample_parc +freesurfer/bin/mris_seg2annot +freesurfer/bin/mris_segment +freesurfer/bin/mris_segmentation_stats +freesurfer/bin/mris_segment_vals +freesurfer/bin/mris_shrinkwrap +freesurfer/bin/mris_simulate_atrophy +freesurfer/bin/mris_smooth_intracortical +freesurfer/bin/mris_surf2vtk +freesurfer/bin/mris_surface_change +freesurfer/bin/mris_surface_to_vol_distances +freesurfer/bin/mris_svm_classify +freesurfer/bin/mris_svm_train +freesurfer/bin/mris_talairach +freesurfer/bin/mris_thickness_comparison +freesurfer/bin/mris_transform +freesurfer/bin/mris_translate_annotation +freesurfer/bin/mris_transmantle_dysplasia_paths +freesurfer/bin/mri_strip_nonwhite +freesurfer/bin/mri_strip_subject_info +freesurfer/bin/mris_twoclass +freesurfer/bin/mri_surfacemask +freesurfer/bin/mris_volmask_novtk +freesurfer/bin/mris_volmask_vtk +freesurfer/bin/mris_volsmooth +freesurfer/bin/mris_volume +freesurfer/bin/mris_warp +freesurfer/bin/mris_wm_volume +freesurfer/bin/mris_w_to_curv +freesurfer/bin/mri_synthesize +freesurfer/bin/mri_synthstrip +freesurfer/bin/mri_threshold +freesurfer/bin/mri_topologycorrection +freesurfer/bin/mri_train +freesurfer/bin/mri_train_autoencoder +freesurfer/bin/mri_transform +freesurfer/bin/mri_transform_to_COR +freesurfer/bin/mri_twoclass +freesurfer/bin/mri_update_gca +freesurfer/bin/mri_validate_skull_stripped +freesurfer/bin/mri_vessel_segment +freesurfer/bin/mri_vol2label +freesurfer/bin/mri_vol2roi +freesurfer/bin/mri_volcluster +freesurfer/bin/mri_volsynth +freesurfer/bin/mri_warp_convert +freesurfer/bin/mri_wbc +freesurfer/bin/mri_wmfilter +freesurfer/bin/mri_xcorr +freesurfer/bin/mri_xvolavg +freesurfer/bin/mri_z2p +freesurfer/bin/ms_refine_subject +freesurfer/bin/nmovie_qt +freesurfer/bin/oct_register_mosaic +freesurfer/bin/oct_rf_train +freesurfer/bin/oct_train +freesurfer/bin/optseq2 +freesurfer/bin/orientLAS +freesurfer/bin/parc_atlas_jackknife_test +freesurfer/bin/plot_structure_stats.tcl +freesurfer/bin/polyorder +freesurfer/bin/predict_v1.sh +freesurfer/bin/print_unique_labels.csh +freesurfer/bin/progressbar.tcl +freesurfer/bin/qatools.py +freesurfer/bin/qdec +freesurfer/bin/qdec_glmfit +freesurfer/bin/qt.conf +freesurfer/bin/quantifyBrainstemStructures.sh +freesurfer/bin/quantifyHAsubregions.sh +freesurfer/bin/quantifyThalamicNuclei.sh +freesurfer/bin/rbbr +freesurfer/bin/rbftest +freesurfer/bin/rcbf-prep +freesurfer/bin/rebuild_gca_atlas.csh +freesurfer/bin/recon-all-exvivo +freesurfer/bin/recon-all.makefile +freesurfer/bin/regdat2xfm +freesurfer/bin/reg-feat2anat +freesurfer/bin/register_child +freesurfer/bin/register.csh +freesurfer/bin/register_elderly_subject +freesurfer/bin/register_subject +freesurfer/bin/register_subject_flash +freesurfer/bin/register_subject_mixed +freesurfer/bin/reg-mni305.2mm +freesurfer/bin/reinflate_subject +freesurfer/bin/reinflate_subject-lh +freesurfer/bin/reinflate_subject-rh +freesurfer/bin/remove_talairach +freesurfer/bin/renormalize_subject +freesurfer/bin/renormalize_subject_keep_editting +freesurfer/bin/renormalize_T1_subject +freesurfer/bin/repair_siemens_file +freesurfer/bin/reregister_subject_mixed +freesurfer/bin/rtview +freesurfer/bin/run_mris_preproc +freesurfer/bin/run-qdec-glm +freesurfer/bin/run_samseg_long +freesurfer/bin/run_SegmentSubfieldsT1Longitudinal.sh +freesurfer/bin/run_SegmentSubject.sh +freesurfer/bin/run_segmentSubjectT1_autoEstimateAlveusML.sh +freesurfer/bin/run_segmentSubjectT1T2_autoEstimateAlveusML.sh +freesurfer/bin/run_segmentSubjectT2_autoEstimateAlveusML.sh +freesurfer/bin/run_SegmentThalamicNuclei.sh +freesurfer/bin/samseg +freesurfer/bin/samseg2recon +freesurfer/bin/samseg-long +freesurfer/bin/sbtiv +freesurfer/bin/seg2filled +freesurfer/bin/segmentBS.sh +freesurfer/bin/segmentHA_T1_long.sh +freesurfer/bin/segmentHA_T1.sh +freesurfer/bin/segmentHA_T2.sh +freesurfer/bin/segment_monkey +freesurfer/bin/SegmentSubfieldsT1Longitudinal +freesurfer/bin/segment_subject +freesurfer/bin/segmentSubject +freesurfer/bin/segment_subject_notal +freesurfer/bin/segment_subject_notal2 +freesurfer/bin/segment_subject_old_skull_strip +freesurfer/bin/segment_subject_sc +freesurfer/bin/segmentSubjectT1_autoEstimateAlveusML +freesurfer/bin/segmentSubjectT1T2_autoEstimateAlveusML +freesurfer/bin/segmentSubjectT2_autoEstimateAlveusML +freesurfer/bin/segment_subject_talmgh +freesurfer/bin/SegmentThalamicNuclei +freesurfer/bin/segmentThalamicNuclei.sh +freesurfer/bin/segpons +freesurfer/bin/setlabelstat +freesurfer/bin/sfa2fieldsign +freesurfer/bin/show_tal +freesurfer/bin/skip_long_make_checks +freesurfer/bin/slicedelay +freesurfer/bin/slicetimer.fsl +freesurfer/bin/sphere_subject +freesurfer/bin/sphere_subject-lh +freesurfer/bin/sphere_subject-rh +freesurfer/bin/spherical_st +freesurfer/bin/Spline3_test +freesurfer/bin/spmmat2register +freesurfer/bin/spmregister +freesurfer/bin/spm_t_to_b +freesurfer/bin/sratio +freesurfer/bin/stat_normalize +freesurfer/bin/stattablediff +freesurfer/bin/stem2fname +freesurfer/bin/stim_polar +freesurfer/bin/streamlineFilter +freesurfer/bin/surf2vol +freesurfer/bin/surfreg +freesurfer/bin/swi_preprocess +freesurfer/bin/swi_process +freesurfer/bin/t4img_4dfp +freesurfer/bin/t4imgs_4dfp +freesurfer/bin/talairach2 +freesurfer/bin/talairach_mgh +freesurfer/bin/tal_compare +freesurfer/bin/tal_QC_AZS +freesurfer/bin/talsegprob +freesurfer/bin/template +freesurfer/bin/testOrientationPlanesFromParcellation +freesurfer/bin/test_recon-all.csh +freesurfer/bin/test_tutorials.sh +freesurfer/bin/thickdiffmap +freesurfer/bin/tkmedit +freesurfer/bin/tkmeditfv +freesurfer/bin/tkregister2 +freesurfer/bin/tkregisterfv +freesurfer/bin/tksurfer +freesurfer/bin/tksurferfv +freesurfer/bin/trac-all +freesurfer/bin/trac-paths +freesurfer/bin/trac-preproc +freesurfer/bin/tractstats2table +freesurfer/bin/train-gcs-atlas +freesurfer/bin/tridec +freesurfer/bin/trk_tools +freesurfer/bin/unpack_ima1.tcl +freesurfer/bin/unpackimadir +freesurfer/bin/unpackimadir2 +freesurfer/bin/unpack_ima.tcl +freesurfer/bin/unpackmincdir +freesurfer/bin/unpack_mnc.tcl +freesurfer/bin/unpacksdcmdir +freesurfer/bin/usbtree +freesurfer/bin/vol2segavg +freesurfer/bin/vol2subfield +freesurfer/bin/vol2symsurf +freesurfer/bin/vsm-smooth +freesurfer/bin/wfilemask +freesurfer/bin/wm-anat-snr +freesurfer/bin/wmedits2surf +freesurfer/bin/wmsaseg +freesurfer/bin/xcerebralseg +freesurfer/bin/xcorr +freesurfer/bin/xfmrot +freesurfer/bin/xhemireg +freesurfer/bin/xhemi-tal +freesurfer/bin/xsanatreg +freesurfer/bin/zero_lt_4dfp +freesurfer/DefectLUT.txt +freesurfer/diffusion +freesurfer/docs/xml +freesurfer/FreeSurferEnv.csh +freesurfer/FreeSurferEnv.sh +freesurfer/fsfast +freesurfer/lib/bem/ic0.tri +freesurfer/lib/bem/ic1.tri +freesurfer/lib/bem/ic2.tri +freesurfer/lib/bem/ic3.tri +freesurfer/lib/bem/ic6.tri +freesurfer/lib/bem/inner_skull.dat +freesurfer/lib/bem/outer_skin.dat +freesurfer/lib/bem/outer_skull.dat +freesurfer/lib/images +freesurfer/lib/qt +freesurfer/lib/resource +freesurfer/lib/tcl +freesurfer/lib/tktools +freesurfer/lib/vtk +freesurfer/matlab +freesurfer/mni-1.4 +freesurfer/mni/bin/correct_field +freesurfer/mni/bin/crispify +freesurfer/mni/bin/dcm2mnc +freesurfer/mni/bin/Display +freesurfer/mni/bin/ecattominc +freesurfer/mni/bin/evaluate_field +freesurfer/mni/bin/extracttag +freesurfer/mni/bin/field2imp +freesurfer/mni/bin/imp2field +freesurfer/mni/bin/invert_raw_image +freesurfer/mni/bin/make_model +freesurfer/mni/bin/make_phantom +freesurfer/mni/bin/make_template +freesurfer/mni/bin/mincaverage +freesurfer/mni/bin/mincbbox +freesurfer/mni/bin/minccalc +freesurfer/mni/bin/mincchamfer +freesurfer/mni/bin/mincconcat +freesurfer/mni/bin/minccopy +freesurfer/mni/bin/mincdiff +freesurfer/mni/bin/mincedit +freesurfer/mni/bin/mincexpand +freesurfer/mni/bin/mincextract +freesurfer/mni/bin/mincheader +freesurfer/mni/bin/minchistory +freesurfer/mni/bin/minclookup +freesurfer/mni/bin/mincmakescalar +freesurfer/mni/bin/mincmakevector +freesurfer/mni/bin/mincmath +freesurfer/mni/bin/minc_modify_header +freesurfer/mni/bin/mincpik +freesurfer/mni/bin/mincreshape +freesurfer/mni/bin/mincstats +freesurfer/mni/bin/minctoecat +freesurfer/mni/bin/minctoraw +freesurfer/mni/bin/mincview +freesurfer/mni/bin/mincwindow +freesurfer/mni/bin/mnc2nii +freesurfer/mni/bin/mritoself +freesurfer/mni/bin/ncdump +freesurfer/mni/bin/ncgen +freesurfer/mni/bin/nii2mnc +freesurfer/mni/bin/nu_estimate +freesurfer/mni/bin/nu_estimate_np_and_em~ +freesurfer/mni/bin/nu_evaluate +freesurfer/mni/bin/param2xfm +freesurfer/mni/bin/rand_param +freesurfer/mni/bin/rawtominc +freesurfer/mni/bin/register +freesurfer/mni/bin/resample_labels +freesurfer/mni/bin/sharpen_hist +freesurfer/mni/bin/sharpen_volume +freesurfer/mni/bin/spline_smooth +freesurfer/mni/bin/transformtags +freesurfer/mni/bin/upet2mnc +freesurfer/mni/bin/volume_hist +freesurfer/mni/bin/volume_stats +freesurfer/mni/bin/voxeltoworld +freesurfer/mni/bin/worldtovoxel +freesurfer/mni/bin/xcorr_vol +freesurfer/mni/bin/xfm2param +freesurfer/mni/bin/xfmconcat +freesurfer/mni/bin/xfminvert +freesurfer/mni/bin/xfmtool +freesurfer/mni/bin/zscore_vol +freesurfer/mni/data +freesurfer/mni/include +freesurfer/mni/mni.srcbuild.June2015.tgz +freesurfer/mni/share/man +freesurfer/mni/share/N3 +freesurfer/models +freesurfer/python/lib/python3.8/test +freesurfer/python/lib/python3.8/site-packages/caffe2 +freesurfer/python/lib/python3.8/site-packages/sklearn +freesurfer/python/lib/python3.8/site-packages/tensorflow +freesurfer/python/lib/python3.8/site-packages/torch +freesurfer/python/lib/python3.8/site-packages/**/tests +freesurfer/python/**/__pycache__ +freesurfer/python/share +freesurfer/SegmentNoLUT.txt +freesurfer/sessions +freesurfer/SetUpFreeSurfer.csh +freesurfer/SetUpFreeSurfer.sh +freesurfer/Simple_surface_labels2009.txt +freesurfer/sources.sh +freesurfer/subjects/bert +freesurfer/subjects/cvs_avg35 +freesurfer/subjects/cvs_avg35_inMNI152 +freesurfer/subjects/fsaverage3 +freesurfer/subjects/fsaverage4 +freesurfer/subjects/fsaverage_sym +freesurfer/subjects/lh.EC_average +freesurfer/subjects/README +freesurfer/subjects/rh.EC_average +freesurfer/subjects/sample-001.mgz +freesurfer/subjects/sample-002.mgz +freesurfer/subjects/V1_average +freesurfer/tkmeditParcColorsCMA +freesurfer/tktools +freesurfer/trctrain diff --git a/docker/files/fs-cert.pem b/docker/files/fs-cert.pem new file mode 100644 index 00000000..34d53c7b --- /dev/null +++ b/docker/files/fs-cert.pem @@ -0,0 +1,44 @@ +-----BEGIN CERTIFICATE----- +MIIHuDCCBiCgAwIBAgIRAMa1FS9MSn5TXKMgD8OXtoswDQYJKoZIhvcNAQEMBQAw +RDELMAkGA1UEBhMCVVMxEjAQBgNVBAoTCUludGVybmV0MjEhMB8GA1UEAxMYSW5D +b21tb24gUlNBIFNlcnZlciBDQSAyMB4XDTI0MDUwOTAwMDAwMFoXDTI1MDUwOTIz +NTk1OVowfTELMAkGA1UEBhMCVVMxFjAUBgNVBAgTDU1hc3NhY2h1c2V0dHMxMTAv +BgNVBAoTKFByZXNpZGVudCBhbmQgRmVsbG93cyBvZiBIYXJ2YXJkIENvbGxlZ2Ux +IzAhBgNVBAMTGnN1cmZlci5ubXIubWdoLmhhcnZhcmQuZWR1MIIBIjANBgkqhkiG +9w0BAQEFAAOCAQ8AMIIBCgKCAQEAxJfeMKn0NjFkmduegvgRICrm+hn4TaZITeVl +uM/af+g05RUtKRKNIR0CC9mzPDYiW10VNj7TuDyS1DNoe/Jr1Or8lrMCm81NHjnY +aKvtC61O9GWvvDfWeb35vkHfkbd60AgBaLGZIEglENl122bBqpSdO8JglVTDgvFd +mWkuBnQzE/qKt7j88Xjafjhzbnv9Uf1hh8NtbiOaAf53/b5FZuUary64k5twPwpm +Y/pWw3CQhIWUhvRMwcQNvG24lDOssOXSEgb9Gd96ikU/yE4MDnuDBb6tf+2crVQ5 +PF4V2YvbQZ2x8Kf8hygWk3C555ZSCR3LgRl/Paxp9DZUrxfjtwIDAQABo4ID6jCC +A+YwHwYDVR0jBBgwFoAU70wAkqb7di5eleLJX4cbGdVN4tkwHQYDVR0OBBYEFG0I +bSHaYbRPsftHU7uJ5A7Z9UBfMA4GA1UdDwEB/wQEAwIFoDAMBgNVHRMBAf8EAjAA +MB0GA1UdJQQWMBQGCCsGAQUFBwMBBggrBgEFBQcDAjBJBgNVHSAEQjBAMDQGCysG +AQQBsjEBAgJnMCUwIwYIKwYBBQUHAgEWF2h0dHBzOi8vc2VjdGlnby5jb20vQ1BT +MAgGBmeBDAECAjBABgNVHR8EOTA3MDWgM6Axhi9odHRwOi8vY3JsLnNlY3RpZ28u +Y29tL0luQ29tbW9uUlNBU2VydmVyQ0EyLmNybDBwBggrBgEFBQcBAQRkMGIwOwYI +KwYBBQUHMAKGL2h0dHA6Ly9jcnQuc2VjdGlnby5jb20vSW5Db21tb25SU0FTZXJ2 +ZXJDQTIuY3J0MCMGCCsGAQUFBzABhhdodHRwOi8vb2NzcC5zZWN0aWdvLmNvbTCC +AX8GCisGAQQB1nkCBAIEggFvBIIBawFpAHUAzxFW7tUufK/zh1vZaS6b6RpxZ0qw +F+ysAdJbd87MOwgAAAGPXXtaqAAABAMARjBEAiARbv8hz3utGkTar2Y3jNnWOGKG +aajAYuB3f30g5Bnb+AIgAmwaltfGcp2uNYmTMU2eSC5AVhpnbisDS2KcFyC7ok4A +dwCi4wrkRe+9rZt+OO1HZ3dT14JbhJTXK14bLMS5UKRH5wAAAY9de1pQAAAEAwBI +MEYCIQDrM8C7Y6GdEKRmGQ1AUmbUArbpImpEXutI8E+KVOUsogIhAKbl+QYqJIUB +rRHpRkKZlefPyZQRo6JnRNz/J1KEuqsTAHcATnWjJ1yaEMM4W2zU3z9S6x3w4I4b +jWnAsfpksWKaOd8AAAGPXXtaRgAABAMASDBGAiEApB4qFWHZLGtPNkUK+6jFqsEk +vmy3bv3cuODXSG3CvfMCIQCCQPR/3HcrSGfmeJsFjWvwLbJFqe6GbRWCvjaUaldI +WDCB5AYDVR0RBIHcMIHZghpzdXJmZXIubm1yLm1naC5oYXJ2YXJkLmVkdYIUZm9y +dW0uZnJlZXN1cmZlci5uZXSCGWZvcnVtLm5tci5tZ2guaGFydmFyZC5lZHWCDmZy +ZWVzdXJmZXIubmV0ghdmdHAubm1yLm1naC5oYXJ2YXJkLmVkdYIZcnN5bmMubm1y +Lm1naC5oYXJ2YXJkLmVkdYIWc3VwcG9ydC5mcmVlc3VyZmVyLm5ldIIad2ViZGV2 +Lm5tci5tZ2guaGFydmFyZC5lZHWCEnd3dy5mcmVlc3VyZmVyLm5ldDANBgkqhkiG +9w0BAQwFAAOCAYEAB5nGih504XqNbZJjz1mK43IAiFs0YjocdDWOqqbXMirpbpNd +cPleB0iJkXJnzfzkZFyLdvFGos9FuPRDGmXSh0sdWpm2uQdkXlOp+/e4vMDg8Nrl +YkjshuU4fmUswnzsQ1aj/ome1DG3rmp3umjKpV6ewnVLhgjQ5zwCHfLLsNyzowqn +I6qAa2uzk7bS3XuYu4bLhVfD3X0Ybe4V3FKODBZRAIIU8hgtCz6zw5WtxzdEm5Qp +FHdN8OKazXvrJbzHB1WVk7buIn+8n2HoO202wGaFyyQFVqM2ug0FgCW8AaB+XRCq +BV+nZND2AIALG1HcIIL+pZwxS1K/jBkjUJRb3GDVWw7yzxuvlmawLhk8xzrgTsvp +QXaR+CbnTBx1PeB4nf+yHg2VBvKk6m7E9lnyymENmbeaLC67CJQgr+ne1rpOuEIs +fVbKUP437fyEvPuZCZ+3gxFsKgOerk2J95+AdLKB01pQFh/ITS/2zHHeMeD118sR +mUO+RXiPX5ZUqu/M +-----END CERTIFICATE----- diff --git a/docker/files/neurodebian.gpg b/docker/files/neurodebian.gpg deleted file mode 100644 index c546d45d..00000000 --- a/docker/files/neurodebian.gpg +++ /dev/null @@ -1,71 +0,0 @@ ------BEGIN PGP PUBLIC KEY BLOCK----- -Version: GnuPG v1 - -mQGiBEQ7TOgRBADvaRsIZ3VZ6Qy7PlDpdMm97m0OfvouOj/HhjOM4M3ECbGn4cYh -vN1gK586s3sUsUcNQ8LuWvNsYhxYsVTZymCReJMEDxod0U6/z/oIbpWv5svF3kpl -ogA66Ju/6cZx62RiCSOkskI6A3Waj6xHyEo8AGOPfzbMoOOQ1TS1u9s2FwCgxziL -wADvKYlDZnWM03QtqIJVD8UEAOks9Q2OqFoqKarj6xTRdOYIBVEp2jhozZUZmLmz -pKL9E4NKGfixqxdVimFcRUGM5h7R2w7ORqXjCzpiPmgdv3jJLWDnmHLmMYRYQc8p -5nqo8mxuO3zJugxBemWoacBDd1MJaH7nK20Hsk9L/jvU/qLxPJotMStTnwO+EpsK -HlihA/9ZpvzR1QWNUd9nSuNR3byJhaXvxqQltsM7tLqAT4qAOJIcMjxr+qESdEbx -NHM5M1Y21ZynrsQw+Fb1WHXNbP79vzOxHoZR0+OXe8uUpkri2d9iOocre3NUdpOO -JHtl6cGGTFILt8tSuOVxMT/+nlo038JQB2jARe4B85O0tkPIPbQybmV1cm8uZGVi -aWFuLm5ldCBhcmNoaXZlIDxtaWNoYWVsLmhhbmtlQGdtYWlsLmNvbT6IRgQQEQgA -BgUCTVHJKwAKCRCNEUVjdcAkyOvzAJ0abJz+f2a6VZG1c9T8NHMTYh1atwCgt0EE -3ZZd/2in64jSzu0miqhXbOKISgQQEQIACgUCSotRlwMFAXgACgkQ93+NsjFEvg8n -JgCfWcdJbILBtpLZCocvOzlLPqJ0Fn0AoI4EpJRxoUnrtzBGUC1MqecU7WsDiGAE -ExECACAFAkqLUWcCGwMGCwkIBwMCBBUCCAMEFgIDAQIeAQIXgAAKCRCl0y8BJkml -qVklAJ4h2V6MdQkSAThF5c2Gkq6eSoIQYQCeM0DWyB9Bl+tTPSTYXwwZi2uoif20 -QmFwc3kuZ3NlLnVuaS1tYWdkZWJ1cmcuZGUgRGViaWFuIEFyY2hpdmUgPG1pY2hh -ZWwuaGFua2VAZ21haWwuY29tPohGBBARAgAGBQJEO03FAAoJEPd/jbIxRL4PU18A -n3tn7i4qdlMi8kHbYWFoabsKc9beAJ9sl/leZNCYNMGhz+u6BQgyeLKw94heBBMR -AgAeBQJEO0zoAhsDBgsJCAcDAgMVAgMDFgIBAh4BAheAAAoJEKXTLwEmSaWpVdoA -n27DvtZizNEbhz3wRUPQMiQjtqdvAJ9rS9YdPe5h5o5gHx3mw3BSkOttdYheBBMR -AgAeBQJEO0zoAhsDBgsJCAcDAgMVAgMDFgIBAh4BAheAAAoJEKXTLwEmSaWpVdoA -oLhwWL+E+2I9lrUf4Lf26quOK9vLAKC9ZpIF2tUirFFkBWnQvu13/TA0SokCHAQQ -AQIABgUCTSNBgQAKCRDAc9Iof/uem4NpEACQ8jxmaCaS/qk/Y4GiwLA5bvKosG3B -iARZ2v5UWqCZQ1tS56yKse/lCIzXQqU9BnYW6wOI2rvFf9meLfd8h96peG6oKscs -fbclLDIf68bBvGBQaD0VYFi/Fk/rxmTQBOCQ3AJZs8O5rIM4gPGE0QGvSZ1h7VRw -3Uyeg4jKXLIeJn2xEmOJgt3auAR2FyKbzHaX9JCoByJZ/eU23akNl9hgt7ePlpXo -74KNYC58auuMUhCq3BQDB+II4ERYMcmFp1N5ZG05Cl6jcaRRHDXz+Ax6DWprRI1+ -RH/Yyae6LmKpeJNwd+vM14aawnNO9h8IAQ+aJ3oYZdRhGyybbin3giJ10hmWveg/ -Pey91Nh9vBCHdDkdPU0s9zE7z/PHT0c5ccZRukxfZfkrlWQ5iqu3V064ku5f4PBy -8UPSkETcjYgDnrdnwqIAO+oVg/SFlfsOzftnwUrvwIcZlXAgtP6MEEAs/38e/JIN -g4VrpdAy7HMGEUsh6Ah6lvGQr+zBnG44XwKfl7e0uCYkrAzUJRGM5vx9iXvFMcMu -jv9EBNNBOU8/Y6MBDzGZhgaoeI27nrUvaveJXjAiDKAQWBLjtQjINZ8I9uaSGOul -8kpbFavE4eS3+KhISrSHe4DuAa3dk9zI+FiPvXY1ZyfQBtNpR+gYFY6VxMbHhY1U -lSLHO2eUIQLdYbRITmV1cm9EZWJpYW4gQXJjaGl2ZSBLZXkgPHBrZy1leHBwc3kt -bWFpbnRhaW5lcnNAbGlzdHMuYWxpb3RoLmRlYmlhbi5vcmc+iEYEEBEIAAYFAk1R -yQYACgkQjRFFY3XAJMgEWwCggx4Gqlcrt76TSMlbU94cESo55AEAoJ3asQEMpe8t -QUX+5aikw3z1AUoCiEoEEBECAAoFAkqf/3cDBQF4AAoJEPd/jbIxRL4PxyMAoKUI -RPWlHCj/+HSFfwhos68wcSwmAKChuC00qutDro+AOo+uuq6YoHXj+ohgBBMRAgAg -BQJKn/8bAhsDBgsJCAcDAgQVAggDBBYCAwECHgECF4AACgkQpdMvASZJpalDggCe -KF9KOgOPdQbFnKXl8KtHory4EEwAnA7jxgorE6kk2QHEXFSF8LzOOH4GiGMEExEC -ACMCGwMGCwkIBwMCBBUCCAMEFgIDAQIeAQIXgAUCSp//RgIZAQAKCRCl0y8BJkml -qekFAKCRyt4+FoCzmBbRUUP3Cr8PzH++IgCgkno4vdjsWdyAey8e0KpITTXMFrmJ -AhwEEAECAAYFAk0jQYEACgkQwHPSKH/7npsFfw/+P8B8hpM3+T1fgboBa4R32deu -n8m6b8vZMXwuo/awQtMpzjem8JGXSUQm8iiX4hDtjq6ZoPrlN8T4jNmviBt/F5jI -Jji/PYmhq+Zn9s++mfx+aF4IJrcHJWFkg/6kJzn4oSdl/YlvKf4VRCcQNtj4xV87 -GsdamnzU17XapLVMbSaVKh+6Af7ZLDerEH+iAq733HsYaTK+1xKmN7EFVXgS7bZ1 -9C4LTzc97bVHSywpT9yIrg9QQs/1kshfVIHDKyhjF6IwzSVbeGAIL3Oqo5zOMkWv -7JlEIkkhTyl+FETxNMTMYjAk+Uei3kRodneq3YBF2uFYSEzrXQgHAyn37geiaMYj -h8wu6a85nG1NS0SdxiZDIePmbvD9vWxFZUWYJ/h9ifsLivWcVXlvHoQ0emd+n2ai -FhAck2xsuyHgnGIZMHww5IkQdu/TMqvbcR6d8Xulh+C4Tq7ppy+oTLADSBKII++p -JQioYydRD529EUJgVlhyH27X6YAk3FuRD3zYZRYS2QECiKXvS665o3JRJ0ZSqNgv -YOom8M0zz6bI9grnUoivMI4o7ISpE4ZwffEd37HVzmraaUHDXRhkulFSf1ImtXoj -V9nNSM5p/+9eP7OioTZhSote6Vj6Ja1SZeRkXZK7BwqPbdO0VsYOb7G//ZiOlqs+ -paRr92G/pwBfj5Dq8EK5Ag0ERDtM9RAIAN0EJqBPvLN0tEin/y4Fe0R4n+E+zNXg -bBsq4WidwyUFy3h/6u86FYvegXwUqVS2OsEs5MwPcCVJOfaEthF7I89QJnP9Nfx7 -V5I9yFB53o9ii38BN7X+9gSjpfwXOvf/wIDfggxX8/wRFel37GRB7TiiABRArBez -s5x+zTXvT++WPhElySj0uY8bjVR6tso+d65K0UesvAa7PPWeRS+3nhqABSFLuTTT -MMbnVXCGesBrYHlFVXClAYrSIOX8Ub/UnuEYs9+hIV7U4jKzRF9WJhIC1cXHPmOh -vleAf/I9h/0KahD7HLYud40pNBo5tW8jSfp2/Q8TIE0xxshd51/xy4MAAwUH+wWn -zsYVk981OKUEXul8JPyPxbw05fOd6gF4MJ3YodO+6dfoyIl3bewk+11KXZQALKaO -1xmkAEO1RqizPeetoadBVkQBp5xPudsVElUTOX0pTYhkUd3iBilsCYKK1/KQ9KzD -I+O/lRsm6L9lc6rV0IgPU00P4BAwR+x8Rw7TJFbuS0miR3lP1NSguz+/kpjxzmGP -LyHJ+LVDYFkk6t0jPXhqFdUY6McUTBDEvavTGlVO062l9APTmmSMVFDsPN/rBes2 -rYhuuT+lDp+gcaS1UoaYCIm9kKOteQBnowX9V74Z+HKEYLtwILaSnNe6/fNSTvyj -g0z+R+sPCY4nHewbVC+ISQQYEQIACQUCRDtM9QIbDAAKCRCl0y8BJkmlqbecAJ9B -UdSKVg9H+fQNyP5sbOjj4RDtdACfXHrRHa2+XjJP0dhpvJ8IfvYnQsU= -=fAJZ ------END PGP PUBLIC KEY BLOCK----- diff --git a/docs/_api/surface.rst b/docs/_api/surface.rst new file mode 100644 index 00000000..92ba28ab --- /dev/null +++ b/docs/_api/surface.rst @@ -0,0 +1,6 @@ +================== +Surface Transforms +================== + +.. automodule:: nitransforms.surface + :members: diff --git a/docs/api.rst b/docs/api.rst index eb3c566b..a57d6836 100644 --- a/docs/api.rst +++ b/docs/api.rst @@ -10,5 +10,6 @@ Information on specific functions, classes, and methods for developers. _api/linear _api/manip _api/nonlinear + _api/surface _api/interp _api/patched diff --git a/docs/conf.py b/docs/conf.py index 18e532a1..025d649b 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -76,7 +76,7 @@ # # This is also used if you do content translation via gettext catalogs. # Usually you set "language" from the command line for these cases. -language = None +language = 'en' # List of patterns, relative to source directory, that match files and # directories to ignore when looking for source files. diff --git a/docs/index.rst b/docs/index.rst index 4edf3198..a1bb94f8 100644 --- a/docs/index.rst +++ b/docs/index.rst @@ -14,7 +14,7 @@ A development repo for `nipy/nibabel#656 \n \n\n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n\n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n\n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n\n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n\n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n\n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n\n \n \n", + "image/svg+xml": [ + "\n", + " \n", + "\n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + "\n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + "\n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + "\n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + "\n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + "\n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + "\n", + " \n", + " \n", + "" + ], "text/plain": [ "" ] @@ -231,7 +1920,7 @@ } ], "source": [ - "moved_to_t1 = t1w_to_bold_xfm.apply(bold_nii)\n", + "moved_to_t1 = apply(t1w_to_bold_xfm, bold_nii)\n", "print(moved_to_t1.affine)\n", "print(moved_to_t1.shape)" ] @@ -243,7 +1932,1695 @@ "outputs": [ { "data": { - "image/svg+xml": "\n \n\n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n\n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n\n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n\n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n\n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n\n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n\n \n \n", + "image/svg+xml": [ + "\n", + " \n", + "\n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + "\n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + "\n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + "\n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + "\n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + "\n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + "\n", + " \n", + " \n", + "" + ], "text/plain": [ "" ] @@ -280,7 +3657,1683 @@ "outputs": [ { "data": { - "image/svg+xml": "\n \n\n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n\n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n\n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n\n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n\n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n\n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n\n \n \n", + "image/svg+xml": [ + "\n", + " \n", + "\n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + "\n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + "\n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + "\n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + "\n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + "\n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + "\n", + " \n", + " \n", + "" + ], "text/plain": [ "" ] @@ -290,7 +5343,7 @@ } ], "source": [ - "display(bold_nii, bold_to_t1w_xfm.apply(t1w_nii))" + "display(bold_nii, apply(bold_to_t1w_xfm, t1w_nii))" ] }, { @@ -315,7 +5368,7 @@ ], "metadata": { "kernelspec": { - "display_name": "Python 3", + "display_name": "Python 3 (ipykernel)", "language": "python", "name": "python3" }, @@ -329,9 +5382,9 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.7.3" + "version": "3.11.8" } }, "nbformat": 4, "nbformat_minor": 4 -} \ No newline at end of file +} diff --git a/docs/requirements.txt b/docs/requirements.txt index 8a932a22..d74f8faf 100644 --- a/docs/requirements.txt +++ b/docs/requirements.txt @@ -4,5 +4,5 @@ packaging pydot>=1.2.3 pydotplus sphinx-argparse -sphinx ~= 4.0 -sphinx_rtd_theme \ No newline at end of file +sphinx~=7.0 +sphinx_rtd_theme diff --git a/env.yml b/env.yml new file mode 100644 index 00000000..d550959b --- /dev/null +++ b/env.yml @@ -0,0 +1,44 @@ +name: nitransforms +channels: + - https://fsl.fmrib.ox.ac.uk/fsldownloads/fslconda/public/ + - conda-forge +# Update this ~yearly; last updated Jan 2024 +dependencies: + - python=3.11 + # Needed for svgo and bids-validator; consider moving to deno + - nodejs=20 + # Intel Math Kernel Library for numpy + - mkl=2023.2.0 + - mkl-service=2.4.0 + # git-annex for templateflow users with DataLad superdatasets + - git-annex=*=alldep* + # ANTs 2.5.3 is linked against libitk 5.4 - let's pin both there + - libitk=5.4 + # Base scientific python stack; required by FSL, so pinned here + - numpy=1.26 + - scipy=1.11 + - matplotlib=3.8 + - pandas=2.2 + - h5py=3.10 + # Dependencies compiled against numpy, best to stick with conda + - nitime=0.10 + - scikit-image=0.22 + - scikit-learn=1.4 + # Utilities + - graphviz=9.0 + - pandoc=3.1 + # Workflow dependencies: ANTs + - ants=2.5.3 + # Workflow dependencies: FSL (versions pinned in 6.0.7.7) + - fsl-bet2=2111.4 + - fsl-flirt=2111.2 + - fsl-fast4=2111.3 + - fsl-fugue=2201.4 + - fsl-mcflirt=2111.0 + - fsl-miscmaths=2203.2 + - fsl-topup=2203.2 + # - pip + # - pip: + # - -r requirements.txt +variables: + FSLOUTPUTTYPE: NIFTI_GZ diff --git a/nitransforms/__init__.py b/nitransforms/__init__.py index 1f819933..4ded59a0 100644 --- a/nitransforms/__init__.py +++ b/nitransforms/__init__.py @@ -16,27 +16,23 @@ transform """ -from . import linear, manip, nonlinear +from . import linear, manip, nonlinear, surface from .linear import Affine, LinearTransformsMapping from .nonlinear import DenseFieldTransform from .manip import TransformChain +from .resampling import apply try: from ._version import __version__ except ModuleNotFoundError: - from pkg_resources import get_distribution, DistributionNotFound - - try: - __version__ = get_distribution("nitransforms").version - except DistributionNotFound: - __version__ = "unknown" - del get_distribution - del DistributionNotFound + __version__ = "0+unknown" __packagename__ = "nitransforms" __copyright__ = "Copyright (c) 2021 The NiPy developers" __all__ = [ + "apply", + "surface", "linear", "manip", "nonlinear", diff --git a/nitransforms/base.py b/nitransforms/base.py index 96f00edb..fa05f1f6 100644 --- a/nitransforms/base.py +++ b/nitransforms/base.py @@ -7,6 +7,7 @@ # ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ## """Common interface for transforms.""" + from pathlib import Path import numpy as np import h5py @@ -15,7 +16,7 @@ from nibabel import funcs as _nbfuncs from nibabel.nifti1 import intent_codes as INTENT_CODES from nibabel.cifti2 import Cifti2Image -from scipy import ndimage as ndi +import nibabel as nb EQUALITY_TOL = 1e-5 @@ -89,6 +90,76 @@ def shape(self): return self._shape +class SurfaceMesh(SampledSpatialData): + """Class to represent surface meshes.""" + + __slots__ = ["_triangles"] + + def __init__(self, dataset): + """Create a sampling reference.""" + self._shape = None + + if isinstance(dataset, SurfaceMesh): + self._coords = dataset._coords + self._triangles = dataset._triangles + self._ndim = dataset._ndim + self._npoints = dataset._npoints + self._shape = dataset._shape + return + + if isinstance(dataset, (str, Path)): + dataset = _nbload(str(dataset)) + + if hasattr(dataset, "numDA"): # Looks like a Gifti file + _das = dataset.get_arrays_from_intent(INTENT_CODES["pointset"]) + if not _das: + raise TypeError( + "Input Gifti file does not contain reference coordinates." + ) + self._coords = np.vstack([da.data for da in _das]) + _tris = dataset.get_arrays_from_intent(INTENT_CODES["triangle"]) + self._triangles = np.vstack([da.data for da in _tris]) + self._npoints, self._ndim = self._coords.shape + self._shape = self._coords.shape + return + + if isinstance(dataset, Cifti2Image): + raise NotImplementedError + + raise ValueError("Dataset could not be interpreted as an irregular sample.") + + def check_sphere(self, tolerance=1.001): + """Check sphericity of surface. + Based on https://github.com/Washington-University/workbench/blob/\ +7ba3345d161d567a4b628ceb02ab4471fc96cb20/src/Files/SurfaceResamplingHelper.cxx#L503 + """ + dists = np.linalg.norm(self._coords, axis=1) + return (dists.min() * tolerance) > dists.max() + + def set_radius(self, radius=100): + if not self.check_sphere(): + raise ValueError("You should only set the radius on spherical surfaces.") + dists = np.linalg.norm(self._coords, axis=1) + self._coords = self._coords * (radius / dists).reshape((-1, 1)) + + @classmethod + def from_arrays(cls, coordinates, triangles): + darrays = [ + nb.gifti.GiftiDataArray( + coordinates.astype(np.float32), + intent=nb.nifti1.intent_codes["NIFTI_INTENT_POINTSET"], + datatype=nb.nifti1.data_type_codes["NIFTI_TYPE_FLOAT32"], + ), + nb.gifti.GiftiDataArray( + triangles.astype(np.int32), + intent=nb.nifti1.intent_codes["NIFTI_INTENT_TRIANGLE"], + datatype=nb.nifti1.data_type_codes["NIFTI_TYPE_INT32"], + ), + ] + gii = nb.gifti.GiftiImage(darrays=darrays) + return cls(gii) + + class ImageGrid(SampledSpatialData): """Class to represent spaces of gridded data (images).""" @@ -178,7 +249,10 @@ def __ne__(self, other): class TransformBase: """Abstract image class to represent transforms.""" - __slots__ = ("_reference", "_ndim",) + __slots__ = ( + "_reference", + "_ndim", + ) def __init__(self, reference=None): """Instantiate a transform.""" @@ -206,6 +280,22 @@ def __add__(self, b): return TransformChain(transforms=[self, b]) + def __len__(self): + """ + Enable ``len()``. + + By default, all transforms are of length one. + This must be overriden by transforms arrays and chains. + + Example + ------- + >>> T1 = TransformBase() + >>> len(T1) + 1 + + """ + return 1 + @property def reference(self): """Access a reference space where data will be resampled onto.""" @@ -222,101 +312,6 @@ def ndim(self): """Access the dimensions of the reference space.""" raise TypeError("TransformBase has no dimensions") - def apply( - self, - spatialimage, - reference=None, - order=3, - mode="constant", - cval=0.0, - prefilter=True, - output_dtype=None, - ): - """ - Apply a transformation to an image, resampling on the reference spatial object. - - Parameters - ---------- - spatialimage : `spatialimage` - The image object containing the data to be resampled in reference - space - reference : spatial object, optional - The image, surface, or combination thereof containing the coordinates - of samples that will be sampled. - order : int, optional - The order of the spline interpolation, default is 3. - The order has to be in the range 0-5. - mode : {'constant', 'reflect', 'nearest', 'mirror', 'wrap'}, optional - Determines how the input image is extended when the resamplings overflows - a border. Default is 'constant'. - cval : float, optional - Constant value for ``mode='constant'``. Default is 0.0. - prefilter: bool, optional - Determines if the image's data array is prefiltered with - a spline filter before interpolation. The default is ``True``, - which will create a temporary *float64* array of filtered values - if *order > 1*. If setting this to ``False``, the output will be - slightly blurred if *order > 1*, unless the input is prefiltered, - i.e. it is the result of calling the spline filter on the original - input. - output_dtype: dtype specifier, optional - The dtype of the returned array or image, if specified. - If ``None``, the default behavior is to use the effective dtype of - the input image. If slope and/or intercept are defined, the effective - dtype is float64, otherwise it is equivalent to the input image's - ``get_data_dtype()`` (on-disk type). - If ``reference`` is defined, then the return value is an image, with - a data array of the effective dtype but with the on-disk dtype set to - the input image's on-disk dtype. - - Returns - ------- - resampled : `spatialimage` or ndarray - The data imaged after resampling to reference space. - - """ - if reference is not None and isinstance(reference, (str, Path)): - reference = _nbload(str(reference)) - - _ref = ( - self.reference if reference is None else SpatialReference.factory(reference) - ) - - if _ref is None: - raise TransformError("Cannot apply transform without reference") - - if isinstance(spatialimage, (str, Path)): - spatialimage = _nbload(str(spatialimage)) - - data = np.asanyarray(spatialimage.dataobj) - targets = ImageGrid(spatialimage).index( # data should be an image - _as_homogeneous(self.map(_ref.ndcoords.T), dim=_ref.ndim) - ) - - resampled = ndi.map_coordinates( - data, - targets.T, - output=output_dtype, - order=order, - mode=mode, - cval=cval, - prefilter=prefilter, - ) - - if isinstance(_ref, ImageGrid): # If reference is grid, reshape - hdr = None - if _ref.header is not None: - hdr = _ref.header.copy() - hdr.set_data_dtype(output_dtype or spatialimage.get_data_dtype()) - moved = spatialimage.__class__( - resampled.reshape(_ref.shape), - _ref.affine, - hdr, - ) - return moved - - return resampled - def map(self, x, inverse=False): r""" Apply :math:`y = f(x)`. @@ -352,6 +347,17 @@ def _to_hdf5(self, x5_root): """Serialize this object into the x5 file format.""" raise NotImplementedError + def apply(self, *args, **kwargs): + """Apply the transform to a dataset. + + Deprecated. Please use ``nitransforms.resampling.apply`` instead. + """ + _msg = "This method is deprecated. Please use `nitransforms.resampling.apply` instead." + warnings.warn(_msg, DeprecationWarning, stacklevel=2) + from .resampling import apply + + return apply(self, *args, **kwargs) + def _as_homogeneous(xyz, dtype="float32", dim=3): """ @@ -382,4 +388,8 @@ def _as_homogeneous(xyz, dtype="float32", dim=3): def _apply_affine(x, affine, dim): """Get the image array's indexes corresponding to coordinates.""" - return affine.dot(_as_homogeneous(x, dim=dim).T)[:dim, ...].T + return np.tensordot( + affine, + _as_homogeneous(x, dim=dim).T, + axes=1, + )[:dim, ...] diff --git a/nitransforms/cli.py b/nitransforms/cli.py index 63b8bed4..8f8f5ce0 100644 --- a/nitransforms/cli.py +++ b/nitransforms/cli.py @@ -5,6 +5,7 @@ from .linear import load as linload from .nonlinear import load as nlinload +from .resampling import apply def cli_apply(pargs): @@ -38,7 +39,8 @@ def cli_apply(pargs): # ensure a reference is set xfm.reference = pargs.ref or pargs.moving - moved = xfm.apply( + moved = apply( + xfm, pargs.moving, order=pargs.order, mode=pargs.mode, diff --git a/nitransforms/conftest.py b/nitransforms/conftest.py index 854cac43..70680882 100644 --- a/nitransforms/conftest.py +++ b/nitransforms/conftest.py @@ -6,8 +6,6 @@ import pytest import tempfile -_data = None -_brainmask = None _testdir = Path(os.getenv("TEST_DATA_HOME", "~/.nitransforms/testdata")).expanduser() _datadir = Path(__file__).parent / "tests" / "data" @@ -48,10 +46,6 @@ def testdata_path(): @pytest.fixture def get_testdata(): """Generate data in the requested orientation.""" - global _data - - if _data is not None: - return _data return _reorient(_testdir / "someones_anatomy.nii.gz") @@ -59,11 +53,6 @@ def get_testdata(): @pytest.fixture def get_testmask(): """Generate data in the requested orientation.""" - global _brainmask - - if _brainmask is not None: - return _brainmask - return _reorient(_testdir / "someones_anatomy_brainmask.nii.gz") diff --git a/nitransforms/io/afni.py b/nitransforms/io/afni.py index 06eaf432..7c66d434 100644 --- a/nitransforms/io/afni.py +++ b/nitransforms/io/afni.py @@ -237,7 +237,7 @@ def _is_oblique(affine, thres=OBLIQUITY_THRESHOLD_DEG): True """ - return (obliquity(affine).max() * 180 / pi) > thres + return float(obliquity(affine).max() * 180 / pi) > thres def _afni_deobliqued_grid(oblique, shape): diff --git a/nitransforms/io/base.py b/nitransforms/io/base.py index d86c8539..3c923426 100644 --- a/nitransforms/io/base.py +++ b/nitransforms/io/base.py @@ -76,12 +76,12 @@ class LinearParameters(LinearTransformStruct): Examples -------- >>> lp = LinearParameters() - >>> np.all(lp.structarr['parameters'] == np.eye(4)) + >>> np.array_equal(lp.structarr['parameters'], np.eye(4)) True >>> p = np.diag([2., 2., 2., 1.]) >>> lp = LinearParameters(p) - >>> np.all(lp.structarr['parameters'] == p) + >>> np.array_equal(lp.structarr['parameters'], p) True """ diff --git a/nitransforms/io/itk.py b/nitransforms/io/itk.py index ddeb78e6..afabfd98 100644 --- a/nitransforms/io/itk.py +++ b/nitransforms/io/itk.py @@ -403,14 +403,19 @@ def from_h5obj(cls, fileobj, check=True, only_linear=False): if xfm["TransformType"][0].startswith(b"DisplacementFieldTransform"): if only_linear: continue - _fixed = np.asanyarray(xfm[f"{typo_fallback}FixedParameters"]) - shape = _fixed[:3].astype("uint16").tolist() - offset = _fixed[3:6].astype("float") - zooms = _fixed[6:9].astype("float") - directions = _fixed[9:].astype("float").reshape((3, 3)) + _fixed = xfm[f"{typo_fallback}FixedParameters"] + shape = _fixed[:3] + offset = _fixed[3:6] + zooms = _fixed[6:9] + directions = np.reshape(_fixed[9:], (3, 3)) affine = from_matvec(directions * zooms, offset) - field = np.asanyarray(xfm[f"{typo_fallback}Parameters"]).reshape( - (*shape, 1, -1) + # ITK uses Fortran ordering, like NIfTI, but with the vector dimension first + field = np.moveaxis( + np.reshape( + xfm[f"{typo_fallback}Parameters"], (3, *shape.astype(int)), order='F' + ), + 0, + -1, ) field[..., (0, 1)] *= -1.0 hdr = Nifti1Header() @@ -418,7 +423,7 @@ def from_h5obj(cls, fileobj, check=True, only_linear=False): hdr.set_data_dtype("float") xfm_list.append( - Nifti1Image(field.astype("float"), LPS @ affine @ LPS, hdr) + Nifti1Image(field.astype("float"), LPS @ affine, hdr) ) continue diff --git a/nitransforms/linear.py b/nitransforms/linear.py index af14f396..71df6a16 100644 --- a/nitransforms/linear.py +++ b/nitransforms/linear.py @@ -10,16 +10,12 @@ import warnings import numpy as np from pathlib import Path -from scipy import ndimage as ndi -from nibabel.loadsave import load as _nbload from nibabel.affines import from_matvec -from nibabel.arrayproxy import get_obj_dtype from nitransforms.base import ( ImageGrid, TransformBase, - SpatialReference, _as_homogeneous, EQUALITY_TOL, ) @@ -113,6 +109,10 @@ def __invert__(self): """ return self.__class__(self._inverse) + def __len__(self): + """Enable using len().""" + return 1 if self._matrix.ndim == 2 else len(self._matrix) + def __matmul__(self, b): """ Compose two Affines. @@ -330,10 +330,6 @@ def __getitem__(self, i): """Enable indexed access to the series of matrices.""" return Affine(self.matrix[i, ...], reference=self._reference) - def __len__(self): - """Enable using len().""" - return len(self._matrix) - def map(self, x, inverse=False): r""" Apply :math:`y = f(x)`. @@ -402,119 +398,6 @@ def to_filename(self, filename, fmt="X5", moving=None): ).to_filename(filename) return filename - def apply( - self, - spatialimage, - reference=None, - order=3, - mode="constant", - cval=0.0, - prefilter=True, - output_dtype=None, - ): - """ - Apply a transformation to an image, resampling on the reference spatial object. - - Parameters - ---------- - spatialimage : `spatialimage` - The image object containing the data to be resampled in reference - space - reference : spatial object, optional - The image, surface, or combination thereof containing the coordinates - of samples that will be sampled. - order : int, optional - The order of the spline interpolation, default is 3. - The order has to be in the range 0-5. - mode : {"constant", "reflect", "nearest", "mirror", "wrap"}, optional - Determines how the input image is extended when the resamplings overflows - a border. Default is "constant". - cval : float, optional - Constant value for ``mode="constant"``. Default is 0.0. - prefilter: bool, optional - Determines if the image's data array is prefiltered with - a spline filter before interpolation. The default is ``True``, - which will create a temporary *float64* array of filtered values - if *order > 1*. If setting this to ``False``, the output will be - slightly blurred if *order > 1*, unless the input is prefiltered, - i.e. it is the result of calling the spline filter on the original - input. - - Returns - ------- - resampled : `spatialimage` or ndarray - The data imaged after resampling to reference space. - - """ - - if reference is not None and isinstance(reference, (str, Path)): - reference = _nbload(str(reference)) - - _ref = ( - self.reference if reference is None else SpatialReference.factory(reference) - ) - - if isinstance(spatialimage, (str, Path)): - spatialimage = _nbload(str(spatialimage)) - - # Avoid opening the data array just yet - input_dtype = get_obj_dtype(spatialimage.dataobj) - output_dtype = output_dtype or input_dtype - - # Prepare physical coordinates of input (grid, points) - xcoords = _ref.ndcoords.astype("f4").T - - # Invert target's (moving) affine once - ras2vox = ~Affine(spatialimage.affine) - - if spatialimage.ndim == 4 and (len(self) != spatialimage.shape[-1]): - raise ValueError( - "Attempting to apply %d transforms on a file with " - "%d timepoints" % (len(self), spatialimage.shape[-1]) - ) - - # Order F ensures individual volumes are contiguous in memory - # Also matches NIfTI, making final save more efficient - resampled = np.zeros( - (xcoords.shape[0], len(self)), dtype=output_dtype, order="F" - ) - - dataobj = ( - np.asanyarray(spatialimage.dataobj, dtype=input_dtype) - if spatialimage.ndim in (2, 3) - else None - ) - - for t, xfm_t in enumerate(self): - # Map the input coordinates on to timepoint t of the target (moving) - ycoords = xfm_t.map(xcoords)[..., : _ref.ndim] - - # Calculate corresponding voxel coordinates - yvoxels = ras2vox.map(ycoords)[..., : _ref.ndim] - - # Interpolate - resampled[..., t] = ndi.map_coordinates( - ( - dataobj - if dataobj is not None - else spatialimage.dataobj[..., t].astype(input_dtype, copy=False) - ), - yvoxels.T, - output=output_dtype, - order=order, - mode=mode, - cval=cval, - prefilter=prefilter, - ) - - if isinstance(_ref, ImageGrid): # If reference is grid, reshape - newdata = resampled.reshape(_ref.shape + (len(self),)) - moved = spatialimage.__class__(newdata, _ref.affine, spatialimage.header) - moved.header.set_data_dtype(output_dtype) - return moved - - return resampled - def load(filename, fmt=None, reference=None, moving=None): """ diff --git a/nitransforms/manip.py b/nitransforms/manip.py index 233f5adf..9389197d 100644 --- a/nitransforms/manip.py +++ b/nitransforms/manip.py @@ -67,6 +67,11 @@ def __len__(self): """Enable using len().""" return len(self.transforms) + @property + def ndim(self): + """Get the number of dimensions.""" + return max(x.ndim for x in self._transforms) + @property def transforms(self): """Get the internal list of transforms.""" @@ -208,6 +213,8 @@ def _as_chain(x): """Convert a value into a transform chain.""" if isinstance(x, TransformChain): return x.transforms + if isinstance(x, TransformBase): + return [x] if isinstance(x, Iterable): return list(x) return [x] diff --git a/nitransforms/nonlinear.py b/nitransforms/nonlinear.py index 69c19d35..9c29c53c 100644 --- a/nitransforms/nonlinear.py +++ b/nitransforms/nonlinear.py @@ -18,7 +18,6 @@ TransformBase, TransformError, ImageGrid, - SpatialReference, _as_homogeneous, ) from scipy.ndimage import map_coordinates @@ -71,21 +70,18 @@ def __init__(self, field=None, is_deltas=True, reference=None): is_deltas = True try: - self.reference = ImageGrid( - reference if reference is not None else field - ) + self.reference = ImageGrid(reference if reference is not None else field) except AttributeError: raise TransformError( "Field must be a spatial image if reference is not provided" - if reference is None else - "Reference is not a spatial image" + if reference is None + else "Reference is not a spatial image" ) - ndim = self._field.ndim - 1 - if self._field.shape[-1] != ndim: + if self._field.shape[-1] != self.ndim: raise TransformError( "The number of components of the field (%d) does not match " - "the number of dimensions (%d)" % (self._field.shape[-1], ndim) + "the number of dimensions (%d)" % (self._field.shape[-1], self.ndim) ) if is_deltas: @@ -163,26 +159,30 @@ def map(self, x, inverse=False): if inverse is True: raise NotImplementedError + ijk = self.reference.index(x) indexes = np.round(ijk).astype("int") if np.all(np.abs(ijk - indexes) < 1e-3): - indexes = tuple(tuple(i) for i in indexes.T) + indexes = tuple(tuple(i) for i in indexes) return self._field[indexes] - new_map = np.vstack(tuple( - map_coordinates( - self._field[..., i], - ijk.T, - order=3, - mode="constant", - cval=np.nan, - prefilter=True, - ) for i in range(self.reference.ndim) - )).T + new_map = np.vstack( + tuple( + map_coordinates( + self._field[..., i], + ijk, + order=3, + mode="constant", + cval=np.nan, + prefilter=True, + ) + for i in range(self.reference.ndim) + ) + ).T # Set NaN values back to the original coordinates value = no displacement - new_map[np.isnan(new_map)] = x[np.isnan(new_map)] + new_map[np.isnan(new_map)] = np.array(x)[np.isnan(new_map)] return new_map def __matmul__(self, b): @@ -205,9 +205,9 @@ def __matmul__(self, b): True """ - retval = b.map( - self._field.reshape((-1, self._field.shape[-1])) - ).reshape(self._field.shape) + retval = b.map(self._field.reshape((-1, self._field.shape[-1]))).reshape( + self._field.shape + ) return DenseFieldTransform(retval, is_deltas=False, reference=self.reference) def __eq__(self, other): @@ -246,7 +246,7 @@ def from_filename(cls, filename, fmt="X5"): class BSplineFieldTransform(TransformBase): """Represent a nonlinear transform parameterized by BSpline basis.""" - __slots__ = ['_coeffs', '_knots', '_weights', '_order', '_moving'] + __slots__ = ["_coeffs", "_knots", "_weights", "_order", "_moving"] def __init__(self, coefficients, reference=None, order=3): """Create a smooth deformation field using B-Spline basis.""" @@ -261,10 +261,11 @@ def __init__(self, coefficients, reference=None, order=3): if reference is not None: self.reference = reference - if coefficients.shape[-1] != self.ndim: + if coefficients.shape[-1] != self.reference.ndim: raise TransformError( - 'Number of components of the coefficients does ' - 'not match the number of dimensions') + "Number of components of the coefficients does " + "not match the number of dimensions" + ) @property def ndim(self): @@ -274,20 +275,17 @@ def ndim(self): def to_field(self, reference=None, dtype="float32"): """Generate a displacements deformation field from this B-Spline field.""" _ref = ( - self.reference if reference is None else - ImageGrid(_ensure_image(reference)) + self.reference if reference is None else ImageGrid(_ensure_image(reference)) ) if _ref is None: raise TransformError("A reference must be defined") - ndim = self._coeffs.shape[-1] - if self._weights is None: self._weights = grid_bspline_weights(_ref, self._knots) - field = np.zeros((_ref.npoints, ndim)) + field = np.zeros((_ref.npoints, self.ndim)) - for d in range(ndim): + for d in range(self.ndim): # 1 x Nvox : (1 x K) @ (K x Nvox) field[:, d] = self._coeffs[..., d].reshape(-1) @ self._weights @@ -295,47 +293,6 @@ def to_field(self, reference=None, dtype="float32"): field.astype(dtype).reshape(*_ref.shape, -1), reference=_ref ) - def apply( - self, - spatialimage, - reference=None, - order=3, - mode="constant", - cval=0.0, - prefilter=True, - output_dtype=None, - ): - """Apply a B-Spline transform on input data.""" - - _ref = ( - self.reference if reference is None else - SpatialReference.factory(_ensure_image(reference)) - ) - spatialimage = _ensure_image(spatialimage) - - # If locations to be interpolated are not on a grid, run map() - if not isinstance(_ref, ImageGrid): - return super().apply( - spatialimage, - reference=_ref, - order=order, - mode=mode, - cval=cval, - prefilter=prefilter, - output_dtype=output_dtype, - ) - - # If locations to be interpolated are on a grid, generate a displacements field - return self.to_field(reference=reference).apply( - spatialimage, - reference=reference, - order=order, - mode=mode, - cval=cval, - prefilter=prefilter, - output_dtype=output_dtype, - ) - def map(self, x, inverse=False): r""" Apply the transformation to a list of physical coordinate points. @@ -360,11 +317,11 @@ def map(self, x, inverse=False): -------- >>> xfm = BSplineFieldTransform(test_dir / "someones_bspline_coefficients.nii.gz") >>> xfm.reference = test_dir / "someones_anatomy.nii.gz" - >>> xfm.map([-6.5, -36., -19.5]).tolist() - [[-6.5, -31.476097418406784, -19.5]] + >>> xfm.map([-6.5, -36., -19.5]).tolist() # doctest: +ELLIPSIS + [[-6.5, -31.476097418406..., -19.5]] - >>> xfm.map([[-6.5, -36., -19.5], [-1., -41.5, -11.25]]).tolist() - [[-6.5, -31.476097418406784, -19.5], [-1.0, -3.8072675377121996, -11.25]] + >>> xfm.map([[-6.5, -36., -19.5], [-1., -41.5, -11.25]]).tolist() # doctest: +ELLIPSIS + [[-6.5, -31.4760974184..., -19.5], [-1.0, -3.807267537712..., -11.25]] """ vfunc = partial( @@ -386,9 +343,9 @@ def _map_xyz(x, reference, knots, coeffs): # Probably this will change if the order of the B-Spline is different w_start, w_end = np.ceil(ijk - 2).astype(int), np.floor(ijk + 2).astype(int) # Generate a grid of indexes corresponding to the window - nonzero_knots = tuple([ - np.arange(start, end + 1) for start, end in zip(w_start, w_end) - ]) + nonzero_knots = tuple( + [np.arange(start, end + 1) for start, end in zip(w_start, w_end)] + ) nonzero_knots = tuple(np.meshgrid(*nonzero_knots, indexing="ij")) window = np.array(nonzero_knots).reshape((ndim, -1)) diff --git a/nitransforms/resampling.py b/nitransforms/resampling.py new file mode 100644 index 00000000..390825c5 --- /dev/null +++ b/nitransforms/resampling.py @@ -0,0 +1,366 @@ +# emacs: -*- mode: python-mode; py-indent-offset: 4; indent-tabs-mode: nil -*- +# vi: set ft=python sts=4 ts=4 sw=4 et: +### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ## +# +# See COPYING file distributed along with the NiBabel package for the +# copyright and license terms. +# +### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ## +"""Resampling utilities.""" + +import asyncio +from os import cpu_count +from functools import partial +from pathlib import Path +from typing import Callable, TypeVar, Union + +import numpy as np +from nibabel.loadsave import load as _nbload +from nibabel.arrayproxy import get_obj_dtype +from nibabel.spatialimages import SpatialImage +from scipy import ndimage as ndi + +from nitransforms.base import ( + ImageGrid, + TransformBase, + TransformError, + SpatialReference, + _as_homogeneous, +) + +R = TypeVar("R") + +SERIALIZE_VOLUME_WINDOW_WIDTH: int = 8 +"""Minimum number of volumes to automatically serialize 4D transforms.""" + + +async def worker(job: Callable[[], R], semaphore) -> R: + async with semaphore: + loop = asyncio.get_running_loop() + return await loop.run_in_executor(None, job) + + +async def _apply_serial( + data: np.ndarray, + spatialimage: SpatialImage, + targets: np.ndarray, + transform: TransformBase, + ref_ndim: int, + ref_ndcoords: np.ndarray, + n_resamplings: int, + output: np.ndarray, + input_dtype: np.dtype, + order: int = 3, + mode: str = "constant", + cval: float = 0.0, + prefilter: bool = True, + max_concurrent: int = min(cpu_count(), 12), +): + """ + Resample through a given transform serially, in a 3D+t setting. + + Parameters + ---------- + data : :obj:`~numpy.ndarray` + The input data array. + spatialimage : :obj:`~nibabel.spatialimages.SpatialImage` or `os.pathlike` + The image object containing the data to be resampled in reference + space + targets : :obj:`~numpy.ndarray` + The target coordinates for mapping. + transform : :obj:`~nitransforms.base.TransformBase` + The 3D, 3D+t, or 4D transform through which data will be resampled. + ref_ndim : :obj:`int` + Dimensionality of the resampling target (reference image). + ref_ndcoords : :obj:`~numpy.ndarray` + Physical coordinates (RAS+) where data will be interpolated, if the resampling + target is a grid, the scanner coordinates of all voxels. + n_resamplings : :obj:`int` + Total number of 3D resamplings (can be defined by the input image, the transform, + or be matched, that is, same number of volumes in the input and number of transforms). + output : :obj:`~numpy.ndarray` + The output data array where resampled values will be stored volume-by-volume. + order : :obj:`int`, optional + The order of the spline interpolation, default is 3. + The order has to be in the range 0-5. + mode : :obj:`str`, optional + Determines how the input image is extended when the resamplings overflows + a border. One of ``'constant'``, ``'reflect'``, ``'nearest'``, ``'mirror'``, + or ``'wrap'``. Default is ``'constant'``. + cval : :obj:`float`, optional + Constant value for ``mode='constant'``. Default is 0.0. + prefilter: :obj:`bool`, optional + Determines if the image's data array is prefiltered with + a spline filter before interpolation. The default is ``True``, + which will create a temporary *float64* array of filtered values + if *order > 1*. If setting this to ``False``, the output will be + slightly blurred if *order > 1*, unless the input is prefiltered, + i.e. it is the result of calling the spline filter on the original + input. + + Returns + ------- + np.ndarray + Data resampled on the 3D+t array of input coordinates. + + """ + tasks = [] + semaphore = asyncio.Semaphore(max_concurrent) + + for t in range(n_resamplings): + xfm_t = transform if (n_resamplings == 1 or transform.ndim < 4) else transform[t] + + if targets is None: + targets = ImageGrid(spatialimage).index( # data should be an image + _as_homogeneous(xfm_t.map(ref_ndcoords), dim=ref_ndim) + ) + + data_t = ( + data + if data is not None + else spatialimage.dataobj[..., t].astype(input_dtype, copy=False) + ) + + tasks.append( + asyncio.create_task( + worker( + partial( + ndi.map_coordinates, + data_t, + targets, + output=output[..., t], + order=order, + mode=mode, + cval=cval, + prefilter=prefilter, + ), + semaphore, + ) + ) + ) + await asyncio.gather(*tasks) + return output + + +def apply( + transform: TransformBase, + spatialimage: Union[str, Path, SpatialImage], + reference: Union[str, Path, SpatialImage] = None, + order: int = 3, + mode: str = "constant", + cval: float = 0.0, + prefilter: bool = True, + output_dtype: np.dtype = None, + dtype_width: int = 8, + serialize_nvols: int = SERIALIZE_VOLUME_WINDOW_WIDTH, + max_concurrent: int = min(cpu_count(), 12), +) -> Union[SpatialImage, np.ndarray]: + """ + Apply a transformation to an image, resampling on the reference spatial object. + + Parameters + ---------- + transform: :obj:`~nitransforms.base.TransformBase` + The 3D, 3D+t, or 4D transform through which data will be resampled. + spatialimage : :obj:`~nibabel.spatialimages.SpatialImage` or `os.pathlike` + The image object containing the data to be resampled in reference + space + reference : :obj:`~nibabel.spatialimages.SpatialImage` or `os.pathlike` + The image, surface, or combination thereof containing the coordinates + of samples that will be sampled. + order : :obj:`int`, optional + The order of the spline interpolation, default is 3. + The order has to be in the range 0-5. + mode : :obj:`str`, optional + Determines how the input image is extended when the resamplings overflows + a border. One of ``'constant'``, ``'reflect'``, ``'nearest'``, ``'mirror'``, + or ``'wrap'``. Default is ``'constant'``. + cval : :obj:`float`, optional + Constant value for ``mode='constant'``. Default is 0.0. + prefilter : :obj:`bool`, optional + Determines if the image's data array is prefiltered with + a spline filter before interpolation. The default is ``True``, + which will create a temporary *float64* array of filtered values + if *order > 1*. If setting this to ``False``, the output will be + slightly blurred if *order > 1*, unless the input is prefiltered, + i.e. it is the result of calling the spline filter on the original + input. + output_dtype : :obj:`~numpy.dtype`, optional + The dtype of the returned array or image, if specified. + If ``None``, the default behavior is to use the effective dtype of + the input image. If slope and/or intercept are defined, the effective + dtype is float64, otherwise it is equivalent to the input image's + ``get_data_dtype()`` (on-disk type). + If ``reference`` is defined, then the return value is an image, with + a data array of the effective dtype but with the on-disk dtype set to + the input image's on-disk dtype. + dtype_width : :obj:`int` + Cap the width of the input data type to the given number of bytes. + This argument is intended to work as a way to implement lower memory + requirements in resampling. + serialize_nvols : :obj:`int` + Minimum number of volumes in a 3D+t (that is, a series of 3D transformations + independent in time) to resample on a one-by-one basis. + Serialized resampling can be executed concurrently (parallelized) with + the argument ``max_concurrent``. + max_concurrent : :obj:`int` + Maximum number of 3D resamplings to be executed concurrently. + + Returns + ------- + resampled : :obj:`~nibabel.spatialimages.SpatialImage` or :obj:`~numpy.ndarray` + The data imaged after resampling to reference space. + + """ + if reference is not None and isinstance(reference, (str, Path)): + reference = _nbload(str(reference)) + + _ref = ( + transform.reference + if reference is None + else SpatialReference.factory(reference) + ) + + if _ref is None: + raise TransformError("Cannot apply transform without reference") + + if isinstance(spatialimage, (str, Path)): + spatialimage = _nbload(str(spatialimage)) + + # Avoid opening the data array just yet + input_dtype = cap_dtype(get_obj_dtype(spatialimage.dataobj), dtype_width) + + # Number of data volumes + data_nvols = 1 if spatialimage.ndim < 4 else spatialimage.shape[-1] + # Number of transforms: transforms chains (e.g., affine + field, are a single transform) + xfm_nvols = 1 if transform.ndim < 4 else len(transform) + + if data_nvols != xfm_nvols and min(data_nvols, xfm_nvols) > 1: + raise ValueError( + "The fourth dimension of the data does not match the transform's shape." + ) + + serialize_nvols = ( + serialize_nvols if serialize_nvols and serialize_nvols > 1 else np.inf + ) + n_resamplings = max(data_nvols, xfm_nvols) + serialize_4d = n_resamplings >= serialize_nvols + + targets = None + ref_ndcoords = _ref.ndcoords.T + if hasattr(transform, "to_field") and callable(transform.to_field): + targets = ImageGrid(spatialimage).index( + _as_homogeneous( + transform.to_field(reference=reference).map(ref_ndcoords), + dim=_ref.ndim, + ) + ) + elif xfm_nvols == 1: + targets = ImageGrid(spatialimage).index( # data should be an image + _as_homogeneous(transform.map(ref_ndcoords), dim=_ref.ndim) + ) + + if serialize_4d: + data = ( + np.asanyarray(spatialimage.dataobj, dtype=input_dtype) + if data_nvols == 1 + else None + ) + + # Order F ensures individual volumes are contiguous in memory + # Also matches NIfTI, making final save more efficient + resampled = np.zeros( + (len(ref_ndcoords), n_resamplings), dtype=input_dtype, order="F" + ) + + resampled = asyncio.run( + _apply_serial( + data, + spatialimage, + targets, + transform, + _ref.ndim, + ref_ndcoords, + n_resamplings, + resampled, + input_dtype, + order=order, + mode=mode, + cval=cval, + prefilter=prefilter, + max_concurrent=max_concurrent, + ) + ) + else: + data = np.asanyarray(spatialimage.dataobj, dtype=input_dtype) + + if targets is None: + targets = ImageGrid(spatialimage).index( # data should be an image + _as_homogeneous(transform.map(ref_ndcoords), dim=_ref.ndim) + ) + + # Cast 3D data into 4D if 4D nonsequential transform + if data_nvols == 1 and xfm_nvols > 1: + data = data[..., np.newaxis] + + if transform.ndim == 4: + targets = _as_homogeneous(targets.reshape(-2, targets.shape[0])).T + + resampled = ndi.map_coordinates( + data, + targets, + order=order, + mode=mode, + cval=cval, + prefilter=prefilter, + ) + + if isinstance(_ref, ImageGrid): # If reference is grid, reshape + hdr = ( + _ref.header.copy() + if _ref.header is not None + else spatialimage.header.__class__() + ) + hdr.set_data_dtype(output_dtype or spatialimage.header.get_data_dtype()) + + moved = spatialimage.__class__( + resampled.reshape(_ref.shape if n_resamplings == 1 else _ref.shape + (-1,)), + _ref.affine, + hdr, + ) + return moved + + output_dtype = output_dtype or input_dtype + return resampled.astype(output_dtype) + + +def cap_dtype(dt, nbytes): + """ + Cap the datatype size to shave off memory requirements. + + Examples + -------- + >>> cap_dtype(np.dtype('f8'), 4) + dtype('float32') + + >>> cap_dtype(np.dtype('f8'), 16) + dtype('float64') + + >>> cap_dtype('float64', 4) + dtype('float32') + + >>> cap_dtype(np.dtype('i1'), 4) + dtype('int8') + + >>> cap_dtype('int8', 4) + dtype('int8') + + >>> cap_dtype('int32', 1) + dtype('int8') + + >>> cap_dtype(np.dtype('i8'), 4) + dtype('int32') + + """ + dt = np.dtype(dt) + return np.dtype(f"{dt.byteorder}{dt.kind}{min(nbytes, dt.itemsize)}") diff --git a/nitransforms/surface.py b/nitransforms/surface.py new file mode 100644 index 00000000..7e1e7116 --- /dev/null +++ b/nitransforms/surface.py @@ -0,0 +1,652 @@ +# emacs: -*- mode: python-mode; py-indent-offset: 4; indent-tabs-mode: nil -*- +# vi: set ft=python sts=4 ts=4 sw=4 et: +### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ## +# +# See COPYING file distributed along with the NiBabel package for the +# copyright and license terms. +# +### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ## +"""Surface transforms.""" +import pathlib +import warnings +import h5py +import numpy as np +import nibabel as nb +from scipy import sparse +from scipy.spatial import KDTree +from scipy.spatial.distance import cdist +from nitransforms.base import ( + SurfaceMesh +) + + +class SurfaceTransformBase(): + """Generic surface transformation class""" + + def __init__(self, reference, moving, spherical=False): + """Instantiate a generic surface transform.""" + if spherical: + if not reference.check_sphere(): + raise ValueError("reference was not spherical") + if not moving.check_sphere(): + raise ValueError("moving was not spherical") + reference.set_radius() + moving.set_radius() + self._reference = reference + self._moving = moving + + def __eq__(self, other): + ref_coords_eq = np.all(self.reference._coords == other.reference._coords) + ref_tris_eq = np.all(self.reference._triangles == other.reference._triangles) + mov_coords_eq = np.all(self.moving._coords == other.moving._coords) + mov_tris_eq = np.all(self.moving._triangles == other.moving._triangles) + return ref_coords_eq & ref_tris_eq & mov_coords_eq & mov_tris_eq + + def __invert__(self): + return self.__class__(self._moving, self._reference) + + @property + def reference(self): + return self._reference + + @reference.setter + def reference(self, surface): + self._reference = SurfaceMesh(surface) + + @property + def moving(self): + return self._moving + + @moving.setter + def moving(self, surface): + self._moving = SurfaceMesh(surface) + + @classmethod + def from_filename(cls, reference_path, moving_path): + """Create an Surface Index Transformation from a pair of surfaces with corresponding + vertices.""" + reference = SurfaceMesh(nb.load(reference_path)) + moving = SurfaceMesh(nb.load(moving_path)) + return cls(reference, moving) + + +class SurfaceCoordinateTransform(SurfaceTransformBase): + """Represents surface transformations in which the indices correspond and the coordinates + differ. This could be two surfaces representing difference structures from the same + hemisphere, like white matter and pial, or it could be a sphere and a deformed sphere that + moves those coordinates to a different location.""" + + __slots__ = ("_reference", "_moving") + + def __init__(self, reference, moving): + """Instantiate a transform between two surfaces with corresponding vertices. + Parameters + ---------- + reference: surface + Surface with the starting coordinates for each index. + moving: surface + Surface with the destination coordinates for each index. + """ + + super().__init__(reference=SurfaceMesh(reference), moving=SurfaceMesh(moving)) + if np.all(self._reference._triangles != self._moving._triangles): + raise ValueError("Both surfaces for an index transform must have corresponding" + " vertices.") + + def map(self, x, inverse=False): + if not inverse: + source = self.reference + dest = self.moving + else: + source = self.moving + dest = self.reference + + s_tree = KDTree(source._coords) + dists, matches = s_tree.query(x) + if not np.allclose(dists, 0): + raise NotImplementedError("Mapping on surfaces not implemented for coordinates that" + " aren't vertices") + return dest._coords[matches] + + def __add__(self, other): + if isinstance(other, SurfaceCoordinateTransform): + return self.__class__(self.reference, other.moving) + raise NotImplementedError + + def _to_hdf5(self, x5_root): + """Write transform to HDF5 file.""" + triangles = x5_root.create_group("Triangles") + coords = x5_root.create_group("Coordinates") + coords.create_dataset("0", data=self.reference._coords) + coords.create_dataset("1", data=self.moving._coords) + triangles.create_dataset("0", data=self.reference._triangles) + xform = x5_root.create_group("Transform") + xform.attrs["Type"] = "SurfaceCoordinateTransform" + reference = xform.create_group("Reference") + reference['Coordinates'] = h5py.SoftLink('/0/Coordinates/0') + reference['Triangles'] = h5py.SoftLink('/0/Triangles/0') + moving = xform.create_group("Moving") + moving['Coordinates'] = h5py.SoftLink('/0/Coordinates/1') + moving['Triangles'] = h5py.SoftLink('/0/Triangles/0') + + def to_filename(self, filename, fmt=None): + """Store the transform.""" + if fmt is None: + fmt = "npz" if filename.endswith(".npz") else "X5" + + if fmt == "npz": + raise NotImplementedError + # sparse.save_npz(filename, self.mat) + # return filename + + with h5py.File(filename, "w") as out_file: + out_file.attrs["Format"] = "X5" + out_file.attrs["Version"] = np.uint16(1) + root = out_file.create_group("/0") + self._to_hdf5(root) + + return filename + + @classmethod + def from_filename(cls, filename=None, reference_path=None, moving_path=None, + fmt=None): + """Load transform from file.""" + if filename is None: + if reference_path is None or moving_path is None: + raise ValueError("You must pass either a X5 file or a pair of reference and moving" + " surfaces.") + return cls(SurfaceMesh(nb.load(reference_path)), + SurfaceMesh(nb.load(moving_path))) + + if fmt is None: + try: + fmt = "npz" if filename.endswith(".npz") else "X5" + except AttributeError: + fmt = "npz" if filename.as_posix().endswith(".npz") else "X5" + + if fmt == "npz": + raise NotImplementedError + # return cls(sparse.load_npz(filename)) + + if fmt != "X5": + raise ValueError("Only npz and X5 formats are supported.") + + with h5py.File(filename, "r") as f: + assert f.attrs["Format"] == "X5" + xform = f["/0/Transform"] + reference = SurfaceMesh.from_arrays( + xform['Reference']['Coordinates'], + xform['Reference']['Triangles'] + ) + + moving = SurfaceMesh.from_arrays( + xform['Moving']['Coordinates'], + xform['Moving']['Triangles'] + ) + return cls(reference, moving) + + +class SurfaceResampler(SurfaceTransformBase): + """ + Represents transformations in which the coordinate space remains the same + and the indices change. + To achieve surface project-unproject functionality: + sphere_in as the reference + sphere_project_to as the moving + Then apply the transformation to sphere_unproject_from + """ + + __slots__ = ("_reference", "_moving", "mat", 'interpolation_method') + + def __init__(self, reference, moving, interpolation_method='barycentric', mat=None): + """Initialize the resampling. + + Parameters + ---------- + reference: spherical surface of the reference space. + Output will have number of indices equal to the number of indicies in this surface. + Both reference and moving should be in the same coordinate space. + moving: spherical surface that will be resampled. + Both reference and moving should be in the same coordinate space. + mat : array-like, shape (nv1, nv2) + Sparse matrix representing the transform. + interpolation_method : str + Only barycentric is currently implemented + """ + + super().__init__(SurfaceMesh(reference), SurfaceMesh(moving), spherical=True) + + self.reference.set_radius() + self.moving.set_radius() + if interpolation_method not in ['barycentric']: + raise NotImplementedError(f"{interpolation_method} is not implemented.") + self.interpolation_method = interpolation_method + + # TODO: should we deal with the case where reference and moving are the same? + + # we're calculating the interpolation in the init so that we can ensure + # that it only has to be calculated once and will always be saved with the + # transform + if mat is None: + self.__calculate_mat() + m_tree = KDTree(self.moving._coords) + _, kmr_closest = m_tree.query(self.reference._coords, k=10) + + # invert the triangles to generate a lookup table from vertices to triangle index + tri_lut = {} + for i, idxs in enumerate(self.moving._triangles): + for x in idxs: + if x not in tri_lut: + tri_lut[x] = [i] + else: + tri_lut[x].append(i) + + # calculate the barycentric interpolation weights + bc_weights = [] + enclosing = [] + for point, kmrv in zip(self.reference._coords, kmr_closest): + close_tris = _find_close_tris(kmrv, tri_lut, self.moving) + ww, ee = _find_weights(point, close_tris, m_tree) + bc_weights.append(ww) + enclosing.append(ee) + + # build sparse matrix + # commenting out code for barycentric nearest neighbor + # bary_nearest = [] + mat = sparse.lil_array((self.reference._npoints, self.moving._npoints)) + for s_ix, dd in enumerate(bc_weights): + for k, v in dd.items(): + mat[s_ix, k] = v + # bary_nearest.append(np.array(list(dd.keys()))[np.array(list(dd.values())).argmax()]) + # bary_nearest = np.array(bary_nearest) + # transpose so that number of out vertices is columns + self.mat = sparse.csr_array(mat.T) + else: + if isinstance(mat, sparse.csr_array): + self.mat = mat + else: + self.mat = sparse.csr_array(mat) + # validate shape of the provided matrix + if (mat.shape[0] != moving._npoints) or (mat.shape[1] != reference._npoints): + msg = "Shape of provided mat does not match expectations based on " \ + "dimensions of moving and reference. \n" + if mat.shape[0] != moving._npoints: + msg += f" mat has {mat.shape[0]} rows but moving has {moving._npoints} " \ + f"vertices. \n" + if mat.shape[1] != reference._npoints: + msg += f" mat has {mat.shape[1]} columns but reference has" \ + f" {reference._npoints} vertices." + raise ValueError(msg) + + def __calculate_mat(self): + m_tree = KDTree(self.moving._coords) + _, kmr_closest = m_tree.query(self.reference._coords, k=10) + + # invert the triangles to generate a lookup table from vertices to triangle index + tri_lut = {} + for i, idxs in enumerate(self.moving._triangles): + for x in idxs: + if x not in tri_lut: + tri_lut[x] = [i] + else: + tri_lut[x].append(i) + + # calculate the barycentric interpolation weights + bc_weights = [] + enclosing = [] + for point, kmrv in zip(self.reference._coords, kmr_closest): + close_tris = _find_close_tris(kmrv, tri_lut, self.moving) + ww, ee = _find_weights(point, close_tris, m_tree) + bc_weights.append(ww) + enclosing.append(ee) + + # build sparse matrix + # commenting out code for barycentric nearest neighbor + # bary_nearest = [] + mat = sparse.lil_array((self.reference._npoints, self.moving._npoints)) + for s_ix, dd in enumerate(bc_weights): + for k, v in dd.items(): + mat[s_ix, k] = v + # bary_nearest.append( + # np.array(list(dd.keys()))[np.array(list(dd.values())).argmax()] + # ) + # bary_nearest = np.array(bary_nearest) + # transpose so that number of out vertices is columns + self.mat = sparse.csr_array(mat.T) + + def map(self, x): + return x + + def __add__(self, other): + if (isinstance(other, SurfaceResampler) + and (other.interpolation_method == self.interpolation_method)): + return self.__class__( + self.reference, + other.moving, + interpolation_method=self.interpolation_method + ) + raise NotImplementedError + + def __invert__(self): + return self.__class__( + self.moving, + self.reference, + interpolation_method=self.interpolation_method + ) + + @SurfaceTransformBase.reference.setter + def reference(self, surface): + raise ValueError("Don't modify the reference of an existing resampling." + "Create a new one instead.") + + @SurfaceTransformBase.moving.setter + def moving(self, surface): + raise ValueError("Don't modify the moving of an existing resampling." + "Create a new one instead.") + + def apply(self, x, inverse=False, normalize="element"): + """Apply the transform to surface data. + + Parameters + ---------- + x : array-like, shape (..., nv1), or SurfaceMesh + Data to transform or SurfaceMesh to resample + inverse : bool, default=False + Whether to apply the inverse transform. If True, ``x`` has shape + (..., nv2), and the output will have shape (..., nv1). + normalize : {"element", "sum", "none"}, default="element" + Normalization strategy. If "element", the scale of each value in + the output is comparable to each value of the input. If "sum", the + sum of the output is comparable to the sum of the input. If + "none", no normalization is applied. + + Returns + ------- + y : array-like, shape (..., nv2) + Transformed data. + """ + if normalize not in ("element", "sum", "none"): + raise ValueError("Invalid normalization strategy.") + + mat = self.mat.T if inverse else self.mat + + if normalize == "element": + sum_ = mat.sum(axis=0) + scale = np.zeros_like(sum_) + mask = sum_ != 0 + scale[mask] = 1.0 / sum_[mask] + mat = mat @ sparse.diags(scale) + elif normalize == "sum": + sum_ = mat.sum(axis=1) + scale = np.zeros_like(sum_) + mask = sum_ != 0 + scale[mask] = 1.0 / sum_[mask] + mat = sparse.diags(scale) @ mat + + if isinstance(x, (SurfaceMesh, pathlib.PurePath, str)): + x = SurfaceMesh(x) + if not x.check_sphere(): + raise ValueError("If x is a surface, it should be a sphere.") + x.set_radius() + rs_coords = x._coords.T @ mat + + y = SurfaceMesh.from_arrays(rs_coords.T, self.reference._triangles) + y.set_radius() + else: + y = x @ mat + return y + + def _to_hdf5(self, x5_root): + """Write transform to HDF5 file.""" + triangles = x5_root.create_group("Triangles") + coords = x5_root.create_group("Coordinates") + coords.create_dataset("0", data=self.reference._coords) + coords.create_dataset("1", data=self.moving._coords) + triangles.create_dataset("0", data=self.reference._triangles) + triangles.create_dataset("1", data=self.moving._triangles) + xform = x5_root.create_group("Transform") + xform.attrs["Type"] = "SurfaceResampling" + xform.attrs['InterpolationMethod'] = self.interpolation_method + mat = xform.create_group("IndexWeights") + mat.create_dataset("Data", data=self.mat.data) + mat.create_dataset("Indices", data=self.mat.indices) + mat.create_dataset("Indptr", data=self.mat.indptr) + mat.create_dataset("Shape", data=self.mat.shape) + reference = xform.create_group("Reference") + reference['Coordinates'] = h5py.SoftLink('/0/Coordinates/0') + reference['Triangles'] = h5py.SoftLink('/0/Triangles/0') + moving = xform.create_group("Moving") + moving['Coordinates'] = h5py.SoftLink('/0/Coordinates/1') + moving['Triangles'] = h5py.SoftLink('/0/Triangles/1') + + def to_filename(self, filename, fmt=None): + """Store the transform.""" + if fmt is None: + fmt = "npz" if filename.endswith(".npz") else "X5" + + if fmt == "npz": + raise NotImplementedError + # sparse.save_npz(filename, self.mat) + # return filename + + with h5py.File(filename, "w") as out_file: + out_file.attrs["Format"] = "X5" + out_file.attrs["Version"] = np.uint16(1) + root = out_file.create_group("/0") + self._to_hdf5(root) + + return filename + + @classmethod + def from_filename(cls, filename=None, reference_path=None, moving_path=None, + fmt=None, interpolation_method=None): + """Load transform from file.""" + if filename is None: + if reference_path is None or moving_path is None: + raise ValueError("You must pass either a X5 file or a pair of reference and moving" + " surfaces.") + if interpolation_method is None: + interpolation_method = 'barycentric' + return cls(SurfaceMesh(nb.load(reference_path)), + SurfaceMesh(nb.load(moving_path)), + interpolation_method=interpolation_method) + + if fmt is None: + try: + fmt = "npz" if filename.endswith(".npz") else "X5" + except AttributeError: + fmt = "npz" if filename.as_posix().endswith(".npz") else "X5" + + if fmt == "npz": + raise NotImplementedError + # return cls(sparse.load_npz(filename)) + + if fmt != "X5": + raise ValueError("Only npz and X5 formats are supported.") + + with h5py.File(filename, "r") as f: + assert f.attrs["Format"] == "X5" + xform = f["/0/Transform"] + try: + iws = xform['IndexWeights'] + mat = sparse.csr_matrix( + (iws["Data"][()], iws["Indices"][()], iws["Indptr"][()]), + shape=iws["Shape"][()], + ) + except KeyError: + mat = None + reference = SurfaceMesh.from_arrays( + xform['Reference']['Coordinates'], + xform['Reference']['Triangles'] + ) + + moving = SurfaceMesh.from_arrays( + xform['Moving']['Coordinates'], + xform['Moving']['Triangles'] + ) + interpolation_method = xform.attrs['InterpolationMethod'] + return cls(reference, moving, interpolation_method=interpolation_method, mat=mat) + + +def _points_to_triangles(points, triangles): + + """Implementation that vectorizes project of a point to a set of triangles. + from: https://stackoverflow.com/a/32529589 + """ + with np.errstate(all='ignore'): + # Unpack triangle points + p0, p1, p2 = np.asarray(triangles).swapaxes(0, 1) + + # Calculate triangle edges + e0 = p1 - p0 + e1 = p2 - p0 + a = np.einsum('...i,...i', e0, e0) + b = np.einsum('...i,...i', e0, e1) + c = np.einsum('...i,...i', e1, e1) + + # Calculate determinant and denominator + det = a * c - b * b + inv_det = 1. / det + denom = a - 2 * b + c + + # Project to the edges + p = p0 - points[:, np.newaxis] + d = np.einsum('...i,...i', e0, p) + e = np.einsum('...i,...i', e1, p) + u = b * e - c * d + v = b * d - a * e + + # Calculate numerators + bd = b + d + ce = c + e + numer0 = (ce - bd) / denom + numer1 = (c + e - b - d) / denom + da = -d / a + ec = -e / c + + # Vectorize test conditions + m0 = u + v < det + m1 = u < 0 + m2 = v < 0 + m3 = d < 0 + m4 = a + d > b + e + + m5 = ce > bd + + t0 = m0 & m1 & m2 & m3 + t1 = m0 & m1 & m2 & ~m3 + t2 = m0 & m1 & ~m2 + t3 = m0 & ~m1 & m2 + t4 = m0 & ~m1 & ~m2 + t5 = ~m0 & m1 & m5 + t6 = ~m0 & m1 & ~m5 + t7 = ~m0 & m2 & m4 + t8 = ~m0 & m2 & ~m4 + t9 = ~m0 & ~m1 & ~m2 + + u = np.where(t0, np.clip(da, 0, 1), u) + v = np.where(t0, 0, v) + u = np.where(t1, 0, u) + v = np.where(t1, 0, v) + u = np.where(t2, 0, u) + v = np.where(t2, np.clip(ec, 0, 1), v) + u = np.where(t3, np.clip(da, 0, 1), u) + v = np.where(t3, 0, v) + u *= np.where(t4, inv_det, 1) + v *= np.where(t4, inv_det, 1) + u = np.where(t5, np.clip(numer0, 0, 1), u) + v = np.where(t5, 1 - u, v) + u = np.where(t6, 0, u) + v = np.where(t6, 1, v) + u = np.where(t7, np.clip(numer1, 0, 1), u) + v = np.where(t7, 1 - u, v) + u = np.where(t8, 1, u) + v = np.where(t8, 0, v) + u = np.where(t9, np.clip(numer1, 0, 1), u) + v = np.where(t9, 1 - u, v) + + # Return closest points + return (p0.T + u[:, np.newaxis] * e0.T + v[:, np.newaxis] * e1.T).swapaxes(2, 1) + + +def _barycentric_weights(vecs, coords): + """Compute the weights for barycentric interpolation. + + Parameters + ---------- + vecs : ndarray of shape (6, 3) + The 6 vectors used to compute barycentric weights. + a, e1, e2, + np.cross(e1, e2), + np.cross(e2, a), + np.cross(a, e1) + coords : ndarray of shape (3, ) + + Returns + ------- + (w, u, v, t) : tuple of float + ``w``, ``u``, and ``v`` are the weights of the three vertices of the + triangle, respectively. ``t`` is the scale that needs to be multiplied + to ``coords`` to make it in the same plane as the three vertices. + + From: https://github.com/neuroboros/neuroboros/blob/\ +f2a2efb914e783add2bf06e0f3715236d3d8550e/src/neuroboros/surface/_barycentric.pyx#L9-L47 + """ + det = coords[0] * vecs[3, 0] + coords[1] * vecs[3, 1] + coords[2] * vecs[3, 2] + if det == 0: + if vecs[3, 0] == 0 and vecs[3, 1] == 0 and vecs[3, 2] == 0: + warnings.warn("Zero cross product of two edges: " + "The three vertices are in the same line.") + else: + print(vecs[3]) + y = coords - vecs[0] + u, v = np.linalg.lstsq(vecs[1:3].T, y, rcond=None)[0] + t = 1. + else: + uu = coords[0] * vecs[4, 0] + coords[1] * vecs[4, 1] + coords[2] * vecs[4, 2] + vv = coords[0] * vecs[5, 0] + coords[1] * vecs[5, 1] + coords[2] * vecs[5, 2] + u = uu / det + v = vv / det + tt = vecs[0, 0] * vecs[3, 0] + vecs[0, 1] * vecs[3, 1] + vecs[0, 2] * vecs[3, 2] + t = tt / det + w = 1. - (u + v) + return w, u, v, t + + +def _find_close_tris(kdsv, tri_lut, surface): + tris = [] + for kk in kdsv: + tris.extend(tri_lut[kk]) + close_tri_verts = surface._triangles[np.unique(tris)] + close_tris = surface._coords[close_tri_verts] + return close_tris + + +def _find_weights(point, close_tris, d_tree): + point = point[np.newaxis, :] + tri_dists = cdist(point, _points_to_triangles(point, close_tris).squeeze()) + + closest_tri = close_tris[(tri_dists == tri_dists.min()).squeeze()] + # make sure a single closest triangle was found + if closest_tri.shape[0] != 1: + # in the event of a tie (which can happen) + # just take the first triangle + closest_tri = closest_tri[0] + + closest_tri = closest_tri.squeeze() + # Make sure point is actually inside triangle + enclosing = True + if np.all((point > closest_tri).sum(0) != 3): + + enclosing = False + _, ct_idxs = d_tree.query(closest_tri) + a = closest_tri[0] + e1 = closest_tri[1] - a + e2 = closest_tri[2] - a + vecs = np.vstack([a, e1, e2, np.cross(e1, e2), np.cross(e2, a), np.cross(a, e1)]) + res = {} + res[ct_idxs[0]], res[ct_idxs[1]], res[ct_idxs[2]], _ = _barycentric_weights( + vecs, + point.squeeze() + ) + return res, enclosing diff --git a/nitransforms/tests/test_base.py b/nitransforms/tests/test_base.py index 07a7e4ec..49d7f7af 100644 --- a/nitransforms/tests/test_base.py +++ b/nitransforms/tests/test_base.py @@ -1,11 +1,22 @@ """Tests of the base module.""" + import numpy as np import nibabel as nb +from nibabel.arrayproxy import get_obj_dtype + import pytest import h5py -from ..base import SpatialReference, SampledSpatialData, ImageGrid, TransformBase + +from ..base import ( + SpatialReference, + SampledSpatialData, + ImageGrid, + TransformBase, + SurfaceMesh, +) from .. import linear as nitl +from ..resampling import apply def test_SpatialReference(testdata_path): @@ -42,10 +53,10 @@ def test_ImageGrid(get_testdata, image_orientation): ijk = [[10, 10, 10], [40, 4, 20], [0, 0, 0], [s - 1 for s in im.shape[:3]]] xyz = [img._affine.dot(idx + [1])[:-1] for idx in ijk] - assert np.allclose(img.ras(ijk[0]), xyz[0]) + assert np.allclose(np.squeeze(img.ras(ijk[0])), xyz[0]) assert np.allclose(np.round(img.index(xyz[0])), ijk[0]) - assert np.allclose(img.ras(ijk), xyz) - assert np.allclose(np.round(img.index(xyz)), ijk) + assert np.allclose(img.ras(ijk).T, xyz) + assert np.allclose(np.round(img.index(xyz)).T, ijk) # nd index / coords idxs = img.ndindex @@ -89,33 +100,28 @@ def _to_hdf5(klass, x5_root): fname = testdata_path / "someones_anatomy.nii.gz" img = nb.load(fname) - imgdata = np.asanyarray(img.dataobj, dtype=img.get_data_dtype()) + imgdata = np.asanyarray(img.dataobj, dtype=get_obj_dtype(img.dataobj)) - # Test identity transform + # Test identity transform - setting reference xfm = TransformBase() - xfm.reference = fname with pytest.raises(TypeError): _ = xfm.ndim - moved = xfm.apply(fname, order=0) - assert np.all( - imgdata == np.asanyarray(moved.dataobj, dtype=moved.get_data_dtype()) - ) - # Test identity transform - setting reference - xfm = TransformBase() + # Test to_filename + xfm.to_filename("data.x5") + + # Test identity transform + xfm = nitl.Affine() xfm.reference = fname - with pytest.raises(TypeError): - _ = xfm.ndim - moved = xfm.apply(str(fname), reference=fname, order=0) + moved = apply(xfm, fname, order=0) + assert np.all( - imgdata == np.asanyarray(moved.dataobj, dtype=moved.get_data_dtype()) + imgdata == np.asanyarray(moved.dataobj, dtype=get_obj_dtype(moved.dataobj)) ) # Test ndim returned by affine assert nitl.Affine().ndim == 3 - assert nitl.LinearTransformsMapping( - [nitl.Affine(), nitl.Affine()] - ).ndim == 4 + assert nitl.LinearTransformsMapping([nitl.Affine(), nitl.Affine()]).ndim == 4 # Test applying to Gifti gii = nb.gifti.GiftiImage( @@ -126,11 +132,11 @@ def _to_hdf5(klass, x5_root): ) ] ) - giimoved = xfm.apply(fname, reference=gii, order=0) + giimoved = apply(xfm, fname, reference=gii, order=0) assert np.allclose(giimoved.reshape(xfm.reference.shape), moved.get_fdata()) # Test to_filename - xfm.to_filename("data.x5") + xfm.to_filename("data.xfm", fmt="itk") def test_SampledSpatialData(testdata_path): @@ -161,3 +167,49 @@ def test_concatenation(testdata_path): x = [(0.0, 0.0, 0.0), (1.0, 1.0, 1.0), (-1.0, -1.0, -1.0)] assert np.all((aff + nitl.Affine())(x) == x) assert np.all((aff + nitl.Affine())(x, inverse=True) == x) + + +def test_SurfaceMesh(testdata_path): + surf_path = testdata_path / "sub-200148_hemi-R_pial.surf.gii" + shape_path = ( + testdata_path + / "sub-sid000005_ses-budapest_acq-MPRAGE_hemi-R_thickness.shape.gii" + ) + img_path = testdata_path / "bold.nii.gz" + + mesh = SurfaceMesh(nb.load(surf_path)) + exp_coords_shape = (249277, 3) + exp_tris_shape = (498550, 3) + assert mesh._coords.shape == exp_coords_shape + assert mesh._triangles.shape == exp_tris_shape + assert mesh._npoints == exp_coords_shape[0] + assert mesh._ndim == exp_coords_shape[1] + + mfd = SurfaceMesh(surf_path) + assert (mfd._coords == mesh._coords).all() + assert (mfd._triangles == mesh._triangles).all() + + mfsm = SurfaceMesh(mfd) + assert (mfd._coords == mfsm._coords).all() + assert (mfd._triangles == mfsm._triangles).all() + + with pytest.raises(ValueError): + SurfaceMesh(nb.load(img_path)) + + with pytest.raises(TypeError): + SurfaceMesh(nb.load(shape_path)) + + +def test_apply_deprecation(monkeypatch): + """Make sure a deprecation warning is issued.""" + from nitransforms import resampling + + def _retval(*args, **kwargs): + return 1 + + monkeypatch.setattr(resampling, "apply", _retval) + + with pytest.deprecated_call(): + retval = TransformBase().apply() + + assert retval == 1 diff --git a/nitransforms/tests/test_cli.py b/nitransforms/tests/test_cli.py index 7f16a1de..58867131 100644 --- a/nitransforms/tests/test_cli.py +++ b/nitransforms/tests/test_cli.py @@ -1,10 +1,18 @@ +import os from textwrap import dedent import pytest from ..cli import cli_apply, main as ntcli +if os.getenv("PYTEST_XDIST_WORKER"): + breaks_on_xdist = pytest.mark.skip(reason="xdist is active; rerun without to run this test.") +else: + def breaks_on_xdist(test): + return test + +@breaks_on_xdist def test_cli(capsys): # empty command with pytest.raises(SystemExit): diff --git a/nitransforms/tests/test_io.py b/nitransforms/tests/test_io.py index bcee9198..0cc79d15 100644 --- a/nitransforms/tests/test_io.py +++ b/nitransforms/tests/test_io.py @@ -28,6 +28,8 @@ ) from nitransforms.io.base import LinearParameters, TransformIOError, TransformFileError from nitransforms.conftest import _datadir, _testdir +from nitransforms.resampling import apply + LPS = np.diag([-1, -1, 1, 1]) ITK_MAT = LPS.dot(np.ones((4, 4)).dot(LPS)) @@ -497,10 +499,13 @@ def test_afni_oblique(tmpdir, parameters, swapaxes, testdata_path, dir_x, dir_y, assert np.allclose(card_aff, nb.load("deob_3drefit.nii.gz").affine) # Check that nitransforms can emulate 3drefit -deoblique - nt3drefit = Affine( - afni._cardinal_rotation(img.affine, False), - reference="deob_3drefit.nii.gz", - ).apply("orig.nii.gz") + nt3drefit = apply( + Affine( + afni._cardinal_rotation(img.affine, False), + reference="deob_3drefit.nii.gz", + ), + "orig.nii.gz", + ) diff = ( np.asanyarray(img.dataobj, dtype="uint8") @@ -509,10 +514,13 @@ def test_afni_oblique(tmpdir, parameters, swapaxes, testdata_path, dir_x, dir_y, assert np.sqrt((diff[10:-10, 10:-10, 10:-10] ** 2).mean()) < 0.1 # Check that nitransforms can revert 3drefit -deoblique - nt_undo3drefit = Affine( - afni._cardinal_rotation(img.affine, True), - reference="orig.nii.gz", - ).apply("deob_3drefit.nii.gz") + nt_undo3drefit = apply( + Affine( + afni._cardinal_rotation(img.affine, True), + reference="orig.nii.gz", + ), + "deob_3drefit.nii.gz", + ) diff = ( np.asanyarray(img.dataobj, dtype="uint8") @@ -531,16 +539,21 @@ def test_afni_oblique(tmpdir, parameters, swapaxes, testdata_path, dir_x, dir_y, assert np.allclose(deobaff, deobnii.affine) # Check resampling in deobliqued grid - ntdeobnii = Affine(np.eye(4), reference=deobnii.__class__( - np.zeros(deobshape, dtype="uint8"), - deobaff, - deobnii.header - )).apply(img, order=0) + ntdeobnii = apply( + Affine(np.eye(4), reference=deobnii.__class__( + np.zeros(deobshape, dtype="uint8"), + deobaff, + deobnii.header + )), + img, + order=0, + ) # Generate an internal box to exclude border effects box = np.zeros(img.shape, dtype="uint8") box[10:-10, 10:-10, 10:-10] = 1 - ntdeobmask = Affine(np.eye(4), reference=ntdeobnii).apply( + ntdeobmask = apply( + Affine(np.eye(4), reference=ntdeobnii), nb.Nifti1Image(box, img.affine, img.header), order=0, ) diff --git a/nitransforms/tests/test_linear.py b/nitransforms/tests/test_linear.py index 2957f59c..31627159 100644 --- a/nitransforms/tests/test_linear.py +++ b/nitransforms/tests/test_linear.py @@ -1,41 +1,26 @@ # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: """Tests of linear transforms.""" -import os + import pytest import numpy as np -from subprocess import check_call -import shutil import h5py -import nibabel as nb from nibabel.eulerangles import euler2mat from nibabel.affines import from_matvec from nitransforms import linear as nitl from nitransforms import io from .utils import assert_affines_by_filename -RMSE_TOL = 0.1 -APPLY_LINEAR_CMD = { - "fsl": """\ -flirt -setbackground 0 -interp nearestneighbour -in {moving} -ref {reference} \ --applyxfm -init {transform} -out {resampled}\ -""".format, - "itk": """\ -antsApplyTransforms -d 3 -r {reference} -i {moving} \ --o {resampled} -n NearestNeighbor -t {transform} --float\ -""".format, - "afni": """\ -3dAllineate -base {reference} -input {moving} \ --prefix {resampled} -1Dmatrix_apply {transform} -final NN\ -""".format, - "fs": """\ -mri_vol2vol --mov {moving} --targ {reference} --lta {transform} \ ---o {resampled} --nearest""".format, -} - - -@pytest.mark.parametrize("matrix", [[0.0], np.ones((3, 3, 3)), np.ones((3, 4)), ]) + +@pytest.mark.parametrize( + "matrix", + [ + [0.0], + np.ones((3, 3, 3)), + np.ones((3, 4)), + ], +) def test_linear_typeerrors1(matrix): """Exercise errors in Affine creation.""" with pytest.raises(TypeError): @@ -97,6 +82,20 @@ def test_loadsave_itk(tmp_path, data_path, testdata_path): ) +def test_mapping_chain(data_path): + xfm = nitl.load(data_path / "itktflist2.tfm", fmt="itk") + xfm = nitl.load(data_path / "itktflist2.tfm", fmt="itk") + assert len(xfm) == 9 + + # Addiition produces a chain + chain = xfm + xfm + # Length now means number of transforms, not number of affines in one transform + assert len(chain) == 2 + # Just because a LinearTransformsMapping is iterable does not mean we decompose it + chain += xfm + assert len(chain) == 3 + + @pytest.mark.parametrize( "image_orientation", [ @@ -157,7 +156,9 @@ def test_loadsave(tmp_path, data_path, testdata_path, autofmt, fmt): assert np.allclose( xfm.matrix, - nitl.load(fname, fmt=supplied_fmt, reference=ref_file, moving=ref_file).matrix, + nitl.load( + fname, fmt=supplied_fmt, reference=ref_file, moving=ref_file + ).matrix, ) else: assert xfm == nitl.load(fname, fmt=supplied_fmt, reference=ref_file) @@ -167,7 +168,9 @@ def test_loadsave(tmp_path, data_path, testdata_path, autofmt, fmt): if fmt == "fsl": assert np.allclose( xfm.matrix, - nitl.load(fname, fmt=supplied_fmt, reference=ref_file, moving=ref_file).matrix, + nitl.load( + fname, fmt=supplied_fmt, reference=ref_file, moving=ref_file + ).matrix, rtol=1e-2, # FSL incurs into large errors due to rounding ) else: @@ -181,7 +184,9 @@ def test_loadsave(tmp_path, data_path, testdata_path, autofmt, fmt): if fmt == "fsl": assert np.allclose( xfm.matrix, - nitl.load(fname, fmt=supplied_fmt, reference=ref_file, moving=ref_file).matrix, + nitl.load( + fname, fmt=supplied_fmt, reference=ref_file, moving=ref_file + ).matrix, rtol=1e-2, # FSL incurs into large errors due to rounding ) else: @@ -191,7 +196,9 @@ def test_loadsave(tmp_path, data_path, testdata_path, autofmt, fmt): if fmt == "fsl": assert np.allclose( xfm.matrix, - nitl.load(fname, fmt=supplied_fmt, reference=ref_file, moving=ref_file).matrix, + nitl.load( + fname, fmt=supplied_fmt, reference=ref_file, moving=ref_file + ).matrix, rtol=1e-2, # FSL incurs into large errors due to rounding ) else: @@ -211,12 +218,15 @@ def test_linear_save(tmpdir, data_path, get_testdata, image_orientation, sw_tool T = np.linalg.inv(T) xfm = ( - nitl.Affine(T) if (sw_tool, image_orientation) != ("afni", "oblique") else + nitl.Affine(T) + if (sw_tool, image_orientation) != ("afni", "oblique") # AFNI is special when moving or reference are oblique - let io do the magic - nitl.Affine(io.afni.AFNILinearTransform.from_ras(T).to_ras( - reference=img, - moving=img, - )) + else nitl.Affine( + io.afni.AFNILinearTransform.from_ras(T).to_ras( + reference=img, + moving=img, + ) + ) ) xfm.reference = img @@ -233,96 +243,6 @@ def test_linear_save(tmpdir, data_path, get_testdata, image_orientation, sw_tool assert_affines_by_filename(xfm_fname1, xfm_fname2) -@pytest.mark.parametrize("image_orientation", ["RAS", "LAS", "LPS", 'oblique', ]) -@pytest.mark.parametrize("sw_tool", ["itk", "fsl", "afni", "fs"]) -def test_apply_linear_transform(tmpdir, get_testdata, get_testmask, image_orientation, sw_tool): - """Check implementation of exporting affines to formats.""" - tmpdir.chdir() - - img = get_testdata[image_orientation] - msk = get_testmask[image_orientation] - - # Generate test transform - T = from_matvec(euler2mat(x=0.9, y=0.001, z=0.001), [4.0, 2.0, -1.0]) - xfm = nitl.Affine(T) - xfm.reference = img - - ext = "" - if sw_tool == "itk": - ext = ".tfm" - elif sw_tool == "fs": - ext = ".lta" - - img.to_filename("img.nii.gz") - msk.to_filename("mask.nii.gz") - - # Write out transform file (software-dependent) - xfm_fname = f"M.{sw_tool}{ext}" - # Change reference dataset for AFNI & oblique - if (sw_tool, image_orientation) == ("afni", "oblique"): - io.afni.AFNILinearTransform.from_ras( - T, - moving=img, - reference=img, - ).to_filename(xfm_fname) - else: - xfm.to_filename(xfm_fname, fmt=sw_tool) - - cmd = APPLY_LINEAR_CMD[sw_tool]( - transform=os.path.abspath(xfm_fname), - reference=os.path.abspath("mask.nii.gz"), - moving=os.path.abspath("mask.nii.gz"), - resampled=os.path.abspath("resampled_brainmask.nii.gz"), - ) - - # skip test if command is not available on host - exe = cmd.split(" ", 1)[0] - if not shutil.which(exe): - pytest.skip(f"Command {exe} not found on host") - - # resample mask - exit_code = check_call([cmd], shell=True) - assert exit_code == 0 - sw_moved_mask = nb.load("resampled_brainmask.nii.gz") - - nt_moved_mask = xfm.apply(msk, order=0) - nt_moved_mask.set_data_dtype(msk.get_data_dtype()) - nt_moved_mask.to_filename("ntmask.nii.gz") - diff = np.asanyarray(sw_moved_mask.dataobj) - np.asanyarray(nt_moved_mask.dataobj) - - assert np.sqrt((diff ** 2).mean()) < RMSE_TOL - brainmask = np.asanyarray(nt_moved_mask.dataobj, dtype=bool) - - cmd = APPLY_LINEAR_CMD[sw_tool]( - transform=os.path.abspath(xfm_fname), - reference=os.path.abspath("img.nii.gz"), - moving=os.path.abspath("img.nii.gz"), - resampled=os.path.abspath("resampled.nii.gz"), - ) - - exit_code = check_call([cmd], shell=True) - assert exit_code == 0 - sw_moved = nb.load("resampled.nii.gz") - sw_moved.set_data_dtype(img.get_data_dtype()) - - nt_moved = xfm.apply(img, order=0) - diff = ( - np.asanyarray(sw_moved.dataobj, dtype=sw_moved.get_data_dtype()) - - np.asanyarray(nt_moved.dataobj, dtype=nt_moved.get_data_dtype()) - ) - - # A certain tolerance is necessary because of resampling at borders - assert np.sqrt((diff[brainmask] ** 2).mean()) < RMSE_TOL - - nt_moved = xfm.apply("img.nii.gz", order=0) - diff = ( - np.asanyarray(sw_moved.dataobj, dtype=sw_moved.get_data_dtype()) - - np.asanyarray(nt_moved.dataobj, dtype=nt_moved.get_data_dtype()) - ) - # A certain tolerance is necessary because of resampling at borders - assert np.sqrt((diff[brainmask] ** 2).mean()) < RMSE_TOL - - def test_Affine_to_x5(tmpdir, testdata_path): """Test affine's operations.""" tmpdir.chdir() @@ -335,36 +255,6 @@ def test_Affine_to_x5(tmpdir, testdata_path): aff._to_hdf5(f.create_group("Affine")) -def test_LinearTransformsMapping_apply(tmp_path, data_path, testdata_path): - """Apply transform mappings.""" - hmc = nitl.load( - data_path / "hmc-itk.tfm", fmt="itk", reference=testdata_path / "sbref.nii.gz" - ) - assert isinstance(hmc, nitl.LinearTransformsMapping) - - # Test-case: realign functional data on to sbref - nii = hmc.apply( - testdata_path / "func.nii.gz", order=1, reference=testdata_path / "sbref.nii.gz" - ) - assert nii.dataobj.shape[-1] == len(hmc) - - # Test-case: write out a fieldmap moved with head - hmcinv = nitl.LinearTransformsMapping( - np.linalg.inv(hmc.matrix), reference=testdata_path / "func.nii.gz" - ) - nii = hmcinv.apply(testdata_path / "fmap.nii.gz", order=1) - assert nii.dataobj.shape[-1] == len(hmc) - - # Ensure a ValueError is issued when trying to do weird stuff - hmc = nitl.LinearTransformsMapping(hmc.matrix[:1, ...]) - with pytest.raises(ValueError): - hmc.apply( - testdata_path / "func.nii.gz", - order=1, - reference=testdata_path / "sbref.nii.gz", - ) - - def test_mulmat_operator(testdata_path): """Check the @ operator.""" ref = testdata_path / "someones_anatomy.nii.gz" diff --git a/nitransforms/tests/test_manip.py b/nitransforms/tests/test_manip.py index 6dee540e..b5dd5c62 100644 --- a/nitransforms/tests/test_manip.py +++ b/nitransforms/tests/test_manip.py @@ -1,66 +1,16 @@ # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: """Tests of nonlinear transforms.""" -import os -import shutil -from subprocess import check_call + import pytest import numpy as np -import nibabel as nb -from ..manip import load as _load, TransformChain +from ..manip import TransformChain from ..linear import Affine -from .test_nonlinear import ( - RMSE_TOL, - APPLY_NONLINEAR_CMD, -) FMT = {"lta": "fs", "tfm": "itk"} -def test_itk_h5(tmp_path, testdata_path): - """Check a translation-only field on one or more axes, different image orientations.""" - os.chdir(str(tmp_path)) - img_fname = testdata_path / "T1w_scanner.nii.gz" - xfm_fname = ( - testdata_path - / "ds-005_sub-01_from-T1w_to-MNI152NLin2009cAsym_mode-image_xfm.h5" - ) - - xfm = _load(xfm_fname) - - assert len(xfm) == 2 - - ref_fname = tmp_path / "reference.nii.gz" - nb.Nifti1Image( - np.zeros(xfm.reference.shape, dtype="uint16"), xfm.reference.affine, - ).to_filename(str(ref_fname)) - - # Then apply the transform and cross-check with software - cmd = APPLY_NONLINEAR_CMD["itk"]( - transform=xfm_fname, - reference=ref_fname, - moving=img_fname, - output="resampled.nii.gz", - extra="", - ) - - # skip test if command is not available on host - exe = cmd.split(" ", 1)[0] - if not shutil.which(exe): - pytest.skip(f"Command {exe} not found on host") - - exit_code = check_call([cmd], shell=True) - assert exit_code == 0 - sw_moved = nb.load("resampled.nii.gz") - - nt_moved = xfm.apply(img_fname, order=0) - nt_moved.to_filename("nt_resampled.nii.gz") - diff = sw_moved.get_fdata() - nt_moved.get_fdata() - # A certain tolerance is necessary because of resampling at borders - assert (np.abs(diff) > 1e-3).sum() / diff.size < RMSE_TOL - - @pytest.mark.parametrize("ext0", ["lta", "tfm"]) @pytest.mark.parametrize("ext1", ["lta", "tfm"]) @pytest.mark.parametrize("ext2", ["lta", "tfm"]) diff --git a/nitransforms/tests/test_nonlinear.py b/nitransforms/tests/test_nonlinear.py index 93d3fd4c..6112f633 100644 --- a/nitransforms/tests/test_nonlinear.py +++ b/nitransforms/tests/test_nonlinear.py @@ -1,44 +1,29 @@ # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: """Tests of nonlinear transforms.""" + import os -import shutil -from subprocess import check_call import pytest import numpy as np import nibabel as nb +from nitransforms.resampling import apply from nitransforms.base import TransformError from nitransforms.io.base import TransformFileError from nitransforms.nonlinear import ( BSplineFieldTransform, DenseFieldTransform, - load as nlload, ) from ..io.itk import ITKDisplacementsField -RMSE_TOL = 0.05 -APPLY_NONLINEAR_CMD = { - "itk": """\ -antsApplyTransforms -d 3 -r {reference} -i {moving} \ --o {output} -n NearestNeighbor -t {transform} {extra}\ -""".format, - "afni": """\ -3dNwarpApply -nwarp {transform} -source {moving} \ --master {reference} -interp NN -prefix {output} {extra}\ -""".format, - 'fsl': """\ -applywarp -i {moving} -r {reference} -o {output} {extra}\ --w {transform} --interp=nn""".format, -} - - @pytest.mark.parametrize("size", [(20, 20, 20), (20, 20, 20, 3)]) def test_itk_disp_load(size): """Checks field sizes.""" with pytest.raises(TransformFileError): - ITKDisplacementsField.from_image(nb.Nifti1Image(np.zeros(size), np.eye(4), None)) + ITKDisplacementsField.from_image( + nb.Nifti1Image(np.zeros(size), np.eye(4), None) + ) @pytest.mark.parametrize("size", [(20, 20, 20), (20, 20, 20, 2, 3), (20, 20, 20, 1, 4)]) @@ -96,144 +81,18 @@ def test_bsplines_references(testdata_path): ).to_field() with pytest.raises(TransformError): - BSplineFieldTransform( - testdata_path / "someones_bspline_coefficients.nii.gz" - ).apply(testdata_path / "someones_anatomy.nii.gz") + apply( + BSplineFieldTransform( + testdata_path / "someones_bspline_coefficients.nii.gz" + ), + testdata_path / "someones_anatomy.nii.gz", + ) - BSplineFieldTransform( - testdata_path / "someones_bspline_coefficients.nii.gz" - ).apply( + apply( + BSplineFieldTransform(testdata_path / "someones_bspline_coefficients.nii.gz"), testdata_path / "someones_anatomy.nii.gz", - reference=testdata_path / "someones_anatomy.nii.gz" - ) - - -@pytest.mark.parametrize("image_orientation", ["RAS", "LAS", "LPS", "oblique"]) -@pytest.mark.parametrize("sw_tool", ["itk", "afni"]) -@pytest.mark.parametrize("axis", [0, 1, 2, (0, 1), (1, 2), (0, 1, 2)]) -def test_displacements_field1( - tmp_path, - get_testdata, - get_testmask, - image_orientation, - sw_tool, - axis, -): - """Check a translation-only field on one or more axes, different image orientations.""" - if (image_orientation, sw_tool) == ("oblique", "afni"): - pytest.skip("AFNI obliques are not yet implemented for displacements fields") - - os.chdir(str(tmp_path)) - nii = get_testdata[image_orientation] - msk = get_testmask[image_orientation] - nii.to_filename("reference.nii.gz") - msk.to_filename("mask.nii.gz") - - fieldmap = np.zeros( - (*nii.shape[:3], 1, 3) if sw_tool != "fsl" else (*nii.shape[:3], 3), - dtype="float32", - ) - fieldmap[..., axis] = -10.0 - - _hdr = nii.header.copy() - if sw_tool in ("itk",): - _hdr.set_intent("vector") - _hdr.set_data_dtype("float32") - - xfm_fname = "warp.nii.gz" - field = nb.Nifti1Image(fieldmap, nii.affine, _hdr) - field.to_filename(xfm_fname) - - xfm = nlload(xfm_fname, fmt=sw_tool) - - # Then apply the transform and cross-check with software - cmd = APPLY_NONLINEAR_CMD[sw_tool]( - transform=os.path.abspath(xfm_fname), - reference=tmp_path / "mask.nii.gz", - moving=tmp_path / "mask.nii.gz", - output=tmp_path / "resampled_brainmask.nii.gz", - extra="--output-data-type uchar" if sw_tool == "itk" else "", - ) - - # skip test if command is not available on host - exe = cmd.split(" ", 1)[0] - if not shutil.which(exe): - pytest.skip(f"Command {exe} not found on host") - - # resample mask - exit_code = check_call([cmd], shell=True) - assert exit_code == 0 - sw_moved_mask = nb.load("resampled_brainmask.nii.gz") - nt_moved_mask = xfm.apply(msk, order=0) - nt_moved_mask.set_data_dtype(msk.get_data_dtype()) - diff = np.asanyarray(sw_moved_mask.dataobj) - np.asanyarray(nt_moved_mask.dataobj) - - assert np.sqrt((diff ** 2).mean()) < RMSE_TOL - brainmask = np.asanyarray(nt_moved_mask.dataobj, dtype=bool) - - # Then apply the transform and cross-check with software - cmd = APPLY_NONLINEAR_CMD[sw_tool]( - transform=os.path.abspath(xfm_fname), - reference=tmp_path / "reference.nii.gz", - moving=tmp_path / "reference.nii.gz", - output=tmp_path / "resampled.nii.gz", - extra="--output-data-type uchar" if sw_tool == "itk" else "" - ) - - exit_code = check_call([cmd], shell=True) - assert exit_code == 0 - sw_moved = nb.load("resampled.nii.gz") - - nt_moved = xfm.apply(nii, order=0) - nt_moved.set_data_dtype(nii.get_data_dtype()) - nt_moved.to_filename("nt_resampled.nii.gz") - sw_moved.set_data_dtype(nt_moved.get_data_dtype()) - diff = ( - np.asanyarray(sw_moved.dataobj, dtype=sw_moved.get_data_dtype()) - - np.asanyarray(nt_moved.dataobj, dtype=nt_moved.get_data_dtype()) + reference=testdata_path / "someones_anatomy.nii.gz", ) - # A certain tolerance is necessary because of resampling at borders - assert np.sqrt((diff[brainmask] ** 2).mean()) < RMSE_TOL - - -@pytest.mark.parametrize("sw_tool", ["itk", "afni"]) -def test_displacements_field2(tmp_path, testdata_path, sw_tool): - """Check a translation-only field on one or more axes, different image orientations.""" - os.chdir(str(tmp_path)) - img_fname = testdata_path / "tpl-OASIS30ANTs_T1w.nii.gz" - xfm_fname = testdata_path / "ds-005_sub-01_from-OASIS_to-T1_warp_{}.nii.gz".format( - sw_tool - ) - - xfm = nlload(xfm_fname, fmt=sw_tool) - - # Then apply the transform and cross-check with software - cmd = APPLY_NONLINEAR_CMD[sw_tool]( - transform=xfm_fname, - reference=img_fname, - moving=img_fname, - output="resampled.nii.gz", - extra="", - ) - - # skip test if command is not available on host - exe = cmd.split(" ", 1)[0] - if not shutil.which(exe): - pytest.skip(f"Command {exe} not found on host") - - exit_code = check_call([cmd], shell=True) - assert exit_code == 0 - sw_moved = nb.load("resampled.nii.gz") - - nt_moved = xfm.apply(img_fname, order=0) - nt_moved.to_filename("nt_resampled.nii.gz") - sw_moved.set_data_dtype(nt_moved.get_data_dtype()) - diff = ( - np.asanyarray(sw_moved.dataobj, dtype=sw_moved.get_data_dtype()) - - np.asanyarray(nt_moved.dataobj, dtype=nt_moved.get_data_dtype()) - ) - # A certain tolerance is necessary because of resampling at borders - assert np.sqrt((diff ** 2).mean()) < RMSE_TOL def test_bspline(tmp_path, testdata_path): @@ -247,12 +106,16 @@ def test_bspline(tmp_path, testdata_path): bsplxfm = BSplineFieldTransform(bs_name, reference=img_name) dispxfm = DenseFieldTransform(disp_name) - out_disp = dispxfm.apply(img_name) - out_bspl = bsplxfm.apply(img_name) + out_disp = apply(dispxfm, img_name) + out_bspl = apply(bsplxfm, img_name) out_disp.to_filename("resampled_field.nii.gz") out_bspl.to_filename("resampled_bsplines.nii.gz") - assert np.sqrt( - (out_disp.get_fdata(dtype="float32") - out_bspl.get_fdata(dtype="float32")) ** 2 - ).mean() < 0.2 + assert ( + np.sqrt( + (out_disp.get_fdata(dtype="float32") - out_bspl.get_fdata(dtype="float32")) + ** 2 + ).mean() + < 0.2 + ) diff --git a/nitransforms/tests/test_resampling.py b/nitransforms/tests/test_resampling.py new file mode 100644 index 00000000..2384ad97 --- /dev/null +++ b/nitransforms/tests/test_resampling.py @@ -0,0 +1,365 @@ +# emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- +# vi: set ft=python sts=4 ts=4 sw=4 et: +"""Exercise the standalone ``apply()`` implementation.""" + +import os +import pytest +import numpy as np +from subprocess import check_call +import shutil + +import nibabel as nb +from nibabel.eulerangles import euler2mat +from nibabel.affines import from_matvec +from nitransforms import linear as nitl +from nitransforms import nonlinear as nitnl +from nitransforms import manip as nitm +from nitransforms import io +from nitransforms.resampling import apply + +RMSE_TOL_LINEAR = 0.09 +RMSE_TOL_NONLINEAR = 0.05 +APPLY_LINEAR_CMD = { + "fsl": """\ +flirt -setbackground 0 -interp nearestneighbour -in {moving} -ref {reference} \ +-applyxfm -init {transform} -out {resampled}\ +""".format, + "itk": """\ +antsApplyTransforms -d 3 -r {reference} -i {moving} \ +-o {resampled} -n NearestNeighbor -t {transform} --float\ +""".format, + "afni": """\ +3dAllineate -base {reference} -input {moving} \ +-prefix {resampled} -1Dmatrix_apply {transform} -final NN\ +""".format, + "fs": """\ +mri_vol2vol --mov {moving} --targ {reference} --lta {transform} \ +--o {resampled} --nearest""".format, +} +APPLY_NONLINEAR_CMD = { + "itk": """\ +antsApplyTransforms -d 3 -r {reference} -i {moving} \ +-o {output} -n NearestNeighbor -t {transform} {extra}\ +""".format, + "afni": """\ +3dNwarpApply -nwarp {transform} -source {moving} \ +-master {reference} -interp NN -prefix {output} {extra}\ +""".format, + "fsl": """\ +applywarp -i {moving} -r {reference} -o {output} {extra}\ +-w {transform} --interp=nn""".format, +} + + +@pytest.mark.parametrize( + "image_orientation", + [ + "RAS", + "LAS", + "LPS", + "oblique", + ], +) +@pytest.mark.parametrize("sw_tool", ["itk", "fsl", "afni", "fs"]) +def test_apply_linear_transform( + tmpdir, get_testdata, get_testmask, image_orientation, sw_tool +): + """Check implementation of exporting affines to formats.""" + tmpdir.chdir() + + img = get_testdata[image_orientation] + msk = get_testmask[image_orientation] + + # Generate test transform + T = from_matvec(euler2mat(x=0.9, y=0.001, z=0.001), [4.0, 2.0, -1.0]) + xfm = nitl.Affine(T) + xfm.reference = img + + ext = "" + if sw_tool == "itk": + ext = ".tfm" + elif sw_tool == "fs": + ext = ".lta" + + img.to_filename("img.nii.gz") + msk.to_filename("mask.nii.gz") + + # Write out transform file (software-dependent) + xfm_fname = f"M.{sw_tool}{ext}" + # Change reference dataset for AFNI & oblique + if (sw_tool, image_orientation) == ("afni", "oblique"): + io.afni.AFNILinearTransform.from_ras( + T, + moving=img, + reference=img, + ).to_filename(xfm_fname) + else: + xfm.to_filename(xfm_fname, fmt=sw_tool) + + cmd = APPLY_LINEAR_CMD[sw_tool]( + transform=os.path.abspath(xfm_fname), + reference=os.path.abspath("mask.nii.gz"), + moving=os.path.abspath("mask.nii.gz"), + resampled=os.path.abspath("resampled_brainmask.nii.gz"), + ) + + # skip test if command is not available on host + exe = cmd.split(" ", 1)[0] + if not shutil.which(exe): + pytest.skip(f"Command {exe} not found on host") + + # resample mask + exit_code = check_call([cmd], shell=True) + assert exit_code == 0 + sw_moved_mask = nb.load("resampled_brainmask.nii.gz") + + nt_moved_mask = apply(xfm, msk, order=0) + nt_moved_mask.set_data_dtype(msk.get_data_dtype()) + nt_moved_mask.to_filename("ntmask.nii.gz") + diff = np.asanyarray(sw_moved_mask.dataobj) - np.asanyarray(nt_moved_mask.dataobj) + + assert np.sqrt((diff**2).mean()) < RMSE_TOL_LINEAR + brainmask = np.asanyarray(nt_moved_mask.dataobj, dtype=bool) + + cmd = APPLY_LINEAR_CMD[sw_tool]( + transform=os.path.abspath(xfm_fname), + reference=os.path.abspath("img.nii.gz"), + moving=os.path.abspath("img.nii.gz"), + resampled=os.path.abspath("resampled.nii.gz"), + ) + + exit_code = check_call([cmd], shell=True) + assert exit_code == 0 + sw_moved = nb.load("resampled.nii.gz") + sw_moved.set_data_dtype(img.get_data_dtype()) + + nt_moved = apply(xfm, img, order=0) + diff = np.asanyarray( + sw_moved.dataobj, dtype=sw_moved.get_data_dtype() + ) - np.asanyarray(nt_moved.dataobj, dtype=nt_moved.get_data_dtype()) + + # A certain tolerance is necessary because of resampling at borders + assert np.sqrt((diff[brainmask] ** 2).mean()) < RMSE_TOL_LINEAR + + nt_moved = apply(xfm, "img.nii.gz", order=0) + diff = np.asanyarray( + sw_moved.dataobj, dtype=sw_moved.get_data_dtype() + ) - np.asanyarray(nt_moved.dataobj, dtype=nt_moved.get_data_dtype()) + # A certain tolerance is necessary because of resampling at borders + assert np.sqrt((diff[brainmask] ** 2).mean()) < RMSE_TOL_LINEAR + + +@pytest.mark.parametrize("image_orientation", ["RAS", "LAS", "LPS", "oblique"]) +@pytest.mark.parametrize("sw_tool", ["itk", "afni"]) +@pytest.mark.parametrize("axis", [0, 1, 2, (0, 1), (1, 2), (0, 1, 2)]) +def test_displacements_field1( + tmp_path, + get_testdata, + get_testmask, + image_orientation, + sw_tool, + axis, +): + """Check a translation-only field on one or more axes, different image orientations.""" + if (image_orientation, sw_tool) == ("oblique", "afni"): + pytest.skip("AFNI obliques are not yet implemented for displacements fields") + + os.chdir(str(tmp_path)) + nii = get_testdata[image_orientation] + msk = get_testmask[image_orientation] + nii.to_filename("reference.nii.gz") + msk.to_filename("mask.nii.gz") + + fieldmap = np.zeros( + (*nii.shape[:3], 1, 3) if sw_tool != "fsl" else (*nii.shape[:3], 3), + dtype="float32", + ) + fieldmap[..., axis] = -10.0 + + _hdr = nii.header.copy() + if sw_tool in ("itk",): + _hdr.set_intent("vector") + _hdr.set_data_dtype("float32") + + xfm_fname = "warp.nii.gz" + field = nb.Nifti1Image(fieldmap, nii.affine, _hdr) + field.to_filename(xfm_fname) + + xfm = nitnl.load(xfm_fname, fmt=sw_tool) + + # Then apply the transform and cross-check with software + cmd = APPLY_NONLINEAR_CMD[sw_tool]( + transform=os.path.abspath(xfm_fname), + reference=tmp_path / "mask.nii.gz", + moving=tmp_path / "mask.nii.gz", + output=tmp_path / "resampled_brainmask.nii.gz", + extra="--output-data-type uchar" if sw_tool == "itk" else "", + ) + + # skip test if command is not available on host + exe = cmd.split(" ", 1)[0] + if not shutil.which(exe): + pytest.skip(f"Command {exe} not found on host") + + # resample mask + exit_code = check_call([cmd], shell=True) + assert exit_code == 0 + sw_moved_mask = nb.load("resampled_brainmask.nii.gz") + nt_moved_mask = apply(xfm, msk, order=0) + nt_moved_mask.set_data_dtype(msk.get_data_dtype()) + diff = np.asanyarray(sw_moved_mask.dataobj) - np.asanyarray(nt_moved_mask.dataobj) + + assert np.sqrt((diff**2).mean()) < RMSE_TOL_LINEAR + brainmask = np.asanyarray(nt_moved_mask.dataobj, dtype=bool) + + # Then apply the transform and cross-check with software + cmd = APPLY_NONLINEAR_CMD[sw_tool]( + transform=os.path.abspath(xfm_fname), + reference=tmp_path / "reference.nii.gz", + moving=tmp_path / "reference.nii.gz", + output=tmp_path / "resampled.nii.gz", + extra="--output-data-type uchar" if sw_tool == "itk" else "", + ) + + exit_code = check_call([cmd], shell=True) + assert exit_code == 0 + sw_moved = nb.load("resampled.nii.gz") + + nt_moved = apply(xfm, nii, order=0) + nt_moved.set_data_dtype(nii.get_data_dtype()) + nt_moved.to_filename("nt_resampled.nii.gz") + sw_moved.set_data_dtype(nt_moved.get_data_dtype()) + diff = np.asanyarray( + sw_moved.dataobj, dtype=sw_moved.get_data_dtype() + ) - np.asanyarray(nt_moved.dataobj, dtype=nt_moved.get_data_dtype()) + # A certain tolerance is necessary because of resampling at borders + assert np.sqrt((diff[brainmask] ** 2).mean()) < RMSE_TOL_LINEAR + + +@pytest.mark.parametrize("sw_tool", ["itk", "afni"]) +def test_displacements_field2(tmp_path, testdata_path, sw_tool): + """Check a translation-only field on one or more axes, different image orientations.""" + os.chdir(str(tmp_path)) + img_fname = testdata_path / "tpl-OASIS30ANTs_T1w.nii.gz" + xfm_fname = testdata_path / "ds-005_sub-01_from-OASIS_to-T1_warp_{}.nii.gz".format( + sw_tool + ) + + xfm = nitnl.load(xfm_fname, fmt=sw_tool) + + # Then apply the transform and cross-check with software + cmd = APPLY_NONLINEAR_CMD[sw_tool]( + transform=xfm_fname, + reference=img_fname, + moving=img_fname, + output="resampled.nii.gz", + extra="", + ) + + # skip test if command is not available on host + exe = cmd.split(" ", 1)[0] + if not shutil.which(exe): + pytest.skip(f"Command {exe} not found on host") + + exit_code = check_call([cmd], shell=True) + assert exit_code == 0 + sw_moved = nb.load("resampled.nii.gz") + + nt_moved = apply(xfm, img_fname, order=0) + nt_moved.to_filename("nt_resampled.nii.gz") + sw_moved.set_data_dtype(nt_moved.get_data_dtype()) + diff = np.asanyarray( + sw_moved.dataobj, dtype=sw_moved.get_data_dtype() + ) - np.asanyarray(nt_moved.dataobj, dtype=nt_moved.get_data_dtype()) + # A certain tolerance is necessary because of resampling at borders + assert np.sqrt((diff**2).mean()) < RMSE_TOL_LINEAR + + +def test_apply_transformchain(tmp_path, testdata_path): + """Check a translation-only field on one or more axes, different image orientations.""" + os.chdir(str(tmp_path)) + img_fname = testdata_path / "T1w_scanner.nii.gz" + xfm_fname = ( + testdata_path + / "ds-005_sub-01_from-T1w_to-MNI152NLin2009cAsym_mode-image_xfm.h5" + ) + + xfm = nitm.load(xfm_fname) + + assert len(xfm) == 2 + + ref_fname = tmp_path / "reference.nii.gz" + nb.Nifti1Image( + np.zeros(xfm.reference.shape, dtype="uint16"), + xfm.reference.affine, + ).to_filename(str(ref_fname)) + + # Then apply the transform and cross-check with software + cmd = APPLY_NONLINEAR_CMD["itk"]( + transform=xfm_fname, + reference=ref_fname, + moving=img_fname, + output="resampled.nii.gz", + extra="", + ) + + # skip test if command is not available on host + exe = cmd.split(" ", 1)[0] + if not shutil.which(exe): + pytest.skip(f"Command {exe} not found on host") + + exit_code = check_call([cmd], shell=True) + assert exit_code == 0 + sw_moved = nb.load("resampled.nii.gz") + + nt_moved = apply(xfm, img_fname, order=0) + nt_moved.to_filename("nt_resampled.nii.gz") + diff = sw_moved.get_fdata() - nt_moved.get_fdata() + # A certain tolerance is necessary because of resampling at borders + assert (np.abs(diff) > 1e-3).sum() / diff.size < RMSE_TOL_LINEAR + + +@pytest.mark.parametrize("serialize_4d", [True, False]) +def test_LinearTransformsMapping_apply( + tmp_path, data_path, testdata_path, serialize_4d +): + """Apply transform mappings.""" + hmc = nitl.load( + data_path / "hmc-itk.tfm", fmt="itk", reference=testdata_path / "sbref.nii.gz" + ) + assert isinstance(hmc, nitl.LinearTransformsMapping) + + # Test-case: realign functional data on to sbref + nii = apply( + hmc, + testdata_path / "func.nii.gz", + order=1, + reference=testdata_path / "sbref.nii.gz", + serialize_nvols=2 if serialize_4d else np.inf, + ) + assert nii.dataobj.shape[-1] == len(hmc) + + # Test-case: write out a fieldmap moved with head + hmcinv = nitl.LinearTransformsMapping( + np.linalg.inv(hmc.matrix), reference=testdata_path / "func.nii.gz" + ) + + nii = apply( + hmcinv, + testdata_path / "fmap.nii.gz", + order=1, + serialize_nvols=2 if serialize_4d else np.inf, + ) + assert nii.dataobj.shape[-1] == len(hmc) + + # Ensure a ValueError is issued when trying to apply mismatched transforms + # (e.g., in this case, two transforms while the functional has 8 volumes) + hmc = nitl.LinearTransformsMapping(hmc.matrix[:2, ...]) + with pytest.raises(ValueError): + apply( + hmc, + testdata_path / "func.nii.gz", + order=1, + reference=testdata_path / "sbref.nii.gz", + serialize_nvols=2 if serialize_4d else np.inf, + ) diff --git a/nitransforms/tests/test_surface.py b/nitransforms/tests/test_surface.py new file mode 100644 index 00000000..a210583e --- /dev/null +++ b/nitransforms/tests/test_surface.py @@ -0,0 +1,241 @@ +import tempfile + +import numpy as np +import nibabel as nb +import pytest +from scipy import sparse +from nitransforms.base import SurfaceMesh +from nitransforms.surface import ( + SurfaceTransformBase, + SurfaceCoordinateTransform, + SurfaceResampler +) + +# def test_surface_transform_npz(): +# mat = sparse.random(10, 10, density=0.5) +# xfm = SurfaceCoordinateTransform(mat) +# fn = tempfile.mktemp(suffix=".npz") +# print(fn) +# xfm.to_filename(fn) +# +# xfm2 = SurfaceCoordinateTransform.from_filename(fn) +# try: +# assert xfm.mat.shape == xfm2.mat.shape +# np.testing.assert_array_equal(xfm.mat.data, xfm2.mat.data) +# np.testing.assert_array_equal(xfm.mat.indices, xfm2.mat.indices) +# np.testing.assert_array_equal(xfm.mat.indptr, xfm2.mat.indptr) +# except Exception: +# os.remove(fn) +# raise +# os.remove(fn) + + +# def test_surface_transform_normalization(): +# mat = np.random.uniform(size=(20, 10)) +# xfm = SurfaceCoordinateTransform(mat) +# x = np.random.uniform(size=(5, 20)) +# y_element = xfm.apply(x, normalize="element") +# np.testing.assert_array_less(y_element.sum(axis=1), x.sum(axis=1)) +# y_sum = xfm.apply(x, normalize="sum") +# np.testing.assert_allclose(y_sum.sum(axis=1), x.sum(axis=1)) +# y_none = xfm.apply(x, normalize="none") +# assert y_none.sum() != y_element.sum() +# assert y_none.sum() != y_sum.sum() + +def test_SurfaceTransformBase(testdata_path): + # note these transformations are a bit of a weird use of surface transformation, but I'm + # just testing the base class and the io + sphere_reg_path = ( + testdata_path + / "sub-sid000005_ses-budapest_acq-MPRAGE_hemi-R_space-fsLR_desc-reg_sphere.surf.gii" + ) + pial_path = testdata_path / "sub-sid000005_ses-budapest_acq-MPRAGE_hemi-R_pial.surf.gii" + + sphere_reg = SurfaceMesh(nb.load(sphere_reg_path)) + pial = SurfaceMesh(nb.load(pial_path)) + stfb = SurfaceTransformBase(sphere_reg, pial) + + # test loading from filenames + stfb_ff = SurfaceTransformBase.from_filename(sphere_reg_path, pial_path) + assert stfb_ff == stfb + + # test inversion and setting + stfb_i = ~stfb + stfb.reference = pial + stfb.moving = sphere_reg + assert np.all(stfb_i._reference._coords == stfb._reference._coords) + assert np.all(stfb_i._reference._triangles == stfb._reference._triangles) + assert np.all(stfb_i._moving._coords == stfb._moving._coords) + assert np.all(stfb_i._moving._triangles == stfb._moving._triangles) + # test equality + assert stfb_i == stfb + + +def test_SurfaceCoordinateTransform(testdata_path): + # note these transformations are a bit of a weird use of surface transformation, but I'm + # just testing the class and the io + sphere_reg_path = ( + testdata_path + / "sub-sid000005_ses-budapest_acq-MPRAGE_hemi-R_space-fsLR_desc-reg_sphere.surf.gii" + ) + pial_path = testdata_path / "sub-sid000005_ses-budapest_acq-MPRAGE_hemi-R_pial.surf.gii" + fslr_sphere_path = testdata_path / "tpl-fsLR_hemi-R_den-32k_sphere.surf.gii" + + sphere_reg = SurfaceMesh(nb.load(sphere_reg_path)) + pial = SurfaceMesh(nb.load(pial_path)) + fslr_sphere = SurfaceMesh(nb.load(fslr_sphere_path)) + + # test mesh correspondence test + with pytest.raises(ValueError): + sct = SurfaceCoordinateTransform(fslr_sphere, pial) + + # test loading from filenames + sct = SurfaceCoordinateTransform(pial, sphere_reg) + sctf = SurfaceCoordinateTransform.from_filename(reference_path=pial_path, + moving_path=sphere_reg_path) + assert sct == sctf + + # test mapping + assert np.all(sct.map(sct.moving._coords[:100], inverse=True) == sct.reference._coords[:100]) + assert np.all(sct.map(sct.reference._coords[:100]) == sct.moving._coords[:100]) + with pytest.raises(NotImplementedError): + sct.map(sct.moving._coords[0]) + + # test inversion and addition + scti = ~sct + + assert sct + scti == SurfaceCoordinateTransform(pial, pial) + assert scti + sct == SurfaceCoordinateTransform(sphere_reg, sphere_reg) + + sct.reference = sphere_reg + sct.moving = pial + assert np.all(scti.reference._coords == sct.reference._coords) + assert np.all(scti.reference._triangles == sct.reference._triangles) + assert scti == sct + + +def test_SurfaceCoordinateTransformIO(testdata_path, tmpdir): + sphere_reg_path = ( + testdata_path + / "sub-sid000005_ses-budapest_acq-MPRAGE_hemi-R_space-fsLR_desc-reg_sphere.surf.gii" + ) + pial_path = testdata_path / "sub-sid000005_ses-budapest_acq-MPRAGE_hemi-R_pial.surf.gii" + + sct = SurfaceCoordinateTransform(pial_path, sphere_reg_path) + fn = tempfile.mktemp(suffix=".h5") + sct.to_filename(fn) + sct2 = SurfaceCoordinateTransform.from_filename(fn) + assert sct == sct2 + + +def test_ProjectUnproject(testdata_path): + + sphere_reg_path = ( + testdata_path + / "sub-sid000005_ses-budapest_acq-MPRAGE_hemi-R_space-fsLR_desc-reg_sphere.surf.gii" + ) + fslr_sphere_path = testdata_path / "tpl-fsLR_hemi-R_den-32k_sphere.surf.gii" + subj_fsaverage_sphere_path = ( + testdata_path + / "sub-sid000005_ses-budapest_acq-MPRAGE_hemi-R_space-fsaverage_desc-reg_sphere.surf.gii" + ) + fslr_fsaverage_sphere_path = ( + testdata_path + / "tpl-fsLR_space-fsaverage_hemi-R_den-32k_sphere.surf.gii" + ) + pial_path = testdata_path / "sub-sid000005_ses-budapest_acq-MPRAGE_hemi-R_pial.surf.gii" + + # test project-unproject funcitonality + projunproj = SurfaceResampler(sphere_reg_path, fslr_sphere_path) + with pytest.raises(ValueError): + projunproj.apply(pial_path) + transformed = projunproj.apply(fslr_fsaverage_sphere_path) + projunproj_ref = nb.load(subj_fsaverage_sphere_path) + assert (projunproj_ref.agg_data()[0] - transformed._coords).max() < 0.0005 + assert np.all(transformed._triangles == projunproj_ref.agg_data()[1]) + + +def test_SurfaceResampler(testdata_path, tmpdir): + dif_tol = 0.001 + fslr_sphere_path = ( + testdata_path + / "tpl-fsLR_hemi-R_den-32k_sphere.surf.gii" + ) + shape_path = ( + testdata_path + / "sub-sid000005_ses-budapest_acq-MPRAGE_hemi-R_thickness.shape.gii" + ) + ref_resampled_thickness_path = ( + testdata_path + / "sub-sid000005_ses-budapest_acq-MPRAGE_hemi-R_space-fsLR_thickness.shape.gii" + ) + pial_path = ( + testdata_path / "sub-sid000005_ses-budapest_acq-MPRAGE_hemi-R_pial.surf.gii" + ) + sphere_reg_path = ( + testdata_path + / "sub-sid000005_ses-budapest_acq-MPRAGE_hemi-R_space-fsLR_desc-reg_sphere.surf.gii" + ) + + fslr_sphere = SurfaceMesh(nb.load(fslr_sphere_path)) + sphere_reg = SurfaceMesh(nb.load(sphere_reg_path)) + subj_thickness = nb.load(shape_path) + + with pytest.raises(ValueError): + SurfaceResampler(sphere_reg_path, pial_path) + with pytest.raises(ValueError): + SurfaceResampler(pial_path, sphere_reg_path) + + reference = fslr_sphere + moving = sphere_reg + # compare results to what connectome workbench produces + resampling = SurfaceResampler(reference, moving) + resampled_thickness = resampling.apply(subj_thickness.agg_data(), normalize='element') + ref_resampled = nb.load(ref_resampled_thickness_path).agg_data() + + max_dif = np.abs(resampled_thickness.astype(np.float32) - ref_resampled).max() + assert max_dif < dif_tol + + with pytest.raises(ValueError): + SurfaceResampler(reference, moving, mat=resampling.mat[:, :10000]) + with pytest.raises(ValueError): + SurfaceResampler(reference, moving, mat=resampling.mat[:10000, :]) + with pytest.raises(ValueError): + resampling.reference = reference + with pytest.raises(ValueError): + resampling.moving = moving + with pytest.raises(NotImplementedError): + _ = SurfaceResampler(reference, moving, "foo") + + # test file io + fn = tempfile.mktemp(suffix=".h5") + resampling.to_filename(fn) + resampling2 = SurfaceResampler.from_filename(fn) + + # assert resampling2 == resampling + assert np.allclose(resampling2.reference._coords, resampling.reference._coords) + assert np.all(resampling2.reference._triangles == resampling.reference._triangles) + assert np.allclose(resampling2.reference._coords, resampling.reference._coords) + assert np.all(resampling2.moving._triangles == resampling.moving._triangles) + + resampled_thickness2 = resampling2.apply(subj_thickness.agg_data(), normalize='element') + assert np.all(resampled_thickness2 == resampled_thickness) + + # test loading with a csr + assert isinstance(resampling.mat, sparse.csr_array) + resampling2a = SurfaceResampler(reference, moving, mat=resampling.mat) + resampled_thickness2a = resampling2a.apply(subj_thickness.agg_data(), normalize='element') + assert np.all(resampled_thickness2a == resampled_thickness) + + with pytest.raises(ValueError): + _ = SurfaceResampler(moving, reference, mat=resampling.mat) + + # test map + assert np.all(resampling.map(np.array([[0, 0, 0]])) == np.array([[0, 0, 0]])) + + # test loading from surfaces + resampling3 = SurfaceResampler.from_filename(reference_path=fslr_sphere_path, + moving_path=sphere_reg_path) + assert resampling3 == resampling + resampled_thickness3 = resampling3.apply(subj_thickness.agg_data(), normalize='element') + assert np.all(resampled_thickness3 == resampled_thickness) diff --git a/nitransforms/tests/test_version.py b/nitransforms/tests/test_version.py index a0723e9a..48a70ecf 100644 --- a/nitransforms/tests/test_version.py +++ b/nitransforms/tests/test_version.py @@ -1,7 +1,5 @@ """Test _version.py.""" import sys -from collections import namedtuple -from pkg_resources import DistributionNotFound from importlib import reload import nitransforms @@ -17,26 +15,9 @@ class _version: assert nitransforms.__version__ == "10.0.0" -def test_version_scm1(monkeypatch): - """Retrieve the version via pkg_resources.""" - monkeypatch.setitem(sys.modules, "nitransforms._version", None) - - def _dist(name): - Distribution = namedtuple("Distribution", ["name", "version"]) - return Distribution(name, "success") - - monkeypatch.setattr("pkg_resources.get_distribution", _dist) - reload(nitransforms) - assert nitransforms.__version__ == "success" - - -def test_version_scm2(monkeypatch): +def test_version_fallback(monkeypatch): """Check version could not be interpolated.""" monkeypatch.setitem(sys.modules, "nitransforms._version", None) - def _raise(name): - raise DistributionNotFound("No get_distribution mock") - - monkeypatch.setattr("pkg_resources.get_distribution", _raise) reload(nitransforms) - assert nitransforms.__version__ == "unknown" + assert nitransforms.__version__ == "0+unknown" diff --git a/pyproject.toml b/pyproject.toml index 45a691bb..63876722 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -2,6 +2,60 @@ requires = ["setuptools >= 45", "setuptools_scm[toml]>=6.2"] build-backend = "setuptools.build_meta" +[project] +name = "nitransforms" +authors = [{name = "The NiPy developers", email = "nipreps@gmail.com"}] +readme = "README.md" +classifiers = [ + "Development Status :: 2 - Pre-Alpha", + "Intended Audience :: Science/Research", + "Topic :: Scientific/Engineering :: Image Recognition", + "License :: OSI Approved :: BSD License", + "Programming Language :: Python :: 3.9", + "Programming Language :: Python :: 3.10", + "Programming Language :: Python :: 3.11", + "Programming Language :: Python :: 3.12", + "Programming Language :: Python :: 3.13", +] +description = "NiTransforms -- Neuroimaging spatial transforms in Python." +license = {text = "MIT License"} +requires-python = ">= 3.9" +dependencies = [ + "numpy >= 1.21", + "scipy >= 1.8", + "nibabel >= 4.0", + "h5py >= 3.9", +] +dynamic = ["version"] + +[project.urls] +Homepage = "https://github.com/nipy/nitransforms" +Manuscript = "https://doi.org/10.31219/osf.io/8aq7b" +NiBabel = "https://github.com/nipy/nibabel/pull/656" + +[project.optional-dependencies] +niftiext = ["lxml >= 4.6"] +test = [ + "pytest >= 6", + "pytest-cov >= 2.11", + "pytest-env", + "pytest-xdist >= 2.5", + "coverage[toml] >= 5.2.1", + "nitransforms[niftiext]", +] +# Aliases +niftiexts = ["nitransforms[niftiext]"] +tests = ["nitransforms[test]"] +all = [ + "nitransforms[test]", +] + +[project.scripts] +nb-transform = "nitransforms.cli:main" + +[tool.setuptools.packages.find] +include = ["nitransforms*"] + [tool.setuptools_scm] write_to = "nitransforms/_version.py" write_to_template = """\ @@ -9,3 +63,24 @@ write_to_template = """\ __version__ = "{version}" """ fallback_version = "0.0" + +[tool.pytest.ini_options] +minversion = "6" +testpaths = ["nitransforms"] +log_cli_level = "INFO" +xfail_strict = true +norecursedirs = [".git"] +addopts = [ + "-svx", + "-ra", + "--strict-config", + "--strict-markers", + "--doctest-modules", + "--cov=nitransforms", + "--cov-report=xml", + "--cov-config=pyproject.toml", +] +doctest_optionflags = "ALLOW_UNICODE NORMALIZE_WHITESPACE ELLIPSIS" +env = "PYTHONHASHSEED=0" +filterwarnings = ["ignore::DeprecationWarning"] +junit_family = "xunit2" diff --git a/setup.cfg b/setup.cfg index 93499c35..f355be94 100644 --- a/setup.cfg +++ b/setup.cfg @@ -1,63 +1,3 @@ -[metadata] -author = The NiPy developers -author_email = nipreps@gmail.com -classifiers = - Development Status :: 2 - Pre-Alpha - Intended Audience :: Science/Research - Topic :: Scientific/Engineering :: Image Recognition - License :: OSI Approved :: BSD License - Programming Language :: Python :: 3.8 - Programming Language :: Python :: 3.9 - Programming Language :: Python :: 3.10 - Programming Language :: Python :: 3.11 -description = NiTransforms -- Neuroimaging spatial transforms in Python. -license = MIT License -long_description = file:README.md -long_description_content_type = text/markdown; charset=UTF-8 -provides = nitransforms -project_urls = - Manuscript=https://doi.org/10.31219/osf.io/8aq7b - NiBabel=https://github.com/nipy/nibabel/pull/656 -url = https://github.com/nipy/nitransforms - -[options] -python_requires = >= 3.8 -install_requires = - numpy >= 1.21.0 - scipy >= 1.6.0 - nibabel >= 3.0 - h5py -test_requires = - pytest - pytest-cov - nose - codecov -setup_requires = - setuptools_scm - toml -packages = find: -include_package_data = True - -[options.extras_require] -niftiext = - lxml -niftiexts = - %(niftiext)s -test = - pytest - pytest-cov - codecov -tests = - %(test)s - -all = - %(niftiext)s - %(test)s - -[options.entry_points] -console_scripts = - nb-transform = nitransforms.cli:main - [flake8] max-line-length = 99 doctests = False @@ -65,13 +5,3 @@ ignore = E266 E231 W503 - -[tool:pytest] -norecursedirs = .git -addopts = -svx --doctest-modules -doctest_optionflags = ALLOW_UNICODE NORMALIZE_WHITESPACE ELLIPSIS -env = - PYTHONHASHSEED=0 -filterwarnings = - ignore::DeprecationWarning -junit_family=xunit2 diff --git a/setup.py b/setup.py deleted file mode 100644 index 78ec8923..00000000 --- a/setup.py +++ /dev/null @@ -1,7 +0,0 @@ -"""Prepare package for distribution.""" -from setuptools import setup - -if __name__ == "__main__": - setup( - name="nitransforms", - ) diff --git a/tox.ini b/tox.ini new file mode 100644 index 00000000..fe549039 --- /dev/null +++ b/tox.ini @@ -0,0 +1,110 @@ +[tox] +requires = + tox>=4 +envlist = + py3{9,10,11,12,13}-latest + py39-min + py3{11,12,13}-pre +skip_missing_interpreters = true + +# Configuration that allows us to split tests across GitHub runners effectively +[gh-actions] +python = + 3.9: py39 + 3.10: py310 + 3.11: py311 + 3.12: py312 + 3.13: py313 + +[gh-actions:env] +DEPENDS = + min: min + latest: latest + pre: pre + +[testenv] +description = Pytest with coverage +labels = test +pip_pre = + pre: true +pass_env = + # Overrides default location for finding test data + TEST_DATA_HOME + # getpass.getuser() sources for Windows: + LOGNAME + USER + LNAME + USERNAME + # Pass user color preferences through + PY_COLORS + FORCE_COLOR + NO_COLOR + CLICOLOR + CLICOLOR_FORCE + PYTHON_GIL +extras = tests +setenv = + FSLOUTPUTTYPE=NIFTI_GZ + pre: PIP_EXTRA_INDEX_URL=https://pypi.anaconda.org/scientific-python-nightly-wheels/simple + pre: UV_EXTRA_INDEX_URL=https://pypi.anaconda.org/scientific-python-nightly-wheels/simple +uv_resolution = + min: lowest-direct + +commands = + python -c "import nitransforms; print(nitransforms.__version__)" + pytest --durations=20 --durations-min=1.0 --cov-report term-missing {posargs:-n auto} + +[testenv:style] +description = Check our style guide +labels = check +deps = + flake8 +skip_install = true +commands = + flake8 nitransforms + +[testenv:style-fix] +description = Auto-apply style guide to the extent possible +labels = pre-release +deps = + ruff +skip_install = true +commands = + ruff check --fix + ruff format + ruff check --select ISC001 + +[testenv:spellcheck] +description = Check spelling +labels = check +deps = + codespell[toml] +skip_install = true +commands = + codespell . {posargs} + +[testenv:build{,-strict}] +labels = + check + pre-release +deps = + build + twine +skip_install = true +set_env = + # Ignore specific known warnings: + # https://github.com/pypa/pip/issues/11684 + # https://github.com/pypa/pip/issues/12243 + strict: PYTHONWARNINGS=error,once:pkg_resources is deprecated as an API.:DeprecationWarning:pip._internal.metadata.importlib._envs,once:Unimplemented abstract methods {'locate_file'}:DeprecationWarning:pip._internal.metadata.importlib._dists +commands = + python -m build + python -m twine check dist/* + +[testenv:publish] +depends = build +labels = release +deps = + twine +skip_install = true +commands = + python -m twine upload dist/*