diff --git a/.circleci/config.yml b/.circleci/config.yml
index 9f216d61..47b0e00e 100644
--- a/.circleci/config.yml
+++ b/.circleci/config.yml
@@ -2,7 +2,7 @@ version: 2
jobs:
build_pytest:
machine:
- image: ubuntu-2004:202107-02
+ image: default
working_directory: /tmp/src/nitransforms
environment:
TZ: "/usr/share/zoneinfo/America/Los_Angeles"
@@ -12,9 +12,9 @@ jobs:
- checkout
- restore_cache:
keys:
- - env-v3-{{ .Branch }}-
- - env-v3-master-
- - env-v3-
+ - env-v6-{{ .Branch }}-
+ - env-v6-master-
+ - env-v6-
- run:
name: Setup git-annex
command: |
@@ -29,17 +29,14 @@ jobs:
- run:
name: Setup DataLad
command: |
- export PY3=$(pyenv versions | grep '3\.' |
- sed -e 's/.* 3\./3./' -e 's/ .*//')
- pyenv local $PY3
- python -m pip install --no-cache-dir -U pip "setuptools >= 45.0" "setuptools_scm[toml] >= 3.4"
- python -m pip install --no-cache-dir -U datalad datalad-osf
+ python3 -m pip install --no-cache-dir -U pip "setuptools >= 45.0" "setuptools_scm[toml] >= 6.2"
+ python3 -m pip install --no-cache-dir -U datalad datalad-osf
- save_cache:
- key: env-v3-{{ .Branch }}-{{ .BuildNum }}
+ key: env-v6-{{ .Branch }}-{{ .BuildNum }}
paths:
- /opt/circleci/git-annex.linux
- - /opt/circleci/.pyenv/versions/3.9.4
+ - /opt/circleci/.pyenv/versions
- restore_cache:
keys:
@@ -49,10 +46,9 @@ jobs:
- run:
name: Install test data from GIN
command: |
- export PY3=$(pyenv versions | grep '3\.' |
- sed -e 's/.* 3\./3./' -e 's/ .*//')
- pyenv local $PY3
export PATH=/opt/circleci/git-annex.linux:$PATH
+ pyenv local 3
+ eval "$(pyenv init --path)"
mkdir -p /tmp/data
cd /tmp/data
datalad install -r https://gin.g-node.org/oesteban/nitransforms-tests
@@ -98,15 +94,12 @@ jobs:
name: Build Docker image & push to registry
no_output_timeout: 60m
command: |
- export PY3=$(pyenv versions | grep '3\.' |
- sed -e 's/.* 3\./3./' -e 's/ .*//')
- pyenv local $PY3
e=1 && for i in {1..5}; do
docker build --rm --cache-from=nitransforms:latest \
-t nitransforms:latest \
--build-arg BUILD_DATE=`date -u +"%Y-%m-%dT%H:%M:%SZ"` \
--build-arg VCS_REF=`git rev-parse --short HEAD` \
- --build-arg VERSION=$( python3 setup.py --version ) . \
+ --build-arg VERSION=$( python3 -m setuptools_scm ) . \
&& e=0 && break || sleep 15
done && [ "$e" -eq "0" ]
docker tag nitransforms:latest localhost:5000/nitransforms
@@ -123,10 +116,7 @@ jobs:
- run:
name: Check version packaged in Docker image
command: |
- export PY3=$(pyenv versions | grep '3\.' |
- sed -e 's/.* 3\./3./' -e 's/ .*//')
- pyenv local $PY3
- THISVERSION=${CIRCLE_TAG:-$(python3 setup.py --version)}
+ THISVERSION=${CIRCLE_TAG:-$(python3 -m setuptools_scm)}
INSTALLED_VERSION=$(\
docker run -it --rm --entrypoint=python nitransforms \
-c 'import nitransforms as nit; print(nit.__version__, end="")' )
@@ -141,13 +131,14 @@ jobs:
echo "cHJpbnRmICJrcnp5c3p0b2YuZ29yZ29sZXdza2lAZ21haWwuY29tXG41MTcyXG4gKkN2dW12RVYzelRmZ1xuRlM1Si8yYzFhZ2c0RVxuIiA+IGxpY2Vuc2UudHh0Cg==" | base64 -d | sh
- run:
name: Get codecov
- command: python -m pip install codecov
+ command: python3 -m pip install codecov
- run:
name: Run unit tests
no_output_timeout: 2h
command: |
mkdir -p /tmp/tests/{artifacts,summaries}
- docker run -u $( id -u ) -it --rm -w /src/nitransforms \
+ docker run -u $( id -u ) -it --rm \
+ -w /src/nitransforms -v $PWD:/src/nitransforms \
-v /tmp/data/nitransforms-tests:/data -e TEST_DATA_HOME=/data \
-e COVERAGE_FILE=/tmp/summaries/.pytest.coverage \
-v /tmp/fslicense/license.txt:/opt/freesurfer/license.txt:ro \
@@ -159,7 +150,7 @@ jobs:
name: Submit unit test coverage
command: |
cd /tmp/src/nitransforms
- python -m codecov --file /tmp/tests/summaries/unittests.xml \
+ python3 -m codecov --file /tmp/tests/summaries/unittests.xml \
--flags unittests -e CIRCLE_JOB
- run:
name: Clean up tests directory
@@ -186,9 +177,9 @@ jobs:
command: |
python3 -m venv /tmp/buildenv
source /tmp/buildenv/bin/activate
- python3 -m pip install "setuptools >= 45.0" wheel "setuptools_scm[toml] >= 3.4" \
+ python3 -m pip install "setuptools >= 45.0" build wheel "setuptools_scm[toml] >= 6.2" \
"pip>=10.0.1" twine docutils
- python setup.py sdist bdist_wheel
+ python3 -m build
twine check dist/nitransforms*
- store_artifacts:
path: /tmp/src/nitransforms/dist
@@ -200,9 +191,9 @@ jobs:
command: |
python3 -m venv /tmp/install_sdist
source /tmp/install_sdist/bin/activate
- python3 -m pip install "setuptools >= 45.0" "pip>=10.0.1"
+ python3 -m pip install "setuptools >= 45.0" "pip>=10.0.1" "setuptools_scm[toml] >= 6.2"
- THISVERSION=$( python3 setup.py --version )
+ THISVERSION=$( python3 -m setuptools_scm )
THISVERSION=${CIRCLE_TAG:-$THISVERSION}
python3 -m pip install dist/nitransforms*.tar.gz
INSTALLED_VERSION=$(python3 -c 'import nitransforms as nit; print(nit.__version__, end="")')
@@ -214,9 +205,9 @@ jobs:
command: |
python3 -m venv /tmp/install_wheel
source /tmp/install_wheel/bin/activate
- python3 -m pip install "setuptools >= 45.0" "pip>=10.0.1"
+ python3 -m pip install "setuptools >= 45.0" "pip>=10.0.1" "setuptools_scm[toml] >= 6.2"
- THISVERSION=$( python3 setup.py --version )
+ THISVERSION=$( python3 -m setuptools_scm )
THISVERSION=${CIRCLE_TAG:-$THISVERSION}
python3 -m pip install dist/nitransforms*.whl
INSTALLED_VERSION=$(python3 -c 'import nitransforms as nit; print(nit.__version__, end="")')
diff --git a/.github/workflows/pythonpackage.yml b/.github/workflows/pythonpackage.yml
index 8fd9829b..71a1494b 100644
--- a/.github/workflows/pythonpackage.yml
+++ b/.github/workflows/pythonpackage.yml
@@ -16,29 +16,30 @@ jobs:
runs-on: ubuntu-latest
strategy:
matrix:
- python-version: [3.7, 3.8, 3.9]
+ python-version: ['3.8', '3.9', '3.10', '3.11', '3.12']
steps:
- name: Set up Python ${{ matrix.python-version }}
- uses: actions/setup-python@v2
+ uses: actions/setup-python@v4
with:
python-version: ${{ matrix.python-version }}
- - uses: actions/checkout@v2
+ - uses: actions/checkout@v3
with:
fetch-depth: 0
- - name: Build in confined, updated environment and interpolate version
+ - name: Build package
run: |
- python -m venv /tmp/buildenv
- source /tmp/buildenv/bin/activate
- python -m pip install -U setuptools pip wheel twine docutils
- python setup.py sdist bdist_wheel
- python -m twine check dist/nitransforms*
+ pipx run build
+ - name: Determine expected version
+ run: |
+ python -m venv /tmp/getversion
+ source /tmp/getversion/bin/activate
+ python -m pip install setuptools_scm
# Interpolate version
if [[ "$GITHUB_REF" == refs/tags/* ]]; then
TAG=${GITHUB_REF##*/}
fi
- THISVERSION=$( python setup.py --version )
+ THISVERSION=$( python -m setuptools_scm )
THISVERSION=${TAG:-$THISVERSION}
echo "Expected VERSION: \"${THISVERSION}\""
echo "THISVERSION=${THISVERSION}" >> ${GITHUB_ENV}
@@ -47,7 +48,7 @@ jobs:
run: |
python -m venv /tmp/install_sdist
source /tmp/install_sdist/bin/activate
- python -m pip install --upgrade pip wheel
+ python -m pip install --upgrade pip
python -m pip install dist/nitransforms*.tar.gz
INSTALLED_VERSION=$(python -c 'import nitransforms; print(nitransforms.__version__, end="")')
echo "VERSION: \"${THISVERSION}\""
@@ -58,7 +59,7 @@ jobs:
run: |
python -m venv /tmp/install_wheel
source /tmp/install_wheel/bin/activate
- python -m pip install --upgrade pip wheel
+ python -m pip install --upgrade pip
python -m pip install dist/nitransforms*.whl
INSTALLED_VERSION=$(python -c 'import nitransforms; print(nitransforms.__version__, end="")')
echo "INSTALLED: \"${INSTALLED_VERSION}\""
@@ -89,10 +90,7 @@ jobs:
if: "!contains(github.event.head_commit.message, '[skip ci]')"
runs-on: ubuntu-latest
steps:
- - uses: actions/checkout@v2
- - name: Set up Python 3.7
- uses: actions/setup-python@v1
- with:
- python-version: 3.7
- - run: pip install flake8
- - run: flake8 nitransforms/
+ - uses: actions/checkout@v3
+ - name: Set up Python 3
+ uses: actions/setup-python@v4
+ - run: pipx run flake8 nitransforms
diff --git a/.github/workflows/travis.yml b/.github/workflows/travis.yml
index 19c6a2a1..7efe2c74 100644
--- a/.github/workflows/travis.yml
+++ b/.github/workflows/travis.yml
@@ -11,7 +11,7 @@ jobs:
strategy:
max-parallel: 5
matrix:
- python-version: [3.7, 3.8, 3.9]
+ python-version: ['3.8', '3.9', '3.10', '3.11']
steps:
- name: Git settings (pacify DataLad)
@@ -19,10 +19,10 @@ jobs:
git config --global user.name 'NiPreps Bot'
git config --global user.email 'nipreps@gmail.com'
- name: Set up Python ${{ matrix.python-version }}
- uses: actions/setup-python@v2
+ uses: actions/setup-python@v4
with:
python-version: ${{ matrix.python-version }}
- - uses: actions/cache@v2
+ - uses: actions/cache@v3
id: conda
with:
path: |
@@ -33,10 +33,10 @@ jobs:
python-${{ matrix.python-version }}-
- name: Install DataLad
run: |
- $CONDA/bin/conda install -c conda-forge git-annex datalad pip codecov pytest
+ $CONDA/bin/conda install -c conda-forge git-annex datalad pip pytest
$CONDA/bin/python -m pip install datalad-osf
- - uses: actions/cache@v2
+ - uses: actions/cache@v3
with:
path: ${{ env.TEST_DATA_HOME }}
key: data-cache-v2
@@ -53,7 +53,7 @@ jobs:
$CONDA/bin/datalad update --merge -d nitransforms-tests/
$CONDA/bin/datalad get -d nitransforms-tests/
- - uses: actions/checkout@v2
+ - uses: actions/checkout@v3
- name: Install minimal dependencies
run: |
$CONDA/bin/pip install .[tests]
@@ -62,5 +62,6 @@ jobs:
$CONDA/bin/pytest -v --cov nitransforms --cov-config .coveragerc --cov-report xml:cov.xml --doctest-modules nitransforms/
- name: Submit code coverage
- run: |
- $CONDA/bin/python -m codecov --flags travis --file cov.xml -e $GITHUB_RUN_NUMBER
+ uses: codecov/codecov-action@v3
+ with:
+ files: cov.xml
diff --git a/.gitignore b/.gitignore
index 8681c41b..1494e83a 100644
--- a/.gitignore
+++ b/.gitignore
@@ -82,3 +82,4 @@ local_settings.py
*.swp
.vscode/
+.DS_Store
diff --git a/.readthedocs.yaml b/.readthedocs.yaml
new file mode 100644
index 00000000..34413ca6
--- /dev/null
+++ b/.readthedocs.yaml
@@ -0,0 +1,19 @@
+# Read the Docs configuration file
+# See https://docs.readthedocs.io/en/stable/config-file/v2.html for details
+
+version: 2
+
+build:
+ os: ubuntu-20.04
+ tools:
+ python: "3.8"
+
+sphinx:
+ configuration: docs/conf.py
+
+python:
+ install:
+ - path: .
+ extra_requirements:
+ - all
+ - requirements: docs/requirements.txt
diff --git a/CHANGES.rst b/CHANGES.rst
index f118e261..b9e933aa 100644
--- a/CHANGES.rst
+++ b/CHANGES.rst
@@ -1,3 +1,54 @@
+24.0.0 (August 18, 2024)
+========================
+A new series incorporating several major changes, including bugfixes and taking on several
+housekeeping/maintenance actions.
+One relevant change is the outsourcing of the ``apply()`` member out of
+transformation data structures by @jmarabotto.
+The method ``apply()`` is now a standalone method that operates on one transform
+and images/surfaces/etc. provided as arguments.
+A later major development is the adoption of a foundation for surface transforms by @feilong
+and @Shotgunosine.
+
+New Contributors
+----------------
+
+* @mvdoc made their first contribution in https://github.com/nipy/nitransforms/pull/194
+* @jmarabotto made their first contribution in https://github.com/nipy/nitransforms/pull/197
+* @bpinsard made their first contribution in https://github.com/nipy/nitransforms/pull/182
+* @jbanusco made their first contribution in https://github.com/nipy/nitransforms/pull/188
+* @feilong made their first contribution in https://github.com/nipy/nitransforms/pull/203
+
+CHANGES
+-------
+
+* FIX: Inefficient iterative reloading of reference and moving images by @oesteban in https://github.com/nipy/nitransforms/pull/186
+* FIX: Postpone coordinate mapping on linear array transforms by @oesteban in https://github.com/nipy/nitransforms/pull/187
+* FIX: Remove unsafe cast during ``TransformBase.apply()`` by @effigies in https://github.com/nipy/nitransforms/pull/189
+* FIX: ``_is_oblique()`` by @mvdoc in https://github.com/nipy/nitransforms/pull/194
+* FIX: Update implementation of ``ndim`` property of transforms by @jmarabotto in https://github.com/nipy/nitransforms/pull/197
+* FIX: Output displacement fields by @bpinsard in https://github.com/nipy/nitransforms/pull/182
+* FIX: Composition of deformation fields by @jbanusco in https://github.com/nipy/nitransforms/pull/188
+* FIX: Indexing disallowed in lists introduced by bugfix by @oesteban in https://github.com/nipy/nitransforms/pull/204
+* FIX: Do not transpose (see :obj:`~scipy.ndimage.map_coordinates`) by @oesteban in https://github.com/nipy/nitransforms/pull/207
+* FIX: Forgotten test using ``xfm.apply()`` by @oesteban in https://github.com/nipy/nitransforms/pull/208
+* FIX: Load ITK fields from H5 correctly by @effigies in https://github.com/nipy/nitransforms/pull/211
+* FIX: Wrong warning argument name ``level`` in ``warnings.warn`` by @oesteban in https://github.com/nipy/nitransforms/pull/216
+* ENH: Define ``ndim`` property on nonlinear transforms by @oesteban in https://github.com/nipy/nitransforms/pull/201
+* ENH: Outsource ``apply()`` from transform objects by @jmarabotto in https://github.com/nipy/nitransforms/pull/195
+* ENH: Restore ``apply()`` method, warning of deprecation and calling function by @effigies in https://github.com/nipy/nitransforms/pull/209
+* ENH: ``SurfaceTransform`` class by @feilong in https://github.com/nipy/nitransforms/pull/203
+* ENH: reenable-parallelization-apply-214 (builds on PR #215, solves Issue #214) by @jmarabotto in https://github.com/nipy/nitransforms/pull/217
+* ENH: Parallelize serialized 3D+t transforms by @oesteban in https://github.com/nipy/nitransforms/pull/220
+* ENH: Implement a memory limitation mechanism in loading data by @oesteban in https://github.com/nipy/nitransforms/pull/221
+* ENH: Serialize+parallelize 4D ``apply()`` into 3D+t and add 'low memory' loading by @oesteban in https://github.com/nipy/nitransforms/pull/215
+* MAINT: Loosen dependencies by @mgxd in https://github.com/nipy/nitransforms/pull/164
+* MAINT: Drop Python 3.7 support, test through 3.11 by @effigies in https://github.com/nipy/nitransforms/pull/181
+* MAINT: Update CircleCI's infrastructure (machine image and Python version in Docker image) by @oesteban in https://github.com/nipy/nitransforms/pull/206
+* MAINT: Fix tests for Python 3.12, numpy 2.0, and pytest-xdist by @effigies in https://github.com/nipy/nitransforms/pull/210
+* MAINT: Update ANTs' pinnings by @oesteban in https://github.com/nipy/nitransforms/pull/219
+
+**Full Changelog**: https://github.com/nipy/nitransforms/compare/23.0.1...24.0.0
+
23.0.1 (July 10, 2023)
======================
Hotfix release addressing two issues.
diff --git a/Dockerfile b/Dockerfile
index ae270b45..a1c5f4b2 100644
--- a/Dockerfile
+++ b/Dockerfile
@@ -1,48 +1,150 @@
-FROM ubuntu:xenial-20200114
-
-# Pre-cache neurodebian key
-COPY docker/files/neurodebian.gpg /usr/local/etc/neurodebian.gpg
-
-# Prepare environment
+# Ubuntu 22.04 LTS - Jammy
+ARG BASE_IMAGE=ubuntu:jammy-20240125
+
+#
+# Build wheel
+#
+FROM python:slim AS src
+RUN pip install build
+RUN apt-get update && \
+ apt-get install -y --no-install-recommends git
+COPY . /src
+RUN python -m build /src
+
+#
+# Download stages
+#
+
+# Utilities for downloading packages
+FROM ${BASE_IMAGE} as downloader
+# Bump the date to current to refresh curl/certificates/etc
+RUN echo "2023.07.20"
RUN apt-get update && \
apt-get install -y --no-install-recommends \
- curl \
+ binutils \
bzip2 \
ca-certificates \
- xvfb \
- build-essential \
- autoconf \
- libtool \
- pkg-config \
- git && \
- curl -sL https://deb.nodesource.com/setup_10.x | bash - && \
+ curl \
+ unzip && \
+ apt-get clean && rm -rf /var/lib/apt/lists/* /tmp/* /var/tmp/*
+
+RUN update-ca-certificates -f
+
+# FreeSurfer 7.3.2
+FROM downloader as freesurfer
+COPY docker/files/freesurfer7.3.2-exclude.txt /usr/local/etc/freesurfer7.3.2-exclude.txt
+COPY docker/files/fs-cert.pem /usr/local/etc/fs-cert.pem
+RUN curl --cacert /usr/local/etc/fs-cert.pem \
+ -sSL https://surfer.nmr.mgh.harvard.edu/pub/dist/freesurfer/7.3.2/freesurfer-linux-ubuntu22_amd64-7.3.2.tar.gz \
+ | tar zxv --no-same-owner -C /opt --exclude-from=/usr/local/etc/freesurfer7.3.2-exclude.txt
+
+# AFNI
+FROM downloader as afni
+# Bump the date to current to update AFNI
+RUN echo "2023.07.20"
+RUN mkdir -p /opt/afni-latest \
+ && curl -fsSL --retry 5 https://afni.nimh.nih.gov/pub/dist/tgz/linux_openmp_64.tgz \
+ | tar -xz -C /opt/afni-latest --strip-components 1 \
+ --exclude "linux_openmp_64/*.gz" \
+ --exclude "linux_openmp_64/funstuff" \
+ --exclude "linux_openmp_64/shiny" \
+ --exclude "linux_openmp_64/afnipy" \
+ --exclude "linux_openmp_64/lib/RetroTS" \
+ --exclude "linux_openmp_64/lib_RetroTS" \
+ --exclude "linux_openmp_64/meica.libs" \
+ # Keep only what we use
+ && find /opt/afni-latest -type f -not \( \
+ -name "3dTshift" -or \
+ -name "3dUnifize" -or \
+ -name "3dAutomask" -or \
+ -name "3dvolreg" -or \
+ -name "3dNwarpApply" \
+ \) -delete
+
+# Micromamba
+FROM downloader as micromamba
+
+# Install a C compiler to build extensions when needed.
+# traits<6.4 wheels are not available for Python 3.11+, but build easily.
+RUN apt-get update && \
+ apt-get install -y --no-install-recommends build-essential && \
+ apt-get clean && rm -rf /var/lib/apt/lists/* /tmp/* /var/tmp/*
+
+WORKDIR /
+# Bump the date to current to force update micromamba
+RUN echo "2024.02.06"
+RUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest | tar -xvj bin/micromamba
+
+ENV MAMBA_ROOT_PREFIX="/opt/conda"
+COPY env.yml /tmp/env.yml
+# COPY requirements.txt /tmp/requirements.txt
+WORKDIR /tmp
+RUN micromamba create -y -f /tmp/env.yml && \
+ micromamba clean -y -a
+
+#
+# Main stage
+#
+FROM ${BASE_IMAGE} as nitransforms
+
+# Configure apt
+ENV DEBIAN_FRONTEND="noninteractive" \
+ LANG="en_US.UTF-8" \
+ LC_ALL="en_US.UTF-8"
+
+# Some baseline tools; bc is needed for FreeSurfer, so don't drop it
+RUN apt-get update && \
apt-get install -y --no-install-recommends \
- nodejs && \
+ bc \
+ ca-certificates \
+ curl \
+ git \
+ gnupg \
+ lsb-release \
+ netbase \
+ xvfb && \
apt-get clean && rm -rf /var/lib/apt/lists/* /tmp/* /var/tmp/*
-# Installing freesurfer
-RUN curl -sSL https://surfer.nmr.mgh.harvard.edu/pub/dist/freesurfer/6.0.1/freesurfer-Linux-centos6_x86_64-stable-pub-v6.0.1.tar.gz | tar zxv --no-same-owner -C /opt \
- --exclude='freesurfer/diffusion' \
- --exclude='freesurfer/docs' \
- --exclude='freesurfer/fsfast' \
- --exclude='freesurfer/lib/cuda' \
- --exclude='freesurfer/lib/qt' \
- --exclude='freesurfer/matlab' \
- --exclude='freesurfer/mni/share/man' \
- --exclude='freesurfer/subjects/fsaverage_sym' \
- --exclude='freesurfer/subjects/fsaverage3' \
- --exclude='freesurfer/subjects/fsaverage4' \
- --exclude='freesurfer/subjects/cvs_avg35' \
- --exclude='freesurfer/subjects/cvs_avg35_inMNI152' \
- --exclude='freesurfer/subjects/bert' \
- --exclude='freesurfer/subjects/lh.EC_average' \
- --exclude='freesurfer/subjects/rh.EC_average' \
- --exclude='freesurfer/subjects/sample-*.mgz' \
- --exclude='freesurfer/subjects/V1_average' \
- --exclude='freesurfer/trctrain'
-
-ENV FSL_DIR="/usr/share/fsl/5.0" \
- OS="Linux" \
+# Configure PPAs for libpng12 and libxp6
+RUN GNUPGHOME=/tmp gpg --keyserver hkps://keyserver.ubuntu.com --no-default-keyring --keyring /usr/share/keyrings/linuxuprising.gpg --recv 0xEA8CACC073C3DB2A \
+ && GNUPGHOME=/tmp gpg --keyserver hkps://keyserver.ubuntu.com --no-default-keyring --keyring /usr/share/keyrings/zeehio.gpg --recv 0xA1301338A3A48C4A \
+ && echo "deb [signed-by=/usr/share/keyrings/linuxuprising.gpg] https://ppa.launchpadcontent.net/linuxuprising/libpng12/ubuntu jammy main" > /etc/apt/sources.list.d/linuxuprising.list \
+ && echo "deb [signed-by=/usr/share/keyrings/zeehio.gpg] https://ppa.launchpadcontent.net/zeehio/libxp/ubuntu jammy main" > /etc/apt/sources.list.d/zeehio.list
+
+# Dependencies for AFNI; requires a discontinued multiarch-support package from bionic (18.04)
+RUN apt-get update -qq \
+ && apt-get install -y -q --no-install-recommends \
+ ed \
+ gsl-bin \
+ libglib2.0-0 \
+ libglu1-mesa-dev \
+ libglw1-mesa \
+ libgomp1 \
+ libjpeg62 \
+ libpng12-0 \
+ libxm4 \
+ libxp6 \
+ netpbm \
+ tcsh \
+ xfonts-base \
+ xvfb \
+ && curl -sSL --retry 5 -o /tmp/multiarch.deb http://archive.ubuntu.com/ubuntu/pool/main/g/glibc/multiarch-support_2.27-3ubuntu1.5_amd64.deb \
+ && dpkg -i /tmp/multiarch.deb \
+ && rm /tmp/multiarch.deb \
+ && apt-get install -f \
+ && apt-get clean && rm -rf /var/lib/apt/lists/* /tmp/* /var/tmp/* \
+ && gsl2_path="$(find / -name 'libgsl.so.19' || printf '')" \
+ && if [ -n "$gsl2_path" ]; then \
+ ln -sfv "$gsl2_path" "$(dirname $gsl2_path)/libgsl.so.0"; \
+ fi \
+ && ldconfig
+
+# Install files from stages
+COPY --from=freesurfer /opt/freesurfer /opt/freesurfer
+COPY --from=afni /opt/afni-latest /opt/afni-latest
+
+# Simulate SetUpFreeSurfer.sh
+ENV OS="Linux" \
FS_OVERRIDE=0 \
FIX_VERTEX_AREA="" \
FSF_OUTPUT_FORMAT="nii.gz" \
@@ -56,95 +158,59 @@ ENV SUBJECTS_DIR="$FREESURFER_HOME/subjects" \
MNI_DATAPATH="$FREESURFER_HOME/mni/data"
ENV PERL5LIB="$MINC_LIB_DIR/perl5/5.8.5" \
MNI_PERL5LIB="$MINC_LIB_DIR/perl5/5.8.5" \
- PATH="$FREESURFER_HOME/bin:$FSFAST_HOME/bin:$FREESURFER_HOME/tktools:$MINC_BIN_DIR:$PATH"
+ PATH="$FREESURFER_HOME/bin:$FREESURFER_HOME/tktools:$MINC_BIN_DIR:$PATH"
-# Installing Neurodebian packages (FSL, AFNI, git)
-RUN curl -sSL "http://neuro.debian.net/lists/$( lsb_release -c | cut -f2 ).us-ca.full" >> /etc/apt/sources.list.d/neurodebian.sources.list && \
- apt-key add /usr/local/etc/neurodebian.gpg && \
- (apt-key adv --refresh-keys --keyserver hkp://ha.pool.sks-keyservers.net 0xA5D32F012649A5A9 || true)
+# AFNI config
+ENV PATH="/opt/afni-latest:$PATH" \
+ AFNI_IMSAVE_WARNINGS="NO" \
+ AFNI_PLUGINPATH="/opt/afni-latest"
-RUN apt-get update && \
- apt-get install -y --no-install-recommends \
- fsl-core=5.0.9-5~nd16.04+1 \
- fsl-mni152-templates=5.0.7-2 \
- afni=16.2.07~dfsg.1-5~nd16.04+1 \
- convert3d \
- connectome-workbench=1.3.2-2~nd16.04+1 \
- git-annex-standalone && \
- apt-get clean && rm -rf /var/lib/apt/lists/* /tmp/* /var/tmp/*
+# Workbench config
+ENV PATH="/opt/workbench/bin_linux64:$PATH"
+
+# Create a shared $HOME directory
+RUN useradd -m -s /bin/bash -G users neuro
+WORKDIR /home/neuro
+ENV HOME="/home/neuro" \
+ LD_LIBRARY_PATH="/usr/lib/x86_64-linux-gnu:$LD_LIBRARY_PATH"
+
+COPY --from=micromamba /bin/micromamba /bin/micromamba
+COPY --from=micromamba /opt/conda/envs/nitransforms /opt/conda/envs/nitransforms
-ENV FSLDIR="/usr/share/fsl/5.0" \
+ENV MAMBA_ROOT_PREFIX="/opt/conda"
+RUN micromamba shell init -s bash && \
+ echo "micromamba activate nitransforms" >> $HOME/.bashrc
+ENV PATH="/opt/conda/envs/nitransforms/bin:$PATH" \
+ CPATH="/opt/conda/envs/nitransforms/include:$CPATH" \
+ LD_LIBRARY_PATH="/opt/conda/envs/nitransforms/lib:$LD_LIBRARY_PATH"
+
+# FSL environment
+ENV LANG="C.UTF-8" \
+ LC_ALL="C.UTF-8" \
+ PYTHONNOUSERSITE=1 \
+ FSLDIR="/opt/conda/envs/nitransforms" \
FSLOUTPUTTYPE="NIFTI_GZ" \
FSLMULTIFILEQUIT="TRUE" \
- POSSUMDIR="/usr/share/fsl/5.0" \
- LD_LIBRARY_PATH="/usr/lib/fsl/5.0:$LD_LIBRARY_PATH" \
- FSLTCLSH="/usr/bin/tclsh" \
- FSLWISH="/usr/bin/wish" \
- AFNI_MODELPATH="/usr/lib/afni/models" \
- AFNI_IMSAVE_WARNINGS="NO" \
- AFNI_TTATLAS_DATASET="/usr/share/afni/atlases" \
- AFNI_PLUGINPATH="/usr/lib/afni/plugins"
-ENV PATH="/usr/lib/fsl/5.0:/usr/lib/afni/bin:$PATH"
-
-# Installing ANTs 2.3.3 (NeuroDocker build)
-# Note: the URL says 2.3.4 but it is actually 2.3.3
-ENV ANTSPATH=/usr/lib/ants
-RUN mkdir -p $ANTSPATH && \
- curl -sSL "https://dl.dropbox.com/s/gwf51ykkk5bifyj/ants-Linux-centos6_x86_64-v2.3.4.tar.gz" \
- | tar -xzC $ANTSPATH --strip-components 1
-ENV PATH=$ANTSPATH:$PATH
-
-# Installing and setting up miniconda
-RUN curl -sSLO https://repo.continuum.io/miniconda/Miniconda3-4.5.11-Linux-x86_64.sh && \
- bash Miniconda3-4.5.11-Linux-x86_64.sh -b -p /usr/local/miniconda && \
- rm Miniconda3-4.5.11-Linux-x86_64.sh
-
-# Set CPATH for packages relying on compiled libs (e.g. indexed_gzip)
-ENV PATH="/usr/local/miniconda/bin:$PATH" \
- CPATH="/usr/local/miniconda/include/:$CPATH" \
- LANG="C.UTF-8" \
- LC_ALL="C.UTF-8" \
- PYTHONNOUSERSITE=1
-
-# Installing precomputed python packages
-RUN conda install -y -c anaconda -c conda-forge \
- python=3.7 \
- libxml2=2.9 \
- libxslt=1.1 \
- lxml \
- mkl \
- mkl-service \
- numpy=1.20 \
- pip=21 \
- scipy=1.6 \
- setuptools \
- setuptools_scm \
- toml \
- zlib; sync && \
- chmod -R a+rX /usr/local/miniconda; sync && \
- chmod +x /usr/local/miniconda/bin/*; sync && \
- conda build purge-all; sync && \
- conda clean -tipsy && sync
+ FSLLOCKDIR="" \
+ FSLMACHINELIST="" \
+ FSLREMOTECALL="" \
+ FSLGECUDAQ="cuda.q"
# Unless otherwise specified each process should only use one thread - nipype
# will handle parallelization
ENV MKL_NUM_THREADS=1 \
OMP_NUM_THREADS=1
-# Create a shared $HOME directory
-RUN useradd -m -s /bin/bash -G users neuro
-WORKDIR /home/neuro
-ENV HOME="/home/neuro"
-
# Install package
# CRITICAL: Make sure python setup.py --version has been run at least once
# outside the container, with access to the git history.
-COPY . /src/nitransforms
-RUN pip install --no-cache-dir "/src/nitransforms[all]"
+COPY --from=src /src/dist/*.whl .
+RUN python -m pip install --no-cache-dir $( ls *.whl )[all]
-RUN find $HOME -type d -exec chmod go=u {} + && \
- find $HOME -type f -exec chmod go=u {} +
+RUN find $HOME -type d -exec chmod go=u {} + && \
+ find $HOME -type f -exec chmod go=u {} + && \
+ rm -rf $HOME/.npm $HOME/.conda $HOME/.empty
RUN ldconfig
WORKDIR /tmp/
diff --git a/docker/files/freesurfer7.3.2-exclude.txt b/docker/files/freesurfer7.3.2-exclude.txt
new file mode 100644
index 00000000..3b07a64e
--- /dev/null
+++ b/docker/files/freesurfer7.3.2-exclude.txt
@@ -0,0 +1,868 @@
+freesurfer/average/711-2B_as_mni_average_305.4dfp.hdr
+freesurfer/average/711-2B_as_mni_average_305.4dfp.ifh
+freesurfer/average/711-2B_as_mni_average_305.4dfp.img
+freesurfer/average/711-2B_as_mni_average_305.4dfp.img.rec
+freesurfer/average/711-2B_as_mni_average_305_mask.4dfp.hdr
+freesurfer/average/711-2B_as_mni_average_305_mask.4dfp.img.rec
+freesurfer/average/711-2C_as_mni_average_305.4dfp.hdr
+freesurfer/average/711-2C_as_mni_average_305.4dfp.img.rec
+freesurfer/average/711-2C_as_mni_average_305.4dfp.mat
+freesurfer/average/aseg+spmhead+vermis+pons.ixi.gca
+freesurfer/average/BrainstemSS
+freesurfer/average/Buckner_JNeurophysiol11_MNI152
+freesurfer/average/Choi_JNeurophysiol12_MNI152
+freesurfer/average/colortable_desikan_killiany.txt
+freesurfer/average/face.gca
+freesurfer/average/HippoSF
+freesurfer/average/label_scales.dat
+freesurfer/average/lh.atlas2002_simple.gcs
+freesurfer/average/lh.atlas2005_simple.gcs
+freesurfer/average/lh.average.curvature.filled.buckner40.tif
+freesurfer/average/lh.average.CURVATURE.tif
+freesurfer/average/lh.average.tif
+freesurfer/average/lh.curvature.buckner40.filled.desikan_killiany.2010-03-25.gcs
+freesurfer/average/lh.destrieux.simple.2009-07-29.gcs
+freesurfer/average/lh.DKTaparc.atlas.acfb40.noaparc.i12.2020-05-13.gcs
+freesurfer/average/lh.DKTatlas100.gcs
+freesurfer/average/lh.DKTatlas40.gcs
+freesurfer/average/lh_trans_toSulc.gcs
+freesurfer/average/mideface-atlas
+freesurfer/average/mni152.mni305.cor.subfov1.dat
+freesurfer/average/mni152.mni305.cor.subfov2.dat
+freesurfer/average/mni152.register.dat
+freesurfer/average/mni305.cor.readme
+freesurfer/average/mni305.cor.subfov1.mgz
+freesurfer/average/mni305.cor.subfov1.reg
+freesurfer/average/mni305.cor.subfov2.mgz
+freesurfer/average/mni305.cor.subfov2.reg
+freesurfer/average/mni305.mask.cor.mgz
+freesurfer/average/mni_average_305.4dfp.hdr
+freesurfer/average/mni_average_305.4dfp.ifh
+freesurfer/average/mni_average_305.4dfp.img
+freesurfer/average/mni_average_305.4dfp.img.rec
+freesurfer/average/mult-comp-cor
+freesurfer/average/pons.mni152.2mm.mgz
+freesurfer/average/RB_all_2008-03-26.mni152.2mm.lta
+freesurfer/average/RB_all_2016-05-10.vc700.gca
+freesurfer/average/RB_all_2019_10_25.talxfm.mni305.gca
+freesurfer/average/RB_all_withskull_2016-05-10.vc700.gca
+freesurfer/average/RB_all_withskull_2019_10_22.talxfm.mni305.gca
+freesurfer/average/rh.atlas2002_simple.gcs
+freesurfer/average/rh.atlas2005_simple.gcs
+freesurfer/average/rh.average.curvature.filled.buckner40.tif
+freesurfer/average/rh.average.CURVATURE.tif
+freesurfer/average/rh.average.tif
+freesurfer/average/rh.curvature.buckner40.filled.desikan_killiany.2010-03-25.gcs
+freesurfer/average/rh.destrieux.simple.2009-07-29.gcs
+freesurfer/average/rh.DKTaparc.atlas.acfb40.noaparc.i12.2020-05-13.gcs
+freesurfer/average/rh.DKTatlas100.gcs
+freesurfer/average/rh.DKTatlas40.gcs
+freesurfer/average/rh_trans_toSulc.gcs
+freesurfer/average/RLB700_atlas_as_orig.4dfp.hdr
+freesurfer/average/RLB700_atlas_as_orig.4dfp.ifh
+freesurfer/average/RLB700_atlas_as_orig.4dfp.img
+freesurfer/average/RLB700_atlas_as_orig.4dfp.img.rec
+freesurfer/average/samseg
+freesurfer/average/surf
+freesurfer/average/SVIP_Adult_Comp_N24_as_orig.4dfp.hdr
+freesurfer/average/SVIP_Adult_Comp_N24_as_orig.4dfp.ifh
+freesurfer/average/SVIP_Adult_Comp_N24_as_orig.4dfp.img
+freesurfer/average/SVIP_Adult_Comp_N24_as_orig.4dfp.img.rec
+freesurfer/average/SVIP_Adult_Control_N12_as_orig.4dfp.hdr
+freesurfer/average/SVIP_Adult_Control_N12_as_orig.4dfp.ifh
+freesurfer/average/SVIP_Adult_Control_N12_as_orig.4dfp.img
+freesurfer/average/SVIP_Adult_Control_N12_as_orig.4dfp.img.rec
+freesurfer/average/SVIP_Adult_Patient_N12_as_orig.4dfp.hdr
+freesurfer/average/SVIP_Adult_Patient_N12_as_orig.4dfp.ifh
+freesurfer/average/SVIP_Adult_Patient_N12_as_orig.4dfp.img
+freesurfer/average/SVIP_Adult_Patient_N12_as_orig.4dfp.img.rec
+freesurfer/average/SVIP_Child_Comp_N24_as_orig.4dfp.hdr
+freesurfer/average/SVIP_Child_Comp_N24_as_orig.4dfp.ifh
+freesurfer/average/SVIP_Child_Comp_N24_as_orig.4dfp.img
+freesurfer/average/SVIP_Child_Comp_N24_as_orig.4dfp.img.rec
+freesurfer/average/SVIP_Full_Comp_N48_as_orig.4dfp.hdr
+freesurfer/average/SVIP_Full_Comp_N48_as_orig.4dfp.ifh
+freesurfer/average/SVIP_Full_Comp_N48_as_orig.4dfp.img
+freesurfer/average/SVIP_Full_Comp_N48_as_orig.4dfp.img.rec
+freesurfer/average/SVIP_Young_Control_N12_as_orig.4dfp.hdr
+freesurfer/average/SVIP_Young_Control_N12_as_orig.4dfp.ifh
+freesurfer/average/SVIP_Young_Control_N12_as_orig.4dfp.img
+freesurfer/average/SVIP_Young_Control_N12_as_orig.4dfp.img.rec
+freesurfer/average/SVIP_Young_Patient_N12_as_orig.4dfp.hdr
+freesurfer/average/SVIP_Young_Patient_N12_as_orig.4dfp.ifh
+freesurfer/average/SVIP_Young_Patient_N12_as_orig.4dfp.img
+freesurfer/average/SVIP_Young_Patient_N12_as_orig.4dfp.img.rec
+freesurfer/average/talairach_mixed_with_skull.gca
+freesurfer/average/ThalamicNuclei
+freesurfer/average/tissue_parms.txt
+freesurfer/average/wmsa_new_eesmith.gca
+freesurfer/average/Yeo_Brainmap_MNI152
+freesurfer/average/Yeo_JNeurophysiol11_MNI152
+freesurfer/bin/3dvolreg.afni
+freesurfer/bin/4dfptoanalyze
+freesurfer/bin/anatomiCutsUtils
+freesurfer/bin/annot2std
+freesurfer/bin/aparc2feat
+freesurfer/bin/aparcstats2table
+freesurfer/bin/aparc_stats_aseg
+freesurfer/bin/aparcstatsdiff
+freesurfer/bin/apas2aseg
+freesurfer/bin/applyMorph
+freesurfer/bin/aseg2feat
+freesurfer/bin/asegstats2table
+freesurfer/bin/asegstatsdiff
+freesurfer/bin/bblabel
+freesurfer/bin/bbmask
+freesurfer/bin/bedpostx_mgh
+freesurfer/bin/beta2sxa
+freesurfer/bin/bet.fsl
+freesurfer/bin/biasfield
+freesurfer/bin/bmedits2surf
+freesurfer/bin/brec
+freesurfer/bin/brec.awk
+freesurfer/bin/browse-minc-header.tcl
+freesurfer/bin/bugr
+freesurfer/bin/build_desikan_killiany_gcs.csh
+freesurfer/bin/cblumwmgyri
+freesurfer/bin/checkMCR.sh
+freesurfer/bin/check_recons.sh
+freesurfer/bin/check_siemens_dir
+freesurfer/bin/check_subject
+freesurfer/bin/clear_fs_env.csh
+freesurfer/bin/compute_interrater_variability.csh
+freesurfer/bin/compute_label_vals.csh
+freesurfer/bin/compute_label_volumes.csh
+freesurfer/bin/connectgraph
+freesurfer/bin/cor_to_minc
+freesurfer/bin/cp-dicom
+freesurfer/bin/createMorph
+freesurfer/bin/csvprint
+freesurfer/bin/dcmdir-info-mgh
+freesurfer/bin/dcmdjpeg.fs
+freesurfer/bin/dcmdrle.fs
+freesurfer/bin/dcmsplit
+freesurfer/bin/dcmunpack
+freesurfer/bin/deface_subject
+freesurfer/bin/defect-seg
+freesurfer/bin/dicom-rename
+freesurfer/bin/diffusionUtils
+freesurfer/bin/dmri_ac.sh
+freesurfer/bin/dmri_AnatomiCuts
+freesurfer/bin/dmri_bset
+freesurfer/bin/dmri_coloredFA
+freesurfer/bin/dmri_extractSurfaceMeasurements
+freesurfer/bin/dmri_forrest
+freesurfer/bin/dmri_group
+freesurfer/bin/dmri_groupByEndpoints
+freesurfer/bin/dmri_match
+freesurfer/bin/dmri_mergepaths
+freesurfer/bin/dmri_motion
+freesurfer/bin/dmri_neighboringRegions
+freesurfer/bin/dmri_paths
+freesurfer/bin/dmri_pathstats
+freesurfer/bin/dmri_projectEndPoints
+freesurfer/bin/dmrirc.example
+freesurfer/bin/dmrirc.long.example
+freesurfer/bin/dmrirc.long.multiscan.example
+freesurfer/bin/dmrirc.multiscan.example
+freesurfer/bin/dmri_saveHistograms
+freesurfer/bin/dmri_spline
+freesurfer/bin/dmri_stats_ac
+freesurfer/bin/dmri_tensoreig
+freesurfer/bin/dmri_train
+freesurfer/bin/dmri_trk2trk
+freesurfer/bin/dmri_violinPlots
+freesurfer/bin/dmri_vox2vox
+freesurfer/bin/dt_recon
+freesurfer/bin/epidewarp.fsl
+freesurfer/bin/exportGcam
+freesurfer/bin/feat2segstats
+freesurfer/bin/feat2surf
+freesurfer/bin/findsession
+freesurfer/bin/fix_subject
+freesurfer/bin/fix_subject_corrected
+freesurfer/bin/fix_subject_corrected-lh
+freesurfer/bin/fix_subject_corrected-rh
+freesurfer/bin/fix_subject-lh
+freesurfer/bin/fix_subject_on_seychelles
+freesurfer/bin/fix_subject-rh
+freesurfer/bin/fixup_mni_paths
+freesurfer/bin/flip_4dfp
+freesurfer/bin/flirt.fsl
+freesurfer/bin/flirt.newdefault.20080811.sch
+freesurfer/bin/fname2ext
+freesurfer/bin/fname2stem
+freesurfer/bin/freesurfer
+freesurfer/bin/freeview
+freesurfer/bin/fscalc
+freesurfer/bin/fscalc.fsl
+freesurfer/bin/fsdcmdecompress
+freesurfer/bin/fsfget
+freesurfer/bin/fsfirst.fsl
+freesurfer/bin/fs_install_mcr
+freesurfer/bin/fsl.5.0.2.xyztrans.sch
+freesurfer/bin/fs_lib_check
+freesurfer/bin/fsl_label2voxel
+freesurfer/bin/fslmaths.fsl
+freesurfer/bin/fslorient.fsl
+freesurfer/bin/fslregister
+freesurfer/bin/fsl_rigid_register
+freesurfer/bin/fsl_sub_mgh
+freesurfer/bin/fslswapdim.fsl
+freesurfer/bin/fspalm
+freesurfer/bin/fsPrintHelp
+freesurfer/bin/fsr-coreg
+freesurfer/bin/fsr-import
+freesurfer/bin/fs_run_from_mcr
+freesurfer/bin/fs_spmreg.glnxa64
+freesurfer/bin/fs_tutorial_data
+freesurfer/bin/fs_update
+freesurfer/bin/fsvglrun
+freesurfer/bin/fvcompare
+freesurfer/bin/gca-apply
+freesurfer/bin/gcainit
+freesurfer/bin/gcaprepone
+freesurfer/bin/gcatrain
+freesurfer/bin/gcatrainskull
+freesurfer/bin/gdcmconv.fs
+freesurfer/bin/gems_compute_binary_atlas_probs
+freesurfer/bin/get_label_thickness
+freesurfer/bin/groupstats
+freesurfer/bin/groupstatsdiff
+freesurfer/bin/gtmseg
+freesurfer/bin/help_xml_validate
+freesurfer/bin/hiam_make_surfaces
+freesurfer/bin/hiam_make_template
+freesurfer/bin/hiam_register
+freesurfer/bin/histo_compute_joint_density
+freesurfer/bin/histo_fix_topology
+freesurfer/bin/histo_register_block
+freesurfer/bin/histo_segment
+freesurfer/bin/histo_synthesize
+freesurfer/bin/ico_supersample
+freesurfer/bin/id.xfm
+freesurfer/bin/inflate_subject
+freesurfer/bin/inflate_subject3
+freesurfer/bin/inflate_subject-lh
+freesurfer/bin/inflate_subject_new
+freesurfer/bin/inflate_subject_new-lh
+freesurfer/bin/inflate_subject_new-rh
+freesurfer/bin/inflate_subject-rh
+freesurfer/bin/inflate_subject_sc
+freesurfer/bin/irepifitvol
+freesurfer/bin/irepifitvol.glnx64
+freesurfer/bin/isanalyze
+freesurfer/bin/isnifti
+freesurfer/bin/isolate_labels.csh
+freesurfer/bin/isolate_labels_keeporigval.csh
+freesurfer/bin/is-surface
+freesurfer/bin/jkgcatrain
+freesurfer/bin/label2flat
+freesurfer/bin/label2patch
+freesurfer/bin/label_area
+freesurfer/bin/label_border
+freesurfer/bin/label_child
+freesurfer/bin/label_elderly_subject
+freesurfer/bin/labels_disjoint
+freesurfer/bin/labels_intersect
+freesurfer/bin/label_subject
+freesurfer/bin/label_subject_flash
+freesurfer/bin/label_subject_mixed
+freesurfer/bin/labels_union
+freesurfer/bin/list_otl_labels
+freesurfer/bin/listsubj
+freesurfer/bin/long_create_base_sigma
+freesurfer/bin/long_create_orig
+freesurfer/bin/longmc
+freesurfer/bin/long_mris_slopes
+freesurfer/bin/long_qdec_table
+freesurfer/bin/long_stats_combine
+freesurfer/bin/long_stats_slopes
+freesurfer/bin/long_stats_tps
+freesurfer/bin/long_submit_jobs
+freesurfer/bin/long_submit_postproc
+freesurfer/bin/lpcregister
+freesurfer/bin/lta_diff
+freesurfer/bin/make_average_subcort
+freesurfer/bin/make_average_subject
+freesurfer/bin/make_average_surface
+freesurfer/bin/make_average_volume
+freesurfer/bin/make_cortex_label
+freesurfer/bin/make_exvivo_filled
+freesurfer/bin/make_folding_atlas
+freesurfer/bin/make_hemi_mask
+freesurfer/bin/make-segvol-table
+freesurfer/bin/make_symmetric
+freesurfer/bin/make_upright
+freesurfer/bin/makevol
+freesurfer/bin/map_all_labels
+freesurfer/bin/map_all_labels-lh
+freesurfer/bin/map_central_sulcus
+freesurfer/bin/map_to_base
+freesurfer/bin/meanval
+freesurfer/bin/mergeseg
+freesurfer/bin/merge_stats_tables
+freesurfer/bin/minc2seqinfo
+freesurfer/bin/mkheadsurf
+freesurfer/bin/mkima_index.tcl
+freesurfer/bin/mkmnc_index.tcl
+freesurfer/bin/mksubjdirs
+freesurfer/bin/mksurfatlas
+freesurfer/bin/mkxsubjreg
+freesurfer/bin/mni152reg
+freesurfer/bin/morph_only_subject
+freesurfer/bin/morph_only_subject-lh
+freesurfer/bin/morph_only_subject-rh
+freesurfer/bin/morph_rgb-lh
+freesurfer/bin/morph_rgb-rh
+freesurfer/bin/morph_subject
+freesurfer/bin/morph_subject-lh
+freesurfer/bin/morph_subject_on_seychelles
+freesurfer/bin/morph_subject-rh
+freesurfer/bin/morph_tables-lh
+freesurfer/bin/morph_tables-rh
+freesurfer/bin/mri_align_long.csh
+freesurfer/bin/mri_aparc2wmseg
+freesurfer/bin/mri_apply_autoencoder
+freesurfer/bin/mri_apply_bias
+freesurfer/bin/mri_apply_inu_correction
+freesurfer/bin/mri_aseg_edit_reclassify
+freesurfer/bin/mri_aseg_edit_train
+freesurfer/bin/mri_auto_fill
+freesurfer/bin/mri_average
+freesurfer/bin/mri_bc_sc_bias_correct
+freesurfer/bin/mri_brain_volume
+freesurfer/bin/mri_build_priors
+freesurfer/bin/mri_cal_renormalize_gca
+freesurfer/bin/mri_ca_tissue_parms
+freesurfer/bin/mri_ca_train
+freesurfer/bin/mri_cht2p
+freesurfer/bin/mri_classify
+freesurfer/bin/mri_cnr
+freesurfer/bin/mri_compute_bias
+freesurfer/bin/mri_compute_change_map
+freesurfer/bin/mri_compute_distances
+freesurfer/bin/mri_compute_layer_fractions
+freesurfer/bin/mri_compute_structure_transforms
+freesurfer/bin/mri_compute_volume_fractions
+freesurfer/bin/mri_compute_volume_intensities
+freesurfer/bin/mri_concatenate_gcam
+freesurfer/bin/mri_convert_mdh
+freesurfer/bin/mri_copy_params
+freesurfer/bin/mri_copy_values
+freesurfer/bin/mri_cor2label
+freesurfer/bin/mri_correct_segmentations
+freesurfer/bin/mri_create_t2combined
+freesurfer/bin/mri_create_tests
+freesurfer/bin/mri_cvs_check
+freesurfer/bin/mri_cvs_data_copy
+freesurfer/bin/mri_cvs_register
+freesurfer/bin/mri_cvs_requiredfiles.txt
+freesurfer/bin/mri_dct_align
+freesurfer/bin/mri_dct_align_binary
+freesurfer/bin/mri_distance_transform
+freesurfer/bin/mri_dist_surf_label
+freesurfer/bin/mri_divide_segmentation
+freesurfer/bin/mri_edit_segmentation
+freesurfer/bin/mri_edit_segmentation_with_surfaces
+freesurfer/bin/mri_elastic_energy
+freesurfer/bin/mri_estimate_tissue_parms
+freesurfer/bin/mri_evaluate_morph
+freesurfer/bin/mri_extract
+freesurfer/bin/mri_extract_conditions
+freesurfer/bin/mri_extract_fcd_features
+freesurfer/bin/mri_extract_label
+freesurfer/bin/mri_extract_largest_CC
+freesurfer/bin/mri_fcili
+freesurfer/bin/mri_fdr
+freesurfer/bin/mri_fieldsign
+freesurfer/bin/mri_fit_bias
+freesurfer/bin/mri_fslmat_to_lta
+freesurfer/bin/mri-func2sph
+freesurfer/bin/mri-funcvits
+freesurfer/bin/mri_fuse_intensity_images
+freesurfer/bin/mri_gca_ambiguous
+freesurfer/bin/mri_gcab_train
+freesurfer/bin/mri_gdfglm
+freesurfer/bin/mri_glmfit
+freesurfer/bin/mri_glmfit-sim
+freesurfer/bin/mri_gradient_info
+freesurfer/bin/mri_gradunwarp
+freesurfer/bin/mri_gtmpvc
+freesurfer/bin/mri_gtmseg
+freesurfer/bin/mri_hausdorff_dist
+freesurfer/bin/mri_head
+freesurfer/bin/mri_hires_register
+freesurfer/bin/mri_histo_eq
+freesurfer/bin/mri_histo_normalize
+freesurfer/bin/mri_ibmc
+freesurfer/bin/mri_interpolate
+freesurfer/bin/mri_jacobian
+freesurfer/bin/mri_joint_density
+freesurfer/bin/mri_label_accuracy
+freesurfer/bin/mri_label_histo
+freesurfer/bin/mri_label_vals
+freesurfer/bin/mri_label_volume
+freesurfer/bin/mri_linear_align
+freesurfer/bin/mri_linear_align_binary
+freesurfer/bin/mri_linear_register
+freesurfer/bin/mri_long_normalize
+freesurfer/bin/mri_make_bem_surfaces
+freesurfer/bin/mri_make_density_map
+freesurfer/bin/mri_make_labels
+freesurfer/bin/mri_make_register
+freesurfer/bin/mri_make_template
+freesurfer/bin/mri_map_cpdat
+freesurfer/bin/mri_maps2csd
+freesurfer/bin/mri_mark_temporal_lobe
+freesurfer/bin/mri_mc
+freesurfer/bin/mri_mcsim
+freesurfer/bin/mri_mergelabels
+freesurfer/bin/mri_mi
+freesurfer/bin/mri_modify
+freesurfer/bin/mri_morphology
+freesurfer/bin/mri_mosaic
+freesurfer/bin/mri_motion_correct
+freesurfer/bin/mri_motion_correct2
+freesurfer/bin/mri_ms_EM
+freesurfer/bin/mri_ms_EM_with_atlas
+freesurfer/bin/mri_ms_fitparms
+freesurfer/bin/mri_ms_LDA
+freesurfer/bin/mri_multiscale_segment
+freesurfer/bin/mri_multispectral_segment
+freesurfer/bin/mri_nl_align
+freesurfer/bin/mri_nl_align_binary
+freesurfer/bin/mri_nlfilter
+freesurfer/bin/mri_paint
+freesurfer/bin/mri_parselabel
+freesurfer/bin/mri_parse_sdcmdir
+freesurfer/bin/mri_partial_ribbon
+freesurfer/bin/mri_path2label
+freesurfer/bin/mri_polv
+freesurfer/bin/mri_probedicom
+freesurfer/bin/mri_probe_ima
+freesurfer/bin/mri_reduce
+freesurfer/bin/mri_refine_seg
+freesurfer/bin/mri_register
+freesurfer/bin/mri_reorient_LR.csh
+freesurfer/bin/mri_rf_label
+freesurfer/bin/mri_rf_long_label
+freesurfer/bin/mri_rf_long_train
+freesurfer/bin/mri_rf_train
+freesurfer/bin/mri_ribbon
+freesurfer/bin/mri_rigid_register
+freesurfer/bin/mris2rgb
+freesurfer/bin/mris_AA_shrinkwrap
+freesurfer/bin/mris_add_template
+freesurfer/bin/mris_annot_diff
+freesurfer/bin/mris_annot_to_segmentation
+freesurfer/bin/mris_aseg_distance
+freesurfer/bin/mris_average_curvature
+freesurfer/bin/mris_average_parcellation
+freesurfer/bin/mris_BA_segment
+freesurfer/bin/mri_sbbr
+freesurfer/bin/mris_ca_deform
+freesurfer/bin/mris_ca_train
+freesurfer/bin/mris_classify_thickness
+freesurfer/bin/mris_compute_acorr
+freesurfer/bin/mris_compute_layer_intensities
+freesurfer/bin/mris_compute_lgi
+freesurfer/bin/mris_compute_optimal_kernel
+freesurfer/bin/mris_compute_overlap
+freesurfer/bin/mris_compute_parc_overlap
+freesurfer/bin/mris_compute_volume_fractions
+freesurfer/bin/mris_congeal
+freesurfer/bin/mris_copy_header
+freesurfer/bin/mris_curvature2image
+freesurfer/bin/mris_deform
+freesurfer/bin/mris_density
+freesurfer/bin/mris_distance_map
+freesurfer/bin/mris_distance_to_label
+freesurfer/bin/mris_distance_transform
+freesurfer/bin/mri_segcentroids
+freesurfer/bin/mri_seghead
+freesurfer/bin/mri_segment_hypothalamic_subunits
+freesurfer/bin/mri_segment_tumor
+freesurfer/bin/mri_segment_wm_damage
+freesurfer/bin/mri_seg_overlap
+freesurfer/bin/mris_entropy
+freesurfer/bin/mris_errors
+freesurfer/bin/mris_extract_patches
+freesurfer/bin/mris_extract_values
+freesurfer/bin/mris_exvivo_surfaces
+freesurfer/bin/mris_fbirn_annot
+freesurfer/bin/mris_fill
+freesurfer/bin/mris_find_flat_regions
+freesurfer/bin/mris_flatten
+freesurfer/bin/mris_fwhm
+freesurfer/bin/mris_gradient
+freesurfer/bin/mris_hausdorff_dist
+freesurfer/bin/mris_image2vtk
+freesurfer/bin/mri_simulate_atrophy
+freesurfer/bin/mris_info
+freesurfer/bin/mris_init_global_tractography
+freesurfer/bin/mris_intensity_profile
+freesurfer/bin/mris_interpolate_warp
+freesurfer/bin/mris_label_area
+freesurfer/bin/mris_label_calc
+freesurfer/bin/mris_label_mode
+freesurfer/bin/mris_longitudinal_surfaces
+freesurfer/bin/mris_make_average_surface
+freesurfer/bin/mris_make_face_parcellation
+freesurfer/bin/mris_make_map_surfaces
+freesurfer/bin/mris_make_surfaces
+freesurfer/bin/mris_make_template
+freesurfer/bin/mris_map_cuts
+freesurfer/bin/mris_mef_surfaces
+freesurfer/bin/mris_merge_parcellations
+freesurfer/bin/mris_mesh_subdivide
+freesurfer/bin/mris_morph_stats
+freesurfer/bin/mris_ms_refine
+freesurfer/bin/mris_ms_surface_CNR
+freesurfer/bin/mris_multimodal
+freesurfer/bin/mris_multimodal_surface_placement
+freesurfer/bin/mris_multiscale_stats
+freesurfer/bin/mris_niters2fwhm
+freesurfer/bin/mris_nudge
+freesurfer/bin/mris_parcellate_connectivity
+freesurfer/bin/mri-sph2surf
+freesurfer/bin/mris_pmake
+freesurfer/bin/mris_preproc
+freesurfer/bin/mris_profileClustering
+freesurfer/bin/mrisp_write
+freesurfer/bin/mris_refine_surfaces
+freesurfer/bin/mris_register_label_map
+freesurfer/bin/mris_register_to_label
+freesurfer/bin/mris_register_to_volume
+freesurfer/bin/mris_remove_negative_vertices
+freesurfer/bin/mris_remove_variance
+freesurfer/bin/mris_resample
+freesurfer/bin/mris_rescale
+freesurfer/bin/mris_reverse
+freesurfer/bin/mris_rf_label
+freesurfer/bin/mris_rf_train
+freesurfer/bin/mris_rotate
+freesurfer/bin/mris_sample_label
+freesurfer/bin/mris_sample_parc
+freesurfer/bin/mris_seg2annot
+freesurfer/bin/mris_segment
+freesurfer/bin/mris_segmentation_stats
+freesurfer/bin/mris_segment_vals
+freesurfer/bin/mris_shrinkwrap
+freesurfer/bin/mris_simulate_atrophy
+freesurfer/bin/mris_smooth_intracortical
+freesurfer/bin/mris_surf2vtk
+freesurfer/bin/mris_surface_change
+freesurfer/bin/mris_surface_to_vol_distances
+freesurfer/bin/mris_svm_classify
+freesurfer/bin/mris_svm_train
+freesurfer/bin/mris_talairach
+freesurfer/bin/mris_thickness_comparison
+freesurfer/bin/mris_transform
+freesurfer/bin/mris_translate_annotation
+freesurfer/bin/mris_transmantle_dysplasia_paths
+freesurfer/bin/mri_strip_nonwhite
+freesurfer/bin/mri_strip_subject_info
+freesurfer/bin/mris_twoclass
+freesurfer/bin/mri_surfacemask
+freesurfer/bin/mris_volmask_novtk
+freesurfer/bin/mris_volmask_vtk
+freesurfer/bin/mris_volsmooth
+freesurfer/bin/mris_volume
+freesurfer/bin/mris_warp
+freesurfer/bin/mris_wm_volume
+freesurfer/bin/mris_w_to_curv
+freesurfer/bin/mri_synthesize
+freesurfer/bin/mri_synthstrip
+freesurfer/bin/mri_threshold
+freesurfer/bin/mri_topologycorrection
+freesurfer/bin/mri_train
+freesurfer/bin/mri_train_autoencoder
+freesurfer/bin/mri_transform
+freesurfer/bin/mri_transform_to_COR
+freesurfer/bin/mri_twoclass
+freesurfer/bin/mri_update_gca
+freesurfer/bin/mri_validate_skull_stripped
+freesurfer/bin/mri_vessel_segment
+freesurfer/bin/mri_vol2label
+freesurfer/bin/mri_vol2roi
+freesurfer/bin/mri_volcluster
+freesurfer/bin/mri_volsynth
+freesurfer/bin/mri_warp_convert
+freesurfer/bin/mri_wbc
+freesurfer/bin/mri_wmfilter
+freesurfer/bin/mri_xcorr
+freesurfer/bin/mri_xvolavg
+freesurfer/bin/mri_z2p
+freesurfer/bin/ms_refine_subject
+freesurfer/bin/nmovie_qt
+freesurfer/bin/oct_register_mosaic
+freesurfer/bin/oct_rf_train
+freesurfer/bin/oct_train
+freesurfer/bin/optseq2
+freesurfer/bin/orientLAS
+freesurfer/bin/parc_atlas_jackknife_test
+freesurfer/bin/plot_structure_stats.tcl
+freesurfer/bin/polyorder
+freesurfer/bin/predict_v1.sh
+freesurfer/bin/print_unique_labels.csh
+freesurfer/bin/progressbar.tcl
+freesurfer/bin/qatools.py
+freesurfer/bin/qdec
+freesurfer/bin/qdec_glmfit
+freesurfer/bin/qt.conf
+freesurfer/bin/quantifyBrainstemStructures.sh
+freesurfer/bin/quantifyHAsubregions.sh
+freesurfer/bin/quantifyThalamicNuclei.sh
+freesurfer/bin/rbbr
+freesurfer/bin/rbftest
+freesurfer/bin/rcbf-prep
+freesurfer/bin/rebuild_gca_atlas.csh
+freesurfer/bin/recon-all-exvivo
+freesurfer/bin/recon-all.makefile
+freesurfer/bin/regdat2xfm
+freesurfer/bin/reg-feat2anat
+freesurfer/bin/register_child
+freesurfer/bin/register.csh
+freesurfer/bin/register_elderly_subject
+freesurfer/bin/register_subject
+freesurfer/bin/register_subject_flash
+freesurfer/bin/register_subject_mixed
+freesurfer/bin/reg-mni305.2mm
+freesurfer/bin/reinflate_subject
+freesurfer/bin/reinflate_subject-lh
+freesurfer/bin/reinflate_subject-rh
+freesurfer/bin/remove_talairach
+freesurfer/bin/renormalize_subject
+freesurfer/bin/renormalize_subject_keep_editting
+freesurfer/bin/renormalize_T1_subject
+freesurfer/bin/repair_siemens_file
+freesurfer/bin/reregister_subject_mixed
+freesurfer/bin/rtview
+freesurfer/bin/run_mris_preproc
+freesurfer/bin/run-qdec-glm
+freesurfer/bin/run_samseg_long
+freesurfer/bin/run_SegmentSubfieldsT1Longitudinal.sh
+freesurfer/bin/run_SegmentSubject.sh
+freesurfer/bin/run_segmentSubjectT1_autoEstimateAlveusML.sh
+freesurfer/bin/run_segmentSubjectT1T2_autoEstimateAlveusML.sh
+freesurfer/bin/run_segmentSubjectT2_autoEstimateAlveusML.sh
+freesurfer/bin/run_SegmentThalamicNuclei.sh
+freesurfer/bin/samseg
+freesurfer/bin/samseg2recon
+freesurfer/bin/samseg-long
+freesurfer/bin/sbtiv
+freesurfer/bin/seg2filled
+freesurfer/bin/segmentBS.sh
+freesurfer/bin/segmentHA_T1_long.sh
+freesurfer/bin/segmentHA_T1.sh
+freesurfer/bin/segmentHA_T2.sh
+freesurfer/bin/segment_monkey
+freesurfer/bin/SegmentSubfieldsT1Longitudinal
+freesurfer/bin/segment_subject
+freesurfer/bin/segmentSubject
+freesurfer/bin/segment_subject_notal
+freesurfer/bin/segment_subject_notal2
+freesurfer/bin/segment_subject_old_skull_strip
+freesurfer/bin/segment_subject_sc
+freesurfer/bin/segmentSubjectT1_autoEstimateAlveusML
+freesurfer/bin/segmentSubjectT1T2_autoEstimateAlveusML
+freesurfer/bin/segmentSubjectT2_autoEstimateAlveusML
+freesurfer/bin/segment_subject_talmgh
+freesurfer/bin/SegmentThalamicNuclei
+freesurfer/bin/segmentThalamicNuclei.sh
+freesurfer/bin/segpons
+freesurfer/bin/setlabelstat
+freesurfer/bin/sfa2fieldsign
+freesurfer/bin/show_tal
+freesurfer/bin/skip_long_make_checks
+freesurfer/bin/slicedelay
+freesurfer/bin/slicetimer.fsl
+freesurfer/bin/sphere_subject
+freesurfer/bin/sphere_subject-lh
+freesurfer/bin/sphere_subject-rh
+freesurfer/bin/spherical_st
+freesurfer/bin/Spline3_test
+freesurfer/bin/spmmat2register
+freesurfer/bin/spmregister
+freesurfer/bin/spm_t_to_b
+freesurfer/bin/sratio
+freesurfer/bin/stat_normalize
+freesurfer/bin/stattablediff
+freesurfer/bin/stem2fname
+freesurfer/bin/stim_polar
+freesurfer/bin/streamlineFilter
+freesurfer/bin/surf2vol
+freesurfer/bin/surfreg
+freesurfer/bin/swi_preprocess
+freesurfer/bin/swi_process
+freesurfer/bin/t4img_4dfp
+freesurfer/bin/t4imgs_4dfp
+freesurfer/bin/talairach2
+freesurfer/bin/talairach_mgh
+freesurfer/bin/tal_compare
+freesurfer/bin/tal_QC_AZS
+freesurfer/bin/talsegprob
+freesurfer/bin/template
+freesurfer/bin/testOrientationPlanesFromParcellation
+freesurfer/bin/test_recon-all.csh
+freesurfer/bin/test_tutorials.sh
+freesurfer/bin/thickdiffmap
+freesurfer/bin/tkmedit
+freesurfer/bin/tkmeditfv
+freesurfer/bin/tkregister2
+freesurfer/bin/tkregisterfv
+freesurfer/bin/tksurfer
+freesurfer/bin/tksurferfv
+freesurfer/bin/trac-all
+freesurfer/bin/trac-paths
+freesurfer/bin/trac-preproc
+freesurfer/bin/tractstats2table
+freesurfer/bin/train-gcs-atlas
+freesurfer/bin/tridec
+freesurfer/bin/trk_tools
+freesurfer/bin/unpack_ima1.tcl
+freesurfer/bin/unpackimadir
+freesurfer/bin/unpackimadir2
+freesurfer/bin/unpack_ima.tcl
+freesurfer/bin/unpackmincdir
+freesurfer/bin/unpack_mnc.tcl
+freesurfer/bin/unpacksdcmdir
+freesurfer/bin/usbtree
+freesurfer/bin/vol2segavg
+freesurfer/bin/vol2subfield
+freesurfer/bin/vol2symsurf
+freesurfer/bin/vsm-smooth
+freesurfer/bin/wfilemask
+freesurfer/bin/wm-anat-snr
+freesurfer/bin/wmedits2surf
+freesurfer/bin/wmsaseg
+freesurfer/bin/xcerebralseg
+freesurfer/bin/xcorr
+freesurfer/bin/xfmrot
+freesurfer/bin/xhemireg
+freesurfer/bin/xhemi-tal
+freesurfer/bin/xsanatreg
+freesurfer/bin/zero_lt_4dfp
+freesurfer/DefectLUT.txt
+freesurfer/diffusion
+freesurfer/docs/xml
+freesurfer/FreeSurferEnv.csh
+freesurfer/FreeSurferEnv.sh
+freesurfer/fsfast
+freesurfer/lib/bem/ic0.tri
+freesurfer/lib/bem/ic1.tri
+freesurfer/lib/bem/ic2.tri
+freesurfer/lib/bem/ic3.tri
+freesurfer/lib/bem/ic6.tri
+freesurfer/lib/bem/inner_skull.dat
+freesurfer/lib/bem/outer_skin.dat
+freesurfer/lib/bem/outer_skull.dat
+freesurfer/lib/images
+freesurfer/lib/qt
+freesurfer/lib/resource
+freesurfer/lib/tcl
+freesurfer/lib/tktools
+freesurfer/lib/vtk
+freesurfer/matlab
+freesurfer/mni-1.4
+freesurfer/mni/bin/correct_field
+freesurfer/mni/bin/crispify
+freesurfer/mni/bin/dcm2mnc
+freesurfer/mni/bin/Display
+freesurfer/mni/bin/ecattominc
+freesurfer/mni/bin/evaluate_field
+freesurfer/mni/bin/extracttag
+freesurfer/mni/bin/field2imp
+freesurfer/mni/bin/imp2field
+freesurfer/mni/bin/invert_raw_image
+freesurfer/mni/bin/make_model
+freesurfer/mni/bin/make_phantom
+freesurfer/mni/bin/make_template
+freesurfer/mni/bin/mincaverage
+freesurfer/mni/bin/mincbbox
+freesurfer/mni/bin/minccalc
+freesurfer/mni/bin/mincchamfer
+freesurfer/mni/bin/mincconcat
+freesurfer/mni/bin/minccopy
+freesurfer/mni/bin/mincdiff
+freesurfer/mni/bin/mincedit
+freesurfer/mni/bin/mincexpand
+freesurfer/mni/bin/mincextract
+freesurfer/mni/bin/mincheader
+freesurfer/mni/bin/minchistory
+freesurfer/mni/bin/minclookup
+freesurfer/mni/bin/mincmakescalar
+freesurfer/mni/bin/mincmakevector
+freesurfer/mni/bin/mincmath
+freesurfer/mni/bin/minc_modify_header
+freesurfer/mni/bin/mincpik
+freesurfer/mni/bin/mincreshape
+freesurfer/mni/bin/mincstats
+freesurfer/mni/bin/minctoecat
+freesurfer/mni/bin/minctoraw
+freesurfer/mni/bin/mincview
+freesurfer/mni/bin/mincwindow
+freesurfer/mni/bin/mnc2nii
+freesurfer/mni/bin/mritoself
+freesurfer/mni/bin/ncdump
+freesurfer/mni/bin/ncgen
+freesurfer/mni/bin/nii2mnc
+freesurfer/mni/bin/nu_estimate
+freesurfer/mni/bin/nu_estimate_np_and_em~
+freesurfer/mni/bin/nu_evaluate
+freesurfer/mni/bin/param2xfm
+freesurfer/mni/bin/rand_param
+freesurfer/mni/bin/rawtominc
+freesurfer/mni/bin/register
+freesurfer/mni/bin/resample_labels
+freesurfer/mni/bin/sharpen_hist
+freesurfer/mni/bin/sharpen_volume
+freesurfer/mni/bin/spline_smooth
+freesurfer/mni/bin/transformtags
+freesurfer/mni/bin/upet2mnc
+freesurfer/mni/bin/volume_hist
+freesurfer/mni/bin/volume_stats
+freesurfer/mni/bin/voxeltoworld
+freesurfer/mni/bin/worldtovoxel
+freesurfer/mni/bin/xcorr_vol
+freesurfer/mni/bin/xfm2param
+freesurfer/mni/bin/xfmconcat
+freesurfer/mni/bin/xfminvert
+freesurfer/mni/bin/xfmtool
+freesurfer/mni/bin/zscore_vol
+freesurfer/mni/data
+freesurfer/mni/include
+freesurfer/mni/mni.srcbuild.June2015.tgz
+freesurfer/mni/share/man
+freesurfer/mni/share/N3
+freesurfer/models
+freesurfer/python/lib/python3.8/test
+freesurfer/python/lib/python3.8/site-packages/caffe2
+freesurfer/python/lib/python3.8/site-packages/sklearn
+freesurfer/python/lib/python3.8/site-packages/tensorflow
+freesurfer/python/lib/python3.8/site-packages/torch
+freesurfer/python/lib/python3.8/site-packages/**/tests
+freesurfer/python/**/__pycache__
+freesurfer/python/share
+freesurfer/SegmentNoLUT.txt
+freesurfer/sessions
+freesurfer/SetUpFreeSurfer.csh
+freesurfer/SetUpFreeSurfer.sh
+freesurfer/Simple_surface_labels2009.txt
+freesurfer/sources.sh
+freesurfer/subjects/bert
+freesurfer/subjects/cvs_avg35
+freesurfer/subjects/cvs_avg35_inMNI152
+freesurfer/subjects/fsaverage3
+freesurfer/subjects/fsaverage4
+freesurfer/subjects/fsaverage_sym
+freesurfer/subjects/lh.EC_average
+freesurfer/subjects/README
+freesurfer/subjects/rh.EC_average
+freesurfer/subjects/sample-001.mgz
+freesurfer/subjects/sample-002.mgz
+freesurfer/subjects/V1_average
+freesurfer/tkmeditParcColorsCMA
+freesurfer/tktools
+freesurfer/trctrain
diff --git a/docker/files/fs-cert.pem b/docker/files/fs-cert.pem
new file mode 100644
index 00000000..34d53c7b
--- /dev/null
+++ b/docker/files/fs-cert.pem
@@ -0,0 +1,44 @@
+-----BEGIN CERTIFICATE-----
+MIIHuDCCBiCgAwIBAgIRAMa1FS9MSn5TXKMgD8OXtoswDQYJKoZIhvcNAQEMBQAw
+RDELMAkGA1UEBhMCVVMxEjAQBgNVBAoTCUludGVybmV0MjEhMB8GA1UEAxMYSW5D
+b21tb24gUlNBIFNlcnZlciBDQSAyMB4XDTI0MDUwOTAwMDAwMFoXDTI1MDUwOTIz
+NTk1OVowfTELMAkGA1UEBhMCVVMxFjAUBgNVBAgTDU1hc3NhY2h1c2V0dHMxMTAv
+BgNVBAoTKFByZXNpZGVudCBhbmQgRmVsbG93cyBvZiBIYXJ2YXJkIENvbGxlZ2Ux
+IzAhBgNVBAMTGnN1cmZlci5ubXIubWdoLmhhcnZhcmQuZWR1MIIBIjANBgkqhkiG
+9w0BAQEFAAOCAQ8AMIIBCgKCAQEAxJfeMKn0NjFkmduegvgRICrm+hn4TaZITeVl
+uM/af+g05RUtKRKNIR0CC9mzPDYiW10VNj7TuDyS1DNoe/Jr1Or8lrMCm81NHjnY
+aKvtC61O9GWvvDfWeb35vkHfkbd60AgBaLGZIEglENl122bBqpSdO8JglVTDgvFd
+mWkuBnQzE/qKt7j88Xjafjhzbnv9Uf1hh8NtbiOaAf53/b5FZuUary64k5twPwpm
+Y/pWw3CQhIWUhvRMwcQNvG24lDOssOXSEgb9Gd96ikU/yE4MDnuDBb6tf+2crVQ5
+PF4V2YvbQZ2x8Kf8hygWk3C555ZSCR3LgRl/Paxp9DZUrxfjtwIDAQABo4ID6jCC
+A+YwHwYDVR0jBBgwFoAU70wAkqb7di5eleLJX4cbGdVN4tkwHQYDVR0OBBYEFG0I
+bSHaYbRPsftHU7uJ5A7Z9UBfMA4GA1UdDwEB/wQEAwIFoDAMBgNVHRMBAf8EAjAA
+MB0GA1UdJQQWMBQGCCsGAQUFBwMBBggrBgEFBQcDAjBJBgNVHSAEQjBAMDQGCysG
+AQQBsjEBAgJnMCUwIwYIKwYBBQUHAgEWF2h0dHBzOi8vc2VjdGlnby5jb20vQ1BT
+MAgGBmeBDAECAjBABgNVHR8EOTA3MDWgM6Axhi9odHRwOi8vY3JsLnNlY3RpZ28u
+Y29tL0luQ29tbW9uUlNBU2VydmVyQ0EyLmNybDBwBggrBgEFBQcBAQRkMGIwOwYI
+KwYBBQUHMAKGL2h0dHA6Ly9jcnQuc2VjdGlnby5jb20vSW5Db21tb25SU0FTZXJ2
+ZXJDQTIuY3J0MCMGCCsGAQUFBzABhhdodHRwOi8vb2NzcC5zZWN0aWdvLmNvbTCC
+AX8GCisGAQQB1nkCBAIEggFvBIIBawFpAHUAzxFW7tUufK/zh1vZaS6b6RpxZ0qw
+F+ysAdJbd87MOwgAAAGPXXtaqAAABAMARjBEAiARbv8hz3utGkTar2Y3jNnWOGKG
+aajAYuB3f30g5Bnb+AIgAmwaltfGcp2uNYmTMU2eSC5AVhpnbisDS2KcFyC7ok4A
+dwCi4wrkRe+9rZt+OO1HZ3dT14JbhJTXK14bLMS5UKRH5wAAAY9de1pQAAAEAwBI
+MEYCIQDrM8C7Y6GdEKRmGQ1AUmbUArbpImpEXutI8E+KVOUsogIhAKbl+QYqJIUB
+rRHpRkKZlefPyZQRo6JnRNz/J1KEuqsTAHcATnWjJ1yaEMM4W2zU3z9S6x3w4I4b
+jWnAsfpksWKaOd8AAAGPXXtaRgAABAMASDBGAiEApB4qFWHZLGtPNkUK+6jFqsEk
+vmy3bv3cuODXSG3CvfMCIQCCQPR/3HcrSGfmeJsFjWvwLbJFqe6GbRWCvjaUaldI
+WDCB5AYDVR0RBIHcMIHZghpzdXJmZXIubm1yLm1naC5oYXJ2YXJkLmVkdYIUZm9y
+dW0uZnJlZXN1cmZlci5uZXSCGWZvcnVtLm5tci5tZ2guaGFydmFyZC5lZHWCDmZy
+ZWVzdXJmZXIubmV0ghdmdHAubm1yLm1naC5oYXJ2YXJkLmVkdYIZcnN5bmMubm1y
+Lm1naC5oYXJ2YXJkLmVkdYIWc3VwcG9ydC5mcmVlc3VyZmVyLm5ldIIad2ViZGV2
+Lm5tci5tZ2guaGFydmFyZC5lZHWCEnd3dy5mcmVlc3VyZmVyLm5ldDANBgkqhkiG
+9w0BAQwFAAOCAYEAB5nGih504XqNbZJjz1mK43IAiFs0YjocdDWOqqbXMirpbpNd
+cPleB0iJkXJnzfzkZFyLdvFGos9FuPRDGmXSh0sdWpm2uQdkXlOp+/e4vMDg8Nrl
+YkjshuU4fmUswnzsQ1aj/ome1DG3rmp3umjKpV6ewnVLhgjQ5zwCHfLLsNyzowqn
+I6qAa2uzk7bS3XuYu4bLhVfD3X0Ybe4V3FKODBZRAIIU8hgtCz6zw5WtxzdEm5Qp
+FHdN8OKazXvrJbzHB1WVk7buIn+8n2HoO202wGaFyyQFVqM2ug0FgCW8AaB+XRCq
+BV+nZND2AIALG1HcIIL+pZwxS1K/jBkjUJRb3GDVWw7yzxuvlmawLhk8xzrgTsvp
+QXaR+CbnTBx1PeB4nf+yHg2VBvKk6m7E9lnyymENmbeaLC67CJQgr+ne1rpOuEIs
+fVbKUP437fyEvPuZCZ+3gxFsKgOerk2J95+AdLKB01pQFh/ITS/2zHHeMeD118sR
+mUO+RXiPX5ZUqu/M
+-----END CERTIFICATE-----
diff --git a/docker/files/neurodebian.gpg b/docker/files/neurodebian.gpg
deleted file mode 100644
index c546d45d..00000000
--- a/docker/files/neurodebian.gpg
+++ /dev/null
@@ -1,71 +0,0 @@
------BEGIN PGP PUBLIC KEY BLOCK-----
-Version: GnuPG v1
-
-mQGiBEQ7TOgRBADvaRsIZ3VZ6Qy7PlDpdMm97m0OfvouOj/HhjOM4M3ECbGn4cYh
-vN1gK586s3sUsUcNQ8LuWvNsYhxYsVTZymCReJMEDxod0U6/z/oIbpWv5svF3kpl
-ogA66Ju/6cZx62RiCSOkskI6A3Waj6xHyEo8AGOPfzbMoOOQ1TS1u9s2FwCgxziL
-wADvKYlDZnWM03QtqIJVD8UEAOks9Q2OqFoqKarj6xTRdOYIBVEp2jhozZUZmLmz
-pKL9E4NKGfixqxdVimFcRUGM5h7R2w7ORqXjCzpiPmgdv3jJLWDnmHLmMYRYQc8p
-5nqo8mxuO3zJugxBemWoacBDd1MJaH7nK20Hsk9L/jvU/qLxPJotMStTnwO+EpsK
-HlihA/9ZpvzR1QWNUd9nSuNR3byJhaXvxqQltsM7tLqAT4qAOJIcMjxr+qESdEbx
-NHM5M1Y21ZynrsQw+Fb1WHXNbP79vzOxHoZR0+OXe8uUpkri2d9iOocre3NUdpOO
-JHtl6cGGTFILt8tSuOVxMT/+nlo038JQB2jARe4B85O0tkPIPbQybmV1cm8uZGVi
-aWFuLm5ldCBhcmNoaXZlIDxtaWNoYWVsLmhhbmtlQGdtYWlsLmNvbT6IRgQQEQgA
-BgUCTVHJKwAKCRCNEUVjdcAkyOvzAJ0abJz+f2a6VZG1c9T8NHMTYh1atwCgt0EE
-3ZZd/2in64jSzu0miqhXbOKISgQQEQIACgUCSotRlwMFAXgACgkQ93+NsjFEvg8n
-JgCfWcdJbILBtpLZCocvOzlLPqJ0Fn0AoI4EpJRxoUnrtzBGUC1MqecU7WsDiGAE
-ExECACAFAkqLUWcCGwMGCwkIBwMCBBUCCAMEFgIDAQIeAQIXgAAKCRCl0y8BJkml
-qVklAJ4h2V6MdQkSAThF5c2Gkq6eSoIQYQCeM0DWyB9Bl+tTPSTYXwwZi2uoif20
-QmFwc3kuZ3NlLnVuaS1tYWdkZWJ1cmcuZGUgRGViaWFuIEFyY2hpdmUgPG1pY2hh
-ZWwuaGFua2VAZ21haWwuY29tPohGBBARAgAGBQJEO03FAAoJEPd/jbIxRL4PU18A
-n3tn7i4qdlMi8kHbYWFoabsKc9beAJ9sl/leZNCYNMGhz+u6BQgyeLKw94heBBMR
-AgAeBQJEO0zoAhsDBgsJCAcDAgMVAgMDFgIBAh4BAheAAAoJEKXTLwEmSaWpVdoA
-n27DvtZizNEbhz3wRUPQMiQjtqdvAJ9rS9YdPe5h5o5gHx3mw3BSkOttdYheBBMR
-AgAeBQJEO0zoAhsDBgsJCAcDAgMVAgMDFgIBAh4BAheAAAoJEKXTLwEmSaWpVdoA
-oLhwWL+E+2I9lrUf4Lf26quOK9vLAKC9ZpIF2tUirFFkBWnQvu13/TA0SokCHAQQ
-AQIABgUCTSNBgQAKCRDAc9Iof/uem4NpEACQ8jxmaCaS/qk/Y4GiwLA5bvKosG3B
-iARZ2v5UWqCZQ1tS56yKse/lCIzXQqU9BnYW6wOI2rvFf9meLfd8h96peG6oKscs
-fbclLDIf68bBvGBQaD0VYFi/Fk/rxmTQBOCQ3AJZs8O5rIM4gPGE0QGvSZ1h7VRw
-3Uyeg4jKXLIeJn2xEmOJgt3auAR2FyKbzHaX9JCoByJZ/eU23akNl9hgt7ePlpXo
-74KNYC58auuMUhCq3BQDB+II4ERYMcmFp1N5ZG05Cl6jcaRRHDXz+Ax6DWprRI1+
-RH/Yyae6LmKpeJNwd+vM14aawnNO9h8IAQ+aJ3oYZdRhGyybbin3giJ10hmWveg/
-Pey91Nh9vBCHdDkdPU0s9zE7z/PHT0c5ccZRukxfZfkrlWQ5iqu3V064ku5f4PBy
-8UPSkETcjYgDnrdnwqIAO+oVg/SFlfsOzftnwUrvwIcZlXAgtP6MEEAs/38e/JIN
-g4VrpdAy7HMGEUsh6Ah6lvGQr+zBnG44XwKfl7e0uCYkrAzUJRGM5vx9iXvFMcMu
-jv9EBNNBOU8/Y6MBDzGZhgaoeI27nrUvaveJXjAiDKAQWBLjtQjINZ8I9uaSGOul
-8kpbFavE4eS3+KhISrSHe4DuAa3dk9zI+FiPvXY1ZyfQBtNpR+gYFY6VxMbHhY1U
-lSLHO2eUIQLdYbRITmV1cm9EZWJpYW4gQXJjaGl2ZSBLZXkgPHBrZy1leHBwc3kt
-bWFpbnRhaW5lcnNAbGlzdHMuYWxpb3RoLmRlYmlhbi5vcmc+iEYEEBEIAAYFAk1R
-yQYACgkQjRFFY3XAJMgEWwCggx4Gqlcrt76TSMlbU94cESo55AEAoJ3asQEMpe8t
-QUX+5aikw3z1AUoCiEoEEBECAAoFAkqf/3cDBQF4AAoJEPd/jbIxRL4PxyMAoKUI
-RPWlHCj/+HSFfwhos68wcSwmAKChuC00qutDro+AOo+uuq6YoHXj+ohgBBMRAgAg
-BQJKn/8bAhsDBgsJCAcDAgQVAggDBBYCAwECHgECF4AACgkQpdMvASZJpalDggCe
-KF9KOgOPdQbFnKXl8KtHory4EEwAnA7jxgorE6kk2QHEXFSF8LzOOH4GiGMEExEC
-ACMCGwMGCwkIBwMCBBUCCAMEFgIDAQIeAQIXgAUCSp//RgIZAQAKCRCl0y8BJkml
-qekFAKCRyt4+FoCzmBbRUUP3Cr8PzH++IgCgkno4vdjsWdyAey8e0KpITTXMFrmJ
-AhwEEAECAAYFAk0jQYEACgkQwHPSKH/7npsFfw/+P8B8hpM3+T1fgboBa4R32deu
-n8m6b8vZMXwuo/awQtMpzjem8JGXSUQm8iiX4hDtjq6ZoPrlN8T4jNmviBt/F5jI
-Jji/PYmhq+Zn9s++mfx+aF4IJrcHJWFkg/6kJzn4oSdl/YlvKf4VRCcQNtj4xV87
-GsdamnzU17XapLVMbSaVKh+6Af7ZLDerEH+iAq733HsYaTK+1xKmN7EFVXgS7bZ1
-9C4LTzc97bVHSywpT9yIrg9QQs/1kshfVIHDKyhjF6IwzSVbeGAIL3Oqo5zOMkWv
-7JlEIkkhTyl+FETxNMTMYjAk+Uei3kRodneq3YBF2uFYSEzrXQgHAyn37geiaMYj
-h8wu6a85nG1NS0SdxiZDIePmbvD9vWxFZUWYJ/h9ifsLivWcVXlvHoQ0emd+n2ai
-FhAck2xsuyHgnGIZMHww5IkQdu/TMqvbcR6d8Xulh+C4Tq7ppy+oTLADSBKII++p
-JQioYydRD529EUJgVlhyH27X6YAk3FuRD3zYZRYS2QECiKXvS665o3JRJ0ZSqNgv
-YOom8M0zz6bI9grnUoivMI4o7ISpE4ZwffEd37HVzmraaUHDXRhkulFSf1ImtXoj
-V9nNSM5p/+9eP7OioTZhSote6Vj6Ja1SZeRkXZK7BwqPbdO0VsYOb7G//ZiOlqs+
-paRr92G/pwBfj5Dq8EK5Ag0ERDtM9RAIAN0EJqBPvLN0tEin/y4Fe0R4n+E+zNXg
-bBsq4WidwyUFy3h/6u86FYvegXwUqVS2OsEs5MwPcCVJOfaEthF7I89QJnP9Nfx7
-V5I9yFB53o9ii38BN7X+9gSjpfwXOvf/wIDfggxX8/wRFel37GRB7TiiABRArBez
-s5x+zTXvT++WPhElySj0uY8bjVR6tso+d65K0UesvAa7PPWeRS+3nhqABSFLuTTT
-MMbnVXCGesBrYHlFVXClAYrSIOX8Ub/UnuEYs9+hIV7U4jKzRF9WJhIC1cXHPmOh
-vleAf/I9h/0KahD7HLYud40pNBo5tW8jSfp2/Q8TIE0xxshd51/xy4MAAwUH+wWn
-zsYVk981OKUEXul8JPyPxbw05fOd6gF4MJ3YodO+6dfoyIl3bewk+11KXZQALKaO
-1xmkAEO1RqizPeetoadBVkQBp5xPudsVElUTOX0pTYhkUd3iBilsCYKK1/KQ9KzD
-I+O/lRsm6L9lc6rV0IgPU00P4BAwR+x8Rw7TJFbuS0miR3lP1NSguz+/kpjxzmGP
-LyHJ+LVDYFkk6t0jPXhqFdUY6McUTBDEvavTGlVO062l9APTmmSMVFDsPN/rBes2
-rYhuuT+lDp+gcaS1UoaYCIm9kKOteQBnowX9V74Z+HKEYLtwILaSnNe6/fNSTvyj
-g0z+R+sPCY4nHewbVC+ISQQYEQIACQUCRDtM9QIbDAAKCRCl0y8BJkmlqbecAJ9B
-UdSKVg9H+fQNyP5sbOjj4RDtdACfXHrRHa2+XjJP0dhpvJ8IfvYnQsU=
-=fAJZ
------END PGP PUBLIC KEY BLOCK-----
diff --git a/docs/_api/surface.rst b/docs/_api/surface.rst
new file mode 100644
index 00000000..92ba28ab
--- /dev/null
+++ b/docs/_api/surface.rst
@@ -0,0 +1,6 @@
+==================
+Surface Transforms
+==================
+
+.. automodule:: nitransforms.surface
+ :members:
diff --git a/docs/api.rst b/docs/api.rst
index eb3c566b..a57d6836 100644
--- a/docs/api.rst
+++ b/docs/api.rst
@@ -10,5 +10,6 @@ Information on specific functions, classes, and methods for developers.
_api/linear
_api/manip
_api/nonlinear
+ _api/surface
_api/interp
_api/patched
diff --git a/docs/notebooks/Reading and Writing transforms.ipynb b/docs/notebooks/Reading and Writing transforms.ipynb
index 61ea269f..c0e85de2 100644
--- a/docs/notebooks/Reading and Writing transforms.ipynb
+++ b/docs/notebooks/Reading and Writing transforms.ipynb
@@ -46,6 +46,7 @@
"import numpy as np\n",
"import nibabel as nb\n",
"import nitransforms as nt\n",
+ "from nitransforms.resampling import apply\n",
"\n",
"cwd = TemporaryDirectory()\n",
"os.chdir(cwd.name)\n",
@@ -263,7 +264,7 @@
"metadata": {},
"outputs": [],
"source": [
- "moved = xfm.apply(nii, order=0)\n",
+ "moved = apply(xfm, nii, order=0)\n",
"moved.to_filename('moved-nb.nii.gz')"
]
},
@@ -741,7 +742,7 @@
"outputs": [],
"source": [
"xfm.reference = oblique\n",
- "moved_oblique = xfm.apply(las_anatomy)"
+ "moved_oblique = apply(xfm, las_anatomy)"
]
},
{
@@ -895,7 +896,7 @@
],
"metadata": {
"kernelspec": {
- "display_name": "Python 3",
+ "display_name": "Python 3 (ipykernel)",
"language": "python",
"name": "python3"
},
@@ -909,9 +910,9 @@
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
- "version": "3.8.5"
+ "version": "3.11.8"
}
},
"nbformat": 4,
- "nbformat_minor": 2
+ "nbformat_minor": 4
}
diff --git a/docs/notebooks/isbi2020.ipynb b/docs/notebooks/isbi2020.ipynb
index 80442dbe..903d86e7 100644
--- a/docs/notebooks/isbi2020.ipynb
+++ b/docs/notebooks/isbi2020.ipynb
@@ -34,7 +34,8 @@
"from pathlib import Path\n",
"import nibabel as nb\n",
"from niworkflows.viz.notebook import display\n",
- "import nitransforms as nt"
+ "import nitransforms as not\n",
+ "from nitransforms.resampling import apply"
]
},
{
@@ -161,7 +162,7 @@
}
],
"source": [
- "resampled_in_t1 = identity_xfm.apply(bold_nii)\n",
+ "resampled_in_t1 = apply(identity_xfm, bold_nii)\n",
"print(resampled_in_t1.affine)\n",
"print(resampled_in_t1.shape)"
]
@@ -180,7 +181,1695 @@
"outputs": [
{
"data": {
- "image/svg+xml": "",
+ "image/svg+xml": [
+ ""
+ ],
"text/plain": [
""
]
@@ -231,7 +1920,7 @@
}
],
"source": [
- "moved_to_t1 = t1w_to_bold_xfm.apply(bold_nii)\n",
+ "moved_to_t1 = apply(t1w_to_bold_xfm, bold_nii)\n",
"print(moved_to_t1.affine)\n",
"print(moved_to_t1.shape)"
]
@@ -243,7 +1932,1695 @@
"outputs": [
{
"data": {
- "image/svg+xml": "",
+ "image/svg+xml": [
+ ""
+ ],
"text/plain": [
""
]
@@ -280,7 +3657,1683 @@
"outputs": [
{
"data": {
- "image/svg+xml": "",
+ "image/svg+xml": [
+ ""
+ ],
"text/plain": [
""
]
@@ -290,7 +5343,7 @@
}
],
"source": [
- "display(bold_nii, bold_to_t1w_xfm.apply(t1w_nii))"
+ "display(bold_nii, apply(bold_to_t1w_xfm, t1w_nii))"
]
},
{
@@ -315,7 +5368,7 @@
],
"metadata": {
"kernelspec": {
- "display_name": "Python 3",
+ "display_name": "Python 3 (ipykernel)",
"language": "python",
"name": "python3"
},
@@ -329,9 +5382,9 @@
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
- "version": "3.7.3"
+ "version": "3.11.8"
}
},
"nbformat": 4,
"nbformat_minor": 4
-}
\ No newline at end of file
+}
diff --git a/env.yml b/env.yml
new file mode 100644
index 00000000..d550959b
--- /dev/null
+++ b/env.yml
@@ -0,0 +1,44 @@
+name: nitransforms
+channels:
+ - https://fsl.fmrib.ox.ac.uk/fsldownloads/fslconda/public/
+ - conda-forge
+# Update this ~yearly; last updated Jan 2024
+dependencies:
+ - python=3.11
+ # Needed for svgo and bids-validator; consider moving to deno
+ - nodejs=20
+ # Intel Math Kernel Library for numpy
+ - mkl=2023.2.0
+ - mkl-service=2.4.0
+ # git-annex for templateflow users with DataLad superdatasets
+ - git-annex=*=alldep*
+ # ANTs 2.5.3 is linked against libitk 5.4 - let's pin both there
+ - libitk=5.4
+ # Base scientific python stack; required by FSL, so pinned here
+ - numpy=1.26
+ - scipy=1.11
+ - matplotlib=3.8
+ - pandas=2.2
+ - h5py=3.10
+ # Dependencies compiled against numpy, best to stick with conda
+ - nitime=0.10
+ - scikit-image=0.22
+ - scikit-learn=1.4
+ # Utilities
+ - graphviz=9.0
+ - pandoc=3.1
+ # Workflow dependencies: ANTs
+ - ants=2.5.3
+ # Workflow dependencies: FSL (versions pinned in 6.0.7.7)
+ - fsl-bet2=2111.4
+ - fsl-flirt=2111.2
+ - fsl-fast4=2111.3
+ - fsl-fugue=2201.4
+ - fsl-mcflirt=2111.0
+ - fsl-miscmaths=2203.2
+ - fsl-topup=2203.2
+ # - pip
+ # - pip:
+ # - -r requirements.txt
+variables:
+ FSLOUTPUTTYPE: NIFTI_GZ
diff --git a/nitransforms/__init__.py b/nitransforms/__init__.py
index 1f819933..38768ae9 100644
--- a/nitransforms/__init__.py
+++ b/nitransforms/__init__.py
@@ -16,7 +16,7 @@
transform
"""
-from . import linear, manip, nonlinear
+from . import linear, manip, nonlinear, surface
from .linear import Affine, LinearTransformsMapping
from .nonlinear import DenseFieldTransform
from .manip import TransformChain
@@ -37,6 +37,7 @@
__copyright__ = "Copyright (c) 2021 The NiPy developers"
__all__ = [
+ "surface",
"linear",
"manip",
"nonlinear",
diff --git a/nitransforms/base.py b/nitransforms/base.py
index 25fd88e0..fa05f1f6 100644
--- a/nitransforms/base.py
+++ b/nitransforms/base.py
@@ -7,6 +7,7 @@
#
### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ##
"""Common interface for transforms."""
+
from pathlib import Path
import numpy as np
import h5py
@@ -15,7 +16,7 @@
from nibabel import funcs as _nbfuncs
from nibabel.nifti1 import intent_codes as INTENT_CODES
from nibabel.cifti2 import Cifti2Image
-from scipy import ndimage as ndi
+import nibabel as nb
EQUALITY_TOL = 1e-5
@@ -89,6 +90,76 @@ def shape(self):
return self._shape
+class SurfaceMesh(SampledSpatialData):
+ """Class to represent surface meshes."""
+
+ __slots__ = ["_triangles"]
+
+ def __init__(self, dataset):
+ """Create a sampling reference."""
+ self._shape = None
+
+ if isinstance(dataset, SurfaceMesh):
+ self._coords = dataset._coords
+ self._triangles = dataset._triangles
+ self._ndim = dataset._ndim
+ self._npoints = dataset._npoints
+ self._shape = dataset._shape
+ return
+
+ if isinstance(dataset, (str, Path)):
+ dataset = _nbload(str(dataset))
+
+ if hasattr(dataset, "numDA"): # Looks like a Gifti file
+ _das = dataset.get_arrays_from_intent(INTENT_CODES["pointset"])
+ if not _das:
+ raise TypeError(
+ "Input Gifti file does not contain reference coordinates."
+ )
+ self._coords = np.vstack([da.data for da in _das])
+ _tris = dataset.get_arrays_from_intent(INTENT_CODES["triangle"])
+ self._triangles = np.vstack([da.data for da in _tris])
+ self._npoints, self._ndim = self._coords.shape
+ self._shape = self._coords.shape
+ return
+
+ if isinstance(dataset, Cifti2Image):
+ raise NotImplementedError
+
+ raise ValueError("Dataset could not be interpreted as an irregular sample.")
+
+ def check_sphere(self, tolerance=1.001):
+ """Check sphericity of surface.
+ Based on https://github.com/Washington-University/workbench/blob/\
+7ba3345d161d567a4b628ceb02ab4471fc96cb20/src/Files/SurfaceResamplingHelper.cxx#L503
+ """
+ dists = np.linalg.norm(self._coords, axis=1)
+ return (dists.min() * tolerance) > dists.max()
+
+ def set_radius(self, radius=100):
+ if not self.check_sphere():
+ raise ValueError("You should only set the radius on spherical surfaces.")
+ dists = np.linalg.norm(self._coords, axis=1)
+ self._coords = self._coords * (radius / dists).reshape((-1, 1))
+
+ @classmethod
+ def from_arrays(cls, coordinates, triangles):
+ darrays = [
+ nb.gifti.GiftiDataArray(
+ coordinates.astype(np.float32),
+ intent=nb.nifti1.intent_codes["NIFTI_INTENT_POINTSET"],
+ datatype=nb.nifti1.data_type_codes["NIFTI_TYPE_FLOAT32"],
+ ),
+ nb.gifti.GiftiDataArray(
+ triangles.astype(np.int32),
+ intent=nb.nifti1.intent_codes["NIFTI_INTENT_TRIANGLE"],
+ datatype=nb.nifti1.data_type_codes["NIFTI_TYPE_INT32"],
+ ),
+ ]
+ gii = nb.gifti.GiftiImage(darrays=darrays)
+ return cls(gii)
+
+
class ImageGrid(SampledSpatialData):
"""Class to represent spaces of gridded data (images)."""
@@ -178,7 +249,10 @@ def __ne__(self, other):
class TransformBase:
"""Abstract image class to represent transforms."""
- __slots__ = ("_reference",)
+ __slots__ = (
+ "_reference",
+ "_ndim",
+ )
def __init__(self, reference=None):
"""Instantiate a transform."""
@@ -206,6 +280,22 @@ def __add__(self, b):
return TransformChain(transforms=[self, b])
+ def __len__(self):
+ """
+ Enable ``len()``.
+
+ By default, all transforms are of length one.
+ This must be overriden by transforms arrays and chains.
+
+ Example
+ -------
+ >>> T1 = TransformBase()
+ >>> len(T1)
+ 1
+
+ """
+ return 1
+
@property
def reference(self):
"""Access a reference space where data will be resampled onto."""
@@ -220,97 +310,7 @@ def reference(self, image):
@property
def ndim(self):
"""Access the dimensions of the reference space."""
- return self.reference.ndim
-
- def apply(
- self,
- spatialimage,
- reference=None,
- order=3,
- mode="constant",
- cval=0.0,
- prefilter=True,
- output_dtype=None,
- ):
- """
- Apply a transformation to an image, resampling on the reference spatial object.
-
- Parameters
- ----------
- spatialimage : `spatialimage`
- The image object containing the data to be resampled in reference
- space
- reference : spatial object, optional
- The image, surface, or combination thereof containing the coordinates
- of samples that will be sampled.
- order : int, optional
- The order of the spline interpolation, default is 3.
- The order has to be in the range 0-5.
- mode : {'constant', 'reflect', 'nearest', 'mirror', 'wrap'}, optional
- Determines how the input image is extended when the resamplings overflows
- a border. Default is 'constant'.
- cval : float, optional
- Constant value for ``mode='constant'``. Default is 0.0.
- prefilter: bool, optional
- Determines if the image's data array is prefiltered with
- a spline filter before interpolation. The default is ``True``,
- which will create a temporary *float64* array of filtered values
- if *order > 1*. If setting this to ``False``, the output will be
- slightly blurred if *order > 1*, unless the input is prefiltered,
- i.e. it is the result of calling the spline filter on the original
- input.
-
- Returns
- -------
- resampled : `spatialimage` or ndarray
- The data imaged after resampling to reference space.
-
- """
- if reference is not None and isinstance(reference, (str, Path)):
- reference = _nbload(str(reference))
-
- _ref = (
- self.reference if reference is None else SpatialReference.factory(reference)
- )
-
- if _ref is None:
- raise TransformError("Cannot apply transform without reference")
-
- if isinstance(spatialimage, (str, Path)):
- spatialimage = _nbload(str(spatialimage))
-
- data = np.asanyarray(
- spatialimage.dataobj,
- dtype=spatialimage.get_data_dtype()
- )
- output_dtype = output_dtype or data.dtype
- targets = ImageGrid(spatialimage).index( # data should be an image
- _as_homogeneous(self.map(_ref.ndcoords.T), dim=_ref.ndim)
- )
-
- resampled = ndi.map_coordinates(
- data,
- targets.T,
- output=output_dtype,
- order=order,
- mode=mode,
- cval=cval,
- prefilter=prefilter,
- )
-
- if isinstance(_ref, ImageGrid): # If reference is grid, reshape
- hdr = None
- if _ref.header is not None:
- hdr = _ref.header.copy()
- hdr.set_data_dtype(output_dtype)
- moved = spatialimage.__class__(
- resampled.reshape(_ref.shape).astype(output_dtype),
- _ref.affine,
- hdr,
- )
- return moved
-
- return resampled
+ raise TypeError("TransformBase has no dimensions")
def map(self, x, inverse=False):
r"""
@@ -347,6 +347,17 @@ def _to_hdf5(self, x5_root):
"""Serialize this object into the x5 file format."""
raise NotImplementedError
+ def apply(self, *args, **kwargs):
+ """Apply the transform to a dataset.
+
+ Deprecated. Please use ``nitransforms.resampling.apply`` instead.
+ """
+ _msg = "This method is deprecated. Please use `nitransforms.resampling.apply` instead."
+ warnings.warn(_msg, DeprecationWarning, stacklevel=2)
+ from .resampling import apply
+
+ return apply(self, *args, **kwargs)
+
def _as_homogeneous(xyz, dtype="float32", dim=3):
"""
@@ -377,4 +388,8 @@ def _as_homogeneous(xyz, dtype="float32", dim=3):
def _apply_affine(x, affine, dim):
"""Get the image array's indexes corresponding to coordinates."""
- return affine.dot(_as_homogeneous(x, dim=dim).T)[:dim, ...].T
+ return np.tensordot(
+ affine,
+ _as_homogeneous(x, dim=dim).T,
+ axes=1,
+ )[:dim, ...]
diff --git a/nitransforms/cli.py b/nitransforms/cli.py
index 59c6b9d3..8f8f5ce0 100644
--- a/nitransforms/cli.py
+++ b/nitransforms/cli.py
@@ -5,6 +5,7 @@
from .linear import load as linload
from .nonlinear import load as nlinload
+from .resampling import apply
def cli_apply(pargs):
@@ -38,14 +39,15 @@ def cli_apply(pargs):
# ensure a reference is set
xfm.reference = pargs.ref or pargs.moving
- moved = xfm.apply(
+ moved = apply(
+ xfm,
pargs.moving,
order=pargs.order,
mode=pargs.mode,
cval=pargs.cval,
prefilter=pargs.prefilter,
)
- moved.to_filename(pargs.out or "nt_{}".format(os.path.basename(pargs.moving)))
+ moved.to_filename(pargs.out or f"nt_{os.path.basename(pargs.moving)}")
def get_parser():
diff --git a/nitransforms/io/afni.py b/nitransforms/io/afni.py
index b7fc657b..7c66d434 100644
--- a/nitransforms/io/afni.py
+++ b/nitransforms/io/afni.py
@@ -130,9 +130,17 @@ class AFNILinearTransformArray(BaseLinearTransformList):
def to_ras(self, moving=None, reference=None):
"""Return a nitransforms' internal RAS matrix."""
- return np.stack(
- [xfm.to_ras(moving=moving, reference=reference) for xfm in self.xforms]
- )
+
+ pre_rotation = post_rotation = np.eye(4)
+ if reference is not None and _is_oblique(ref_aff := _ensure_image(reference).affine):
+ pre_rotation = _cardinal_rotation(ref_aff, True)
+ if moving is not None and _is_oblique(mov_aff := _ensure_image(moving).affine):
+ post_rotation = _cardinal_rotation(mov_aff, False)
+
+ return np.stack([
+ post_rotation @ (xfm.to_ras() @ pre_rotation)
+ for xfm in self.xforms
+ ])
def to_string(self):
"""Convert to a string directly writeable to file."""
@@ -144,14 +152,22 @@ def to_string(self):
if line.strip()
]
strings += lines
- return "\n".join(strings)
+ return "\n".join(strings + [""])
@classmethod
def from_ras(cls, ras, moving=None, reference=None):
"""Create an ITK affine from a nitransform's RAS+ matrix."""
_self = cls()
+
+ pre_rotation = post_rotation = np.eye(4)
+
+ if reference is not None and _is_oblique(ref_aff := _ensure_image(reference).affine):
+ pre_rotation = _cardinal_rotation(ref_aff, False)
+ if moving is not None and _is_oblique(mov_aff := _ensure_image(moving).affine):
+ post_rotation = _cardinal_rotation(mov_aff, True)
+
_self.xforms = [
- cls._inner_type.from_ras(ras[i, ...], moving=moving, reference=reference)
+ cls._inner_type.from_ras(post_rotation @ ras[i, ...] @ pre_rotation)
for i in range(ras.shape[0])
]
return _self
@@ -193,6 +209,17 @@ def from_image(cls, imgobj):
return imgobj.__class__(field, imgobj.affine, hdr)
+ @classmethod
+ def to_image(cls, imgobj):
+ """Export a displacements field from a nibabel object."""
+
+ hdr = imgobj.header.copy()
+
+ warp_data = imgobj.get_fdata().reshape(imgobj.shape[:3] + (1, imgobj.shape[-1]))
+ warp_data[..., (0, 1)] *= -1
+
+ return imgobj.__class__(warp_data, imgobj.affine, hdr)
+
def _is_oblique(affine, thres=OBLIQUITY_THRESHOLD_DEG):
"""
@@ -210,7 +237,7 @@ def _is_oblique(affine, thres=OBLIQUITY_THRESHOLD_DEG):
True
"""
- return (obliquity(affine).min() * 180 / pi) > thres
+ return float(obliquity(affine).max() * 180 / pi) > thres
def _afni_deobliqued_grid(oblique, shape):
diff --git a/nitransforms/io/base.py b/nitransforms/io/base.py
index 6d1a7c8e..3c923426 100644
--- a/nitransforms/io/base.py
+++ b/nitransforms/io/base.py
@@ -76,12 +76,12 @@ class LinearParameters(LinearTransformStruct):
Examples
--------
>>> lp = LinearParameters()
- >>> np.all(lp.structarr['parameters'] == np.eye(4))
+ >>> np.array_equal(lp.structarr['parameters'], np.eye(4))
True
>>> p = np.diag([2., 2., 2., 1.])
>>> lp = LinearParameters(p)
- >>> np.all(lp.structarr['parameters'] == p)
+ >>> np.array_equal(lp.structarr['parameters'], p)
True
"""
@@ -146,6 +146,17 @@ def from_image(cls, imgobj):
"""Import a displacements field from a nibabel image object."""
raise NotImplementedError
+ @classmethod
+ def to_filename(cls, img, filename):
+ """Export a displacements field to a NIfTI file."""
+ imgobj = cls.to_image(img)
+ imgobj.to_filename(filename)
+
+ @classmethod
+ def to_image(cls, imgobj):
+ """Export a displacements field image from a nitransforms image object."""
+ raise NotImplementedError
+
def _ensure_image(img):
if isinstance(img, (str, Path)):
diff --git a/nitransforms/io/fsl.py b/nitransforms/io/fsl.py
index 8e4c8264..f454227e 100644
--- a/nitransforms/io/fsl.py
+++ b/nitransforms/io/fsl.py
@@ -190,6 +190,17 @@ def from_image(cls, imgobj):
return imgobj.__class__(field, imgobj.affine, hdr)
+ @classmethod
+ def to_image(cls, imgobj):
+ """Export a displacements field from a nibabel object."""
+
+ hdr = imgobj.header.copy()
+
+ warp_data = imgobj.get_fdata()
+ warp_data[..., 0] *= -1
+
+ return imgobj.__class__(warp_data, imgobj.affine, hdr)
+
def _fsl_aff_adapt(space):
"""
diff --git a/nitransforms/io/itk.py b/nitransforms/io/itk.py
index d7a093eb..afabfd98 100644
--- a/nitransforms/io/itk.py
+++ b/nitransforms/io/itk.py
@@ -352,6 +352,18 @@ def from_image(cls, imgobj):
return imgobj.__class__(field, imgobj.affine, hdr)
+ @classmethod
+ def to_image(cls, imgobj):
+ """Export a displacements field from a nibabel object."""
+
+ hdr = imgobj.header.copy()
+ hdr.set_intent("vector")
+
+ warp_data = imgobj.get_fdata().reshape(imgobj.shape[:3] + (1, imgobj.shape[-1]))
+ warp_data[..., (0, 1)] *= -1
+
+ return imgobj.__class__(warp_data, imgobj.affine, hdr)
+
class ITKCompositeH5:
"""A data structure for ITK's HDF5 files."""
@@ -391,14 +403,19 @@ def from_h5obj(cls, fileobj, check=True, only_linear=False):
if xfm["TransformType"][0].startswith(b"DisplacementFieldTransform"):
if only_linear:
continue
- _fixed = np.asanyarray(xfm[f"{typo_fallback}FixedParameters"])
- shape = _fixed[:3].astype("uint16").tolist()
- offset = _fixed[3:6].astype("float")
- zooms = _fixed[6:9].astype("float")
- directions = _fixed[9:].astype("float").reshape((3, 3))
+ _fixed = xfm[f"{typo_fallback}FixedParameters"]
+ shape = _fixed[:3]
+ offset = _fixed[3:6]
+ zooms = _fixed[6:9]
+ directions = np.reshape(_fixed[9:], (3, 3))
affine = from_matvec(directions * zooms, offset)
- field = np.asanyarray(xfm[f"{typo_fallback}Parameters"]).reshape(
- (*shape, 1, -1)
+ # ITK uses Fortran ordering, like NIfTI, but with the vector dimension first
+ field = np.moveaxis(
+ np.reshape(
+ xfm[f"{typo_fallback}Parameters"], (3, *shape.astype(int)), order='F'
+ ),
+ 0,
+ -1,
)
field[..., (0, 1)] *= -1.0
hdr = Nifti1Header()
@@ -406,7 +423,7 @@ def from_h5obj(cls, fileobj, check=True, only_linear=False):
hdr.set_data_dtype("float")
xfm_list.append(
- Nifti1Image(field.astype("float"), LPS @ affine @ LPS, hdr)
+ Nifti1Image(field.astype("float"), LPS @ affine, hdr)
)
continue
diff --git a/nitransforms/io/lta.py b/nitransforms/io/lta.py
index 31271154..334266bb 100644
--- a/nitransforms/io/lta.py
+++ b/nitransforms/io/lta.py
@@ -176,7 +176,7 @@ def set_type(self, new_type):
return
raise NotImplementedError(
- "Converting {0} to {1} is not yet available".format(
+ "Converting {} to {} is not yet available".format(
transform_codes.label[current], transform_codes.label[new_type]
)
)
@@ -334,7 +334,7 @@ def to_string(self):
code = int(self["type"])
header = [
"# LTA-array file created by NiTransforms",
- "type = {} # {}".format(code, transform_codes.label[code]),
+ f"type = {code} # {transform_codes.label[code]}",
"nxforms = {}".format(self["nxforms"]),
]
xforms = [xfm.to_string(partial=True) for xfm in self._xforms]
diff --git a/nitransforms/linear.py b/nitransforms/linear.py
index 9c430d3b..71df6a16 100644
--- a/nitransforms/linear.py
+++ b/nitransforms/linear.py
@@ -10,15 +10,12 @@
import warnings
import numpy as np
from pathlib import Path
-from scipy import ndimage as ndi
-from nibabel.loadsave import load as _nbload
from nibabel.affines import from_matvec
from nitransforms.base import (
ImageGrid,
TransformBase,
- SpatialReference,
_as_homogeneous,
EQUALITY_TOL,
)
@@ -112,6 +109,10 @@ def __invert__(self):
"""
return self.__class__(self._inverse)
+ def __len__(self):
+ """Enable using len()."""
+ return 1 if self._matrix.ndim == 2 else len(self._matrix)
+
def __matmul__(self, b):
"""
Compose two Affines.
@@ -142,6 +143,11 @@ def matrix(self):
"""Access the internal representation of this affine."""
return self._matrix
+ @property
+ def ndim(self):
+ """Access the internal representation of this affine."""
+ return self._matrix.ndim + 1
+
def map(self, x, inverse=False):
r"""
Apply :math:`y = f(x)`.
@@ -216,14 +222,13 @@ def from_filename(cls, filename, fmt=None, reference=None, moving=None):
is_array = cls != Affine
errors = []
for potential_fmt in fmtlist:
- if (potential_fmt == "itk" and Path(filename).suffix == ".mat"):
+ if potential_fmt == "itk" and Path(filename).suffix == ".mat":
is_array = False
cls = Affine
try:
struct = get_linear_factory(
- potential_fmt,
- is_array=is_array
+ potential_fmt, is_array=is_array
).from_filename(filename)
except (TransformFileError, FileNotFoundError) as err:
errors.append((potential_fmt, err))
@@ -316,14 +321,15 @@ def __init__(self, transforms, reference=None):
)
self._inverse = np.linalg.inv(self._matrix)
+ def __iter__(self):
+ """Enable iterating over the series of transforms."""
+ for _m in self.matrix:
+ yield Affine(_m, reference=self._reference)
+
def __getitem__(self, i):
"""Enable indexed access to the series of matrices."""
return Affine(self.matrix[i, ...], reference=self._reference)
- def __len__(self):
- """Enable using len()."""
- return len(self._matrix)
-
def map(self, x, inverse=False):
r"""
Apply :math:`y = f(x)`.
@@ -392,111 +398,6 @@ def to_filename(self, filename, fmt="X5", moving=None):
).to_filename(filename)
return filename
- def apply(
- self,
- spatialimage,
- reference=None,
- order=3,
- mode="constant",
- cval=0.0,
- prefilter=True,
- output_dtype=None,
- ):
- """
- Apply a transformation to an image, resampling on the reference spatial object.
-
- Parameters
- ----------
- spatialimage : `spatialimage`
- The image object containing the data to be resampled in reference
- space
- reference : spatial object, optional
- The image, surface, or combination thereof containing the coordinates
- of samples that will be sampled.
- order : int, optional
- The order of the spline interpolation, default is 3.
- The order has to be in the range 0-5.
- mode : {"constant", "reflect", "nearest", "mirror", "wrap"}, optional
- Determines how the input image is extended when the resamplings overflows
- a border. Default is "constant".
- cval : float, optional
- Constant value for ``mode="constant"``. Default is 0.0.
- prefilter: bool, optional
- Determines if the image's data array is prefiltered with
- a spline filter before interpolation. The default is ``True``,
- which will create a temporary *float64* array of filtered values
- if *order > 1*. If setting this to ``False``, the output will be
- slightly blurred if *order > 1*, unless the input is prefiltered,
- i.e. it is the result of calling the spline filter on the original
- input.
-
- Returns
- -------
- resampled : `spatialimage` or ndarray
- The data imaged after resampling to reference space.
-
- """
- if reference is not None and isinstance(reference, (str, Path)):
- reference = _nbload(str(reference))
-
- _ref = (
- self.reference if reference is None else SpatialReference.factory(reference)
- )
-
- if isinstance(spatialimage, (str, Path)):
- spatialimage = _nbload(str(spatialimage))
-
- data = np.squeeze(np.asanyarray(spatialimage.dataobj))
- output_dtype = output_dtype or data.dtype
-
- ycoords = self.map(_ref.ndcoords.T)
- targets = ImageGrid(spatialimage).index( # data should be an image
- _as_homogeneous(np.vstack(ycoords), dim=_ref.ndim)
- )
-
- if data.ndim == 4:
- if len(self) != data.shape[-1]:
- raise ValueError(
- "Attempting to apply %d transforms on a file with "
- "%d timepoints" % (len(self), data.shape[-1])
- )
- targets = targets.reshape((len(self), -1, targets.shape[-1]))
- resampled = np.stack(
- [
- ndi.map_coordinates(
- data[..., t],
- targets[t, ..., : _ref.ndim].T,
- output=output_dtype,
- order=order,
- mode=mode,
- cval=cval,
- prefilter=prefilter,
- )
- for t in range(data.shape[-1])
- ],
- axis=0,
- )
- elif data.ndim in (2, 3):
- resampled = ndi.map_coordinates(
- data,
- targets[..., : _ref.ndim].T,
- output=output_dtype,
- order=order,
- mode=mode,
- cval=cval,
- prefilter=prefilter,
- )
-
- if isinstance(_ref, ImageGrid): # If reference is grid, reshape
- newdata = resampled.reshape((len(self), *_ref.shape))
- moved = spatialimage.__class__(
- np.moveaxis(newdata, 0, -1), _ref.affine, spatialimage.header
- )
- moved.header.set_data_dtype(output_dtype)
- return moved
-
- return resampled
-
def load(filename, fmt=None, reference=None, moving=None):
"""
diff --git a/nitransforms/manip.py b/nitransforms/manip.py
index 233f5adf..b30fd646 100644
--- a/nitransforms/manip.py
+++ b/nitransforms/manip.py
@@ -67,6 +67,11 @@ def __len__(self):
"""Enable using len()."""
return len(self.transforms)
+ @property
+ def ndim(self):
+ """Get the number of dimensions."""
+ return max(x.ndim for x in self._transforms)
+
@property
def transforms(self):
"""Get the internal list of transforms."""
diff --git a/nitransforms/nonlinear.py b/nitransforms/nonlinear.py
index c0cdc92e..9c29c53c 100644
--- a/nitransforms/nonlinear.py
+++ b/nitransforms/nonlinear.py
@@ -18,7 +18,6 @@
TransformBase,
TransformError,
ImageGrid,
- SpatialReference,
_as_homogeneous,
)
from scipy.ndimage import map_coordinates
@@ -71,21 +70,18 @@ def __init__(self, field=None, is_deltas=True, reference=None):
is_deltas = True
try:
- self.reference = ImageGrid(
- reference if reference is not None else field
- )
+ self.reference = ImageGrid(reference if reference is not None else field)
except AttributeError:
raise TransformError(
"Field must be a spatial image if reference is not provided"
- if reference is None else
- "Reference is not a spatial image"
+ if reference is None
+ else "Reference is not a spatial image"
)
- ndim = self._field.ndim - 1
- if self._field.shape[-1] != ndim:
+ if self._field.shape[-1] != self.ndim:
raise TransformError(
"The number of components of the field (%d) does not match "
- "the number of dimensions (%d)" % (self._field.shape[-1], ndim)
+ "the number of dimensions (%d)" % (self._field.shape[-1], self.ndim)
)
if is_deltas:
@@ -98,6 +94,11 @@ def __repr__(self):
"""Beautify the python representation."""
return f"<{self.__class__.__name__}[{self._field.shape[-1]}D] {self._field.shape[:3]}>"
+ @property
+ def ndim(self):
+ """Get the dimensions of the transform."""
+ return self._field.ndim - 1
+
def map(self, x, inverse=False):
r"""
Apply the transformation to a list of physical coordinate points.
@@ -158,23 +159,31 @@ def map(self, x, inverse=False):
if inverse is True:
raise NotImplementedError
+
ijk = self.reference.index(x)
indexes = np.round(ijk).astype("int")
if np.all(np.abs(ijk - indexes) < 1e-3):
- indexes = tuple(tuple(i) for i in indexes.T)
+ indexes = tuple(tuple(i) for i in indexes)
return self._field[indexes]
- return np.vstack(tuple(
- map_coordinates(
- self._field[..., i],
- ijk.T,
- order=3,
- mode="constant",
- cval=0,
- prefilter=True,
- ) for i in range(self.reference.ndim)
- )).T
+ new_map = np.vstack(
+ tuple(
+ map_coordinates(
+ self._field[..., i],
+ ijk,
+ order=3,
+ mode="constant",
+ cval=np.nan,
+ prefilter=True,
+ )
+ for i in range(self.reference.ndim)
+ )
+ ).T
+
+ # Set NaN values back to the original coordinates value = no displacement
+ new_map[np.isnan(new_map)] = np.array(x)[np.isnan(new_map)]
+ return new_map
def __matmul__(self, b):
"""
@@ -196,9 +205,9 @@ def __matmul__(self, b):
True
"""
- retval = b.map(
- self._field.reshape((-1, self._field.shape[-1]))
- ).reshape(self._field.shape)
+ retval = b.map(self._field.reshape((-1, self._field.shape[-1]))).reshape(
+ self._field.shape
+ )
return DenseFieldTransform(retval, is_deltas=False, reference=self.reference)
def __eq__(self, other):
@@ -237,11 +246,11 @@ def from_filename(cls, filename, fmt="X5"):
class BSplineFieldTransform(TransformBase):
"""Represent a nonlinear transform parameterized by BSpline basis."""
- __slots__ = ['_coeffs', '_knots', '_weights', '_order', '_moving']
+ __slots__ = ["_coeffs", "_knots", "_weights", "_order", "_moving"]
def __init__(self, coefficients, reference=None, order=3):
"""Create a smooth deformation field using B-Spline basis."""
- super(BSplineFieldTransform, self).__init__()
+ super().__init__()
self._order = order
coefficients = _ensure_image(coefficients)
@@ -252,28 +261,31 @@ def __init__(self, coefficients, reference=None, order=3):
if reference is not None:
self.reference = reference
- if coefficients.shape[-1] != self.ndim:
+ if coefficients.shape[-1] != self.reference.ndim:
raise TransformError(
- 'Number of components of the coefficients does '
- 'not match the number of dimensions')
+ "Number of components of the coefficients does "
+ "not match the number of dimensions"
+ )
+
+ @property
+ def ndim(self):
+ """Get the dimensions of the transform."""
+ return self._coeffs.ndim - 1
def to_field(self, reference=None, dtype="float32"):
"""Generate a displacements deformation field from this B-Spline field."""
_ref = (
- self.reference if reference is None else
- ImageGrid(_ensure_image(reference))
+ self.reference if reference is None else ImageGrid(_ensure_image(reference))
)
if _ref is None:
raise TransformError("A reference must be defined")
- ndim = self._coeffs.shape[-1]
-
if self._weights is None:
self._weights = grid_bspline_weights(_ref, self._knots)
- field = np.zeros((_ref.npoints, ndim))
+ field = np.zeros((_ref.npoints, self.ndim))
- for d in range(ndim):
+ for d in range(self.ndim):
# 1 x Nvox : (1 x K) @ (K x Nvox)
field[:, d] = self._coeffs[..., d].reshape(-1) @ self._weights
@@ -281,47 +293,6 @@ def to_field(self, reference=None, dtype="float32"):
field.astype(dtype).reshape(*_ref.shape, -1), reference=_ref
)
- def apply(
- self,
- spatialimage,
- reference=None,
- order=3,
- mode="constant",
- cval=0.0,
- prefilter=True,
- output_dtype=None,
- ):
- """Apply a B-Spline transform on input data."""
-
- _ref = (
- self.reference if reference is None else
- SpatialReference.factory(_ensure_image(reference))
- )
- spatialimage = _ensure_image(spatialimage)
-
- # If locations to be interpolated are not on a grid, run map()
- if not isinstance(_ref, ImageGrid):
- return super().apply(
- spatialimage,
- reference=_ref,
- order=order,
- mode=mode,
- cval=cval,
- prefilter=prefilter,
- output_dtype=output_dtype,
- )
-
- # If locations to be interpolated are on a grid, generate a displacements field
- return self.to_field(reference=reference).apply(
- spatialimage,
- reference=reference,
- order=order,
- mode=mode,
- cval=cval,
- prefilter=prefilter,
- output_dtype=output_dtype,
- )
-
def map(self, x, inverse=False):
r"""
Apply the transformation to a list of physical coordinate points.
@@ -346,11 +317,11 @@ def map(self, x, inverse=False):
--------
>>> xfm = BSplineFieldTransform(test_dir / "someones_bspline_coefficients.nii.gz")
>>> xfm.reference = test_dir / "someones_anatomy.nii.gz"
- >>> xfm.map([-6.5, -36., -19.5]).tolist()
- [[-6.5, -31.476097418406784, -19.5]]
+ >>> xfm.map([-6.5, -36., -19.5]).tolist() # doctest: +ELLIPSIS
+ [[-6.5, -31.476097418406..., -19.5]]
- >>> xfm.map([[-6.5, -36., -19.5], [-1., -41.5, -11.25]]).tolist()
- [[-6.5, -31.476097418406784, -19.5], [-1.0, -3.8072675377121996, -11.25]]
+ >>> xfm.map([[-6.5, -36., -19.5], [-1., -41.5, -11.25]]).tolist() # doctest: +ELLIPSIS
+ [[-6.5, -31.4760974184..., -19.5], [-1.0, -3.807267537712..., -11.25]]
"""
vfunc = partial(
@@ -372,9 +343,9 @@ def _map_xyz(x, reference, knots, coeffs):
# Probably this will change if the order of the B-Spline is different
w_start, w_end = np.ceil(ijk - 2).astype(int), np.floor(ijk + 2).astype(int)
# Generate a grid of indexes corresponding to the window
- nonzero_knots = tuple([
- np.arange(start, end + 1) for start, end in zip(w_start, w_end)
- ])
+ nonzero_knots = tuple(
+ [np.arange(start, end + 1) for start, end in zip(w_start, w_end)]
+ )
nonzero_knots = tuple(np.meshgrid(*nonzero_knots, indexing="ij"))
window = np.array(nonzero_knots).reshape((ndim, -1))
diff --git a/nitransforms/resampling.py b/nitransforms/resampling.py
new file mode 100644
index 00000000..d7c7f9c5
--- /dev/null
+++ b/nitransforms/resampling.py
@@ -0,0 +1,366 @@
+# emacs: -*- mode: python-mode; py-indent-offset: 4; indent-tabs-mode: nil -*-
+# vi: set ft=python sts=4 ts=4 sw=4 et:
+### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ##
+#
+# See COPYING file distributed along with the NiBabel package for the
+# copyright and license terms.
+#
+### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ##
+"""Resampling utilities."""
+
+import asyncio
+from os import cpu_count
+from functools import partial
+from pathlib import Path
+from typing import Callable, TypeVar
+
+import numpy as np
+from nibabel.loadsave import load as _nbload
+from nibabel.arrayproxy import get_obj_dtype
+from nibabel.spatialimages import SpatialImage
+from scipy import ndimage as ndi
+
+from nitransforms.base import (
+ ImageGrid,
+ TransformBase,
+ TransformError,
+ SpatialReference,
+ _as_homogeneous,
+)
+
+R = TypeVar("R")
+
+SERIALIZE_VOLUME_WINDOW_WIDTH: int = 8
+"""Minimum number of volumes to automatically serialize 4D transforms."""
+
+
+async def worker(job: Callable[[], R], semaphore) -> R:
+ async with semaphore:
+ loop = asyncio.get_running_loop()
+ return await loop.run_in_executor(None, job)
+
+
+async def _apply_serial(
+ data: np.ndarray,
+ spatialimage: SpatialImage,
+ targets: np.ndarray,
+ transform: TransformBase,
+ ref_ndim: int,
+ ref_ndcoords: np.ndarray,
+ n_resamplings: int,
+ output: np.ndarray,
+ input_dtype: np.dtype,
+ order: int = 3,
+ mode: str = "constant",
+ cval: float = 0.0,
+ prefilter: bool = True,
+ max_concurrent: int = min(cpu_count(), 12),
+):
+ """
+ Resample through a given transform serially, in a 3D+t setting.
+
+ Parameters
+ ----------
+ data : :obj:`~numpy.ndarray`
+ The input data array.
+ spatialimage : :obj:`~nibabel.spatialimages.SpatialImage` or `os.pathlike`
+ The image object containing the data to be resampled in reference
+ space
+ targets : :obj:`~numpy.ndarray`
+ The target coordinates for mapping.
+ transform : :obj:`~nitransforms.base.TransformBase`
+ The 3D, 3D+t, or 4D transform through which data will be resampled.
+ ref_ndim : :obj:`int`
+ Dimensionality of the resampling target (reference image).
+ ref_ndcoords : :obj:`~numpy.ndarray`
+ Physical coordinates (RAS+) where data will be interpolated, if the resampling
+ target is a grid, the scanner coordinates of all voxels.
+ n_resamplings : :obj:`int`
+ Total number of 3D resamplings (can be defined by the input image, the transform,
+ or be matched, that is, same number of volumes in the input and number of transforms).
+ output : :obj:`~numpy.ndarray`
+ The output data array where resampled values will be stored volume-by-volume.
+ order : :obj:`int`, optional
+ The order of the spline interpolation, default is 3.
+ The order has to be in the range 0-5.
+ mode : :obj:`str`, optional
+ Determines how the input image is extended when the resamplings overflows
+ a border. One of ``'constant'``, ``'reflect'``, ``'nearest'``, ``'mirror'``,
+ or ``'wrap'``. Default is ``'constant'``.
+ cval : :obj:`float`, optional
+ Constant value for ``mode='constant'``. Default is 0.0.
+ prefilter: :obj:`bool`, optional
+ Determines if the image's data array is prefiltered with
+ a spline filter before interpolation. The default is ``True``,
+ which will create a temporary *float64* array of filtered values
+ if *order > 1*. If setting this to ``False``, the output will be
+ slightly blurred if *order > 1*, unless the input is prefiltered,
+ i.e. it is the result of calling the spline filter on the original
+ input.
+
+ Returns
+ -------
+ np.ndarray
+ Data resampled on the 3D+t array of input coordinates.
+
+ """
+ tasks = []
+ semaphore = asyncio.Semaphore(max_concurrent)
+
+ for t in range(n_resamplings):
+ xfm_t = transform if n_resamplings == 1 else transform[t]
+
+ if targets is None:
+ targets = ImageGrid(spatialimage).index( # data should be an image
+ _as_homogeneous(xfm_t.map(ref_ndcoords), dim=ref_ndim)
+ )
+
+ data_t = (
+ data
+ if data is not None
+ else spatialimage.dataobj[..., t].astype(input_dtype, copy=False)
+ )
+
+ tasks.append(
+ asyncio.create_task(
+ worker(
+ partial(
+ ndi.map_coordinates,
+ data_t,
+ targets,
+ output=output[..., t],
+ order=order,
+ mode=mode,
+ cval=cval,
+ prefilter=prefilter,
+ ),
+ semaphore,
+ )
+ )
+ )
+ await asyncio.gather(*tasks)
+ return output
+
+
+def apply(
+ transform: TransformBase,
+ spatialimage: str | Path | SpatialImage,
+ reference: str | Path | SpatialImage = None,
+ order: int = 3,
+ mode: str = "constant",
+ cval: float = 0.0,
+ prefilter: bool = True,
+ output_dtype: np.dtype = None,
+ dtype_width: int = 8,
+ serialize_nvols: int = SERIALIZE_VOLUME_WINDOW_WIDTH,
+ max_concurrent: int = min(cpu_count(), 12),
+) -> SpatialImage | np.ndarray:
+ """
+ Apply a transformation to an image, resampling on the reference spatial object.
+
+ Parameters
+ ----------
+ transform: :obj:`~nitransforms.base.TransformBase`
+ The 3D, 3D+t, or 4D transform through which data will be resampled.
+ spatialimage : :obj:`~nibabel.spatialimages.SpatialImage` or `os.pathlike`
+ The image object containing the data to be resampled in reference
+ space
+ reference : :obj:`~nibabel.spatialimages.SpatialImage` or `os.pathlike`
+ The image, surface, or combination thereof containing the coordinates
+ of samples that will be sampled.
+ order : :obj:`int`, optional
+ The order of the spline interpolation, default is 3.
+ The order has to be in the range 0-5.
+ mode : :obj:`str`, optional
+ Determines how the input image is extended when the resamplings overflows
+ a border. One of ``'constant'``, ``'reflect'``, ``'nearest'``, ``'mirror'``,
+ or ``'wrap'``. Default is ``'constant'``.
+ cval : :obj:`float`, optional
+ Constant value for ``mode='constant'``. Default is 0.0.
+ prefilter : :obj:`bool`, optional
+ Determines if the image's data array is prefiltered with
+ a spline filter before interpolation. The default is ``True``,
+ which will create a temporary *float64* array of filtered values
+ if *order > 1*. If setting this to ``False``, the output will be
+ slightly blurred if *order > 1*, unless the input is prefiltered,
+ i.e. it is the result of calling the spline filter on the original
+ input.
+ output_dtype : :obj:`~numpy.dtype`, optional
+ The dtype of the returned array or image, if specified.
+ If ``None``, the default behavior is to use the effective dtype of
+ the input image. If slope and/or intercept are defined, the effective
+ dtype is float64, otherwise it is equivalent to the input image's
+ ``get_data_dtype()`` (on-disk type).
+ If ``reference`` is defined, then the return value is an image, with
+ a data array of the effective dtype but with the on-disk dtype set to
+ the input image's on-disk dtype.
+ dtype_width : :obj:`int`
+ Cap the width of the input data type to the given number of bytes.
+ This argument is intended to work as a way to implement lower memory
+ requirements in resampling.
+ serialize_nvols : :obj:`int`
+ Minimum number of volumes in a 3D+t (that is, a series of 3D transformations
+ independent in time) to resample on a one-by-one basis.
+ Serialized resampling can be executed concurrently (parallelized) with
+ the argument ``max_concurrent``.
+ max_concurrent : :obj:`int`
+ Maximum number of 3D resamplings to be executed concurrently.
+
+ Returns
+ -------
+ resampled : :obj:`~nibabel.spatialimages.SpatialImage` or :obj:`~numpy.ndarray`
+ The data imaged after resampling to reference space.
+
+ """
+ if reference is not None and isinstance(reference, (str, Path)):
+ reference = _nbload(str(reference))
+
+ _ref = (
+ transform.reference
+ if reference is None
+ else SpatialReference.factory(reference)
+ )
+
+ if _ref is None:
+ raise TransformError("Cannot apply transform without reference")
+
+ if isinstance(spatialimage, (str, Path)):
+ spatialimage = _nbload(str(spatialimage))
+
+ # Avoid opening the data array just yet
+ input_dtype = cap_dtype(get_obj_dtype(spatialimage.dataobj), dtype_width)
+
+ # Number of data volumes
+ data_nvols = 1 if spatialimage.ndim < 4 else spatialimage.shape[-1]
+ # Number of transforms: transforms chains (e.g., affine + field, are a single transform)
+ xfm_nvols = 1 if transform.ndim < 4 else len(transform)
+
+ if data_nvols != xfm_nvols and min(data_nvols, xfm_nvols) > 1:
+ raise ValueError(
+ "The fourth dimension of the data does not match the transform's shape."
+ )
+
+ serialize_nvols = (
+ serialize_nvols if serialize_nvols and serialize_nvols > 1 else np.inf
+ )
+ n_resamplings = max(data_nvols, xfm_nvols)
+ serialize_4d = n_resamplings >= serialize_nvols
+
+ targets = None
+ ref_ndcoords = _ref.ndcoords.T
+ if hasattr(transform, "to_field") and callable(transform.to_field):
+ targets = ImageGrid(spatialimage).index(
+ _as_homogeneous(
+ transform.to_field(reference=reference).map(ref_ndcoords),
+ dim=_ref.ndim,
+ )
+ )
+ elif xfm_nvols == 1:
+ targets = ImageGrid(spatialimage).index( # data should be an image
+ _as_homogeneous(transform.map(ref_ndcoords), dim=_ref.ndim)
+ )
+
+ if serialize_4d:
+ data = (
+ np.asanyarray(spatialimage.dataobj, dtype=input_dtype)
+ if data_nvols == 1
+ else None
+ )
+
+ # Order F ensures individual volumes are contiguous in memory
+ # Also matches NIfTI, making final save more efficient
+ resampled = np.zeros(
+ (len(ref_ndcoords), len(transform)), dtype=input_dtype, order="F"
+ )
+
+ resampled = asyncio.run(
+ _apply_serial(
+ data,
+ spatialimage,
+ targets,
+ transform,
+ _ref.ndim,
+ ref_ndcoords,
+ n_resamplings,
+ resampled,
+ input_dtype,
+ order=order,
+ mode=mode,
+ cval=cval,
+ prefilter=prefilter,
+ max_concurrent=max_concurrent,
+ )
+ )
+ else:
+ data = np.asanyarray(spatialimage.dataobj, dtype=input_dtype)
+
+ if targets is None:
+ targets = ImageGrid(spatialimage).index( # data should be an image
+ _as_homogeneous(transform.map(ref_ndcoords), dim=_ref.ndim)
+ )
+
+ # Cast 3D data into 4D if 4D nonsequential transform
+ if data_nvols == 1 and xfm_nvols > 1:
+ data = data[..., np.newaxis]
+
+ if transform.ndim == 4:
+ targets = _as_homogeneous(targets.reshape(-2, targets.shape[0])).T
+
+ resampled = ndi.map_coordinates(
+ data,
+ targets,
+ order=order,
+ mode=mode,
+ cval=cval,
+ prefilter=prefilter,
+ )
+
+ if isinstance(_ref, ImageGrid): # If reference is grid, reshape
+ hdr = (
+ _ref.header.copy()
+ if _ref.header is not None
+ else spatialimage.header.__class__()
+ )
+ hdr.set_data_dtype(output_dtype or spatialimage.header.get_data_dtype())
+
+ moved = spatialimage.__class__(
+ resampled.reshape(_ref.shape if n_resamplings == 1 else _ref.shape + (-1,)),
+ _ref.affine,
+ hdr,
+ )
+ return moved
+
+ output_dtype = output_dtype or input_dtype
+ return resampled.astype(output_dtype)
+
+
+def cap_dtype(dt, nbytes):
+ """
+ Cap the datatype size to shave off memory requirements.
+
+ Examples
+ --------
+ >>> cap_dtype(np.dtype('f8'), 4)
+ dtype('float32')
+
+ >>> cap_dtype(np.dtype('f8'), 16)
+ dtype('float64')
+
+ >>> cap_dtype('float64', 4)
+ dtype('float32')
+
+ >>> cap_dtype(np.dtype('i1'), 4)
+ dtype('int8')
+
+ >>> cap_dtype('int8', 4)
+ dtype('int8')
+
+ >>> cap_dtype('int32', 1)
+ dtype('int8')
+
+ >>> cap_dtype(np.dtype('i8'), 4)
+ dtype('int32')
+
+ """
+ dt = np.dtype(dt)
+ return np.dtype(f"{dt.byteorder}{dt.kind}{min(nbytes, dt.itemsize)}")
diff --git a/nitransforms/surface.py b/nitransforms/surface.py
new file mode 100644
index 00000000..7e1e7116
--- /dev/null
+++ b/nitransforms/surface.py
@@ -0,0 +1,652 @@
+# emacs: -*- mode: python-mode; py-indent-offset: 4; indent-tabs-mode: nil -*-
+# vi: set ft=python sts=4 ts=4 sw=4 et:
+### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ##
+#
+# See COPYING file distributed along with the NiBabel package for the
+# copyright and license terms.
+#
+### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ##
+"""Surface transforms."""
+import pathlib
+import warnings
+import h5py
+import numpy as np
+import nibabel as nb
+from scipy import sparse
+from scipy.spatial import KDTree
+from scipy.spatial.distance import cdist
+from nitransforms.base import (
+ SurfaceMesh
+)
+
+
+class SurfaceTransformBase():
+ """Generic surface transformation class"""
+
+ def __init__(self, reference, moving, spherical=False):
+ """Instantiate a generic surface transform."""
+ if spherical:
+ if not reference.check_sphere():
+ raise ValueError("reference was not spherical")
+ if not moving.check_sphere():
+ raise ValueError("moving was not spherical")
+ reference.set_radius()
+ moving.set_radius()
+ self._reference = reference
+ self._moving = moving
+
+ def __eq__(self, other):
+ ref_coords_eq = np.all(self.reference._coords == other.reference._coords)
+ ref_tris_eq = np.all(self.reference._triangles == other.reference._triangles)
+ mov_coords_eq = np.all(self.moving._coords == other.moving._coords)
+ mov_tris_eq = np.all(self.moving._triangles == other.moving._triangles)
+ return ref_coords_eq & ref_tris_eq & mov_coords_eq & mov_tris_eq
+
+ def __invert__(self):
+ return self.__class__(self._moving, self._reference)
+
+ @property
+ def reference(self):
+ return self._reference
+
+ @reference.setter
+ def reference(self, surface):
+ self._reference = SurfaceMesh(surface)
+
+ @property
+ def moving(self):
+ return self._moving
+
+ @moving.setter
+ def moving(self, surface):
+ self._moving = SurfaceMesh(surface)
+
+ @classmethod
+ def from_filename(cls, reference_path, moving_path):
+ """Create an Surface Index Transformation from a pair of surfaces with corresponding
+ vertices."""
+ reference = SurfaceMesh(nb.load(reference_path))
+ moving = SurfaceMesh(nb.load(moving_path))
+ return cls(reference, moving)
+
+
+class SurfaceCoordinateTransform(SurfaceTransformBase):
+ """Represents surface transformations in which the indices correspond and the coordinates
+ differ. This could be two surfaces representing difference structures from the same
+ hemisphere, like white matter and pial, or it could be a sphere and a deformed sphere that
+ moves those coordinates to a different location."""
+
+ __slots__ = ("_reference", "_moving")
+
+ def __init__(self, reference, moving):
+ """Instantiate a transform between two surfaces with corresponding vertices.
+ Parameters
+ ----------
+ reference: surface
+ Surface with the starting coordinates for each index.
+ moving: surface
+ Surface with the destination coordinates for each index.
+ """
+
+ super().__init__(reference=SurfaceMesh(reference), moving=SurfaceMesh(moving))
+ if np.all(self._reference._triangles != self._moving._triangles):
+ raise ValueError("Both surfaces for an index transform must have corresponding"
+ " vertices.")
+
+ def map(self, x, inverse=False):
+ if not inverse:
+ source = self.reference
+ dest = self.moving
+ else:
+ source = self.moving
+ dest = self.reference
+
+ s_tree = KDTree(source._coords)
+ dists, matches = s_tree.query(x)
+ if not np.allclose(dists, 0):
+ raise NotImplementedError("Mapping on surfaces not implemented for coordinates that"
+ " aren't vertices")
+ return dest._coords[matches]
+
+ def __add__(self, other):
+ if isinstance(other, SurfaceCoordinateTransform):
+ return self.__class__(self.reference, other.moving)
+ raise NotImplementedError
+
+ def _to_hdf5(self, x5_root):
+ """Write transform to HDF5 file."""
+ triangles = x5_root.create_group("Triangles")
+ coords = x5_root.create_group("Coordinates")
+ coords.create_dataset("0", data=self.reference._coords)
+ coords.create_dataset("1", data=self.moving._coords)
+ triangles.create_dataset("0", data=self.reference._triangles)
+ xform = x5_root.create_group("Transform")
+ xform.attrs["Type"] = "SurfaceCoordinateTransform"
+ reference = xform.create_group("Reference")
+ reference['Coordinates'] = h5py.SoftLink('/0/Coordinates/0')
+ reference['Triangles'] = h5py.SoftLink('/0/Triangles/0')
+ moving = xform.create_group("Moving")
+ moving['Coordinates'] = h5py.SoftLink('/0/Coordinates/1')
+ moving['Triangles'] = h5py.SoftLink('/0/Triangles/0')
+
+ def to_filename(self, filename, fmt=None):
+ """Store the transform."""
+ if fmt is None:
+ fmt = "npz" if filename.endswith(".npz") else "X5"
+
+ if fmt == "npz":
+ raise NotImplementedError
+ # sparse.save_npz(filename, self.mat)
+ # return filename
+
+ with h5py.File(filename, "w") as out_file:
+ out_file.attrs["Format"] = "X5"
+ out_file.attrs["Version"] = np.uint16(1)
+ root = out_file.create_group("/0")
+ self._to_hdf5(root)
+
+ return filename
+
+ @classmethod
+ def from_filename(cls, filename=None, reference_path=None, moving_path=None,
+ fmt=None):
+ """Load transform from file."""
+ if filename is None:
+ if reference_path is None or moving_path is None:
+ raise ValueError("You must pass either a X5 file or a pair of reference and moving"
+ " surfaces.")
+ return cls(SurfaceMesh(nb.load(reference_path)),
+ SurfaceMesh(nb.load(moving_path)))
+
+ if fmt is None:
+ try:
+ fmt = "npz" if filename.endswith(".npz") else "X5"
+ except AttributeError:
+ fmt = "npz" if filename.as_posix().endswith(".npz") else "X5"
+
+ if fmt == "npz":
+ raise NotImplementedError
+ # return cls(sparse.load_npz(filename))
+
+ if fmt != "X5":
+ raise ValueError("Only npz and X5 formats are supported.")
+
+ with h5py.File(filename, "r") as f:
+ assert f.attrs["Format"] == "X5"
+ xform = f["/0/Transform"]
+ reference = SurfaceMesh.from_arrays(
+ xform['Reference']['Coordinates'],
+ xform['Reference']['Triangles']
+ )
+
+ moving = SurfaceMesh.from_arrays(
+ xform['Moving']['Coordinates'],
+ xform['Moving']['Triangles']
+ )
+ return cls(reference, moving)
+
+
+class SurfaceResampler(SurfaceTransformBase):
+ """
+ Represents transformations in which the coordinate space remains the same
+ and the indices change.
+ To achieve surface project-unproject functionality:
+ sphere_in as the reference
+ sphere_project_to as the moving
+ Then apply the transformation to sphere_unproject_from
+ """
+
+ __slots__ = ("_reference", "_moving", "mat", 'interpolation_method')
+
+ def __init__(self, reference, moving, interpolation_method='barycentric', mat=None):
+ """Initialize the resampling.
+
+ Parameters
+ ----------
+ reference: spherical surface of the reference space.
+ Output will have number of indices equal to the number of indicies in this surface.
+ Both reference and moving should be in the same coordinate space.
+ moving: spherical surface that will be resampled.
+ Both reference and moving should be in the same coordinate space.
+ mat : array-like, shape (nv1, nv2)
+ Sparse matrix representing the transform.
+ interpolation_method : str
+ Only barycentric is currently implemented
+ """
+
+ super().__init__(SurfaceMesh(reference), SurfaceMesh(moving), spherical=True)
+
+ self.reference.set_radius()
+ self.moving.set_radius()
+ if interpolation_method not in ['barycentric']:
+ raise NotImplementedError(f"{interpolation_method} is not implemented.")
+ self.interpolation_method = interpolation_method
+
+ # TODO: should we deal with the case where reference and moving are the same?
+
+ # we're calculating the interpolation in the init so that we can ensure
+ # that it only has to be calculated once and will always be saved with the
+ # transform
+ if mat is None:
+ self.__calculate_mat()
+ m_tree = KDTree(self.moving._coords)
+ _, kmr_closest = m_tree.query(self.reference._coords, k=10)
+
+ # invert the triangles to generate a lookup table from vertices to triangle index
+ tri_lut = {}
+ for i, idxs in enumerate(self.moving._triangles):
+ for x in idxs:
+ if x not in tri_lut:
+ tri_lut[x] = [i]
+ else:
+ tri_lut[x].append(i)
+
+ # calculate the barycentric interpolation weights
+ bc_weights = []
+ enclosing = []
+ for point, kmrv in zip(self.reference._coords, kmr_closest):
+ close_tris = _find_close_tris(kmrv, tri_lut, self.moving)
+ ww, ee = _find_weights(point, close_tris, m_tree)
+ bc_weights.append(ww)
+ enclosing.append(ee)
+
+ # build sparse matrix
+ # commenting out code for barycentric nearest neighbor
+ # bary_nearest = []
+ mat = sparse.lil_array((self.reference._npoints, self.moving._npoints))
+ for s_ix, dd in enumerate(bc_weights):
+ for k, v in dd.items():
+ mat[s_ix, k] = v
+ # bary_nearest.append(np.array(list(dd.keys()))[np.array(list(dd.values())).argmax()])
+ # bary_nearest = np.array(bary_nearest)
+ # transpose so that number of out vertices is columns
+ self.mat = sparse.csr_array(mat.T)
+ else:
+ if isinstance(mat, sparse.csr_array):
+ self.mat = mat
+ else:
+ self.mat = sparse.csr_array(mat)
+ # validate shape of the provided matrix
+ if (mat.shape[0] != moving._npoints) or (mat.shape[1] != reference._npoints):
+ msg = "Shape of provided mat does not match expectations based on " \
+ "dimensions of moving and reference. \n"
+ if mat.shape[0] != moving._npoints:
+ msg += f" mat has {mat.shape[0]} rows but moving has {moving._npoints} " \
+ f"vertices. \n"
+ if mat.shape[1] != reference._npoints:
+ msg += f" mat has {mat.shape[1]} columns but reference has" \
+ f" {reference._npoints} vertices."
+ raise ValueError(msg)
+
+ def __calculate_mat(self):
+ m_tree = KDTree(self.moving._coords)
+ _, kmr_closest = m_tree.query(self.reference._coords, k=10)
+
+ # invert the triangles to generate a lookup table from vertices to triangle index
+ tri_lut = {}
+ for i, idxs in enumerate(self.moving._triangles):
+ for x in idxs:
+ if x not in tri_lut:
+ tri_lut[x] = [i]
+ else:
+ tri_lut[x].append(i)
+
+ # calculate the barycentric interpolation weights
+ bc_weights = []
+ enclosing = []
+ for point, kmrv in zip(self.reference._coords, kmr_closest):
+ close_tris = _find_close_tris(kmrv, tri_lut, self.moving)
+ ww, ee = _find_weights(point, close_tris, m_tree)
+ bc_weights.append(ww)
+ enclosing.append(ee)
+
+ # build sparse matrix
+ # commenting out code for barycentric nearest neighbor
+ # bary_nearest = []
+ mat = sparse.lil_array((self.reference._npoints, self.moving._npoints))
+ for s_ix, dd in enumerate(bc_weights):
+ for k, v in dd.items():
+ mat[s_ix, k] = v
+ # bary_nearest.append(
+ # np.array(list(dd.keys()))[np.array(list(dd.values())).argmax()]
+ # )
+ # bary_nearest = np.array(bary_nearest)
+ # transpose so that number of out vertices is columns
+ self.mat = sparse.csr_array(mat.T)
+
+ def map(self, x):
+ return x
+
+ def __add__(self, other):
+ if (isinstance(other, SurfaceResampler)
+ and (other.interpolation_method == self.interpolation_method)):
+ return self.__class__(
+ self.reference,
+ other.moving,
+ interpolation_method=self.interpolation_method
+ )
+ raise NotImplementedError
+
+ def __invert__(self):
+ return self.__class__(
+ self.moving,
+ self.reference,
+ interpolation_method=self.interpolation_method
+ )
+
+ @SurfaceTransformBase.reference.setter
+ def reference(self, surface):
+ raise ValueError("Don't modify the reference of an existing resampling."
+ "Create a new one instead.")
+
+ @SurfaceTransformBase.moving.setter
+ def moving(self, surface):
+ raise ValueError("Don't modify the moving of an existing resampling."
+ "Create a new one instead.")
+
+ def apply(self, x, inverse=False, normalize="element"):
+ """Apply the transform to surface data.
+
+ Parameters
+ ----------
+ x : array-like, shape (..., nv1), or SurfaceMesh
+ Data to transform or SurfaceMesh to resample
+ inverse : bool, default=False
+ Whether to apply the inverse transform. If True, ``x`` has shape
+ (..., nv2), and the output will have shape (..., nv1).
+ normalize : {"element", "sum", "none"}, default="element"
+ Normalization strategy. If "element", the scale of each value in
+ the output is comparable to each value of the input. If "sum", the
+ sum of the output is comparable to the sum of the input. If
+ "none", no normalization is applied.
+
+ Returns
+ -------
+ y : array-like, shape (..., nv2)
+ Transformed data.
+ """
+ if normalize not in ("element", "sum", "none"):
+ raise ValueError("Invalid normalization strategy.")
+
+ mat = self.mat.T if inverse else self.mat
+
+ if normalize == "element":
+ sum_ = mat.sum(axis=0)
+ scale = np.zeros_like(sum_)
+ mask = sum_ != 0
+ scale[mask] = 1.0 / sum_[mask]
+ mat = mat @ sparse.diags(scale)
+ elif normalize == "sum":
+ sum_ = mat.sum(axis=1)
+ scale = np.zeros_like(sum_)
+ mask = sum_ != 0
+ scale[mask] = 1.0 / sum_[mask]
+ mat = sparse.diags(scale) @ mat
+
+ if isinstance(x, (SurfaceMesh, pathlib.PurePath, str)):
+ x = SurfaceMesh(x)
+ if not x.check_sphere():
+ raise ValueError("If x is a surface, it should be a sphere.")
+ x.set_radius()
+ rs_coords = x._coords.T @ mat
+
+ y = SurfaceMesh.from_arrays(rs_coords.T, self.reference._triangles)
+ y.set_radius()
+ else:
+ y = x @ mat
+ return y
+
+ def _to_hdf5(self, x5_root):
+ """Write transform to HDF5 file."""
+ triangles = x5_root.create_group("Triangles")
+ coords = x5_root.create_group("Coordinates")
+ coords.create_dataset("0", data=self.reference._coords)
+ coords.create_dataset("1", data=self.moving._coords)
+ triangles.create_dataset("0", data=self.reference._triangles)
+ triangles.create_dataset("1", data=self.moving._triangles)
+ xform = x5_root.create_group("Transform")
+ xform.attrs["Type"] = "SurfaceResampling"
+ xform.attrs['InterpolationMethod'] = self.interpolation_method
+ mat = xform.create_group("IndexWeights")
+ mat.create_dataset("Data", data=self.mat.data)
+ mat.create_dataset("Indices", data=self.mat.indices)
+ mat.create_dataset("Indptr", data=self.mat.indptr)
+ mat.create_dataset("Shape", data=self.mat.shape)
+ reference = xform.create_group("Reference")
+ reference['Coordinates'] = h5py.SoftLink('/0/Coordinates/0')
+ reference['Triangles'] = h5py.SoftLink('/0/Triangles/0')
+ moving = xform.create_group("Moving")
+ moving['Coordinates'] = h5py.SoftLink('/0/Coordinates/1')
+ moving['Triangles'] = h5py.SoftLink('/0/Triangles/1')
+
+ def to_filename(self, filename, fmt=None):
+ """Store the transform."""
+ if fmt is None:
+ fmt = "npz" if filename.endswith(".npz") else "X5"
+
+ if fmt == "npz":
+ raise NotImplementedError
+ # sparse.save_npz(filename, self.mat)
+ # return filename
+
+ with h5py.File(filename, "w") as out_file:
+ out_file.attrs["Format"] = "X5"
+ out_file.attrs["Version"] = np.uint16(1)
+ root = out_file.create_group("/0")
+ self._to_hdf5(root)
+
+ return filename
+
+ @classmethod
+ def from_filename(cls, filename=None, reference_path=None, moving_path=None,
+ fmt=None, interpolation_method=None):
+ """Load transform from file."""
+ if filename is None:
+ if reference_path is None or moving_path is None:
+ raise ValueError("You must pass either a X5 file or a pair of reference and moving"
+ " surfaces.")
+ if interpolation_method is None:
+ interpolation_method = 'barycentric'
+ return cls(SurfaceMesh(nb.load(reference_path)),
+ SurfaceMesh(nb.load(moving_path)),
+ interpolation_method=interpolation_method)
+
+ if fmt is None:
+ try:
+ fmt = "npz" if filename.endswith(".npz") else "X5"
+ except AttributeError:
+ fmt = "npz" if filename.as_posix().endswith(".npz") else "X5"
+
+ if fmt == "npz":
+ raise NotImplementedError
+ # return cls(sparse.load_npz(filename))
+
+ if fmt != "X5":
+ raise ValueError("Only npz and X5 formats are supported.")
+
+ with h5py.File(filename, "r") as f:
+ assert f.attrs["Format"] == "X5"
+ xform = f["/0/Transform"]
+ try:
+ iws = xform['IndexWeights']
+ mat = sparse.csr_matrix(
+ (iws["Data"][()], iws["Indices"][()], iws["Indptr"][()]),
+ shape=iws["Shape"][()],
+ )
+ except KeyError:
+ mat = None
+ reference = SurfaceMesh.from_arrays(
+ xform['Reference']['Coordinates'],
+ xform['Reference']['Triangles']
+ )
+
+ moving = SurfaceMesh.from_arrays(
+ xform['Moving']['Coordinates'],
+ xform['Moving']['Triangles']
+ )
+ interpolation_method = xform.attrs['InterpolationMethod']
+ return cls(reference, moving, interpolation_method=interpolation_method, mat=mat)
+
+
+def _points_to_triangles(points, triangles):
+
+ """Implementation that vectorizes project of a point to a set of triangles.
+ from: https://stackoverflow.com/a/32529589
+ """
+ with np.errstate(all='ignore'):
+ # Unpack triangle points
+ p0, p1, p2 = np.asarray(triangles).swapaxes(0, 1)
+
+ # Calculate triangle edges
+ e0 = p1 - p0
+ e1 = p2 - p0
+ a = np.einsum('...i,...i', e0, e0)
+ b = np.einsum('...i,...i', e0, e1)
+ c = np.einsum('...i,...i', e1, e1)
+
+ # Calculate determinant and denominator
+ det = a * c - b * b
+ inv_det = 1. / det
+ denom = a - 2 * b + c
+
+ # Project to the edges
+ p = p0 - points[:, np.newaxis]
+ d = np.einsum('...i,...i', e0, p)
+ e = np.einsum('...i,...i', e1, p)
+ u = b * e - c * d
+ v = b * d - a * e
+
+ # Calculate numerators
+ bd = b + d
+ ce = c + e
+ numer0 = (ce - bd) / denom
+ numer1 = (c + e - b - d) / denom
+ da = -d / a
+ ec = -e / c
+
+ # Vectorize test conditions
+ m0 = u + v < det
+ m1 = u < 0
+ m2 = v < 0
+ m3 = d < 0
+ m4 = a + d > b + e
+
+ m5 = ce > bd
+
+ t0 = m0 & m1 & m2 & m3
+ t1 = m0 & m1 & m2 & ~m3
+ t2 = m0 & m1 & ~m2
+ t3 = m0 & ~m1 & m2
+ t4 = m0 & ~m1 & ~m2
+ t5 = ~m0 & m1 & m5
+ t6 = ~m0 & m1 & ~m5
+ t7 = ~m0 & m2 & m4
+ t8 = ~m0 & m2 & ~m4
+ t9 = ~m0 & ~m1 & ~m2
+
+ u = np.where(t0, np.clip(da, 0, 1), u)
+ v = np.where(t0, 0, v)
+ u = np.where(t1, 0, u)
+ v = np.where(t1, 0, v)
+ u = np.where(t2, 0, u)
+ v = np.where(t2, np.clip(ec, 0, 1), v)
+ u = np.where(t3, np.clip(da, 0, 1), u)
+ v = np.where(t3, 0, v)
+ u *= np.where(t4, inv_det, 1)
+ v *= np.where(t4, inv_det, 1)
+ u = np.where(t5, np.clip(numer0, 0, 1), u)
+ v = np.where(t5, 1 - u, v)
+ u = np.where(t6, 0, u)
+ v = np.where(t6, 1, v)
+ u = np.where(t7, np.clip(numer1, 0, 1), u)
+ v = np.where(t7, 1 - u, v)
+ u = np.where(t8, 1, u)
+ v = np.where(t8, 0, v)
+ u = np.where(t9, np.clip(numer1, 0, 1), u)
+ v = np.where(t9, 1 - u, v)
+
+ # Return closest points
+ return (p0.T + u[:, np.newaxis] * e0.T + v[:, np.newaxis] * e1.T).swapaxes(2, 1)
+
+
+def _barycentric_weights(vecs, coords):
+ """Compute the weights for barycentric interpolation.
+
+ Parameters
+ ----------
+ vecs : ndarray of shape (6, 3)
+ The 6 vectors used to compute barycentric weights.
+ a, e1, e2,
+ np.cross(e1, e2),
+ np.cross(e2, a),
+ np.cross(a, e1)
+ coords : ndarray of shape (3, )
+
+ Returns
+ -------
+ (w, u, v, t) : tuple of float
+ ``w``, ``u``, and ``v`` are the weights of the three vertices of the
+ triangle, respectively. ``t`` is the scale that needs to be multiplied
+ to ``coords`` to make it in the same plane as the three vertices.
+
+ From: https://github.com/neuroboros/neuroboros/blob/\
+f2a2efb914e783add2bf06e0f3715236d3d8550e/src/neuroboros/surface/_barycentric.pyx#L9-L47
+ """
+ det = coords[0] * vecs[3, 0] + coords[1] * vecs[3, 1] + coords[2] * vecs[3, 2]
+ if det == 0:
+ if vecs[3, 0] == 0 and vecs[3, 1] == 0 and vecs[3, 2] == 0:
+ warnings.warn("Zero cross product of two edges: "
+ "The three vertices are in the same line.")
+ else:
+ print(vecs[3])
+ y = coords - vecs[0]
+ u, v = np.linalg.lstsq(vecs[1:3].T, y, rcond=None)[0]
+ t = 1.
+ else:
+ uu = coords[0] * vecs[4, 0] + coords[1] * vecs[4, 1] + coords[2] * vecs[4, 2]
+ vv = coords[0] * vecs[5, 0] + coords[1] * vecs[5, 1] + coords[2] * vecs[5, 2]
+ u = uu / det
+ v = vv / det
+ tt = vecs[0, 0] * vecs[3, 0] + vecs[0, 1] * vecs[3, 1] + vecs[0, 2] * vecs[3, 2]
+ t = tt / det
+ w = 1. - (u + v)
+ return w, u, v, t
+
+
+def _find_close_tris(kdsv, tri_lut, surface):
+ tris = []
+ for kk in kdsv:
+ tris.extend(tri_lut[kk])
+ close_tri_verts = surface._triangles[np.unique(tris)]
+ close_tris = surface._coords[close_tri_verts]
+ return close_tris
+
+
+def _find_weights(point, close_tris, d_tree):
+ point = point[np.newaxis, :]
+ tri_dists = cdist(point, _points_to_triangles(point, close_tris).squeeze())
+
+ closest_tri = close_tris[(tri_dists == tri_dists.min()).squeeze()]
+ # make sure a single closest triangle was found
+ if closest_tri.shape[0] != 1:
+ # in the event of a tie (which can happen)
+ # just take the first triangle
+ closest_tri = closest_tri[0]
+
+ closest_tri = closest_tri.squeeze()
+ # Make sure point is actually inside triangle
+ enclosing = True
+ if np.all((point > closest_tri).sum(0) != 3):
+
+ enclosing = False
+ _, ct_idxs = d_tree.query(closest_tri)
+ a = closest_tri[0]
+ e1 = closest_tri[1] - a
+ e2 = closest_tri[2] - a
+ vecs = np.vstack([a, e1, e2, np.cross(e1, e2), np.cross(e2, a), np.cross(a, e1)])
+ res = {}
+ res[ct_idxs[0]], res[ct_idxs[1]], res[ct_idxs[2]], _ = _barycentric_weights(
+ vecs,
+ point.squeeze()
+ )
+ return res, enclosing
diff --git a/nitransforms/tests/data/affine-LAS.afni-array b/nitransforms/tests/data/affine-LAS.afni-array
new file mode 120000
index 00000000..27d48851
--- /dev/null
+++ b/nitransforms/tests/data/affine-LAS.afni-array
@@ -0,0 +1 @@
+affine-RAS.afni-array
\ No newline at end of file
diff --git a/nitransforms/tests/data/affine-LPS.afni-array b/nitransforms/tests/data/affine-LPS.afni-array
new file mode 120000
index 00000000..27d48851
--- /dev/null
+++ b/nitransforms/tests/data/affine-LPS.afni-array
@@ -0,0 +1 @@
+affine-RAS.afni-array
\ No newline at end of file
diff --git a/nitransforms/tests/data/affine-RAS.afni-array b/nitransforms/tests/data/affine-RAS.afni-array
new file mode 100644
index 00000000..df023e21
--- /dev/null
+++ b/nitransforms/tests/data/affine-RAS.afni-array
@@ -0,0 +1,3 @@
+# 3dvolreg matrices (DICOM-to-DICOM, row-by-row):
+0.999999 -0.000999999 -0.001 -4 0.00140494 0.621609 0.783327 -2 -0.000161717 -0.783327 0.62161 -1
+0.999999 -0.000999999 -0.001 -4 0.00140494 0.621609 0.783327 -2 -0.000161717 -0.783327 0.62161 -1
diff --git a/nitransforms/tests/data/affine-oblique.afni-array b/nitransforms/tests/data/affine-oblique.afni-array
new file mode 120000
index 00000000..27d48851
--- /dev/null
+++ b/nitransforms/tests/data/affine-oblique.afni-array
@@ -0,0 +1 @@
+affine-RAS.afni-array
\ No newline at end of file
diff --git a/nitransforms/tests/test_base.py b/nitransforms/tests/test_base.py
index 8422ca10..49d7f7af 100644
--- a/nitransforms/tests/test_base.py
+++ b/nitransforms/tests/test_base.py
@@ -1,11 +1,22 @@
"""Tests of the base module."""
+
import numpy as np
import nibabel as nb
+from nibabel.arrayproxy import get_obj_dtype
+
import pytest
import h5py
-from ..base import SpatialReference, SampledSpatialData, ImageGrid, TransformBase
+
+from ..base import (
+ SpatialReference,
+ SampledSpatialData,
+ ImageGrid,
+ TransformBase,
+ SurfaceMesh,
+)
from .. import linear as nitl
+from ..resampling import apply
def test_SpatialReference(testdata_path):
@@ -42,10 +53,10 @@ def test_ImageGrid(get_testdata, image_orientation):
ijk = [[10, 10, 10], [40, 4, 20], [0, 0, 0], [s - 1 for s in im.shape[:3]]]
xyz = [img._affine.dot(idx + [1])[:-1] for idx in ijk]
- assert np.allclose(img.ras(ijk[0]), xyz[0])
+ assert np.allclose(np.squeeze(img.ras(ijk[0])), xyz[0])
assert np.allclose(np.round(img.index(xyz[0])), ijk[0])
- assert np.allclose(img.ras(ijk), xyz)
- assert np.allclose(np.round(img.index(xyz)), ijk)
+ assert np.allclose(img.ras(ijk).T, xyz)
+ assert np.allclose(np.round(img.index(xyz)).T, ijk)
# nd index / coords
idxs = img.ndindex
@@ -89,26 +100,29 @@ def _to_hdf5(klass, x5_root):
fname = testdata_path / "someones_anatomy.nii.gz"
img = nb.load(fname)
- imgdata = np.asanyarray(img.dataobj, dtype=img.get_data_dtype())
-
- # Test identity transform
- xfm = TransformBase()
- xfm.reference = fname
- assert xfm.ndim == 3
- moved = xfm.apply(fname, order=0)
- assert np.all(
- imgdata == np.asanyarray(moved.dataobj, dtype=moved.get_data_dtype())
- )
+ imgdata = np.asanyarray(img.dataobj, dtype=get_obj_dtype(img.dataobj))
# Test identity transform - setting reference
xfm = TransformBase()
+ with pytest.raises(TypeError):
+ _ = xfm.ndim
+
+ # Test to_filename
+ xfm.to_filename("data.x5")
+
+ # Test identity transform
+ xfm = nitl.Affine()
xfm.reference = fname
- assert xfm.ndim == 3
- moved = xfm.apply(str(fname), reference=fname, order=0)
+ moved = apply(xfm, fname, order=0)
+
assert np.all(
- imgdata == np.asanyarray(moved.dataobj, dtype=moved.get_data_dtype())
+ imgdata == np.asanyarray(moved.dataobj, dtype=get_obj_dtype(moved.dataobj))
)
+ # Test ndim returned by affine
+ assert nitl.Affine().ndim == 3
+ assert nitl.LinearTransformsMapping([nitl.Affine(), nitl.Affine()]).ndim == 4
+
# Test applying to Gifti
gii = nb.gifti.GiftiImage(
darrays=[
@@ -118,11 +132,11 @@ def _to_hdf5(klass, x5_root):
)
]
)
- giimoved = xfm.apply(fname, reference=gii, order=0)
+ giimoved = apply(xfm, fname, reference=gii, order=0)
assert np.allclose(giimoved.reshape(xfm.reference.shape), moved.get_fdata())
# Test to_filename
- xfm.to_filename("data.x5")
+ xfm.to_filename("data.xfm", fmt="itk")
def test_SampledSpatialData(testdata_path):
@@ -153,3 +167,49 @@ def test_concatenation(testdata_path):
x = [(0.0, 0.0, 0.0), (1.0, 1.0, 1.0), (-1.0, -1.0, -1.0)]
assert np.all((aff + nitl.Affine())(x) == x)
assert np.all((aff + nitl.Affine())(x, inverse=True) == x)
+
+
+def test_SurfaceMesh(testdata_path):
+ surf_path = testdata_path / "sub-200148_hemi-R_pial.surf.gii"
+ shape_path = (
+ testdata_path
+ / "sub-sid000005_ses-budapest_acq-MPRAGE_hemi-R_thickness.shape.gii"
+ )
+ img_path = testdata_path / "bold.nii.gz"
+
+ mesh = SurfaceMesh(nb.load(surf_path))
+ exp_coords_shape = (249277, 3)
+ exp_tris_shape = (498550, 3)
+ assert mesh._coords.shape == exp_coords_shape
+ assert mesh._triangles.shape == exp_tris_shape
+ assert mesh._npoints == exp_coords_shape[0]
+ assert mesh._ndim == exp_coords_shape[1]
+
+ mfd = SurfaceMesh(surf_path)
+ assert (mfd._coords == mesh._coords).all()
+ assert (mfd._triangles == mesh._triangles).all()
+
+ mfsm = SurfaceMesh(mfd)
+ assert (mfd._coords == mfsm._coords).all()
+ assert (mfd._triangles == mfsm._triangles).all()
+
+ with pytest.raises(ValueError):
+ SurfaceMesh(nb.load(img_path))
+
+ with pytest.raises(TypeError):
+ SurfaceMesh(nb.load(shape_path))
+
+
+def test_apply_deprecation(monkeypatch):
+ """Make sure a deprecation warning is issued."""
+ from nitransforms import resampling
+
+ def _retval(*args, **kwargs):
+ return 1
+
+ monkeypatch.setattr(resampling, "apply", _retval)
+
+ with pytest.deprecated_call():
+ retval = TransformBase().apply()
+
+ assert retval == 1
diff --git a/nitransforms/tests/test_cli.py b/nitransforms/tests/test_cli.py
index 7f16a1de..58867131 100644
--- a/nitransforms/tests/test_cli.py
+++ b/nitransforms/tests/test_cli.py
@@ -1,10 +1,18 @@
+import os
from textwrap import dedent
import pytest
from ..cli import cli_apply, main as ntcli
+if os.getenv("PYTEST_XDIST_WORKER"):
+ breaks_on_xdist = pytest.mark.skip(reason="xdist is active; rerun without to run this test.")
+else:
+ def breaks_on_xdist(test):
+ return test
+
+@breaks_on_xdist
def test_cli(capsys):
# empty command
with pytest.raises(SystemExit):
diff --git a/nitransforms/tests/test_io.py b/nitransforms/tests/test_io.py
index cef7bfff..0cc79d15 100644
--- a/nitransforms/tests/test_io.py
+++ b/nitransforms/tests/test_io.py
@@ -28,6 +28,8 @@
)
from nitransforms.io.base import LinearParameters, TransformIOError, TransformFileError
from nitransforms.conftest import _datadir, _testdir
+from nitransforms.resampling import apply
+
LPS = np.diag([-1, -1, 1, 1])
ITK_MAT = LPS.dot(np.ones((4, 4)).dot(LPS))
@@ -180,7 +182,7 @@ def test_LT_conversions(data_path, fname):
"oblique",
],
)
-@pytest.mark.parametrize("sw", ["afni", "fsl", "fs", "itk"])
+@pytest.mark.parametrize("sw", ["afni", "fsl", "fs", "itk", "afni-array"])
def test_Linear_common(tmpdir, data_path, sw, image_orientation, get_testdata):
tmpdir.chdir()
@@ -190,6 +192,8 @@ def test_Linear_common(tmpdir, data_path, sw, image_orientation, get_testdata):
ext = ""
if sw == "afni":
factory = afni.AFNILinearTransform
+ elif sw == "afni-array":
+ factory = afni.AFNILinearTransformArray
elif sw == "fsl":
factory = fsl.FSLLinearTransform
elif sw == "itk":
@@ -204,7 +208,7 @@ def test_Linear_common(tmpdir, data_path, sw, image_orientation, get_testdata):
with pytest.raises(TransformFileError):
factory.from_string("")
- fname = "affine-%s.%s%s" % (image_orientation, sw, ext)
+ fname = f"affine-{image_orientation}.{sw}{ext}"
# Test the transform loaders are implemented
xfm = factory.from_filename(data_path / fname)
@@ -222,6 +226,9 @@ def test_Linear_common(tmpdir, data_path, sw, image_orientation, get_testdata):
# Test from_ras
RAS = from_matvec(euler2mat(x=0.9, y=0.001, z=0.001), [4.0, 2.0, -1.0])
+ if sw == "afni-array":
+ RAS = np.array([RAS, RAS])
+
xfm = factory.from_ras(RAS, reference=reference, moving=moving)
assert np.allclose(xfm.to_ras(reference=reference, moving=moving), RAS)
@@ -262,7 +269,7 @@ def test_LinearList_common(tmpdir, data_path, sw, image_orientation, get_testdat
tflist1 = factory(mats)
- fname = "affine-%s.%s%s" % (image_orientation, sw, ext)
+ fname = f"affine-{image_orientation}.{sw}{ext}"
with pytest.raises(FileNotFoundError):
factory.from_filename(fname)
@@ -305,7 +312,7 @@ def test_ITKLinearTransform(tmpdir, testdata_path):
# Test to_filename(textfiles)
itkxfm.to_filename("textfile.tfm")
- with open("textfile.tfm", "r") as f:
+ with open("textfile.tfm") as f:
itkxfm2 = itk.ITKLinearTransform.from_fileobj(f)
assert np.allclose(itkxfm["parameters"], itkxfm2["parameters"])
@@ -492,10 +499,13 @@ def test_afni_oblique(tmpdir, parameters, swapaxes, testdata_path, dir_x, dir_y,
assert np.allclose(card_aff, nb.load("deob_3drefit.nii.gz").affine)
# Check that nitransforms can emulate 3drefit -deoblique
- nt3drefit = Affine(
- afni._cardinal_rotation(img.affine, False),
- reference="deob_3drefit.nii.gz",
- ).apply("orig.nii.gz")
+ nt3drefit = apply(
+ Affine(
+ afni._cardinal_rotation(img.affine, False),
+ reference="deob_3drefit.nii.gz",
+ ),
+ "orig.nii.gz",
+ )
diff = (
np.asanyarray(img.dataobj, dtype="uint8")
@@ -504,10 +514,13 @@ def test_afni_oblique(tmpdir, parameters, swapaxes, testdata_path, dir_x, dir_y,
assert np.sqrt((diff[10:-10, 10:-10, 10:-10] ** 2).mean()) < 0.1
# Check that nitransforms can revert 3drefit -deoblique
- nt_undo3drefit = Affine(
- afni._cardinal_rotation(img.affine, True),
- reference="orig.nii.gz",
- ).apply("deob_3drefit.nii.gz")
+ nt_undo3drefit = apply(
+ Affine(
+ afni._cardinal_rotation(img.affine, True),
+ reference="orig.nii.gz",
+ ),
+ "deob_3drefit.nii.gz",
+ )
diff = (
np.asanyarray(img.dataobj, dtype="uint8")
@@ -526,16 +539,21 @@ def test_afni_oblique(tmpdir, parameters, swapaxes, testdata_path, dir_x, dir_y,
assert np.allclose(deobaff, deobnii.affine)
# Check resampling in deobliqued grid
- ntdeobnii = Affine(np.eye(4), reference=deobnii.__class__(
- np.zeros(deobshape, dtype="uint8"),
- deobaff,
- deobnii.header
- )).apply(img, order=0)
+ ntdeobnii = apply(
+ Affine(np.eye(4), reference=deobnii.__class__(
+ np.zeros(deobshape, dtype="uint8"),
+ deobaff,
+ deobnii.header
+ )),
+ img,
+ order=0,
+ )
# Generate an internal box to exclude border effects
box = np.zeros(img.shape, dtype="uint8")
box[10:-10, 10:-10, 10:-10] = 1
- ntdeobmask = Affine(np.eye(4), reference=ntdeobnii).apply(
+ ntdeobmask = apply(
+ Affine(np.eye(4), reference=ntdeobnii),
nb.Nifti1Image(box, img.affine, img.header),
order=0,
)
diff --git a/nitransforms/tests/test_linear.py b/nitransforms/tests/test_linear.py
index eea77b7f..969b33ab 100644
--- a/nitransforms/tests/test_linear.py
+++ b/nitransforms/tests/test_linear.py
@@ -1,41 +1,26 @@
# emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*-
# vi: set ft=python sts=4 ts=4 sw=4 et:
"""Tests of linear transforms."""
-import os
+
import pytest
import numpy as np
-from subprocess import check_call
-import shutil
import h5py
-import nibabel as nb
from nibabel.eulerangles import euler2mat
from nibabel.affines import from_matvec
from nitransforms import linear as nitl
from nitransforms import io
from .utils import assert_affines_by_filename
-RMSE_TOL = 0.1
-APPLY_LINEAR_CMD = {
- "fsl": """\
-flirt -setbackground 0 -interp nearestneighbour -in {moving} -ref {reference} \
--applyxfm -init {transform} -out {resampled}\
-""".format,
- "itk": """\
-antsApplyTransforms -d 3 -r {reference} -i {moving} \
--o {resampled} -n NearestNeighbor -t {transform} --float\
-""".format,
- "afni": """\
-3dAllineate -base {reference} -input {moving} \
--prefix {resampled} -1Dmatrix_apply {transform} -final NN\
-""".format,
- "fs": """\
-mri_vol2vol --mov {moving} --targ {reference} --lta {transform} \
---o {resampled} --nearest""".format,
-}
-
-
-@pytest.mark.parametrize("matrix", [[0.0], np.ones((3, 3, 3)), np.ones((3, 4)), ])
+
+@pytest.mark.parametrize(
+ "matrix",
+ [
+ [0.0],
+ np.ones((3, 3, 3)),
+ np.ones((3, 4)),
+ ],
+)
def test_linear_typeerrors1(matrix):
"""Exercise errors in Affine creation."""
with pytest.raises(TypeError):
@@ -157,7 +142,9 @@ def test_loadsave(tmp_path, data_path, testdata_path, autofmt, fmt):
assert np.allclose(
xfm.matrix,
- nitl.load(fname, fmt=supplied_fmt, reference=ref_file, moving=ref_file).matrix,
+ nitl.load(
+ fname, fmt=supplied_fmt, reference=ref_file, moving=ref_file
+ ).matrix,
)
else:
assert xfm == nitl.load(fname, fmt=supplied_fmt, reference=ref_file)
@@ -167,7 +154,9 @@ def test_loadsave(tmp_path, data_path, testdata_path, autofmt, fmt):
if fmt == "fsl":
assert np.allclose(
xfm.matrix,
- nitl.load(fname, fmt=supplied_fmt, reference=ref_file, moving=ref_file).matrix,
+ nitl.load(
+ fname, fmt=supplied_fmt, reference=ref_file, moving=ref_file
+ ).matrix,
rtol=1e-2, # FSL incurs into large errors due to rounding
)
else:
@@ -181,7 +170,9 @@ def test_loadsave(tmp_path, data_path, testdata_path, autofmt, fmt):
if fmt == "fsl":
assert np.allclose(
xfm.matrix,
- nitl.load(fname, fmt=supplied_fmt, reference=ref_file, moving=ref_file).matrix,
+ nitl.load(
+ fname, fmt=supplied_fmt, reference=ref_file, moving=ref_file
+ ).matrix,
rtol=1e-2, # FSL incurs into large errors due to rounding
)
else:
@@ -191,7 +182,9 @@ def test_loadsave(tmp_path, data_path, testdata_path, autofmt, fmt):
if fmt == "fsl":
assert np.allclose(
xfm.matrix,
- nitl.load(fname, fmt=supplied_fmt, reference=ref_file, moving=ref_file).matrix,
+ nitl.load(
+ fname, fmt=supplied_fmt, reference=ref_file, moving=ref_file
+ ).matrix,
rtol=1e-2, # FSL incurs into large errors due to rounding
)
else:
@@ -211,12 +204,15 @@ def test_linear_save(tmpdir, data_path, get_testdata, image_orientation, sw_tool
T = np.linalg.inv(T)
xfm = (
- nitl.Affine(T) if (sw_tool, image_orientation) != ("afni", "oblique") else
+ nitl.Affine(T)
+ if (sw_tool, image_orientation) != ("afni", "oblique")
# AFNI is special when moving or reference are oblique - let io do the magic
- nitl.Affine(io.afni.AFNILinearTransform.from_ras(T).to_ras(
- reference=img,
- moving=img,
- ))
+ else nitl.Affine(
+ io.afni.AFNILinearTransform.from_ras(T).to_ras(
+ reference=img,
+ moving=img,
+ )
+ )
)
xfm.reference = img
@@ -226,103 +222,13 @@ def test_linear_save(tmpdir, data_path, get_testdata, image_orientation, sw_tool
elif sw_tool == "fs":
ext = ".lta"
- xfm_fname1 = "M.%s%s" % (sw_tool, ext)
+ xfm_fname1 = f"M.{sw_tool}{ext}"
xfm.to_filename(xfm_fname1, fmt=sw_tool)
xfm_fname2 = str(data_path / "affine-%s.%s%s") % (image_orientation, sw_tool, ext)
assert_affines_by_filename(xfm_fname1, xfm_fname2)
-@pytest.mark.parametrize("image_orientation", ["RAS", "LAS", "LPS", 'oblique', ])
-@pytest.mark.parametrize("sw_tool", ["itk", "fsl", "afni", "fs"])
-def test_apply_linear_transform(tmpdir, get_testdata, get_testmask, image_orientation, sw_tool):
- """Check implementation of exporting affines to formats."""
- tmpdir.chdir()
-
- img = get_testdata[image_orientation]
- msk = get_testmask[image_orientation]
-
- # Generate test transform
- T = from_matvec(euler2mat(x=0.9, y=0.001, z=0.001), [4.0, 2.0, -1.0])
- xfm = nitl.Affine(T)
- xfm.reference = img
-
- ext = ""
- if sw_tool == "itk":
- ext = ".tfm"
- elif sw_tool == "fs":
- ext = ".lta"
-
- img.to_filename("img.nii.gz")
- msk.to_filename("mask.nii.gz")
-
- # Write out transform file (software-dependent)
- xfm_fname = "M.%s%s" % (sw_tool, ext)
- # Change reference dataset for AFNI & oblique
- if (sw_tool, image_orientation) == ("afni", "oblique"):
- io.afni.AFNILinearTransform.from_ras(
- T,
- moving=img,
- reference=img,
- ).to_filename(xfm_fname)
- else:
- xfm.to_filename(xfm_fname, fmt=sw_tool)
-
- cmd = APPLY_LINEAR_CMD[sw_tool](
- transform=os.path.abspath(xfm_fname),
- reference=os.path.abspath("mask.nii.gz"),
- moving=os.path.abspath("mask.nii.gz"),
- resampled=os.path.abspath("resampled_brainmask.nii.gz"),
- )
-
- # skip test if command is not available on host
- exe = cmd.split(" ", 1)[0]
- if not shutil.which(exe):
- pytest.skip("Command {} not found on host".format(exe))
-
- # resample mask
- exit_code = check_call([cmd], shell=True)
- assert exit_code == 0
- sw_moved_mask = nb.load("resampled_brainmask.nii.gz")
-
- nt_moved_mask = xfm.apply(msk, order=0)
- nt_moved_mask.set_data_dtype(msk.get_data_dtype())
- nt_moved_mask.to_filename("ntmask.nii.gz")
- diff = np.asanyarray(sw_moved_mask.dataobj) - np.asanyarray(nt_moved_mask.dataobj)
-
- assert np.sqrt((diff ** 2).mean()) < RMSE_TOL
- brainmask = np.asanyarray(nt_moved_mask.dataobj, dtype=bool)
-
- cmd = APPLY_LINEAR_CMD[sw_tool](
- transform=os.path.abspath(xfm_fname),
- reference=os.path.abspath("img.nii.gz"),
- moving=os.path.abspath("img.nii.gz"),
- resampled=os.path.abspath("resampled.nii.gz"),
- )
-
- exit_code = check_call([cmd], shell=True)
- assert exit_code == 0
- sw_moved = nb.load("resampled.nii.gz")
- sw_moved.set_data_dtype(img.get_data_dtype())
-
- nt_moved = xfm.apply(img, order=0)
- diff = (
- np.asanyarray(sw_moved.dataobj, dtype=sw_moved.get_data_dtype())
- - np.asanyarray(nt_moved.dataobj, dtype=nt_moved.get_data_dtype())
- )
-
- # A certain tolerance is necessary because of resampling at borders
- assert np.sqrt((diff[brainmask] ** 2).mean()) < RMSE_TOL
-
- nt_moved = xfm.apply("img.nii.gz", order=0)
- diff = (
- np.asanyarray(sw_moved.dataobj, dtype=sw_moved.get_data_dtype())
- - np.asanyarray(nt_moved.dataobj, dtype=nt_moved.get_data_dtype())
- )
- # A certain tolerance is necessary because of resampling at borders
- assert np.sqrt((diff[brainmask] ** 2).mean()) < RMSE_TOL
-
-
def test_Affine_to_x5(tmpdir, testdata_path):
"""Test affine's operations."""
tmpdir.chdir()
@@ -335,36 +241,6 @@ def test_Affine_to_x5(tmpdir, testdata_path):
aff._to_hdf5(f.create_group("Affine"))
-def test_LinearTransformsMapping_apply(tmp_path, data_path, testdata_path):
- """Apply transform mappings."""
- hmc = nitl.load(
- data_path / "hmc-itk.tfm", fmt="itk", reference=testdata_path / "sbref.nii.gz"
- )
- assert isinstance(hmc, nitl.LinearTransformsMapping)
-
- # Test-case: realign functional data on to sbref
- nii = hmc.apply(
- testdata_path / "func.nii.gz", order=1, reference=testdata_path / "sbref.nii.gz"
- )
- assert nii.dataobj.shape[-1] == len(hmc)
-
- # Test-case: write out a fieldmap moved with head
- hmcinv = nitl.LinearTransformsMapping(
- np.linalg.inv(hmc.matrix), reference=testdata_path / "func.nii.gz"
- )
- nii = hmcinv.apply(testdata_path / "fmap.nii.gz", order=1)
- assert nii.dataobj.shape[-1] == len(hmc)
-
- # Ensure a ValueError is issued when trying to do weird stuff
- hmc = nitl.LinearTransformsMapping(hmc.matrix[:1, ...])
- with pytest.raises(ValueError):
- hmc.apply(
- testdata_path / "func.nii.gz",
- order=1,
- reference=testdata_path / "sbref.nii.gz",
- )
-
-
def test_mulmat_operator(testdata_path):
"""Check the @ operator."""
ref = testdata_path / "someones_anatomy.nii.gz"
diff --git a/nitransforms/tests/test_manip.py b/nitransforms/tests/test_manip.py
index 6dee540e..b5dd5c62 100644
--- a/nitransforms/tests/test_manip.py
+++ b/nitransforms/tests/test_manip.py
@@ -1,66 +1,16 @@
# emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*-
# vi: set ft=python sts=4 ts=4 sw=4 et:
"""Tests of nonlinear transforms."""
-import os
-import shutil
-from subprocess import check_call
+
import pytest
import numpy as np
-import nibabel as nb
-from ..manip import load as _load, TransformChain
+from ..manip import TransformChain
from ..linear import Affine
-from .test_nonlinear import (
- RMSE_TOL,
- APPLY_NONLINEAR_CMD,
-)
FMT = {"lta": "fs", "tfm": "itk"}
-def test_itk_h5(tmp_path, testdata_path):
- """Check a translation-only field on one or more axes, different image orientations."""
- os.chdir(str(tmp_path))
- img_fname = testdata_path / "T1w_scanner.nii.gz"
- xfm_fname = (
- testdata_path
- / "ds-005_sub-01_from-T1w_to-MNI152NLin2009cAsym_mode-image_xfm.h5"
- )
-
- xfm = _load(xfm_fname)
-
- assert len(xfm) == 2
-
- ref_fname = tmp_path / "reference.nii.gz"
- nb.Nifti1Image(
- np.zeros(xfm.reference.shape, dtype="uint16"), xfm.reference.affine,
- ).to_filename(str(ref_fname))
-
- # Then apply the transform and cross-check with software
- cmd = APPLY_NONLINEAR_CMD["itk"](
- transform=xfm_fname,
- reference=ref_fname,
- moving=img_fname,
- output="resampled.nii.gz",
- extra="",
- )
-
- # skip test if command is not available on host
- exe = cmd.split(" ", 1)[0]
- if not shutil.which(exe):
- pytest.skip(f"Command {exe} not found on host")
-
- exit_code = check_call([cmd], shell=True)
- assert exit_code == 0
- sw_moved = nb.load("resampled.nii.gz")
-
- nt_moved = xfm.apply(img_fname, order=0)
- nt_moved.to_filename("nt_resampled.nii.gz")
- diff = sw_moved.get_fdata() - nt_moved.get_fdata()
- # A certain tolerance is necessary because of resampling at borders
- assert (np.abs(diff) > 1e-3).sum() / diff.size < RMSE_TOL
-
-
@pytest.mark.parametrize("ext0", ["lta", "tfm"])
@pytest.mark.parametrize("ext1", ["lta", "tfm"])
@pytest.mark.parametrize("ext2", ["lta", "tfm"])
diff --git a/nitransforms/tests/test_nonlinear.py b/nitransforms/tests/test_nonlinear.py
index 9ae82d29..6112f633 100644
--- a/nitransforms/tests/test_nonlinear.py
+++ b/nitransforms/tests/test_nonlinear.py
@@ -1,44 +1,29 @@
# emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*-
# vi: set ft=python sts=4 ts=4 sw=4 et:
"""Tests of nonlinear transforms."""
+
import os
-import shutil
-from subprocess import check_call
import pytest
import numpy as np
import nibabel as nb
+from nitransforms.resampling import apply
from nitransforms.base import TransformError
from nitransforms.io.base import TransformFileError
from nitransforms.nonlinear import (
BSplineFieldTransform,
DenseFieldTransform,
- load as nlload,
)
from ..io.itk import ITKDisplacementsField
-RMSE_TOL = 0.05
-APPLY_NONLINEAR_CMD = {
- "itk": """\
-antsApplyTransforms -d 3 -r {reference} -i {moving} \
--o {output} -n NearestNeighbor -t {transform} {extra}\
-""".format,
- "afni": """\
-3dNwarpApply -nwarp {transform} -source {moving} \
--master {reference} -interp NN -prefix {output} {extra}\
-""".format,
- 'fsl': """\
-applywarp -i {moving} -r {reference} -o {output} {extra}\
--w {transform} --interp=nn""".format,
-}
-
-
@pytest.mark.parametrize("size", [(20, 20, 20), (20, 20, 20, 3)])
def test_itk_disp_load(size):
"""Checks field sizes."""
with pytest.raises(TransformFileError):
- ITKDisplacementsField.from_image(nb.Nifti1Image(np.zeros(size), np.eye(4), None))
+ ITKDisplacementsField.from_image(
+ nb.Nifti1Image(np.zeros(size), np.eye(4), None)
+ )
@pytest.mark.parametrize("size", [(20, 20, 20), (20, 20, 20, 2, 3), (20, 20, 20, 1, 4)])
@@ -96,144 +81,18 @@ def test_bsplines_references(testdata_path):
).to_field()
with pytest.raises(TransformError):
- BSplineFieldTransform(
- testdata_path / "someones_bspline_coefficients.nii.gz"
- ).apply(testdata_path / "someones_anatomy.nii.gz")
+ apply(
+ BSplineFieldTransform(
+ testdata_path / "someones_bspline_coefficients.nii.gz"
+ ),
+ testdata_path / "someones_anatomy.nii.gz",
+ )
- BSplineFieldTransform(
- testdata_path / "someones_bspline_coefficients.nii.gz"
- ).apply(
+ apply(
+ BSplineFieldTransform(testdata_path / "someones_bspline_coefficients.nii.gz"),
testdata_path / "someones_anatomy.nii.gz",
- reference=testdata_path / "someones_anatomy.nii.gz"
- )
-
-
-@pytest.mark.parametrize("image_orientation", ["RAS", "LAS", "LPS", "oblique"])
-@pytest.mark.parametrize("sw_tool", ["itk", "afni"])
-@pytest.mark.parametrize("axis", [0, 1, 2, (0, 1), (1, 2), (0, 1, 2)])
-def test_displacements_field1(
- tmp_path,
- get_testdata,
- get_testmask,
- image_orientation,
- sw_tool,
- axis,
-):
- """Check a translation-only field on one or more axes, different image orientations."""
- if (image_orientation, sw_tool) == ("oblique", "afni"):
- pytest.skip("AFNI obliques are not yet implemented for displacements fields")
-
- os.chdir(str(tmp_path))
- nii = get_testdata[image_orientation]
- msk = get_testmask[image_orientation]
- nii.to_filename("reference.nii.gz")
- msk.to_filename("mask.nii.gz")
-
- fieldmap = np.zeros(
- (*nii.shape[:3], 1, 3) if sw_tool != "fsl" else (*nii.shape[:3], 3),
- dtype="float32",
- )
- fieldmap[..., axis] = -10.0
-
- _hdr = nii.header.copy()
- if sw_tool in ("itk",):
- _hdr.set_intent("vector")
- _hdr.set_data_dtype("float32")
-
- xfm_fname = "warp.nii.gz"
- field = nb.Nifti1Image(fieldmap, nii.affine, _hdr)
- field.to_filename(xfm_fname)
-
- xfm = nlload(xfm_fname, fmt=sw_tool)
-
- # Then apply the transform and cross-check with software
- cmd = APPLY_NONLINEAR_CMD[sw_tool](
- transform=os.path.abspath(xfm_fname),
- reference=tmp_path / "mask.nii.gz",
- moving=tmp_path / "mask.nii.gz",
- output=tmp_path / "resampled_brainmask.nii.gz",
- extra="--output-data-type uchar" if sw_tool == "itk" else "",
- )
-
- # skip test if command is not available on host
- exe = cmd.split(" ", 1)[0]
- if not shutil.which(exe):
- pytest.skip("Command {} not found on host".format(exe))
-
- # resample mask
- exit_code = check_call([cmd], shell=True)
- assert exit_code == 0
- sw_moved_mask = nb.load("resampled_brainmask.nii.gz")
- nt_moved_mask = xfm.apply(msk, order=0)
- nt_moved_mask.set_data_dtype(msk.get_data_dtype())
- diff = np.asanyarray(sw_moved_mask.dataobj) - np.asanyarray(nt_moved_mask.dataobj)
-
- assert np.sqrt((diff ** 2).mean()) < RMSE_TOL
- brainmask = np.asanyarray(nt_moved_mask.dataobj, dtype=bool)
-
- # Then apply the transform and cross-check with software
- cmd = APPLY_NONLINEAR_CMD[sw_tool](
- transform=os.path.abspath(xfm_fname),
- reference=tmp_path / "reference.nii.gz",
- moving=tmp_path / "reference.nii.gz",
- output=tmp_path / "resampled.nii.gz",
- extra="--output-data-type uchar" if sw_tool == "itk" else ""
- )
-
- exit_code = check_call([cmd], shell=True)
- assert exit_code == 0
- sw_moved = nb.load("resampled.nii.gz")
-
- nt_moved = xfm.apply(nii, order=0)
- nt_moved.set_data_dtype(nii.get_data_dtype())
- nt_moved.to_filename("nt_resampled.nii.gz")
- sw_moved.set_data_dtype(nt_moved.get_data_dtype())
- diff = (
- np.asanyarray(sw_moved.dataobj, dtype=sw_moved.get_data_dtype())
- - np.asanyarray(nt_moved.dataobj, dtype=nt_moved.get_data_dtype())
+ reference=testdata_path / "someones_anatomy.nii.gz",
)
- # A certain tolerance is necessary because of resampling at borders
- assert np.sqrt((diff[brainmask] ** 2).mean()) < RMSE_TOL
-
-
-@pytest.mark.parametrize("sw_tool", ["itk", "afni"])
-def test_displacements_field2(tmp_path, testdata_path, sw_tool):
- """Check a translation-only field on one or more axes, different image orientations."""
- os.chdir(str(tmp_path))
- img_fname = testdata_path / "tpl-OASIS30ANTs_T1w.nii.gz"
- xfm_fname = testdata_path / "ds-005_sub-01_from-OASIS_to-T1_warp_{}.nii.gz".format(
- sw_tool
- )
-
- xfm = nlload(xfm_fname, fmt=sw_tool)
-
- # Then apply the transform and cross-check with software
- cmd = APPLY_NONLINEAR_CMD[sw_tool](
- transform=xfm_fname,
- reference=img_fname,
- moving=img_fname,
- output="resampled.nii.gz",
- extra="",
- )
-
- # skip test if command is not available on host
- exe = cmd.split(" ", 1)[0]
- if not shutil.which(exe):
- pytest.skip("Command {} not found on host".format(exe))
-
- exit_code = check_call([cmd], shell=True)
- assert exit_code == 0
- sw_moved = nb.load("resampled.nii.gz")
-
- nt_moved = xfm.apply(img_fname, order=0)
- nt_moved.to_filename("nt_resampled.nii.gz")
- sw_moved.set_data_dtype(nt_moved.get_data_dtype())
- diff = (
- np.asanyarray(sw_moved.dataobj, dtype=sw_moved.get_data_dtype())
- - np.asanyarray(nt_moved.dataobj, dtype=nt_moved.get_data_dtype())
- )
- # A certain tolerance is necessary because of resampling at borders
- assert np.sqrt((diff ** 2).mean()) < RMSE_TOL
def test_bspline(tmp_path, testdata_path):
@@ -247,12 +106,16 @@ def test_bspline(tmp_path, testdata_path):
bsplxfm = BSplineFieldTransform(bs_name, reference=img_name)
dispxfm = DenseFieldTransform(disp_name)
- out_disp = dispxfm.apply(img_name)
- out_bspl = bsplxfm.apply(img_name)
+ out_disp = apply(dispxfm, img_name)
+ out_bspl = apply(bsplxfm, img_name)
out_disp.to_filename("resampled_field.nii.gz")
out_bspl.to_filename("resampled_bsplines.nii.gz")
- assert np.sqrt(
- (out_disp.get_fdata(dtype="float32") - out_bspl.get_fdata(dtype="float32")) ** 2
- ).mean() < 0.2
+ assert (
+ np.sqrt(
+ (out_disp.get_fdata(dtype="float32") - out_bspl.get_fdata(dtype="float32"))
+ ** 2
+ ).mean()
+ < 0.2
+ )
diff --git a/nitransforms/tests/test_resampling.py b/nitransforms/tests/test_resampling.py
new file mode 100644
index 00000000..2384ad97
--- /dev/null
+++ b/nitransforms/tests/test_resampling.py
@@ -0,0 +1,365 @@
+# emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*-
+# vi: set ft=python sts=4 ts=4 sw=4 et:
+"""Exercise the standalone ``apply()`` implementation."""
+
+import os
+import pytest
+import numpy as np
+from subprocess import check_call
+import shutil
+
+import nibabel as nb
+from nibabel.eulerangles import euler2mat
+from nibabel.affines import from_matvec
+from nitransforms import linear as nitl
+from nitransforms import nonlinear as nitnl
+from nitransforms import manip as nitm
+from nitransforms import io
+from nitransforms.resampling import apply
+
+RMSE_TOL_LINEAR = 0.09
+RMSE_TOL_NONLINEAR = 0.05
+APPLY_LINEAR_CMD = {
+ "fsl": """\
+flirt -setbackground 0 -interp nearestneighbour -in {moving} -ref {reference} \
+-applyxfm -init {transform} -out {resampled}\
+""".format,
+ "itk": """\
+antsApplyTransforms -d 3 -r {reference} -i {moving} \
+-o {resampled} -n NearestNeighbor -t {transform} --float\
+""".format,
+ "afni": """\
+3dAllineate -base {reference} -input {moving} \
+-prefix {resampled} -1Dmatrix_apply {transform} -final NN\
+""".format,
+ "fs": """\
+mri_vol2vol --mov {moving} --targ {reference} --lta {transform} \
+--o {resampled} --nearest""".format,
+}
+APPLY_NONLINEAR_CMD = {
+ "itk": """\
+antsApplyTransforms -d 3 -r {reference} -i {moving} \
+-o {output} -n NearestNeighbor -t {transform} {extra}\
+""".format,
+ "afni": """\
+3dNwarpApply -nwarp {transform} -source {moving} \
+-master {reference} -interp NN -prefix {output} {extra}\
+""".format,
+ "fsl": """\
+applywarp -i {moving} -r {reference} -o {output} {extra}\
+-w {transform} --interp=nn""".format,
+}
+
+
+@pytest.mark.parametrize(
+ "image_orientation",
+ [
+ "RAS",
+ "LAS",
+ "LPS",
+ "oblique",
+ ],
+)
+@pytest.mark.parametrize("sw_tool", ["itk", "fsl", "afni", "fs"])
+def test_apply_linear_transform(
+ tmpdir, get_testdata, get_testmask, image_orientation, sw_tool
+):
+ """Check implementation of exporting affines to formats."""
+ tmpdir.chdir()
+
+ img = get_testdata[image_orientation]
+ msk = get_testmask[image_orientation]
+
+ # Generate test transform
+ T = from_matvec(euler2mat(x=0.9, y=0.001, z=0.001), [4.0, 2.0, -1.0])
+ xfm = nitl.Affine(T)
+ xfm.reference = img
+
+ ext = ""
+ if sw_tool == "itk":
+ ext = ".tfm"
+ elif sw_tool == "fs":
+ ext = ".lta"
+
+ img.to_filename("img.nii.gz")
+ msk.to_filename("mask.nii.gz")
+
+ # Write out transform file (software-dependent)
+ xfm_fname = f"M.{sw_tool}{ext}"
+ # Change reference dataset for AFNI & oblique
+ if (sw_tool, image_orientation) == ("afni", "oblique"):
+ io.afni.AFNILinearTransform.from_ras(
+ T,
+ moving=img,
+ reference=img,
+ ).to_filename(xfm_fname)
+ else:
+ xfm.to_filename(xfm_fname, fmt=sw_tool)
+
+ cmd = APPLY_LINEAR_CMD[sw_tool](
+ transform=os.path.abspath(xfm_fname),
+ reference=os.path.abspath("mask.nii.gz"),
+ moving=os.path.abspath("mask.nii.gz"),
+ resampled=os.path.abspath("resampled_brainmask.nii.gz"),
+ )
+
+ # skip test if command is not available on host
+ exe = cmd.split(" ", 1)[0]
+ if not shutil.which(exe):
+ pytest.skip(f"Command {exe} not found on host")
+
+ # resample mask
+ exit_code = check_call([cmd], shell=True)
+ assert exit_code == 0
+ sw_moved_mask = nb.load("resampled_brainmask.nii.gz")
+
+ nt_moved_mask = apply(xfm, msk, order=0)
+ nt_moved_mask.set_data_dtype(msk.get_data_dtype())
+ nt_moved_mask.to_filename("ntmask.nii.gz")
+ diff = np.asanyarray(sw_moved_mask.dataobj) - np.asanyarray(nt_moved_mask.dataobj)
+
+ assert np.sqrt((diff**2).mean()) < RMSE_TOL_LINEAR
+ brainmask = np.asanyarray(nt_moved_mask.dataobj, dtype=bool)
+
+ cmd = APPLY_LINEAR_CMD[sw_tool](
+ transform=os.path.abspath(xfm_fname),
+ reference=os.path.abspath("img.nii.gz"),
+ moving=os.path.abspath("img.nii.gz"),
+ resampled=os.path.abspath("resampled.nii.gz"),
+ )
+
+ exit_code = check_call([cmd], shell=True)
+ assert exit_code == 0
+ sw_moved = nb.load("resampled.nii.gz")
+ sw_moved.set_data_dtype(img.get_data_dtype())
+
+ nt_moved = apply(xfm, img, order=0)
+ diff = np.asanyarray(
+ sw_moved.dataobj, dtype=sw_moved.get_data_dtype()
+ ) - np.asanyarray(nt_moved.dataobj, dtype=nt_moved.get_data_dtype())
+
+ # A certain tolerance is necessary because of resampling at borders
+ assert np.sqrt((diff[brainmask] ** 2).mean()) < RMSE_TOL_LINEAR
+
+ nt_moved = apply(xfm, "img.nii.gz", order=0)
+ diff = np.asanyarray(
+ sw_moved.dataobj, dtype=sw_moved.get_data_dtype()
+ ) - np.asanyarray(nt_moved.dataobj, dtype=nt_moved.get_data_dtype())
+ # A certain tolerance is necessary because of resampling at borders
+ assert np.sqrt((diff[brainmask] ** 2).mean()) < RMSE_TOL_LINEAR
+
+
+@pytest.mark.parametrize("image_orientation", ["RAS", "LAS", "LPS", "oblique"])
+@pytest.mark.parametrize("sw_tool", ["itk", "afni"])
+@pytest.mark.parametrize("axis", [0, 1, 2, (0, 1), (1, 2), (0, 1, 2)])
+def test_displacements_field1(
+ tmp_path,
+ get_testdata,
+ get_testmask,
+ image_orientation,
+ sw_tool,
+ axis,
+):
+ """Check a translation-only field on one or more axes, different image orientations."""
+ if (image_orientation, sw_tool) == ("oblique", "afni"):
+ pytest.skip("AFNI obliques are not yet implemented for displacements fields")
+
+ os.chdir(str(tmp_path))
+ nii = get_testdata[image_orientation]
+ msk = get_testmask[image_orientation]
+ nii.to_filename("reference.nii.gz")
+ msk.to_filename("mask.nii.gz")
+
+ fieldmap = np.zeros(
+ (*nii.shape[:3], 1, 3) if sw_tool != "fsl" else (*nii.shape[:3], 3),
+ dtype="float32",
+ )
+ fieldmap[..., axis] = -10.0
+
+ _hdr = nii.header.copy()
+ if sw_tool in ("itk",):
+ _hdr.set_intent("vector")
+ _hdr.set_data_dtype("float32")
+
+ xfm_fname = "warp.nii.gz"
+ field = nb.Nifti1Image(fieldmap, nii.affine, _hdr)
+ field.to_filename(xfm_fname)
+
+ xfm = nitnl.load(xfm_fname, fmt=sw_tool)
+
+ # Then apply the transform and cross-check with software
+ cmd = APPLY_NONLINEAR_CMD[sw_tool](
+ transform=os.path.abspath(xfm_fname),
+ reference=tmp_path / "mask.nii.gz",
+ moving=tmp_path / "mask.nii.gz",
+ output=tmp_path / "resampled_brainmask.nii.gz",
+ extra="--output-data-type uchar" if sw_tool == "itk" else "",
+ )
+
+ # skip test if command is not available on host
+ exe = cmd.split(" ", 1)[0]
+ if not shutil.which(exe):
+ pytest.skip(f"Command {exe} not found on host")
+
+ # resample mask
+ exit_code = check_call([cmd], shell=True)
+ assert exit_code == 0
+ sw_moved_mask = nb.load("resampled_brainmask.nii.gz")
+ nt_moved_mask = apply(xfm, msk, order=0)
+ nt_moved_mask.set_data_dtype(msk.get_data_dtype())
+ diff = np.asanyarray(sw_moved_mask.dataobj) - np.asanyarray(nt_moved_mask.dataobj)
+
+ assert np.sqrt((diff**2).mean()) < RMSE_TOL_LINEAR
+ brainmask = np.asanyarray(nt_moved_mask.dataobj, dtype=bool)
+
+ # Then apply the transform and cross-check with software
+ cmd = APPLY_NONLINEAR_CMD[sw_tool](
+ transform=os.path.abspath(xfm_fname),
+ reference=tmp_path / "reference.nii.gz",
+ moving=tmp_path / "reference.nii.gz",
+ output=tmp_path / "resampled.nii.gz",
+ extra="--output-data-type uchar" if sw_tool == "itk" else "",
+ )
+
+ exit_code = check_call([cmd], shell=True)
+ assert exit_code == 0
+ sw_moved = nb.load("resampled.nii.gz")
+
+ nt_moved = apply(xfm, nii, order=0)
+ nt_moved.set_data_dtype(nii.get_data_dtype())
+ nt_moved.to_filename("nt_resampled.nii.gz")
+ sw_moved.set_data_dtype(nt_moved.get_data_dtype())
+ diff = np.asanyarray(
+ sw_moved.dataobj, dtype=sw_moved.get_data_dtype()
+ ) - np.asanyarray(nt_moved.dataobj, dtype=nt_moved.get_data_dtype())
+ # A certain tolerance is necessary because of resampling at borders
+ assert np.sqrt((diff[brainmask] ** 2).mean()) < RMSE_TOL_LINEAR
+
+
+@pytest.mark.parametrize("sw_tool", ["itk", "afni"])
+def test_displacements_field2(tmp_path, testdata_path, sw_tool):
+ """Check a translation-only field on one or more axes, different image orientations."""
+ os.chdir(str(tmp_path))
+ img_fname = testdata_path / "tpl-OASIS30ANTs_T1w.nii.gz"
+ xfm_fname = testdata_path / "ds-005_sub-01_from-OASIS_to-T1_warp_{}.nii.gz".format(
+ sw_tool
+ )
+
+ xfm = nitnl.load(xfm_fname, fmt=sw_tool)
+
+ # Then apply the transform and cross-check with software
+ cmd = APPLY_NONLINEAR_CMD[sw_tool](
+ transform=xfm_fname,
+ reference=img_fname,
+ moving=img_fname,
+ output="resampled.nii.gz",
+ extra="",
+ )
+
+ # skip test if command is not available on host
+ exe = cmd.split(" ", 1)[0]
+ if not shutil.which(exe):
+ pytest.skip(f"Command {exe} not found on host")
+
+ exit_code = check_call([cmd], shell=True)
+ assert exit_code == 0
+ sw_moved = nb.load("resampled.nii.gz")
+
+ nt_moved = apply(xfm, img_fname, order=0)
+ nt_moved.to_filename("nt_resampled.nii.gz")
+ sw_moved.set_data_dtype(nt_moved.get_data_dtype())
+ diff = np.asanyarray(
+ sw_moved.dataobj, dtype=sw_moved.get_data_dtype()
+ ) - np.asanyarray(nt_moved.dataobj, dtype=nt_moved.get_data_dtype())
+ # A certain tolerance is necessary because of resampling at borders
+ assert np.sqrt((diff**2).mean()) < RMSE_TOL_LINEAR
+
+
+def test_apply_transformchain(tmp_path, testdata_path):
+ """Check a translation-only field on one or more axes, different image orientations."""
+ os.chdir(str(tmp_path))
+ img_fname = testdata_path / "T1w_scanner.nii.gz"
+ xfm_fname = (
+ testdata_path
+ / "ds-005_sub-01_from-T1w_to-MNI152NLin2009cAsym_mode-image_xfm.h5"
+ )
+
+ xfm = nitm.load(xfm_fname)
+
+ assert len(xfm) == 2
+
+ ref_fname = tmp_path / "reference.nii.gz"
+ nb.Nifti1Image(
+ np.zeros(xfm.reference.shape, dtype="uint16"),
+ xfm.reference.affine,
+ ).to_filename(str(ref_fname))
+
+ # Then apply the transform and cross-check with software
+ cmd = APPLY_NONLINEAR_CMD["itk"](
+ transform=xfm_fname,
+ reference=ref_fname,
+ moving=img_fname,
+ output="resampled.nii.gz",
+ extra="",
+ )
+
+ # skip test if command is not available on host
+ exe = cmd.split(" ", 1)[0]
+ if not shutil.which(exe):
+ pytest.skip(f"Command {exe} not found on host")
+
+ exit_code = check_call([cmd], shell=True)
+ assert exit_code == 0
+ sw_moved = nb.load("resampled.nii.gz")
+
+ nt_moved = apply(xfm, img_fname, order=0)
+ nt_moved.to_filename("nt_resampled.nii.gz")
+ diff = sw_moved.get_fdata() - nt_moved.get_fdata()
+ # A certain tolerance is necessary because of resampling at borders
+ assert (np.abs(diff) > 1e-3).sum() / diff.size < RMSE_TOL_LINEAR
+
+
+@pytest.mark.parametrize("serialize_4d", [True, False])
+def test_LinearTransformsMapping_apply(
+ tmp_path, data_path, testdata_path, serialize_4d
+):
+ """Apply transform mappings."""
+ hmc = nitl.load(
+ data_path / "hmc-itk.tfm", fmt="itk", reference=testdata_path / "sbref.nii.gz"
+ )
+ assert isinstance(hmc, nitl.LinearTransformsMapping)
+
+ # Test-case: realign functional data on to sbref
+ nii = apply(
+ hmc,
+ testdata_path / "func.nii.gz",
+ order=1,
+ reference=testdata_path / "sbref.nii.gz",
+ serialize_nvols=2 if serialize_4d else np.inf,
+ )
+ assert nii.dataobj.shape[-1] == len(hmc)
+
+ # Test-case: write out a fieldmap moved with head
+ hmcinv = nitl.LinearTransformsMapping(
+ np.linalg.inv(hmc.matrix), reference=testdata_path / "func.nii.gz"
+ )
+
+ nii = apply(
+ hmcinv,
+ testdata_path / "fmap.nii.gz",
+ order=1,
+ serialize_nvols=2 if serialize_4d else np.inf,
+ )
+ assert nii.dataobj.shape[-1] == len(hmc)
+
+ # Ensure a ValueError is issued when trying to apply mismatched transforms
+ # (e.g., in this case, two transforms while the functional has 8 volumes)
+ hmc = nitl.LinearTransformsMapping(hmc.matrix[:2, ...])
+ with pytest.raises(ValueError):
+ apply(
+ hmc,
+ testdata_path / "func.nii.gz",
+ order=1,
+ reference=testdata_path / "sbref.nii.gz",
+ serialize_nvols=2 if serialize_4d else np.inf,
+ )
diff --git a/nitransforms/tests/test_surface.py b/nitransforms/tests/test_surface.py
new file mode 100644
index 00000000..a210583e
--- /dev/null
+++ b/nitransforms/tests/test_surface.py
@@ -0,0 +1,241 @@
+import tempfile
+
+import numpy as np
+import nibabel as nb
+import pytest
+from scipy import sparse
+from nitransforms.base import SurfaceMesh
+from nitransforms.surface import (
+ SurfaceTransformBase,
+ SurfaceCoordinateTransform,
+ SurfaceResampler
+)
+
+# def test_surface_transform_npz():
+# mat = sparse.random(10, 10, density=0.5)
+# xfm = SurfaceCoordinateTransform(mat)
+# fn = tempfile.mktemp(suffix=".npz")
+# print(fn)
+# xfm.to_filename(fn)
+#
+# xfm2 = SurfaceCoordinateTransform.from_filename(fn)
+# try:
+# assert xfm.mat.shape == xfm2.mat.shape
+# np.testing.assert_array_equal(xfm.mat.data, xfm2.mat.data)
+# np.testing.assert_array_equal(xfm.mat.indices, xfm2.mat.indices)
+# np.testing.assert_array_equal(xfm.mat.indptr, xfm2.mat.indptr)
+# except Exception:
+# os.remove(fn)
+# raise
+# os.remove(fn)
+
+
+# def test_surface_transform_normalization():
+# mat = np.random.uniform(size=(20, 10))
+# xfm = SurfaceCoordinateTransform(mat)
+# x = np.random.uniform(size=(5, 20))
+# y_element = xfm.apply(x, normalize="element")
+# np.testing.assert_array_less(y_element.sum(axis=1), x.sum(axis=1))
+# y_sum = xfm.apply(x, normalize="sum")
+# np.testing.assert_allclose(y_sum.sum(axis=1), x.sum(axis=1))
+# y_none = xfm.apply(x, normalize="none")
+# assert y_none.sum() != y_element.sum()
+# assert y_none.sum() != y_sum.sum()
+
+def test_SurfaceTransformBase(testdata_path):
+ # note these transformations are a bit of a weird use of surface transformation, but I'm
+ # just testing the base class and the io
+ sphere_reg_path = (
+ testdata_path
+ / "sub-sid000005_ses-budapest_acq-MPRAGE_hemi-R_space-fsLR_desc-reg_sphere.surf.gii"
+ )
+ pial_path = testdata_path / "sub-sid000005_ses-budapest_acq-MPRAGE_hemi-R_pial.surf.gii"
+
+ sphere_reg = SurfaceMesh(nb.load(sphere_reg_path))
+ pial = SurfaceMesh(nb.load(pial_path))
+ stfb = SurfaceTransformBase(sphere_reg, pial)
+
+ # test loading from filenames
+ stfb_ff = SurfaceTransformBase.from_filename(sphere_reg_path, pial_path)
+ assert stfb_ff == stfb
+
+ # test inversion and setting
+ stfb_i = ~stfb
+ stfb.reference = pial
+ stfb.moving = sphere_reg
+ assert np.all(stfb_i._reference._coords == stfb._reference._coords)
+ assert np.all(stfb_i._reference._triangles == stfb._reference._triangles)
+ assert np.all(stfb_i._moving._coords == stfb._moving._coords)
+ assert np.all(stfb_i._moving._triangles == stfb._moving._triangles)
+ # test equality
+ assert stfb_i == stfb
+
+
+def test_SurfaceCoordinateTransform(testdata_path):
+ # note these transformations are a bit of a weird use of surface transformation, but I'm
+ # just testing the class and the io
+ sphere_reg_path = (
+ testdata_path
+ / "sub-sid000005_ses-budapest_acq-MPRAGE_hemi-R_space-fsLR_desc-reg_sphere.surf.gii"
+ )
+ pial_path = testdata_path / "sub-sid000005_ses-budapest_acq-MPRAGE_hemi-R_pial.surf.gii"
+ fslr_sphere_path = testdata_path / "tpl-fsLR_hemi-R_den-32k_sphere.surf.gii"
+
+ sphere_reg = SurfaceMesh(nb.load(sphere_reg_path))
+ pial = SurfaceMesh(nb.load(pial_path))
+ fslr_sphere = SurfaceMesh(nb.load(fslr_sphere_path))
+
+ # test mesh correspondence test
+ with pytest.raises(ValueError):
+ sct = SurfaceCoordinateTransform(fslr_sphere, pial)
+
+ # test loading from filenames
+ sct = SurfaceCoordinateTransform(pial, sphere_reg)
+ sctf = SurfaceCoordinateTransform.from_filename(reference_path=pial_path,
+ moving_path=sphere_reg_path)
+ assert sct == sctf
+
+ # test mapping
+ assert np.all(sct.map(sct.moving._coords[:100], inverse=True) == sct.reference._coords[:100])
+ assert np.all(sct.map(sct.reference._coords[:100]) == sct.moving._coords[:100])
+ with pytest.raises(NotImplementedError):
+ sct.map(sct.moving._coords[0])
+
+ # test inversion and addition
+ scti = ~sct
+
+ assert sct + scti == SurfaceCoordinateTransform(pial, pial)
+ assert scti + sct == SurfaceCoordinateTransform(sphere_reg, sphere_reg)
+
+ sct.reference = sphere_reg
+ sct.moving = pial
+ assert np.all(scti.reference._coords == sct.reference._coords)
+ assert np.all(scti.reference._triangles == sct.reference._triangles)
+ assert scti == sct
+
+
+def test_SurfaceCoordinateTransformIO(testdata_path, tmpdir):
+ sphere_reg_path = (
+ testdata_path
+ / "sub-sid000005_ses-budapest_acq-MPRAGE_hemi-R_space-fsLR_desc-reg_sphere.surf.gii"
+ )
+ pial_path = testdata_path / "sub-sid000005_ses-budapest_acq-MPRAGE_hemi-R_pial.surf.gii"
+
+ sct = SurfaceCoordinateTransform(pial_path, sphere_reg_path)
+ fn = tempfile.mktemp(suffix=".h5")
+ sct.to_filename(fn)
+ sct2 = SurfaceCoordinateTransform.from_filename(fn)
+ assert sct == sct2
+
+
+def test_ProjectUnproject(testdata_path):
+
+ sphere_reg_path = (
+ testdata_path
+ / "sub-sid000005_ses-budapest_acq-MPRAGE_hemi-R_space-fsLR_desc-reg_sphere.surf.gii"
+ )
+ fslr_sphere_path = testdata_path / "tpl-fsLR_hemi-R_den-32k_sphere.surf.gii"
+ subj_fsaverage_sphere_path = (
+ testdata_path
+ / "sub-sid000005_ses-budapest_acq-MPRAGE_hemi-R_space-fsaverage_desc-reg_sphere.surf.gii"
+ )
+ fslr_fsaverage_sphere_path = (
+ testdata_path
+ / "tpl-fsLR_space-fsaverage_hemi-R_den-32k_sphere.surf.gii"
+ )
+ pial_path = testdata_path / "sub-sid000005_ses-budapest_acq-MPRAGE_hemi-R_pial.surf.gii"
+
+ # test project-unproject funcitonality
+ projunproj = SurfaceResampler(sphere_reg_path, fslr_sphere_path)
+ with pytest.raises(ValueError):
+ projunproj.apply(pial_path)
+ transformed = projunproj.apply(fslr_fsaverage_sphere_path)
+ projunproj_ref = nb.load(subj_fsaverage_sphere_path)
+ assert (projunproj_ref.agg_data()[0] - transformed._coords).max() < 0.0005
+ assert np.all(transformed._triangles == projunproj_ref.agg_data()[1])
+
+
+def test_SurfaceResampler(testdata_path, tmpdir):
+ dif_tol = 0.001
+ fslr_sphere_path = (
+ testdata_path
+ / "tpl-fsLR_hemi-R_den-32k_sphere.surf.gii"
+ )
+ shape_path = (
+ testdata_path
+ / "sub-sid000005_ses-budapest_acq-MPRAGE_hemi-R_thickness.shape.gii"
+ )
+ ref_resampled_thickness_path = (
+ testdata_path
+ / "sub-sid000005_ses-budapest_acq-MPRAGE_hemi-R_space-fsLR_thickness.shape.gii"
+ )
+ pial_path = (
+ testdata_path / "sub-sid000005_ses-budapest_acq-MPRAGE_hemi-R_pial.surf.gii"
+ )
+ sphere_reg_path = (
+ testdata_path
+ / "sub-sid000005_ses-budapest_acq-MPRAGE_hemi-R_space-fsLR_desc-reg_sphere.surf.gii"
+ )
+
+ fslr_sphere = SurfaceMesh(nb.load(fslr_sphere_path))
+ sphere_reg = SurfaceMesh(nb.load(sphere_reg_path))
+ subj_thickness = nb.load(shape_path)
+
+ with pytest.raises(ValueError):
+ SurfaceResampler(sphere_reg_path, pial_path)
+ with pytest.raises(ValueError):
+ SurfaceResampler(pial_path, sphere_reg_path)
+
+ reference = fslr_sphere
+ moving = sphere_reg
+ # compare results to what connectome workbench produces
+ resampling = SurfaceResampler(reference, moving)
+ resampled_thickness = resampling.apply(subj_thickness.agg_data(), normalize='element')
+ ref_resampled = nb.load(ref_resampled_thickness_path).agg_data()
+
+ max_dif = np.abs(resampled_thickness.astype(np.float32) - ref_resampled).max()
+ assert max_dif < dif_tol
+
+ with pytest.raises(ValueError):
+ SurfaceResampler(reference, moving, mat=resampling.mat[:, :10000])
+ with pytest.raises(ValueError):
+ SurfaceResampler(reference, moving, mat=resampling.mat[:10000, :])
+ with pytest.raises(ValueError):
+ resampling.reference = reference
+ with pytest.raises(ValueError):
+ resampling.moving = moving
+ with pytest.raises(NotImplementedError):
+ _ = SurfaceResampler(reference, moving, "foo")
+
+ # test file io
+ fn = tempfile.mktemp(suffix=".h5")
+ resampling.to_filename(fn)
+ resampling2 = SurfaceResampler.from_filename(fn)
+
+ # assert resampling2 == resampling
+ assert np.allclose(resampling2.reference._coords, resampling.reference._coords)
+ assert np.all(resampling2.reference._triangles == resampling.reference._triangles)
+ assert np.allclose(resampling2.reference._coords, resampling.reference._coords)
+ assert np.all(resampling2.moving._triangles == resampling.moving._triangles)
+
+ resampled_thickness2 = resampling2.apply(subj_thickness.agg_data(), normalize='element')
+ assert np.all(resampled_thickness2 == resampled_thickness)
+
+ # test loading with a csr
+ assert isinstance(resampling.mat, sparse.csr_array)
+ resampling2a = SurfaceResampler(reference, moving, mat=resampling.mat)
+ resampled_thickness2a = resampling2a.apply(subj_thickness.agg_data(), normalize='element')
+ assert np.all(resampled_thickness2a == resampled_thickness)
+
+ with pytest.raises(ValueError):
+ _ = SurfaceResampler(moving, reference, mat=resampling.mat)
+
+ # test map
+ assert np.all(resampling.map(np.array([[0, 0, 0]])) == np.array([[0, 0, 0]]))
+
+ # test loading from surfaces
+ resampling3 = SurfaceResampler.from_filename(reference_path=fslr_sphere_path,
+ moving_path=sphere_reg_path)
+ assert resampling3 == resampling
+ resampled_thickness3 = resampling3.apply(subj_thickness.agg_data(), normalize='element')
+ assert np.all(resampled_thickness3 == resampled_thickness)
diff --git a/nitransforms/tests/test_version.py b/nitransforms/tests/test_version.py
index a0723e9a..bc4c4a0a 100644
--- a/nitransforms/tests/test_version.py
+++ b/nitransforms/tests/test_version.py
@@ -1,10 +1,15 @@
"""Test _version.py."""
import sys
from collections import namedtuple
-from pkg_resources import DistributionNotFound
from importlib import reload
+import pytest
import nitransforms
+try:
+ from pkg_resources import DistributionNotFound
+except ImportError:
+ pytest.skip(allow_module_level=True)
+
def test_version_scm0(monkeypatch):
"""Retrieve the version via setuptools_scm."""
diff --git a/pyproject.toml b/pyproject.toml
index 686a8c8d..45a691bb 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -1,5 +1,6 @@
[build-system]
-requires = ["setuptools >= 42.0", "wheel", "setuptools_scm[toml] >= 3.4", "setuptools_scm_git_archive"]
+requires = ["setuptools >= 45", "setuptools_scm[toml]>=6.2"]
+build-backend = "setuptools.build_meta"
[tool.setuptools_scm]
write_to = "nitransforms/_version.py"
diff --git a/setup.cfg b/setup.cfg
index 51fdd474..158a9013 100644
--- a/setup.cfg
+++ b/setup.cfg
@@ -6,9 +6,10 @@ classifiers =
Intended Audience :: Science/Research
Topic :: Scientific/Engineering :: Image Recognition
License :: OSI Approved :: BSD License
- Programming Language :: Python :: 3.7
Programming Language :: Python :: 3.8
Programming Language :: Python :: 3.9
+ Programming Language :: Python :: 3.10
+ Programming Language :: Python :: 3.11
description = NiTransforms -- Neuroimaging spatial transforms in Python.
license = MIT License
long_description = file:README.md
@@ -20,17 +21,17 @@ project_urls =
url = https://github.com/nipy/nitransforms
[options]
-python_requires = >= 3.7
+python_requires = >= 3.8
install_requires =
- numpy ~= 1.21.0; python_version<'3.8'
- numpy ~= 1.21; python_version>'3.7'
- scipy ~= 1.6.0; python_version<'3.8'
- scipy ~= 1.6; python_version>'3.7'
+ numpy ~= 1.21
+ scipy >= 1.6.0
nibabel >= 3.0
h5py
+ pathlib
test_requires =
pytest
pytest-cov
+ pytest-env
nose
codecov
setup_requires =
@@ -47,7 +48,9 @@ niftiexts =
test =
pytest
pytest-cov
+ pytest-env
codecov
+ lxml
tests =
%(test)s