From 6bb3d6936fd417db742699b874ce86fca2fff7a3 Mon Sep 17 00:00:00 2001 From: mathiasg Date: Mon, 2 May 2022 15:11:35 -0400 Subject: [PATCH 001/123] MAINT: Add readthedocs file --- .readthedocs.yaml | 16 ++++++++++++++++ 1 file changed, 16 insertions(+) create mode 100644 .readthedocs.yaml diff --git a/.readthedocs.yaml b/.readthedocs.yaml new file mode 100644 index 00000000..8d664f7d --- /dev/null +++ b/.readthedocs.yaml @@ -0,0 +1,16 @@ +# Read the Docs configuration file +# See https://docs.readthedocs.io/en/stable/config-file/v2.html for details + +version: 2 + +build: + os: ubuntu-20.04 + tools: + python: "3.8" + +sphinx: + configuration: docs/conf.py + +python: + install: + - requirements: docs/requirements.txt From 23f94c3534e46b7eca13862e43b0ac4e5975d7d4 Mon Sep 17 00:00:00 2001 From: mathiasg Date: Mon, 2 May 2022 15:12:52 -0400 Subject: [PATCH 002/123] MAINT: Simplify dependencies --- setup.cfg | 6 ++---- 1 file changed, 2 insertions(+), 4 deletions(-) diff --git a/setup.cfg b/setup.cfg index 51fdd474..251a5bef 100644 --- a/setup.cfg +++ b/setup.cfg @@ -22,10 +22,8 @@ url = https://github.com/nipy/nitransforms [options] python_requires = >= 3.7 install_requires = - numpy ~= 1.21.0; python_version<'3.8' - numpy ~= 1.21; python_version>'3.7' - scipy ~= 1.6.0; python_version<'3.8' - scipy ~= 1.6; python_version>'3.7' + numpy >= 1.21.0 + scipy >= 1.6.0 nibabel >= 3.0 h5py test_requires = From 5694f080e793a09208d715a00191464ee8ead678 Mon Sep 17 00:00:00 2001 From: mathiasg Date: Mon, 2 May 2022 15:29:04 -0400 Subject: [PATCH 003/123] MAINT: Install package for RTD --- .readthedocs.yaml | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/.readthedocs.yaml b/.readthedocs.yaml index 8d664f7d..34413ca6 100644 --- a/.readthedocs.yaml +++ b/.readthedocs.yaml @@ -13,4 +13,7 @@ sphinx: python: install: - - requirements: docs/requirements.txt + - path: . + extra_requirements: + - all + - requirements: docs/requirements.txt From 49d52d300aad84226ccf37da4b000730e04c7953 Mon Sep 17 00:00:00 2001 From: Chris Markiewicz Date: Mon, 10 Jul 2023 15:45:12 -0400 Subject: [PATCH 004/123] MNT: Drop Python 3.7 support, test through 3.11 --- .github/workflows/pythonpackage.yml | 2 +- .github/workflows/travis.yml | 2 +- setup.cfg | 5 +++-- 3 files changed, 5 insertions(+), 4 deletions(-) diff --git a/.github/workflows/pythonpackage.yml b/.github/workflows/pythonpackage.yml index 8fd9829b..d780e79b 100644 --- a/.github/workflows/pythonpackage.yml +++ b/.github/workflows/pythonpackage.yml @@ -16,7 +16,7 @@ jobs: runs-on: ubuntu-latest strategy: matrix: - python-version: [3.7, 3.8, 3.9] + python-version: ['3.8', '3.9', '3.10', '3.11'] steps: - name: Set up Python ${{ matrix.python-version }} diff --git a/.github/workflows/travis.yml b/.github/workflows/travis.yml index 19c6a2a1..c41ca31d 100644 --- a/.github/workflows/travis.yml +++ b/.github/workflows/travis.yml @@ -11,7 +11,7 @@ jobs: strategy: max-parallel: 5 matrix: - python-version: [3.7, 3.8, 3.9] + python-version: ['3.8', '3.9', '3.10', '3.11'] steps: - name: Git settings (pacify DataLad) diff --git a/setup.cfg b/setup.cfg index 251a5bef..93499c35 100644 --- a/setup.cfg +++ b/setup.cfg @@ -6,9 +6,10 @@ classifiers = Intended Audience :: Science/Research Topic :: Scientific/Engineering :: Image Recognition License :: OSI Approved :: BSD License - Programming Language :: Python :: 3.7 Programming Language :: Python :: 3.8 Programming Language :: Python :: 3.9 + Programming Language :: Python :: 3.10 + Programming Language :: Python :: 3.11 description = NiTransforms -- Neuroimaging spatial transforms in Python. license = MIT License long_description = file:README.md @@ -20,7 +21,7 @@ project_urls = url = https://github.com/nipy/nitransforms [options] -python_requires = >= 3.7 +python_requires = >= 3.8 install_requires = numpy >= 1.21.0 scipy >= 1.6.0 From bc7f688a89f78ad1d4f3b8c6a47d78922f59d687 Mon Sep 17 00:00:00 2001 From: Chris Markiewicz Date: Mon, 10 Jul 2023 15:48:50 -0400 Subject: [PATCH 005/123] CI: Upgrade actions --- .github/workflows/pythonpackage.yml | 8 ++++---- .github/workflows/travis.yml | 8 ++++---- 2 files changed, 8 insertions(+), 8 deletions(-) diff --git a/.github/workflows/pythonpackage.yml b/.github/workflows/pythonpackage.yml index d780e79b..5c519a77 100644 --- a/.github/workflows/pythonpackage.yml +++ b/.github/workflows/pythonpackage.yml @@ -20,10 +20,10 @@ jobs: steps: - name: Set up Python ${{ matrix.python-version }} - uses: actions/setup-python@v2 + uses: actions/setup-python@v4 with: python-version: ${{ matrix.python-version }} - - uses: actions/checkout@v2 + - uses: actions/checkout@v3 with: fetch-depth: 0 - name: Build in confined, updated environment and interpolate version @@ -89,9 +89,9 @@ jobs: if: "!contains(github.event.head_commit.message, '[skip ci]')" runs-on: ubuntu-latest steps: - - uses: actions/checkout@v2 + - uses: actions/checkout@v3 - name: Set up Python 3.7 - uses: actions/setup-python@v1 + uses: actions/setup-python@v4 with: python-version: 3.7 - run: pip install flake8 diff --git a/.github/workflows/travis.yml b/.github/workflows/travis.yml index c41ca31d..32538f8c 100644 --- a/.github/workflows/travis.yml +++ b/.github/workflows/travis.yml @@ -19,10 +19,10 @@ jobs: git config --global user.name 'NiPreps Bot' git config --global user.email 'nipreps@gmail.com' - name: Set up Python ${{ matrix.python-version }} - uses: actions/setup-python@v2 + uses: actions/setup-python@v4 with: python-version: ${{ matrix.python-version }} - - uses: actions/cache@v2 + - uses: actions/cache@v3 id: conda with: path: | @@ -36,7 +36,7 @@ jobs: $CONDA/bin/conda install -c conda-forge git-annex datalad pip codecov pytest $CONDA/bin/python -m pip install datalad-osf - - uses: actions/cache@v2 + - uses: actions/cache@v3 with: path: ${{ env.TEST_DATA_HOME }} key: data-cache-v2 @@ -53,7 +53,7 @@ jobs: $CONDA/bin/datalad update --merge -d nitransforms-tests/ $CONDA/bin/datalad get -d nitransforms-tests/ - - uses: actions/checkout@v2 + - uses: actions/checkout@v3 - name: Install minimal dependencies run: | $CONDA/bin/pip install .[tests] From 1776a76414a2dbca8a861e09d419dc7878ac874b Mon Sep 17 00:00:00 2001 From: Chris Markiewicz Date: Mon, 10 Jul 2023 15:52:34 -0400 Subject: [PATCH 006/123] CI: Use codecov action --- .github/workflows/travis.yml | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/.github/workflows/travis.yml b/.github/workflows/travis.yml index 32538f8c..7efe2c74 100644 --- a/.github/workflows/travis.yml +++ b/.github/workflows/travis.yml @@ -33,7 +33,7 @@ jobs: python-${{ matrix.python-version }}- - name: Install DataLad run: | - $CONDA/bin/conda install -c conda-forge git-annex datalad pip codecov pytest + $CONDA/bin/conda install -c conda-forge git-annex datalad pip pytest $CONDA/bin/python -m pip install datalad-osf - uses: actions/cache@v3 @@ -62,5 +62,6 @@ jobs: $CONDA/bin/pytest -v --cov nitransforms --cov-config .coveragerc --cov-report xml:cov.xml --doctest-modules nitransforms/ - name: Submit code coverage - run: | - $CONDA/bin/python -m codecov --flags travis --file cov.xml -e $GITHUB_RUN_NUMBER + uses: codecov/codecov-action@v3 + with: + files: cov.xml From 321c0748b6603011fa6bb6bb0cebb9be9ffce5b4 Mon Sep 17 00:00:00 2001 From: Chris Markiewicz Date: Mon, 10 Jul 2023 15:54:00 -0400 Subject: [PATCH 007/123] CI: Drop direct setup.py calls, use pipx where convenient --- .github/workflows/pythonpackage.yml | 22 +++++++++++----------- 1 file changed, 11 insertions(+), 11 deletions(-) diff --git a/.github/workflows/pythonpackage.yml b/.github/workflows/pythonpackage.yml index 5c519a77..97c4984e 100644 --- a/.github/workflows/pythonpackage.yml +++ b/.github/workflows/pythonpackage.yml @@ -26,19 +26,20 @@ jobs: - uses: actions/checkout@v3 with: fetch-depth: 0 - - name: Build in confined, updated environment and interpolate version + - name: Build package run: | - python -m venv /tmp/buildenv - source /tmp/buildenv/bin/activate - python -m pip install -U setuptools pip wheel twine docutils - python setup.py sdist bdist_wheel - python -m twine check dist/nitransforms* + pipx run build + - name: Determine expected version + run: | + python -m venv /tmp/getversion + source /tmp/getversion/bin/activate + python -m pip install setuptools_scm # Interpolate version if [[ "$GITHUB_REF" == refs/tags/* ]]; then TAG=${GITHUB_REF##*/} fi - THISVERSION=$( python setup.py --version ) + THISVERSION=$( python -m setuptools_scm ) THISVERSION=${TAG:-$THISVERSION} echo "Expected VERSION: \"${THISVERSION}\"" echo "THISVERSION=${THISVERSION}" >> ${GITHUB_ENV} @@ -47,7 +48,7 @@ jobs: run: | python -m venv /tmp/install_sdist source /tmp/install_sdist/bin/activate - python -m pip install --upgrade pip wheel + python -m pip install --upgrade pip python -m pip install dist/nitransforms*.tar.gz INSTALLED_VERSION=$(python -c 'import nitransforms; print(nitransforms.__version__, end="")') echo "VERSION: \"${THISVERSION}\"" @@ -58,7 +59,7 @@ jobs: run: | python -m venv /tmp/install_wheel source /tmp/install_wheel/bin/activate - python -m pip install --upgrade pip wheel + python -m pip install --upgrade pip python -m pip install dist/nitransforms*.whl INSTALLED_VERSION=$(python -c 'import nitransforms; print(nitransforms.__version__, end="")') echo "INSTALLED: \"${INSTALLED_VERSION}\"" @@ -94,5 +95,4 @@ jobs: uses: actions/setup-python@v4 with: python-version: 3.7 - - run: pip install flake8 - - run: flake8 nitransforms/ + - run: pipx run flake8 nitransforms From b1ca471ac6c848b3ecbca35792efa61d7835ccd3 Mon Sep 17 00:00:00 2001 From: Chris Markiewicz Date: Mon, 10 Jul 2023 15:54:26 -0400 Subject: [PATCH 008/123] MNT: Update setuptools_scm dependencies to include git archive support --- pyproject.toml | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/pyproject.toml b/pyproject.toml index 686a8c8d..45a691bb 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,5 +1,6 @@ [build-system] -requires = ["setuptools >= 42.0", "wheel", "setuptools_scm[toml] >= 3.4", "setuptools_scm_git_archive"] +requires = ["setuptools >= 45", "setuptools_scm[toml]>=6.2"] +build-backend = "setuptools.build_meta" [tool.setuptools_scm] write_to = "nitransforms/_version.py" From 300ae9dce242c02e7e536bb5079f560c8aedb427 Mon Sep 17 00:00:00 2001 From: Chris Markiewicz Date: Mon, 10 Jul 2023 16:26:36 -0400 Subject: [PATCH 009/123] STY: pyupgrade --py38-plus --- nitransforms/cli.py | 2 +- nitransforms/io/lta.py | 4 ++-- nitransforms/nonlinear.py | 2 +- nitransforms/tests/test_io.py | 6 +++--- nitransforms/tests/test_linear.py | 6 +++--- nitransforms/tests/test_nonlinear.py | 4 ++-- 6 files changed, 12 insertions(+), 12 deletions(-) diff --git a/nitransforms/cli.py b/nitransforms/cli.py index 59c6b9d3..63b8bed4 100644 --- a/nitransforms/cli.py +++ b/nitransforms/cli.py @@ -45,7 +45,7 @@ def cli_apply(pargs): cval=pargs.cval, prefilter=pargs.prefilter, ) - moved.to_filename(pargs.out or "nt_{}".format(os.path.basename(pargs.moving))) + moved.to_filename(pargs.out or f"nt_{os.path.basename(pargs.moving)}") def get_parser(): diff --git a/nitransforms/io/lta.py b/nitransforms/io/lta.py index 31271154..334266bb 100644 --- a/nitransforms/io/lta.py +++ b/nitransforms/io/lta.py @@ -176,7 +176,7 @@ def set_type(self, new_type): return raise NotImplementedError( - "Converting {0} to {1} is not yet available".format( + "Converting {} to {} is not yet available".format( transform_codes.label[current], transform_codes.label[new_type] ) ) @@ -334,7 +334,7 @@ def to_string(self): code = int(self["type"]) header = [ "# LTA-array file created by NiTransforms", - "type = {} # {}".format(code, transform_codes.label[code]), + f"type = {code} # {transform_codes.label[code]}", "nxforms = {}".format(self["nxforms"]), ] xforms = [xfm.to_string(partial=True) for xfm in self._xforms] diff --git a/nitransforms/nonlinear.py b/nitransforms/nonlinear.py index c0cdc92e..8cfb4a62 100644 --- a/nitransforms/nonlinear.py +++ b/nitransforms/nonlinear.py @@ -241,7 +241,7 @@ class BSplineFieldTransform(TransformBase): def __init__(self, coefficients, reference=None, order=3): """Create a smooth deformation field using B-Spline basis.""" - super(BSplineFieldTransform, self).__init__() + super().__init__() self._order = order coefficients = _ensure_image(coefficients) diff --git a/nitransforms/tests/test_io.py b/nitransforms/tests/test_io.py index cef7bfff..2475c946 100644 --- a/nitransforms/tests/test_io.py +++ b/nitransforms/tests/test_io.py @@ -204,7 +204,7 @@ def test_Linear_common(tmpdir, data_path, sw, image_orientation, get_testdata): with pytest.raises(TransformFileError): factory.from_string("") - fname = "affine-%s.%s%s" % (image_orientation, sw, ext) + fname = f"affine-{image_orientation}.{sw}{ext}" # Test the transform loaders are implemented xfm = factory.from_filename(data_path / fname) @@ -262,7 +262,7 @@ def test_LinearList_common(tmpdir, data_path, sw, image_orientation, get_testdat tflist1 = factory(mats) - fname = "affine-%s.%s%s" % (image_orientation, sw, ext) + fname = f"affine-{image_orientation}.{sw}{ext}" with pytest.raises(FileNotFoundError): factory.from_filename(fname) @@ -305,7 +305,7 @@ def test_ITKLinearTransform(tmpdir, testdata_path): # Test to_filename(textfiles) itkxfm.to_filename("textfile.tfm") - with open("textfile.tfm", "r") as f: + with open("textfile.tfm") as f: itkxfm2 = itk.ITKLinearTransform.from_fileobj(f) assert np.allclose(itkxfm["parameters"], itkxfm2["parameters"]) diff --git a/nitransforms/tests/test_linear.py b/nitransforms/tests/test_linear.py index eea77b7f..2957f59c 100644 --- a/nitransforms/tests/test_linear.py +++ b/nitransforms/tests/test_linear.py @@ -226,7 +226,7 @@ def test_linear_save(tmpdir, data_path, get_testdata, image_orientation, sw_tool elif sw_tool == "fs": ext = ".lta" - xfm_fname1 = "M.%s%s" % (sw_tool, ext) + xfm_fname1 = f"M.{sw_tool}{ext}" xfm.to_filename(xfm_fname1, fmt=sw_tool) xfm_fname2 = str(data_path / "affine-%s.%s%s") % (image_orientation, sw_tool, ext) @@ -257,7 +257,7 @@ def test_apply_linear_transform(tmpdir, get_testdata, get_testmask, image_orient msk.to_filename("mask.nii.gz") # Write out transform file (software-dependent) - xfm_fname = "M.%s%s" % (sw_tool, ext) + xfm_fname = f"M.{sw_tool}{ext}" # Change reference dataset for AFNI & oblique if (sw_tool, image_orientation) == ("afni", "oblique"): io.afni.AFNILinearTransform.from_ras( @@ -278,7 +278,7 @@ def test_apply_linear_transform(tmpdir, get_testdata, get_testmask, image_orient # skip test if command is not available on host exe = cmd.split(" ", 1)[0] if not shutil.which(exe): - pytest.skip("Command {} not found on host".format(exe)) + pytest.skip(f"Command {exe} not found on host") # resample mask exit_code = check_call([cmd], shell=True) diff --git a/nitransforms/tests/test_nonlinear.py b/nitransforms/tests/test_nonlinear.py index 9ae82d29..93d3fd4c 100644 --- a/nitransforms/tests/test_nonlinear.py +++ b/nitransforms/tests/test_nonlinear.py @@ -158,7 +158,7 @@ def test_displacements_field1( # skip test if command is not available on host exe = cmd.split(" ", 1)[0] if not shutil.which(exe): - pytest.skip("Command {} not found on host".format(exe)) + pytest.skip(f"Command {exe} not found on host") # resample mask exit_code = check_call([cmd], shell=True) @@ -219,7 +219,7 @@ def test_displacements_field2(tmp_path, testdata_path, sw_tool): # skip test if command is not available on host exe = cmd.split(" ", 1)[0] if not shutil.which(exe): - pytest.skip("Command {} not found on host".format(exe)) + pytest.skip(f"Command {exe} not found on host") exit_code = check_call([cmd], shell=True) assert exit_code == 0 From c41fc0768ae9fb07db2da309dfd24d86e920111e Mon Sep 17 00:00:00 2001 From: bpinsard Date: Wed, 9 Aug 2023 14:49:58 -0400 Subject: [PATCH 010/123] output displacement fields --- nitransforms/io/afni.py | 11 +++++++++++ nitransforms/io/base.py | 11 +++++++++++ nitransforms/io/fsl.py | 11 +++++++++++ nitransforms/io/itk.py | 12 ++++++++++++ 4 files changed, 45 insertions(+) diff --git a/nitransforms/io/afni.py b/nitransforms/io/afni.py index b7fc657b..e95c4494 100644 --- a/nitransforms/io/afni.py +++ b/nitransforms/io/afni.py @@ -193,6 +193,17 @@ def from_image(cls, imgobj): return imgobj.__class__(field, imgobj.affine, hdr) + @classmethod + def to_image(cls, imgobj): + """Export a displacements field from a nibabel object.""" + + hdr = imgobj.header.copy() + + warp_data = imgobj.get_fdata().reshape(imgobj.shape[:3] + (1, imgobj.shape[-1])) + warp_data[..., (0, 1)] *= -1 + + return imgobj.__class__(warp_data, imgobj.affine, hdr) + def _is_oblique(affine, thres=OBLIQUITY_THRESHOLD_DEG): """ diff --git a/nitransforms/io/base.py b/nitransforms/io/base.py index 6d1a7c8e..d86c8539 100644 --- a/nitransforms/io/base.py +++ b/nitransforms/io/base.py @@ -146,6 +146,17 @@ def from_image(cls, imgobj): """Import a displacements field from a nibabel image object.""" raise NotImplementedError + @classmethod + def to_filename(cls, img, filename): + """Export a displacements field to a NIfTI file.""" + imgobj = cls.to_image(img) + imgobj.to_filename(filename) + + @classmethod + def to_image(cls, imgobj): + """Export a displacements field image from a nitransforms image object.""" + raise NotImplementedError + def _ensure_image(img): if isinstance(img, (str, Path)): diff --git a/nitransforms/io/fsl.py b/nitransforms/io/fsl.py index 8e4c8264..f454227e 100644 --- a/nitransforms/io/fsl.py +++ b/nitransforms/io/fsl.py @@ -190,6 +190,17 @@ def from_image(cls, imgobj): return imgobj.__class__(field, imgobj.affine, hdr) + @classmethod + def to_image(cls, imgobj): + """Export a displacements field from a nibabel object.""" + + hdr = imgobj.header.copy() + + warp_data = imgobj.get_fdata() + warp_data[..., 0] *= -1 + + return imgobj.__class__(warp_data, imgobj.affine, hdr) + def _fsl_aff_adapt(space): """ diff --git a/nitransforms/io/itk.py b/nitransforms/io/itk.py index d7a093eb..ddeb78e6 100644 --- a/nitransforms/io/itk.py +++ b/nitransforms/io/itk.py @@ -352,6 +352,18 @@ def from_image(cls, imgobj): return imgobj.__class__(field, imgobj.affine, hdr) + @classmethod + def to_image(cls, imgobj): + """Export a displacements field from a nibabel object.""" + + hdr = imgobj.header.copy() + hdr.set_intent("vector") + + warp_data = imgobj.get_fdata().reshape(imgobj.shape[:3] + (1, imgobj.shape[-1])) + warp_data[..., (0, 1)] *= -1 + + return imgobj.__class__(warp_data, imgobj.affine, hdr) + class ITKCompositeH5: """A data structure for ITK's HDF5 files.""" From 755e8d1c18b948a6efee262419feae77054ea3ad Mon Sep 17 00:00:00 2001 From: Oscar Esteban Date: Thu, 16 Nov 2023 17:11:10 +0100 Subject: [PATCH 011/123] fix: inefficient iterative reloading of reference and moving images The list comprehension had ``nb.loads`` and other method calls continously happening when converting the affine array into RAS. --- nitransforms/io/afni.py | 39 +++++++++++++++++++++++++++++---------- 1 file changed, 29 insertions(+), 10 deletions(-) diff --git a/nitransforms/io/afni.py b/nitransforms/io/afni.py index b7fc657b..75476e77 100644 --- a/nitransforms/io/afni.py +++ b/nitransforms/io/afni.py @@ -108,17 +108,24 @@ def from_string(cls, string): sa["parameters"] = parameters return tf - def to_ras(self, moving=None, reference=None): + def to_ras(self, moving=None, reference=None, pre_rotation=None, post_rotation=None): """Return a nitransforms internal RAS+ matrix.""" # swapaxes is necessary, as axis 0 encodes series of transforms retval = LPS @ np.swapaxes(self.structarr["parameters"].T, 0, 1) @ LPS - reference = _ensure_image(reference) - if reference is not None and _is_oblique(reference.affine): - retval = retval @ _cardinal_rotation(reference.affine, True) - moving = _ensure_image(moving) - if moving is not None and _is_oblique(moving.affine): - retval = _cardinal_rotation(moving.affine, False) @ retval + if pre_rotation is None and reference is not None: + ref_aff = _ensure_image(reference).affine + pre_rotation = _cardinal_rotation(ref_aff, True) if _is_oblique(ref_aff) else None + + if pre_rotation is not None: + retval = retval @ pre_rotation + + if post_rotation is None and reference is not None: + mov_aff = _ensure_image(moving).affine + post_rotation = _cardinal_rotation(mov_aff, True) if _is_oblique(mov_aff) else None + + if post_rotation is not None: + retval = post_rotation @ retval return retval @@ -130,9 +137,21 @@ class AFNILinearTransformArray(BaseLinearTransformList): def to_ras(self, moving=None, reference=None): """Return a nitransforms' internal RAS matrix.""" - return np.stack( - [xfm.to_ras(moving=moving, reference=reference) for xfm in self.xforms] - ) + + pre_rotation = None + if reference is not None: + ref_aff = _ensure_image(reference).affine + pre_rotation = _cardinal_rotation(ref_aff, True) if _is_oblique(ref_aff) else None + + post_rotation = None + if moving is not None: + mov_aff = _ensure_image(moving).affine + post_rotation = _cardinal_rotation(mov_aff, True) if _is_oblique(mov_aff) else None + + return np.stack([ + xfm.to_ras(pre_rotation=pre_rotation, post_rotation=post_rotation) + for xfm in self.xforms + ]) def to_string(self): """Convert to a string directly writeable to file.""" From 6b0f3a8883f773cd01cbe7460759f952d6dac93f Mon Sep 17 00:00:00 2001 From: Oscar Esteban Date: Thu, 16 Nov 2023 17:29:49 +0100 Subject: [PATCH 012/123] Update nitransforms/io/afni.py Co-authored-by: Chris Markiewicz --- nitransforms/io/afni.py | 17 +++++++---------- 1 file changed, 7 insertions(+), 10 deletions(-) diff --git a/nitransforms/io/afni.py b/nitransforms/io/afni.py index 75476e77..5afa1e25 100644 --- a/nitransforms/io/afni.py +++ b/nitransforms/io/afni.py @@ -138,18 +138,15 @@ class AFNILinearTransformArray(BaseLinearTransformList): def to_ras(self, moving=None, reference=None): """Return a nitransforms' internal RAS matrix.""" - pre_rotation = None - if reference is not None: - ref_aff = _ensure_image(reference).affine - pre_rotation = _cardinal_rotation(ref_aff, True) if _is_oblique(ref_aff) else None - - post_rotation = None - if moving is not None: - mov_aff = _ensure_image(moving).affine - post_rotation = _cardinal_rotation(mov_aff, True) if _is_oblique(mov_aff) else None + pre_rotation = post_rotation = np.eye(4) + + if reference is not None and _is_oblique(ref_aff := _ensure_image(reference).affine): + pre_rotation = _cardinal_rotation(ref_aff, True) + if moving is not None and _is_oblique(mov_aff := _ensure_image(moving).affine): + post_rotation = _cardinal_rotation(mov_aff, True) return np.stack([ - xfm.to_ras(pre_rotation=pre_rotation, post_rotation=post_rotation) + post_rotation @ xfm.to_ras() @ pre_rotation for xfm in self.xforms ]) From 4ace2bebb6359f31c79fa27650d76c8e82171354 Mon Sep 17 00:00:00 2001 From: Oscar Esteban Date: Thu, 16 Nov 2023 17:52:55 +0100 Subject: [PATCH 013/123] enh: remove unnecessary new arguments Co-authored-by: Chris Markiewicz --- nitransforms/io/afni.py | 30 +++++++++++++----------------- 1 file changed, 13 insertions(+), 17 deletions(-) diff --git a/nitransforms/io/afni.py b/nitransforms/io/afni.py index 5afa1e25..ebe19703 100644 --- a/nitransforms/io/afni.py +++ b/nitransforms/io/afni.py @@ -108,26 +108,23 @@ def from_string(cls, string): sa["parameters"] = parameters return tf - def to_ras(self, moving=None, reference=None, pre_rotation=None, post_rotation=None): + def to_ras(self, moving=None, reference=None): """Return a nitransforms internal RAS+ matrix.""" # swapaxes is necessary, as axis 0 encodes series of transforms - retval = LPS @ np.swapaxes(self.structarr["parameters"].T, 0, 1) @ LPS - - if pre_rotation is None and reference is not None: - ref_aff = _ensure_image(reference).affine - pre_rotation = _cardinal_rotation(ref_aff, True) if _is_oblique(ref_aff) else None - - if pre_rotation is not None: - retval = retval @ pre_rotation - if post_rotation is None and reference is not None: - mov_aff = _ensure_image(moving).affine - post_rotation = _cardinal_rotation(mov_aff, True) if _is_oblique(mov_aff) else None - - if post_rotation is not None: - retval = post_rotation @ retval + pre_rotation = post_rotation = np.eye(4) + if reference is not None and _is_oblique(ref_aff := _ensure_image(reference).affine): + pre_rotation = _cardinal_rotation(ref_aff, True) + if moving is not None and _is_oblique(mov_aff := _ensure_image(moving).affine): + post_rotation = _cardinal_rotation(mov_aff, True) - return retval + return ( + post_rotation + @ LPS + @ np.swapaxes(self.structarr["parameters"].T, 0, 1) + @ LPS + @ pre_rotation + ) class AFNILinearTransformArray(BaseLinearTransformList): @@ -139,7 +136,6 @@ def to_ras(self, moving=None, reference=None): """Return a nitransforms' internal RAS matrix.""" pre_rotation = post_rotation = np.eye(4) - if reference is not None and _is_oblique(ref_aff := _ensure_image(reference).affine): pre_rotation = _cardinal_rotation(ref_aff, True) if moving is not None and _is_oblique(mov_aff := _ensure_image(moving).affine): From 3a5467f19ba0e2e3054317c07731127ccbb731b8 Mon Sep 17 00:00:00 2001 From: Oscar Esteban Date: Thu, 16 Nov 2023 18:07:29 +0100 Subject: [PATCH 014/123] fix: roll back original implementation --- nitransforms/io/afni.py | 20 ++++++++------------ 1 file changed, 8 insertions(+), 12 deletions(-) diff --git a/nitransforms/io/afni.py b/nitransforms/io/afni.py index ebe19703..1e878563 100644 --- a/nitransforms/io/afni.py +++ b/nitransforms/io/afni.py @@ -111,20 +111,16 @@ def from_string(cls, string): def to_ras(self, moving=None, reference=None): """Return a nitransforms internal RAS+ matrix.""" # swapaxes is necessary, as axis 0 encodes series of transforms + retval = LPS @ np.swapaxes(self.structarr["parameters"].T, 0, 1) @ LPS + reference = _ensure_image(reference) + if reference is not None and _is_oblique(reference.affine): + retval = retval @ _cardinal_rotation(reference.affine, True) - pre_rotation = post_rotation = np.eye(4) - if reference is not None and _is_oblique(ref_aff := _ensure_image(reference).affine): - pre_rotation = _cardinal_rotation(ref_aff, True) - if moving is not None and _is_oblique(mov_aff := _ensure_image(moving).affine): - post_rotation = _cardinal_rotation(mov_aff, True) + moving = _ensure_image(moving) + if moving is not None and _is_oblique(moving.affine): + retval = _cardinal_rotation(moving.affine, False) @ retval - return ( - post_rotation - @ LPS - @ np.swapaxes(self.structarr["parameters"].T, 0, 1) - @ LPS - @ pre_rotation - ) + return retval class AFNILinearTransformArray(BaseLinearTransformList): From 4c8f0437cda4d9f7224006271fc70d8ddeadcdfc Mon Sep 17 00:00:00 2001 From: Oscar Esteban Date: Thu, 16 Nov 2023 20:13:05 +0100 Subject: [PATCH 015/123] enh: add test to cover new lines --- nitransforms/tests/data/affine-RAS.afni-array | 3 +++ nitransforms/tests/test_io.py | 8 +++++++- 2 files changed, 10 insertions(+), 1 deletion(-) create mode 100644 nitransforms/tests/data/affine-RAS.afni-array diff --git a/nitransforms/tests/data/affine-RAS.afni-array b/nitransforms/tests/data/affine-RAS.afni-array new file mode 100644 index 00000000..df023e21 --- /dev/null +++ b/nitransforms/tests/data/affine-RAS.afni-array @@ -0,0 +1,3 @@ +# 3dvolreg matrices (DICOM-to-DICOM, row-by-row): +0.999999 -0.000999999 -0.001 -4 0.00140494 0.621609 0.783327 -2 -0.000161717 -0.783327 0.62161 -1 +0.999999 -0.000999999 -0.001 -4 0.00140494 0.621609 0.783327 -2 -0.000161717 -0.783327 0.62161 -1 diff --git a/nitransforms/tests/test_io.py b/nitransforms/tests/test_io.py index 2475c946..cb9270d7 100644 --- a/nitransforms/tests/test_io.py +++ b/nitransforms/tests/test_io.py @@ -180,7 +180,7 @@ def test_LT_conversions(data_path, fname): "oblique", ], ) -@pytest.mark.parametrize("sw", ["afni", "fsl", "fs", "itk"]) +@pytest.mark.parametrize("sw", ["afni", "fsl", "fs", "itk", "afni-array"]) def test_Linear_common(tmpdir, data_path, sw, image_orientation, get_testdata): tmpdir.chdir() @@ -206,6 +206,9 @@ def test_Linear_common(tmpdir, data_path, sw, image_orientation, get_testdata): fname = f"affine-{image_orientation}.{sw}{ext}" + if sw == "afni-array": + fname.replace(image_orientation, "RAS") + # Test the transform loaders are implemented xfm = factory.from_filename(data_path / fname) @@ -222,6 +225,9 @@ def test_Linear_common(tmpdir, data_path, sw, image_orientation, get_testdata): # Test from_ras RAS = from_matvec(euler2mat(x=0.9, y=0.001, z=0.001), [4.0, 2.0, -1.0]) + if sw == "afni-array": + RAS = [RAS, RAS] + xfm = factory.from_ras(RAS, reference=reference, moving=moving) assert np.allclose(xfm.to_ras(reference=reference, moving=moving), RAS) From 0a02bcf8fb7cc73f765655cebf4d821134ce370a Mon Sep 17 00:00:00 2001 From: Oscar Esteban Date: Thu, 16 Nov 2023 20:17:25 +0100 Subject: [PATCH 016/123] fix: error in new test case implementation --- nitransforms/tests/test_io.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/nitransforms/tests/test_io.py b/nitransforms/tests/test_io.py index cb9270d7..982167f5 100644 --- a/nitransforms/tests/test_io.py +++ b/nitransforms/tests/test_io.py @@ -190,6 +190,8 @@ def test_Linear_common(tmpdir, data_path, sw, image_orientation, get_testdata): ext = "" if sw == "afni": factory = afni.AFNILinearTransform + elif sw == "afni-array": + factory = afni.AFNILinearTransformArray elif sw == "fsl": factory = fsl.FSLLinearTransform elif sw == "itk": From 01cfcbc0b976f8e18e492f5a07d5ec20a6af6da2 Mon Sep 17 00:00:00 2001 From: Oscar Esteban Date: Thu, 16 Nov 2023 21:17:43 +0100 Subject: [PATCH 017/123] fix: bug in oblique adjusting matrices computation --- nitransforms/io/afni.py | 16 ++++++++++++---- nitransforms/tests/data/affine-LAS.afni-array | 1 + nitransforms/tests/data/affine-LPS.afni-array | 1 + .../tests/data/affine-oblique.afni-array | 1 + nitransforms/tests/test_io.py | 5 +---- 5 files changed, 16 insertions(+), 8 deletions(-) create mode 120000 nitransforms/tests/data/affine-LAS.afni-array create mode 120000 nitransforms/tests/data/affine-LPS.afni-array create mode 120000 nitransforms/tests/data/affine-oblique.afni-array diff --git a/nitransforms/io/afni.py b/nitransforms/io/afni.py index 1e878563..b197c4ed 100644 --- a/nitransforms/io/afni.py +++ b/nitransforms/io/afni.py @@ -135,10 +135,10 @@ def to_ras(self, moving=None, reference=None): if reference is not None and _is_oblique(ref_aff := _ensure_image(reference).affine): pre_rotation = _cardinal_rotation(ref_aff, True) if moving is not None and _is_oblique(mov_aff := _ensure_image(moving).affine): - post_rotation = _cardinal_rotation(mov_aff, True) + post_rotation = _cardinal_rotation(mov_aff, False) return np.stack([ - post_rotation @ xfm.to_ras() @ pre_rotation + post_rotation @ (xfm.to_ras() @ pre_rotation) for xfm in self.xforms ]) @@ -152,14 +152,22 @@ def to_string(self): if line.strip() ] strings += lines - return "\n".join(strings) + return "\n".join(strings + [""]) @classmethod def from_ras(cls, ras, moving=None, reference=None): """Create an ITK affine from a nitransform's RAS+ matrix.""" _self = cls() + + pre_rotation = post_rotation = np.eye(4) + + if reference is not None and _is_oblique(ref_aff := _ensure_image(reference).affine): + pre_rotation = _cardinal_rotation(ref_aff, False) + if moving is not None and _is_oblique(mov_aff := _ensure_image(moving).affine): + post_rotation = _cardinal_rotation(mov_aff, True) + _self.xforms = [ - cls._inner_type.from_ras(ras[i, ...], moving=moving, reference=reference) + cls._inner_type.from_ras(post_rotation @ ras[i, ...] @ pre_rotation) for i in range(ras.shape[0]) ] return _self diff --git a/nitransforms/tests/data/affine-LAS.afni-array b/nitransforms/tests/data/affine-LAS.afni-array new file mode 120000 index 00000000..27d48851 --- /dev/null +++ b/nitransforms/tests/data/affine-LAS.afni-array @@ -0,0 +1 @@ +affine-RAS.afni-array \ No newline at end of file diff --git a/nitransforms/tests/data/affine-LPS.afni-array b/nitransforms/tests/data/affine-LPS.afni-array new file mode 120000 index 00000000..27d48851 --- /dev/null +++ b/nitransforms/tests/data/affine-LPS.afni-array @@ -0,0 +1 @@ +affine-RAS.afni-array \ No newline at end of file diff --git a/nitransforms/tests/data/affine-oblique.afni-array b/nitransforms/tests/data/affine-oblique.afni-array new file mode 120000 index 00000000..27d48851 --- /dev/null +++ b/nitransforms/tests/data/affine-oblique.afni-array @@ -0,0 +1 @@ +affine-RAS.afni-array \ No newline at end of file diff --git a/nitransforms/tests/test_io.py b/nitransforms/tests/test_io.py index 982167f5..bcee9198 100644 --- a/nitransforms/tests/test_io.py +++ b/nitransforms/tests/test_io.py @@ -208,9 +208,6 @@ def test_Linear_common(tmpdir, data_path, sw, image_orientation, get_testdata): fname = f"affine-{image_orientation}.{sw}{ext}" - if sw == "afni-array": - fname.replace(image_orientation, "RAS") - # Test the transform loaders are implemented xfm = factory.from_filename(data_path / fname) @@ -228,7 +225,7 @@ def test_Linear_common(tmpdir, data_path, sw, image_orientation, get_testdata): # Test from_ras RAS = from_matvec(euler2mat(x=0.9, y=0.001, z=0.001), [4.0, 2.0, -1.0]) if sw == "afni-array": - RAS = [RAS, RAS] + RAS = np.array([RAS, RAS]) xfm = factory.from_ras(RAS, reference=reference, moving=moving) assert np.allclose(xfm.to_ras(reference=reference, moving=moving), RAS) From db1b250dbbfa928785a2d469f246369e130f3624 Mon Sep 17 00:00:00 2001 From: Oscar Esteban Date: Thu, 16 Nov 2023 17:49:46 +0100 Subject: [PATCH 018/123] fix: postpone coordinate mapping on linear array transforms Resolves: #173. --- nitransforms/linear.py | 68 +++++++++++++++++++++++++----------------- 1 file changed, 41 insertions(+), 27 deletions(-) diff --git a/nitransforms/linear.py b/nitransforms/linear.py index 9c430d3b..0709d50b 100644 --- a/nitransforms/linear.py +++ b/nitransforms/linear.py @@ -436,6 +436,7 @@ def apply( The data imaged after resampling to reference space. """ + if reference is not None and isinstance(reference, (str, Path)): reference = _nbload(str(reference)) @@ -446,40 +447,53 @@ def apply( if isinstance(spatialimage, (str, Path)): spatialimage = _nbload(str(spatialimage)) - data = np.squeeze(np.asanyarray(spatialimage.dataobj)) - output_dtype = output_dtype or data.dtype + # Avoid opening the data array just yet + input_dtype = spatialimage.header.get_data_dtype() + output_dtype = output_dtype or input_dtype - ycoords = self.map(_ref.ndcoords.T) - targets = ImageGrid(spatialimage).index( # data should be an image - _as_homogeneous(np.vstack(ycoords), dim=_ref.ndim) - ) + # Prepare physical coordinates of input (grid, points) + xcoords = _ref.ndcoords.astype("f4") - if data.ndim == 4: - if len(self) != data.shape[-1]: + # Invert target's (moving) affine once + ras2vox = ~Affine(spatialimage.affine) + + if spatialimage.ndim == 4: + if len(self) != spatialimage.shape[-1]: raise ValueError( "Attempting to apply %d transforms on a file with " - "%d timepoints" % (len(self), data.shape[-1]) + "%d timepoints" % (len(self), spatialimage.shape[-1]) ) - targets = targets.reshape((len(self), -1, targets.shape[-1])) - resampled = np.stack( - [ - ndi.map_coordinates( - data[..., t], - targets[t, ..., : _ref.ndim].T, - output=output_dtype, - order=order, - mode=mode, - cval=cval, - prefilter=prefilter, - ) - for t in range(data.shape[-1]) - ], - axis=0, + + # Order F ensures individual volumes are contiguous in memory + # Also matches NIfTI, making final save more efficient + resampled = np.zeros( + (xcoords.T.shape[0], ) + spatialimage.shape[-1:], dtype=output_dtype, order="F" ) - elif data.ndim in (2, 3): + + for t in range(spatialimage.shape[-1]): + # Map the input coordinates on to timepoint t of the target (moving) + ycoords = Affine(self.matrix[t]).map(xcoords.T)[..., : _ref.ndim] + + # Calculate corresponding voxel coordinates + yvoxels = ras2vox.map(ycoords)[..., : _ref.ndim] + + # Interpolate + resampled[..., t] = ndi.map_coordinates( + spatialimage.dataobj[..., t].astype(input_dtype, copy=False), + yvoxels.T, + output=output_dtype, + order=order, + mode=mode, + cval=cval, + prefilter=prefilter, + ) + elif spatialimage.ndim in (2, 3): + ycoords = self.map(xcoords.T)[..., : _ref.ndim] + yvoxels = ras2vox.map(ycoords)[..., : _ref.ndim] + resampled = ndi.map_coordinates( - data, - targets[..., : _ref.ndim].T, + spatialimage.dataobj.astype(input_dtype, copy=False), + yvoxels.T, output=output_dtype, order=order, mode=mode, From c0e7f2758a0a013d7f5377d25059456365465232 Mon Sep 17 00:00:00 2001 From: Oscar Esteban Date: Thu, 16 Nov 2023 21:50:39 +0100 Subject: [PATCH 019/123] Update nitransforms/linear.py --- nitransforms/linear.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/nitransforms/linear.py b/nitransforms/linear.py index 0709d50b..3995bd28 100644 --- a/nitransforms/linear.py +++ b/nitransforms/linear.py @@ -448,7 +448,7 @@ def apply( spatialimage = _nbload(str(spatialimage)) # Avoid opening the data array just yet - input_dtype = spatialimage.header.get_data_dtype() + input_dtype = nb.arrayproxy.get_obj_dtype(spatialimage.dataobj) output_dtype = output_dtype or input_dtype # Prepare physical coordinates of input (grid, points) From 2c36d088e7d8dae658a064211fdd394caf214ed9 Mon Sep 17 00:00:00 2001 From: Oscar Esteban Date: Thu, 16 Nov 2023 21:53:01 +0100 Subject: [PATCH 020/123] fix: missing import --- nitransforms/linear.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/nitransforms/linear.py b/nitransforms/linear.py index 3995bd28..dfeca36d 100644 --- a/nitransforms/linear.py +++ b/nitransforms/linear.py @@ -14,6 +14,7 @@ from nibabel.loadsave import load as _nbload from nibabel.affines import from_matvec +from nibabel.arrayproxy import get_obj_dtype from nitransforms.base import ( ImageGrid, @@ -448,7 +449,7 @@ def apply( spatialimage = _nbload(str(spatialimage)) # Avoid opening the data array just yet - input_dtype = nb.arrayproxy.get_obj_dtype(spatialimage.dataobj) + input_dtype = get_obj_dtype(spatialimage.dataobj) output_dtype = output_dtype or input_dtype # Prepare physical coordinates of input (grid, points) From 9772710419968ef362e1fb486b29c31ae00cbd1f Mon Sep 17 00:00:00 2001 From: Oscar Esteban Date: Thu, 16 Nov 2023 23:15:44 +0100 Subject: [PATCH 021/123] fix: shape and order of resampled array --- nitransforms/linear.py | 68 +++++++++++++++++++++--------------------- 1 file changed, 34 insertions(+), 34 deletions(-) diff --git a/nitransforms/linear.py b/nitransforms/linear.py index dfeca36d..8511124c 100644 --- a/nitransforms/linear.py +++ b/nitransforms/linear.py @@ -317,6 +317,11 @@ def __init__(self, transforms, reference=None): ) self._inverse = np.linalg.inv(self._matrix) + def __iter__(self): + """Enable iterating over the series of transforms.""" + for _m in self.matrix: + yield Affine(_m, reference=self._reference) + def __getitem__(self, i): """Enable indexed access to the series of matrices.""" return Affine(self.matrix[i, ...], reference=self._reference) @@ -458,42 +463,37 @@ def apply( # Invert target's (moving) affine once ras2vox = ~Affine(spatialimage.affine) - if spatialimage.ndim == 4: - if len(self) != spatialimage.shape[-1]: - raise ValueError( - "Attempting to apply %d transforms on a file with " - "%d timepoints" % (len(self), spatialimage.shape[-1]) - ) - - # Order F ensures individual volumes are contiguous in memory - # Also matches NIfTI, making final save more efficient - resampled = np.zeros( - (xcoords.T.shape[0], ) + spatialimage.shape[-1:], dtype=output_dtype, order="F" + if spatialimage.ndim == 4 and (len(self) != spatialimage.shape[-1]): + raise ValueError( + "Attempting to apply %d transforms on a file with " + "%d timepoints" % (len(self), spatialimage.shape[-1]) ) - for t in range(spatialimage.shape[-1]): - # Map the input coordinates on to timepoint t of the target (moving) - ycoords = Affine(self.matrix[t]).map(xcoords.T)[..., : _ref.ndim] - - # Calculate corresponding voxel coordinates - yvoxels = ras2vox.map(ycoords)[..., : _ref.ndim] - - # Interpolate - resampled[..., t] = ndi.map_coordinates( - spatialimage.dataobj[..., t].astype(input_dtype, copy=False), - yvoxels.T, - output=output_dtype, - order=order, - mode=mode, - cval=cval, - prefilter=prefilter, - ) - elif spatialimage.ndim in (2, 3): - ycoords = self.map(xcoords.T)[..., : _ref.ndim] + # Order F ensures individual volumes are contiguous in memory + # Also matches NIfTI, making final save more efficient + resampled = np.zeros( + (xcoords.T.shape[0], len(self)), dtype=output_dtype, order="F" + ) + + dataobj = ( + np.asanyarray(spatialimage.dataobj, dtype=input_dtype) + if spatialimage.ndim in (2, 3) + else None + ) + + for t, xfm_t in enumerate(self): + # Map the input coordinates on to timepoint t of the target (moving) + ycoords = xfm_t.map(xcoords.T)[..., : _ref.ndim] + + # Calculate corresponding voxel coordinates yvoxels = ras2vox.map(ycoords)[..., : _ref.ndim] - resampled = ndi.map_coordinates( - spatialimage.dataobj.astype(input_dtype, copy=False), + # Interpolate + resampled[..., t] = ndi.map_coordinates( + ( + dataobj if dataobj is not None + else np.asanyarray(spatialimage.dataobj[..., t], dtype=input_dtype) + ), yvoxels.T, output=output_dtype, order=order, @@ -503,9 +503,9 @@ def apply( ) if isinstance(_ref, ImageGrid): # If reference is grid, reshape - newdata = resampled.reshape((len(self), *_ref.shape)) + newdata = resampled.reshape(_ref.shape + (len(self), )) moved = spatialimage.__class__( - np.moveaxis(newdata, 0, -1), _ref.affine, spatialimage.header + newdata, _ref.affine, spatialimage.header ) moved.header.set_data_dtype(output_dtype) return moved From 40e3c13c66054e66c960dacc8d5da101986d574d Mon Sep 17 00:00:00 2001 From: Oscar Esteban Date: Thu, 16 Nov 2023 23:18:31 +0100 Subject: [PATCH 022/123] enh: one less iterated operation (transpose) --- nitransforms/linear.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/nitransforms/linear.py b/nitransforms/linear.py index 8511124c..12409885 100644 --- a/nitransforms/linear.py +++ b/nitransforms/linear.py @@ -458,7 +458,7 @@ def apply( output_dtype = output_dtype or input_dtype # Prepare physical coordinates of input (grid, points) - xcoords = _ref.ndcoords.astype("f4") + xcoords = _ref.ndcoords.astype("f4").T # Invert target's (moving) affine once ras2vox = ~Affine(spatialimage.affine) @@ -472,7 +472,7 @@ def apply( # Order F ensures individual volumes are contiguous in memory # Also matches NIfTI, making final save more efficient resampled = np.zeros( - (xcoords.T.shape[0], len(self)), dtype=output_dtype, order="F" + (xcoords.shape[0], len(self)), dtype=output_dtype, order="F" ) dataobj = ( @@ -483,7 +483,7 @@ def apply( for t, xfm_t in enumerate(self): # Map the input coordinates on to timepoint t of the target (moving) - ycoords = xfm_t.map(xcoords.T)[..., : _ref.ndim] + ycoords = xfm_t.map(xcoords)[..., : _ref.ndim] # Calculate corresponding voxel coordinates yvoxels = ras2vox.map(ycoords)[..., : _ref.ndim] From a6fe3ebd8e0bc5f1010375e8532d38fb99943086 Mon Sep 17 00:00:00 2001 From: Oscar Esteban Date: Thu, 16 Nov 2023 23:26:29 +0100 Subject: [PATCH 023/123] sty: run black --- nitransforms/linear.py | 14 ++++++-------- 1 file changed, 6 insertions(+), 8 deletions(-) diff --git a/nitransforms/linear.py b/nitransforms/linear.py index 12409885..fcee7432 100644 --- a/nitransforms/linear.py +++ b/nitransforms/linear.py @@ -217,14 +217,13 @@ def from_filename(cls, filename, fmt=None, reference=None, moving=None): is_array = cls != Affine errors = [] for potential_fmt in fmtlist: - if (potential_fmt == "itk" and Path(filename).suffix == ".mat"): + if potential_fmt == "itk" and Path(filename).suffix == ".mat": is_array = False cls = Affine try: struct = get_linear_factory( - potential_fmt, - is_array=is_array + potential_fmt, is_array=is_array ).from_filename(filename) except (TransformFileError, FileNotFoundError) as err: errors.append((potential_fmt, err)) @@ -491,7 +490,8 @@ def apply( # Interpolate resampled[..., t] = ndi.map_coordinates( ( - dataobj if dataobj is not None + dataobj + if dataobj is not None else np.asanyarray(spatialimage.dataobj[..., t], dtype=input_dtype) ), yvoxels.T, @@ -503,10 +503,8 @@ def apply( ) if isinstance(_ref, ImageGrid): # If reference is grid, reshape - newdata = resampled.reshape(_ref.shape + (len(self), )) - moved = spatialimage.__class__( - newdata, _ref.affine, spatialimage.header - ) + newdata = resampled.reshape(_ref.shape + (len(self),)) + moved = spatialimage.__class__(newdata, _ref.affine, spatialimage.header) moved.header.set_data_dtype(output_dtype) return moved From d148e85e333f5cb6c737879e9d33fd573c8ae9b4 Mon Sep 17 00:00:00 2001 From: Oscar Esteban Date: Thu, 16 Nov 2023 23:55:39 +0100 Subject: [PATCH 024/123] fix: access ``__getitem__`` directly Co-authored-by: Chris Markiewicz --- nitransforms/linear.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/nitransforms/linear.py b/nitransforms/linear.py index fcee7432..eb4a95d7 100644 --- a/nitransforms/linear.py +++ b/nitransforms/linear.py @@ -492,7 +492,7 @@ def apply( ( dataobj if dataobj is not None - else np.asanyarray(spatialimage.dataobj[..., t], dtype=input_dtype) + else spatialimage.dataobj[..., t].astype(input_dtype, copy=False) ), yvoxels.T, output=output_dtype, From c77ba42eb2525fc5048e87333afea0567fb3b616 Mon Sep 17 00:00:00 2001 From: Chris Markiewicz Date: Fri, 15 Dec 2023 13:12:39 -0500 Subject: [PATCH 025/123] FIX: Remove unsafe cast during TransformBase.apply() --- nitransforms/base.py | 19 ++++++++++++------- 1 file changed, 12 insertions(+), 7 deletions(-) diff --git a/nitransforms/base.py b/nitransforms/base.py index 25fd88e0..68b97f75 100644 --- a/nitransforms/base.py +++ b/nitransforms/base.py @@ -259,6 +259,15 @@ def apply( slightly blurred if *order > 1*, unless the input is prefiltered, i.e. it is the result of calling the spline filter on the original input. + output_dtype: dtype specifier, optional + The dtype of the returned array or image, if specified. + If ``None``, the default behavior is to use the effective dtype of + the input image. If slope and/or intercept are defined, the effective + dtype is float64, otherwise it is equivalent to the input image's + ``get_data_dtype()`` (on-disk type). + If ``reference`` is defined, then the return value is an image, with + a data array of the effective dtype but with the on-disk dtype set to + the input image's on-disk dtype. Returns ------- @@ -279,11 +288,7 @@ def apply( if isinstance(spatialimage, (str, Path)): spatialimage = _nbload(str(spatialimage)) - data = np.asanyarray( - spatialimage.dataobj, - dtype=spatialimage.get_data_dtype() - ) - output_dtype = output_dtype or data.dtype + data = np.asanyarray(spatialimage.dataobj) targets = ImageGrid(spatialimage).index( # data should be an image _as_homogeneous(self.map(_ref.ndcoords.T), dim=_ref.ndim) ) @@ -302,9 +307,9 @@ def apply( hdr = None if _ref.header is not None: hdr = _ref.header.copy() - hdr.set_data_dtype(output_dtype) + hdr.set_data_dtype(output_dtype or spatialimage.get_data_dtype()) moved = spatialimage.__class__( - resampled.reshape(_ref.shape).astype(output_dtype), + resampled.reshape(_ref.shape), _ref.affine, hdr, ) From 2eec8d9f0f37f158a8b7df08031306ff7fdaae10 Mon Sep 17 00:00:00 2001 From: Matteo Visconti di Oleggio Castello Date: Tue, 26 Mar 2024 16:58:19 -0700 Subject: [PATCH 026/123] FIX _is_oblique --- nitransforms/io/afni.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/nitransforms/io/afni.py b/nitransforms/io/afni.py index b197c4ed..16c3c189 100644 --- a/nitransforms/io/afni.py +++ b/nitransforms/io/afni.py @@ -226,7 +226,7 @@ def _is_oblique(affine, thres=OBLIQUITY_THRESHOLD_DEG): True """ - return (obliquity(affine).min() * 180 / pi) > thres + return (obliquity(affine).max() * 180 / pi) > thres def _afni_deobliqued_grid(oblique, shape): From 592f91baa6138d2f6d018d8b1768f59435f7d553 Mon Sep 17 00:00:00 2001 From: Julien Marabotto <166002186+jmarabotto@users.noreply.github.com> Date: Fri, 19 Apr 2024 10:28:28 +0200 Subject: [PATCH 027/123] Merge pull request #197 from jmarabotto:Implement_ndim FIX: Update implementation of ``ndim`` property of transforms (#197) Co-authored-by: Oscar Esteban Co-authored-by: Julien Marabotto --- nitransforms/base.py | 4 ++-- nitransforms/linear.py | 5 +++++ nitransforms/tests/test_base.py | 12 ++++++++++-- 3 files changed, 17 insertions(+), 4 deletions(-) diff --git a/nitransforms/base.py b/nitransforms/base.py index 68b97f75..96f00edb 100644 --- a/nitransforms/base.py +++ b/nitransforms/base.py @@ -178,7 +178,7 @@ def __ne__(self, other): class TransformBase: """Abstract image class to represent transforms.""" - __slots__ = ("_reference",) + __slots__ = ("_reference", "_ndim",) def __init__(self, reference=None): """Instantiate a transform.""" @@ -220,7 +220,7 @@ def reference(self, image): @property def ndim(self): """Access the dimensions of the reference space.""" - return self.reference.ndim + raise TypeError("TransformBase has no dimensions") def apply( self, diff --git a/nitransforms/linear.py b/nitransforms/linear.py index eb4a95d7..af14f396 100644 --- a/nitransforms/linear.py +++ b/nitransforms/linear.py @@ -143,6 +143,11 @@ def matrix(self): """Access the internal representation of this affine.""" return self._matrix + @property + def ndim(self): + """Access the internal representation of this affine.""" + return self._matrix.ndim + 1 + def map(self, x, inverse=False): r""" Apply :math:`y = f(x)`. diff --git a/nitransforms/tests/test_base.py b/nitransforms/tests/test_base.py index 8422ca10..07a7e4ec 100644 --- a/nitransforms/tests/test_base.py +++ b/nitransforms/tests/test_base.py @@ -94,7 +94,8 @@ def _to_hdf5(klass, x5_root): # Test identity transform xfm = TransformBase() xfm.reference = fname - assert xfm.ndim == 3 + with pytest.raises(TypeError): + _ = xfm.ndim moved = xfm.apply(fname, order=0) assert np.all( imgdata == np.asanyarray(moved.dataobj, dtype=moved.get_data_dtype()) @@ -103,12 +104,19 @@ def _to_hdf5(klass, x5_root): # Test identity transform - setting reference xfm = TransformBase() xfm.reference = fname - assert xfm.ndim == 3 + with pytest.raises(TypeError): + _ = xfm.ndim moved = xfm.apply(str(fname), reference=fname, order=0) assert np.all( imgdata == np.asanyarray(moved.dataobj, dtype=moved.get_data_dtype()) ) + # Test ndim returned by affine + assert nitl.Affine().ndim == 3 + assert nitl.LinearTransformsMapping( + [nitl.Affine(), nitl.Affine()] + ).ndim == 4 + # Test applying to Gifti gii = nb.gifti.GiftiImage( darrays=[ From 17477ea20342866c37a3acc4055e7b7821181675 Mon Sep 17 00:00:00 2001 From: Feilong Ma Date: Wed, 15 May 2024 15:15:35 -0400 Subject: [PATCH 028/123] Skeleton code to save/load transforms using X5 format. --- nitransforms/surface.py | 77 ++++++++++++++++++++++++++++++ nitransforms/tests/test_surface.py | 26 ++++++++++ 2 files changed, 103 insertions(+) create mode 100644 nitransforms/surface.py create mode 100644 nitransforms/tests/test_surface.py diff --git a/nitransforms/surface.py b/nitransforms/surface.py new file mode 100644 index 00000000..1490f490 --- /dev/null +++ b/nitransforms/surface.py @@ -0,0 +1,77 @@ +# emacs: -*- mode: python-mode; py-indent-offset: 4; indent-tabs-mode: nil -*- +# vi: set ft=python sts=4 ts=4 sw=4 et: +### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ## +# +# See COPYING file distributed along with the NiBabel package for the +# copyright and license terms. +# +### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ## +"""Surface transforms.""" + +import h5py +import scipy.sparse as sparse + +from nitransforms.base import TransformBase + + +class SurfaceTransform(TransformBase): + """Represents transforms between surface spaces.""" + + __slots__ = ("mat",) + + def __init__(self, mat): + """Initialize the transform. + + Parameters + ---------- + mat : array-like, shape (nv1, nv2) + Sparse matrix representing the transform. + """ + super().__init__() + if isinstance(mat, sparse.csr_array): + self.mat = mat + else: + self.mat = sparse.csr_array(mat) + + def apply(self, x, inverse=False): + """Apply the transform to surface data. + + Parameters + ---------- + x : array-like, shape (..., nv1) + Data to transform. + inverse : bool, default=False + Whether to apply the inverse transform. If True, ``x`` has shape + (..., nv2), and the output will have shape (..., nv1). + + Returns + ------- + y : array-like, shape (..., nv2) + Transformed data. + """ + if inverse: + return x @ self.mat.T + return x @ self.mat + + def _to_hdf5(self, x5_root): + """Write transform to HDF5 file.""" + xform = x5_root.create_group("Transform") + xform.attrs["Type"] = "surface" + xform.create_dataset("data", data=self.mat.data) + xform.create_dataset("indices", data=self.mat.indices) + xform.create_dataset("indptr", data=self.mat.indptr) + xform.create_dataset("shape", data=self.mat.shape) + + @classmethod + def from_filename(cls, filename, fmt="X5"): + """Load transform from file.""" + if fmt != "X5": + raise ValueError("Only X5 format is supported.") + with h5py.File(filename, "r") as f: + assert f.attrs["Format"] == "X5" + xform = f["/0/Transform"] + mat = sparse.csr_matrix( + (xform["data"][()], xform["indices"][()], xform["indptr"][()]), + shape=xform["shape"][()], + ) + return cls(mat) diff --git a/nitransforms/tests/test_surface.py b/nitransforms/tests/test_surface.py new file mode 100644 index 00000000..31e8e85c --- /dev/null +++ b/nitransforms/tests/test_surface.py @@ -0,0 +1,26 @@ +import os +import tempfile + +import numpy as np +import scipy.sparse as sparse + +from nitransforms.surface import SurfaceTransform + + +def test_surface_transform(): + mat = sparse.random(10, 10, density=0.5) + xfm = SurfaceTransform(mat) + fn = tempfile.mktemp(suffix=".h5") + print(fn) + xfm.to_filename(fn) + + xfm2 = SurfaceTransform.from_filename(fn) + try: + assert xfm.mat.shape == xfm2.mat.shape + np.testing.assert_array_equal(xfm.mat.data, xfm2.mat.data) + np.testing.assert_array_equal(xfm.mat.indices, xfm2.mat.indices) + np.testing.assert_array_equal(xfm.mat.indptr, xfm2.mat.indptr) + except Exception: + os.remove(fn) + raise + os.remove(fn) From 00af8d99304e021fcd42f117c13a16f3120b3dd9 Mon Sep 17 00:00:00 2001 From: Feilong Ma Date: Wed, 15 May 2024 15:22:05 -0400 Subject: [PATCH 029/123] Support I/O of npz format. --- nitransforms/surface.py | 31 +++++++++++++++++++++++++++--- nitransforms/tests/test_surface.py | 21 +++++++++++++++++++- 2 files changed, 48 insertions(+), 4 deletions(-) diff --git a/nitransforms/surface.py b/nitransforms/surface.py index 1490f490..a0c436af 100644 --- a/nitransforms/surface.py +++ b/nitransforms/surface.py @@ -9,6 +9,7 @@ """Surface transforms.""" import h5py +import numpy as np import scipy.sparse as sparse from nitransforms.base import TransformBase @@ -62,11 +63,35 @@ def _to_hdf5(self, x5_root): xform.create_dataset("indptr", data=self.mat.indptr) xform.create_dataset("shape", data=self.mat.shape) + def to_filename(self, filename, fmt=None): + """Store the transform.""" + if fmt is None: + fmt = "npz" if filename.endswith(".npz") else "X5" + + if fmt == "npz": + sparse.save_npz(filename, self.mat) + return filename + + with h5py.File(filename, "w") as out_file: + out_file.attrs["Format"] = "X5" + out_file.attrs["Version"] = np.uint16(1) + root = out_file.create_group("/0") + self._to_hdf5(root) + + return filename + @classmethod - def from_filename(cls, filename, fmt="X5"): + def from_filename(cls, filename, fmt=None): """Load transform from file.""" + if fmt is None: + fmt = "npz" if filename.endswith(".npz") else "X5" + + if fmt == "npz": + return cls(sparse.load_npz(filename)) + if fmt != "X5": - raise ValueError("Only X5 format is supported.") + raise ValueError("Only npz and X5 formats are supported.") + with h5py.File(filename, "r") as f: assert f.attrs["Format"] == "X5" xform = f["/0/Transform"] @@ -74,4 +99,4 @@ def from_filename(cls, filename, fmt="X5"): (xform["data"][()], xform["indices"][()], xform["indptr"][()]), shape=xform["shape"][()], ) - return cls(mat) + return cls(mat) diff --git a/nitransforms/tests/test_surface.py b/nitransforms/tests/test_surface.py index 31e8e85c..418f9d7f 100644 --- a/nitransforms/tests/test_surface.py +++ b/nitransforms/tests/test_surface.py @@ -7,7 +7,7 @@ from nitransforms.surface import SurfaceTransform -def test_surface_transform(): +def test_surface_transform_x5(): mat = sparse.random(10, 10, density=0.5) xfm = SurfaceTransform(mat) fn = tempfile.mktemp(suffix=".h5") @@ -24,3 +24,22 @@ def test_surface_transform(): os.remove(fn) raise os.remove(fn) + + +def test_surface_transform_npz(): + mat = sparse.random(10, 10, density=0.5) + xfm = SurfaceTransform(mat) + fn = tempfile.mktemp(suffix=".npz") + print(fn) + xfm.to_filename(fn) + + xfm2 = SurfaceTransform.from_filename(fn) + try: + assert xfm.mat.shape == xfm2.mat.shape + np.testing.assert_array_equal(xfm.mat.data, xfm2.mat.data) + np.testing.assert_array_equal(xfm.mat.indices, xfm2.mat.indices) + np.testing.assert_array_equal(xfm.mat.indptr, xfm2.mat.indptr) + except Exception: + os.remove(fn) + raise + os.remove(fn) From 91f7ae20e0dd7065126dbeccec398bf488f97360 Mon Sep 17 00:00:00 2001 From: Oscar Esteban Date: Thu, 16 May 2024 08:12:17 -0400 Subject: [PATCH 030/123] enh: define ndim on nonlinear transforms --- nitransforms/nonlinear.py | 10 ++++++++++ 1 file changed, 10 insertions(+) diff --git a/nitransforms/nonlinear.py b/nitransforms/nonlinear.py index 8cfb4a62..79c3aa45 100644 --- a/nitransforms/nonlinear.py +++ b/nitransforms/nonlinear.py @@ -98,6 +98,11 @@ def __repr__(self): """Beautify the python representation.""" return f"<{self.__class__.__name__}[{self._field.shape[-1]}D] {self._field.shape[:3]}>" + @property + def ndim(self): + """Get the dimensions of the transform.""" + return self._field.ndim - 1 + def map(self, x, inverse=False): r""" Apply the transformation to a list of physical coordinate points. @@ -257,6 +262,11 @@ def __init__(self, coefficients, reference=None, order=3): 'Number of components of the coefficients does ' 'not match the number of dimensions') + @property + def ndim(self): + """Get the dimensions of the transform.""" + return self._coeffs.ndim - 1 + def to_field(self, reference=None, dtype="float32"): """Generate a displacements deformation field from this B-Spline field.""" _ref = ( From e38c16a461d44dff932a732072cd2548693676ce Mon Sep 17 00:00:00 2001 From: Julien Marabotto <166002186+jmarabotto@users.noreply.github.com> Date: Fri, 5 Apr 2024 10:00:33 +0200 Subject: [PATCH 031/123] enh: outsource the apply function --- nitransforms/base.py | 95 ------------------------------- nitransforms/resampling.py | 114 +++++++++++++++++++++++++++++++++++++ 2 files changed, 114 insertions(+), 95 deletions(-) create mode 100644 nitransforms/resampling.py diff --git a/nitransforms/base.py b/nitransforms/base.py index 96f00edb..b9704340 100644 --- a/nitransforms/base.py +++ b/nitransforms/base.py @@ -222,101 +222,6 @@ def ndim(self): """Access the dimensions of the reference space.""" raise TypeError("TransformBase has no dimensions") - def apply( - self, - spatialimage, - reference=None, - order=3, - mode="constant", - cval=0.0, - prefilter=True, - output_dtype=None, - ): - """ - Apply a transformation to an image, resampling on the reference spatial object. - - Parameters - ---------- - spatialimage : `spatialimage` - The image object containing the data to be resampled in reference - space - reference : spatial object, optional - The image, surface, or combination thereof containing the coordinates - of samples that will be sampled. - order : int, optional - The order of the spline interpolation, default is 3. - The order has to be in the range 0-5. - mode : {'constant', 'reflect', 'nearest', 'mirror', 'wrap'}, optional - Determines how the input image is extended when the resamplings overflows - a border. Default is 'constant'. - cval : float, optional - Constant value for ``mode='constant'``. Default is 0.0. - prefilter: bool, optional - Determines if the image's data array is prefiltered with - a spline filter before interpolation. The default is ``True``, - which will create a temporary *float64* array of filtered values - if *order > 1*. If setting this to ``False``, the output will be - slightly blurred if *order > 1*, unless the input is prefiltered, - i.e. it is the result of calling the spline filter on the original - input. - output_dtype: dtype specifier, optional - The dtype of the returned array or image, if specified. - If ``None``, the default behavior is to use the effective dtype of - the input image. If slope and/or intercept are defined, the effective - dtype is float64, otherwise it is equivalent to the input image's - ``get_data_dtype()`` (on-disk type). - If ``reference`` is defined, then the return value is an image, with - a data array of the effective dtype but with the on-disk dtype set to - the input image's on-disk dtype. - - Returns - ------- - resampled : `spatialimage` or ndarray - The data imaged after resampling to reference space. - - """ - if reference is not None and isinstance(reference, (str, Path)): - reference = _nbload(str(reference)) - - _ref = ( - self.reference if reference is None else SpatialReference.factory(reference) - ) - - if _ref is None: - raise TransformError("Cannot apply transform without reference") - - if isinstance(spatialimage, (str, Path)): - spatialimage = _nbload(str(spatialimage)) - - data = np.asanyarray(spatialimage.dataobj) - targets = ImageGrid(spatialimage).index( # data should be an image - _as_homogeneous(self.map(_ref.ndcoords.T), dim=_ref.ndim) - ) - - resampled = ndi.map_coordinates( - data, - targets.T, - output=output_dtype, - order=order, - mode=mode, - cval=cval, - prefilter=prefilter, - ) - - if isinstance(_ref, ImageGrid): # If reference is grid, reshape - hdr = None - if _ref.header is not None: - hdr = _ref.header.copy() - hdr.set_data_dtype(output_dtype or spatialimage.get_data_dtype()) - moved = spatialimage.__class__( - resampled.reshape(_ref.shape), - _ref.affine, - hdr, - ) - return moved - - return resampled - def map(self, x, inverse=False): r""" Apply :math:`y = f(x)`. diff --git a/nitransforms/resampling.py b/nitransforms/resampling.py new file mode 100644 index 00000000..7d2765ac --- /dev/null +++ b/nitransforms/resampling.py @@ -0,0 +1,114 @@ +# emacs: -*- mode: python-mode; py-indent-offset: 4; indent-tabs-mode: nil -*- +# vi: set ft=python sts=4 ts=4 sw=4 et: +### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ## +# +# See COPYING file distributed along with the NiBabel package for the +# copyright and license terms. +# +### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ## +"""Resampling utilities.""" +from pathlib import Path +import numpy as np +import h5py +import warnings +from nibabel.loadsave import load as _nbload +from nibabel import funcs as _nbfuncs +from nibabel.nifti1 import intent_codes as INTENT_CODES +from nibabel.cifti2 import Cifti2Image +from scipy import ndimage as ndi + + +def apply( + transform, + spatialimage, + reference=None, + order=3, + mode="constant", + cval=0.0, + prefilter=True, + output_dtype=None, +): + """ + Apply a transformation to an image, resampling on the reference spatial object. + + Parameters + ---------- + spatialimage : `spatialimage` + The image object containing the data to be resampled in reference + space + reference : spatial object, optional + The image, surface, or combination thereof containing the coordinates + of samples that will be sampled. + order : int, optional + The order of the spline interpolation, default is 3. + The order has to be in the range 0-5. + mode : {'constant', 'reflect', 'nearest', 'mirror', 'wrap'}, optional + Determines how the input image is extended when the resamplings overflows + a border. Default is 'constant'. + cval : float, optional + Constant value for ``mode='constant'``. Default is 0.0. + prefilter: bool, optional + Determines if the image's data array is prefiltered with + a spline filter before interpolation. The default is ``True``, + which will create a temporary *float64* array of filtered values + if *order > 1*. If setting this to ``False``, the output will be + slightly blurred if *order > 1*, unless the input is prefiltered, + i.e. it is the result of calling the spline filter on the original + input. + output_dtype: dtype specifier, optional + The dtype of the returned array or image, if specified. + If ``None``, the default behavior is to use the effective dtype of + the input image. If slope and/or intercept are defined, the effective + dtype is float64, otherwise it is equivalent to the input image's + ``get_data_dtype()`` (on-disk type). + If ``reference`` is defined, then the return value is an image, with + a data array of the effective dtype but with the on-disk dtype set to + the input image's on-disk dtype. + + Returns + ------- + resampled : `spatialimage` or ndarray + The data imaged after resampling to reference space. + + """ + if reference is not None and isinstance(reference, (str, Path)): + reference = _nbload(str(reference)) + + _ref = ( + transform.reference if reference is None else SpatialReference.factory(reference) + ) + + if _ref is None: + raise TransformError("Cannot apply transform without reference") + + if isinstance(spatialimage, (str, Path)): + spatialimage = _nbload(str(spatialimage)) + + data = np.asanyarray(spatialimage.dataobj) + targets = ImageGrid(spatialimage).index( # data should be an image + _as_homogeneous(transform.map(_ref.ndcoords.T), dim=_ref.ndim) + ) + + resampled = ndi.map_coordinates( + data, + targets.T, + output=output_dtype, + order=order, + mode=mode, + cval=cval, + prefilter=prefilter, + ) + + if isinstance(_ref, ImageGrid): # If reference is grid, reshape + hdr = None + if _ref.header is not None: + hdr = _ref.header.copy() + hdr.set_data_dtype(output_dtype or spatialimage.get_data_dtype()) + moved = spatialimage.__class__( + resampled.reshape(_ref.shape), + _ref.affine, + hdr, + ) + return moved + + return resampled From f396f94daa6bd542104d49dadb6e744f7f22b322 Mon Sep 17 00:00:00 2001 From: Julien Marabotto <166002186+jmarabotto@users.noreply.github.com> Date: Fri, 5 Apr 2024 10:06:05 +0200 Subject: [PATCH 032/123] sty: pacify flake8 --- nitransforms/resampling.py | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/nitransforms/resampling.py b/nitransforms/resampling.py index 7d2765ac..a876bb12 100644 --- a/nitransforms/resampling.py +++ b/nitransforms/resampling.py @@ -9,14 +9,14 @@ """Resampling utilities.""" from pathlib import Path import numpy as np -import h5py -import warnings from nibabel.loadsave import load as _nbload -from nibabel import funcs as _nbfuncs -from nibabel.nifti1 import intent_codes as INTENT_CODES -from nibabel.cifti2 import Cifti2Image -from scipy import ndimage as ndi +from nitransforms.base import ( + ImageGrid, + TransformError, + SpatialReference, + _as_homogeneous, +) def apply( transform, From a150da488b48e038c898d0392d5d8bc33633b342 Mon Sep 17 00:00:00 2001 From: Julien Marabotto <166002186+jmarabotto@users.noreply.github.com> Date: Fri, 5 Apr 2024 10:09:33 +0200 Subject: [PATCH 033/123] sty: fix imports --- nitransforms/base.py | 1 - nitransforms/resampling.py | 2 ++ 2 files changed, 2 insertions(+), 1 deletion(-) diff --git a/nitransforms/base.py b/nitransforms/base.py index b9704340..e7f67e60 100644 --- a/nitransforms/base.py +++ b/nitransforms/base.py @@ -15,7 +15,6 @@ from nibabel import funcs as _nbfuncs from nibabel.nifti1 import intent_codes as INTENT_CODES from nibabel.cifti2 import Cifti2Image -from scipy import ndimage as ndi EQUALITY_TOL = 1e-5 diff --git a/nitransforms/resampling.py b/nitransforms/resampling.py index a876bb12..e89b081a 100644 --- a/nitransforms/resampling.py +++ b/nitransforms/resampling.py @@ -10,6 +10,7 @@ from pathlib import Path import numpy as np from nibabel.loadsave import load as _nbload +from scipy import ndimage as ndi from nitransforms.base import ( ImageGrid, @@ -18,6 +19,7 @@ _as_homogeneous, ) + def apply( transform, spatialimage, From 3ff7407c8603f64c6a364115ad94aa40a2097fbc Mon Sep 17 00:00:00 2001 From: Oscar Esteban Date: Fri, 5 Apr 2024 10:35:24 +0200 Subject: [PATCH 034/123] fix: update many apply() calls --- nitransforms/cli.py | 4 ++- nitransforms/tests/test_base.py | 9 ++++--- nitransforms/tests/test_io.py | 41 ++++++++++++++++++++----------- nitransforms/tests/test_linear.py | 16 ++++++------ 4 files changed, 45 insertions(+), 25 deletions(-) diff --git a/nitransforms/cli.py b/nitransforms/cli.py index 63b8bed4..8f8f5ce0 100644 --- a/nitransforms/cli.py +++ b/nitransforms/cli.py @@ -5,6 +5,7 @@ from .linear import load as linload from .nonlinear import load as nlinload +from .resampling import apply def cli_apply(pargs): @@ -38,7 +39,8 @@ def cli_apply(pargs): # ensure a reference is set xfm.reference = pargs.ref or pargs.moving - moved = xfm.apply( + moved = apply( + xfm, pargs.moving, order=pargs.order, mode=pargs.mode, diff --git a/nitransforms/tests/test_base.py b/nitransforms/tests/test_base.py index 07a7e4ec..a1402baf 100644 --- a/nitransforms/tests/test_base.py +++ b/nitransforms/tests/test_base.py @@ -6,6 +6,7 @@ from ..base import SpatialReference, SampledSpatialData, ImageGrid, TransformBase from .. import linear as nitl +from ..resampling import apply def test_SpatialReference(testdata_path): @@ -94,9 +95,10 @@ def _to_hdf5(klass, x5_root): # Test identity transform xfm = TransformBase() xfm.reference = fname + with pytest.raises(TypeError): _ = xfm.ndim - moved = xfm.apply(fname, order=0) + moved = apply(xfm, fname, order=0) assert np.all( imgdata == np.asanyarray(moved.dataobj, dtype=moved.get_data_dtype()) ) @@ -104,9 +106,10 @@ def _to_hdf5(klass, x5_root): # Test identity transform - setting reference xfm = TransformBase() xfm.reference = fname + with pytest.raises(TypeError): _ = xfm.ndim - moved = xfm.apply(str(fname), reference=fname, order=0) + moved = apply(xfm, str(fname), reference=fname, order=0) assert np.all( imgdata == np.asanyarray(moved.dataobj, dtype=moved.get_data_dtype()) ) @@ -126,7 +129,7 @@ def _to_hdf5(klass, x5_root): ) ] ) - giimoved = xfm.apply(fname, reference=gii, order=0) + giimoved = apply(xfm, fname, reference=gii, order=0) assert np.allclose(giimoved.reshape(xfm.reference.shape), moved.get_fdata()) # Test to_filename diff --git a/nitransforms/tests/test_io.py b/nitransforms/tests/test_io.py index bcee9198..0cc79d15 100644 --- a/nitransforms/tests/test_io.py +++ b/nitransforms/tests/test_io.py @@ -28,6 +28,8 @@ ) from nitransforms.io.base import LinearParameters, TransformIOError, TransformFileError from nitransforms.conftest import _datadir, _testdir +from nitransforms.resampling import apply + LPS = np.diag([-1, -1, 1, 1]) ITK_MAT = LPS.dot(np.ones((4, 4)).dot(LPS)) @@ -497,10 +499,13 @@ def test_afni_oblique(tmpdir, parameters, swapaxes, testdata_path, dir_x, dir_y, assert np.allclose(card_aff, nb.load("deob_3drefit.nii.gz").affine) # Check that nitransforms can emulate 3drefit -deoblique - nt3drefit = Affine( - afni._cardinal_rotation(img.affine, False), - reference="deob_3drefit.nii.gz", - ).apply("orig.nii.gz") + nt3drefit = apply( + Affine( + afni._cardinal_rotation(img.affine, False), + reference="deob_3drefit.nii.gz", + ), + "orig.nii.gz", + ) diff = ( np.asanyarray(img.dataobj, dtype="uint8") @@ -509,10 +514,13 @@ def test_afni_oblique(tmpdir, parameters, swapaxes, testdata_path, dir_x, dir_y, assert np.sqrt((diff[10:-10, 10:-10, 10:-10] ** 2).mean()) < 0.1 # Check that nitransforms can revert 3drefit -deoblique - nt_undo3drefit = Affine( - afni._cardinal_rotation(img.affine, True), - reference="orig.nii.gz", - ).apply("deob_3drefit.nii.gz") + nt_undo3drefit = apply( + Affine( + afni._cardinal_rotation(img.affine, True), + reference="orig.nii.gz", + ), + "deob_3drefit.nii.gz", + ) diff = ( np.asanyarray(img.dataobj, dtype="uint8") @@ -531,16 +539,21 @@ def test_afni_oblique(tmpdir, parameters, swapaxes, testdata_path, dir_x, dir_y, assert np.allclose(deobaff, deobnii.affine) # Check resampling in deobliqued grid - ntdeobnii = Affine(np.eye(4), reference=deobnii.__class__( - np.zeros(deobshape, dtype="uint8"), - deobaff, - deobnii.header - )).apply(img, order=0) + ntdeobnii = apply( + Affine(np.eye(4), reference=deobnii.__class__( + np.zeros(deobshape, dtype="uint8"), + deobaff, + deobnii.header + )), + img, + order=0, + ) # Generate an internal box to exclude border effects box = np.zeros(img.shape, dtype="uint8") box[10:-10, 10:-10, 10:-10] = 1 - ntdeobmask = Affine(np.eye(4), reference=ntdeobnii).apply( + ntdeobmask = apply( + Affine(np.eye(4), reference=ntdeobnii), nb.Nifti1Image(box, img.affine, img.header), order=0, ) diff --git a/nitransforms/tests/test_linear.py b/nitransforms/tests/test_linear.py index 2957f59c..9a06fe32 100644 --- a/nitransforms/tests/test_linear.py +++ b/nitransforms/tests/test_linear.py @@ -13,6 +13,7 @@ from nibabel.affines import from_matvec from nitransforms import linear as nitl from nitransforms import io +from nitransforms.resampling import apply from .utils import assert_affines_by_filename RMSE_TOL = 0.1 @@ -285,7 +286,7 @@ def test_apply_linear_transform(tmpdir, get_testdata, get_testmask, image_orient assert exit_code == 0 sw_moved_mask = nb.load("resampled_brainmask.nii.gz") - nt_moved_mask = xfm.apply(msk, order=0) + nt_moved_mask = apply(xfm, msk, order=0) nt_moved_mask.set_data_dtype(msk.get_data_dtype()) nt_moved_mask.to_filename("ntmask.nii.gz") diff = np.asanyarray(sw_moved_mask.dataobj) - np.asanyarray(nt_moved_mask.dataobj) @@ -305,7 +306,7 @@ def test_apply_linear_transform(tmpdir, get_testdata, get_testmask, image_orient sw_moved = nb.load("resampled.nii.gz") sw_moved.set_data_dtype(img.get_data_dtype()) - nt_moved = xfm.apply(img, order=0) + nt_moved = apply(xfm, img, order=0) diff = ( np.asanyarray(sw_moved.dataobj, dtype=sw_moved.get_data_dtype()) - np.asanyarray(nt_moved.dataobj, dtype=nt_moved.get_data_dtype()) @@ -314,7 +315,7 @@ def test_apply_linear_transform(tmpdir, get_testdata, get_testmask, image_orient # A certain tolerance is necessary because of resampling at borders assert np.sqrt((diff[brainmask] ** 2).mean()) < RMSE_TOL - nt_moved = xfm.apply("img.nii.gz", order=0) + nt_moved = apply(xfm, "img.nii.gz", order=0) diff = ( np.asanyarray(sw_moved.dataobj, dtype=sw_moved.get_data_dtype()) - np.asanyarray(nt_moved.dataobj, dtype=nt_moved.get_data_dtype()) @@ -343,8 +344,8 @@ def test_LinearTransformsMapping_apply(tmp_path, data_path, testdata_path): assert isinstance(hmc, nitl.LinearTransformsMapping) # Test-case: realign functional data on to sbref - nii = hmc.apply( - testdata_path / "func.nii.gz", order=1, reference=testdata_path / "sbref.nii.gz" + nii = apply( + hmc, testdata_path / "func.nii.gz", order=1, reference=testdata_path / "sbref.nii.gz" ) assert nii.dataobj.shape[-1] == len(hmc) @@ -352,13 +353,14 @@ def test_LinearTransformsMapping_apply(tmp_path, data_path, testdata_path): hmcinv = nitl.LinearTransformsMapping( np.linalg.inv(hmc.matrix), reference=testdata_path / "func.nii.gz" ) - nii = hmcinv.apply(testdata_path / "fmap.nii.gz", order=1) + nii = apply(hmcinv, testdata_path / "fmap.nii.gz", order=1) assert nii.dataobj.shape[-1] == len(hmc) # Ensure a ValueError is issued when trying to do weird stuff hmc = nitl.LinearTransformsMapping(hmc.matrix[:1, ...]) with pytest.raises(ValueError): - hmc.apply( + apply( + hmc, testdata_path / "func.nii.gz", order=1, reference=testdata_path / "sbref.nii.gz", From cc3b21e205f027e0ab7d7f360de397befbab4298 Mon Sep 17 00:00:00 2001 From: Julien Marabotto Date: Wed, 17 Apr 2024 11:18:02 +0200 Subject: [PATCH 035/123] Updated outsource Apply outsourced Apply(); fixed resampled.py (line 101); implemented np.tensordor to _apply_affine() in main.py (line 287). Left to do: fix test_linear RunTime error (line 358) --- nitransforms/base.py | 8 ++++++-- nitransforms/resampling.py | 15 ++++++++++++--- nitransforms/tests/test_linear.py | 6 +++++- 3 files changed, 23 insertions(+), 6 deletions(-) diff --git a/nitransforms/base.py b/nitransforms/base.py index e7f67e60..99a0ee96 100644 --- a/nitransforms/base.py +++ b/nitransforms/base.py @@ -283,7 +283,11 @@ def _as_homogeneous(xyz, dtype="float32", dim=3): return np.hstack((xyz, np.ones((xyz.shape[0], 1), dtype=dtype))) - +#import pdb; pdb.set_trace() def _apply_affine(x, affine, dim): """Get the image array's indexes corresponding to coordinates.""" - return affine.dot(_as_homogeneous(x, dim=dim).T)[:dim, ...].T + return np.tensordot( + affine, + _as_homogeneous(x, dim=dim).T, + axes=1, + )[:dim, ...] diff --git a/nitransforms/resampling.py b/nitransforms/resampling.py index e89b081a..f621f7a3 100644 --- a/nitransforms/resampling.py +++ b/nitransforms/resampling.py @@ -87,27 +87,36 @@ def apply( spatialimage = _nbload(str(spatialimage)) data = np.asanyarray(spatialimage.dataobj) + targets = ImageGrid(spatialimage).index( # data should be an image _as_homogeneous(transform.map(_ref.ndcoords.T), dim=_ref.ndim) ) + if data.ndim < targets.shape[-1]: + data = data[..., np.newaxis] + + import pdb; pdb.set_trace() + + #import pdb; pdb.set_trace() resampled = ndi.map_coordinates( data, - targets.T, + #targets.T, + #Reshape targets (516096, 3, 8) --> (4, 4128768) : + _as_homogeneous(targets.reshape(-2, targets.shape[0])).T, output=output_dtype, order=order, mode=mode, cval=cval, prefilter=prefilter, ) - + if isinstance(_ref, ImageGrid): # If reference is grid, reshape hdr = None if _ref.header is not None: hdr = _ref.header.copy() hdr.set_data_dtype(output_dtype or spatialimage.get_data_dtype()) moved = spatialimage.__class__( - resampled.reshape(_ref.shape), + resampled.reshape(_ref.shape if data.ndim < 4 else _ref.shape + (-1, )), _ref.affine, hdr, ) diff --git a/nitransforms/tests/test_linear.py b/nitransforms/tests/test_linear.py index 9a06fe32..b7f7384a 100644 --- a/nitransforms/tests/test_linear.py +++ b/nitransforms/tests/test_linear.py @@ -353,7 +353,11 @@ def test_LinearTransformsMapping_apply(tmp_path, data_path, testdata_path): hmcinv = nitl.LinearTransformsMapping( np.linalg.inv(hmc.matrix), reference=testdata_path / "func.nii.gz" ) - nii = apply(hmcinv, testdata_path / "fmap.nii.gz", order=1) + + import pdb; pdb.set_trace() + nii = apply( + hmcinv, testdata_path / "fmap.nii.gz", order=1 + ) assert nii.dataobj.shape[-1] == len(hmc) # Ensure a ValueError is issued when trying to do weird stuff From f16b737dfc7c3b63c722e1af772deacdbf59abff Mon Sep 17 00:00:00 2001 From: Julien Marabotto Date: Wed, 17 Apr 2024 11:26:00 +0200 Subject: [PATCH 036/123] Updated changes - see previous commit for details --- nitransforms/resampling.py | 8 +++----- 1 file changed, 3 insertions(+), 5 deletions(-) diff --git a/nitransforms/resampling.py b/nitransforms/resampling.py index f621f7a3..88f7e6b8 100644 --- a/nitransforms/resampling.py +++ b/nitransforms/resampling.py @@ -92,11 +92,9 @@ def apply( _as_homogeneous(transform.map(_ref.ndcoords.T), dim=_ref.ndim) ) - if data.ndim < targets.shape[-1]: - data = data[..., np.newaxis] - - import pdb; pdb.set_trace() - + #if data.ndim < targets.shape[-1]: + # data = data[..., np.newaxis] + #import pdb; pdb.set_trace() resampled = ndi.map_coordinates( data, From be7e9a9449944b7e3f9b9ad50e7ccb084a070767 Mon Sep 17 00:00:00 2001 From: Julien Marabotto Date: Mon, 22 Apr 2024 10:44:26 +0200 Subject: [PATCH 037/123] FIX: Outsource Apply Outsourced apply, test_linear.py successful --- nitransforms/base.py | 1 - nitransforms/linear.py | 121 +----------------------------- nitransforms/resampling.py | 10 +-- nitransforms/tests/test_linear.py | 1 - 4 files changed, 9 insertions(+), 124 deletions(-) diff --git a/nitransforms/base.py b/nitransforms/base.py index 99a0ee96..6a6ae7ed 100644 --- a/nitransforms/base.py +++ b/nitransforms/base.py @@ -283,7 +283,6 @@ def _as_homogeneous(xyz, dtype="float32", dim=3): return np.hstack((xyz, np.ones((xyz.shape[0], 1), dtype=dtype))) -#import pdb; pdb.set_trace() def _apply_affine(x, affine, dim): """Get the image array's indexes corresponding to coordinates.""" return np.tensordot( diff --git a/nitransforms/linear.py b/nitransforms/linear.py index af14f396..cf16104e 100644 --- a/nitransforms/linear.py +++ b/nitransforms/linear.py @@ -112,6 +112,10 @@ def __invert__(self): """ return self.__class__(self._inverse) + + def __len__(self): + """Enable using len().""" + return 1 if self._matrix.ndim == 2 else len(self._matrix) def __matmul__(self, b): """ @@ -330,10 +334,6 @@ def __getitem__(self, i): """Enable indexed access to the series of matrices.""" return Affine(self.matrix[i, ...], reference=self._reference) - def __len__(self): - """Enable using len().""" - return len(self._matrix) - def map(self, x, inverse=False): r""" Apply :math:`y = f(x)`. @@ -402,119 +402,6 @@ def to_filename(self, filename, fmt="X5", moving=None): ).to_filename(filename) return filename - def apply( - self, - spatialimage, - reference=None, - order=3, - mode="constant", - cval=0.0, - prefilter=True, - output_dtype=None, - ): - """ - Apply a transformation to an image, resampling on the reference spatial object. - - Parameters - ---------- - spatialimage : `spatialimage` - The image object containing the data to be resampled in reference - space - reference : spatial object, optional - The image, surface, or combination thereof containing the coordinates - of samples that will be sampled. - order : int, optional - The order of the spline interpolation, default is 3. - The order has to be in the range 0-5. - mode : {"constant", "reflect", "nearest", "mirror", "wrap"}, optional - Determines how the input image is extended when the resamplings overflows - a border. Default is "constant". - cval : float, optional - Constant value for ``mode="constant"``. Default is 0.0. - prefilter: bool, optional - Determines if the image's data array is prefiltered with - a spline filter before interpolation. The default is ``True``, - which will create a temporary *float64* array of filtered values - if *order > 1*. If setting this to ``False``, the output will be - slightly blurred if *order > 1*, unless the input is prefiltered, - i.e. it is the result of calling the spline filter on the original - input. - - Returns - ------- - resampled : `spatialimage` or ndarray - The data imaged after resampling to reference space. - - """ - - if reference is not None and isinstance(reference, (str, Path)): - reference = _nbload(str(reference)) - - _ref = ( - self.reference if reference is None else SpatialReference.factory(reference) - ) - - if isinstance(spatialimage, (str, Path)): - spatialimage = _nbload(str(spatialimage)) - - # Avoid opening the data array just yet - input_dtype = get_obj_dtype(spatialimage.dataobj) - output_dtype = output_dtype or input_dtype - - # Prepare physical coordinates of input (grid, points) - xcoords = _ref.ndcoords.astype("f4").T - - # Invert target's (moving) affine once - ras2vox = ~Affine(spatialimage.affine) - - if spatialimage.ndim == 4 and (len(self) != spatialimage.shape[-1]): - raise ValueError( - "Attempting to apply %d transforms on a file with " - "%d timepoints" % (len(self), spatialimage.shape[-1]) - ) - - # Order F ensures individual volumes are contiguous in memory - # Also matches NIfTI, making final save more efficient - resampled = np.zeros( - (xcoords.shape[0], len(self)), dtype=output_dtype, order="F" - ) - - dataobj = ( - np.asanyarray(spatialimage.dataobj, dtype=input_dtype) - if spatialimage.ndim in (2, 3) - else None - ) - - for t, xfm_t in enumerate(self): - # Map the input coordinates on to timepoint t of the target (moving) - ycoords = xfm_t.map(xcoords)[..., : _ref.ndim] - - # Calculate corresponding voxel coordinates - yvoxels = ras2vox.map(ycoords)[..., : _ref.ndim] - - # Interpolate - resampled[..., t] = ndi.map_coordinates( - ( - dataobj - if dataobj is not None - else spatialimage.dataobj[..., t].astype(input_dtype, copy=False) - ), - yvoxels.T, - output=output_dtype, - order=order, - mode=mode, - cval=cval, - prefilter=prefilter, - ) - - if isinstance(_ref, ImageGrid): # If reference is grid, reshape - newdata = resampled.reshape(_ref.shape + (len(self),)) - moved = spatialimage.__class__(newdata, _ref.affine, spatialimage.header) - moved.header.set_data_dtype(output_dtype) - return moved - - return resampled - def load(filename, fmt=None, reference=None, moving=None): """ diff --git a/nitransforms/resampling.py b/nitransforms/resampling.py index 88f7e6b8..1a1e2239 100644 --- a/nitransforms/resampling.py +++ b/nitransforms/resampling.py @@ -92,14 +92,14 @@ def apply( _as_homogeneous(transform.map(_ref.ndcoords.T), dim=_ref.ndim) ) - #if data.ndim < targets.shape[-1]: - # data = data[..., np.newaxis] + if data.ndim == 4 and data.shape[-1] != len(transform): + raise ValueError("The fourth dimension of the data does not match the tranform's shape.") + + if data.ndim < transform.ndim: + data = data[..., np.newaxis] - #import pdb; pdb.set_trace() resampled = ndi.map_coordinates( data, - #targets.T, - #Reshape targets (516096, 3, 8) --> (4, 4128768) : _as_homogeneous(targets.reshape(-2, targets.shape[0])).T, output=output_dtype, order=order, diff --git a/nitransforms/tests/test_linear.py b/nitransforms/tests/test_linear.py index b7f7384a..50cc5371 100644 --- a/nitransforms/tests/test_linear.py +++ b/nitransforms/tests/test_linear.py @@ -354,7 +354,6 @@ def test_LinearTransformsMapping_apply(tmp_path, data_path, testdata_path): np.linalg.inv(hmc.matrix), reference=testdata_path / "func.nii.gz" ) - import pdb; pdb.set_trace() nii = apply( hmcinv, testdata_path / "fmap.nii.gz", order=1 ) From c5b86e1d5bae736d75b6f0cf486268d161801b50 Mon Sep 17 00:00:00 2001 From: Julien Marabotto Date: Mon, 22 Apr 2024 14:50:33 +0200 Subject: [PATCH 038/123] ENH: update outsoucre apply --- nitransforms/resampling.py | 1 + nitransforms/tests/test_base.py | 17 ++++++++++------- 2 files changed, 11 insertions(+), 7 deletions(-) diff --git a/nitransforms/resampling.py b/nitransforms/resampling.py index 1a1e2239..fa02c2e1 100644 --- a/nitransforms/resampling.py +++ b/nitransforms/resampling.py @@ -98,6 +98,7 @@ def apply( if data.ndim < transform.ndim: data = data[..., np.newaxis] + import pdb; pdb.set_trace() resampled = ndi.map_coordinates( data, _as_homogeneous(targets.reshape(-2, targets.shape[0])).T, diff --git a/nitransforms/tests/test_base.py b/nitransforms/tests/test_base.py index a1402baf..fe7855a0 100644 --- a/nitransforms/tests/test_base.py +++ b/nitransforms/tests/test_base.py @@ -4,7 +4,7 @@ import pytest import h5py -from ..base import SpatialReference, SampledSpatialData, ImageGrid, TransformBase +from ..base import SpatialReference, SampledSpatialData, ImageGrid, TransformBase, _as_homogeneous from .. import linear as nitl from ..resampling import apply @@ -42,11 +42,13 @@ def test_ImageGrid(get_testdata, image_orientation): # Test ras2vox and vox2ras conversions ijk = [[10, 10, 10], [40, 4, 20], [0, 0, 0], [s - 1 for s in im.shape[:3]]] xyz = [img._affine.dot(idx + [1])[:-1] for idx in ijk] + # xyz = np.array([np.tensordot(img._affine, idx + [1], axes=1)[:-1] for idx in ijk]) - assert np.allclose(img.ras(ijk[0]), xyz[0]) + # import pdb; pdb.set_trace() + assert np.allclose(np.squeeze(img.ras(ijk[0])), xyz[0]) assert np.allclose(np.round(img.index(xyz[0])), ijk[0]) - assert np.allclose(img.ras(ijk), xyz) - assert np.allclose(np.round(img.index(xyz)), ijk) + assert np.allclose(img.ras(ijk).T, xyz) + assert np.allclose(np.round(img.index(xyz)).T, ijk) # nd index / coords idxs = img.ndindex @@ -92,12 +94,13 @@ def _to_hdf5(klass, x5_root): img = nb.load(fname) imgdata = np.asanyarray(img.dataobj, dtype=img.get_data_dtype()) - # Test identity transform xfm = TransformBase() - xfm.reference = fname - with pytest.raises(TypeError): _ = xfm.ndim + + # Test identity transform + xfm = nitl.Affine() + xfm.reference = fname moved = apply(xfm, fname, order=0) assert np.all( imgdata == np.asanyarray(moved.dataobj, dtype=moved.get_data_dtype()) From 95a215edf5e3e93a8a02669900c0167c34034f37 Mon Sep 17 00:00:00 2001 From: Julien Marabotto Date: Thu, 25 Apr 2024 15:33:41 +0200 Subject: [PATCH 039/123] Updated: offsource apply --- nitransforms/nonlinear.py | 13 +++++++++---- nitransforms/resampling.py | 6 ++++-- nitransforms/tests/test_base.py | 17 +++++------------ nitransforms/tests/test_nonlinear.py | 5 +++-- 4 files changed, 21 insertions(+), 20 deletions(-) diff --git a/nitransforms/nonlinear.py b/nitransforms/nonlinear.py index 79c3aa45..52d854e2 100644 --- a/nitransforms/nonlinear.py +++ b/nitransforms/nonlinear.py @@ -14,6 +14,7 @@ from nitransforms import io from nitransforms.io.base import _ensure_image from nitransforms.interp.bspline import grid_bspline_weights, _cubic_bspline +from nitransforms.resampling import apply from nitransforms.base import ( TransformBase, TransformError, @@ -257,7 +258,7 @@ def __init__(self, coefficients, reference=None, order=3): if reference is not None: self.reference = reference - if coefficients.shape[-1] != self.ndim: + if coefficients.shape[-1] != self.reference.ndim: raise TransformError( 'Number of components of the coefficients does ' 'not match the number of dimensions') @@ -310,19 +311,23 @@ def apply( spatialimage = _ensure_image(spatialimage) # If locations to be interpolated are not on a grid, run map() + #import pdb; pdb.set_trace() if not isinstance(_ref, ImageGrid): - return super().apply( + return apply( + super(), spatialimage, reference=_ref, + output_dtype=output_dtype, order=order, mode=mode, cval=cval, prefilter=prefilter, - output_dtype=output_dtype, + ) # If locations to be interpolated are on a grid, generate a displacements field - return self.to_field(reference=reference).apply( + return apply( + self.to_field(reference=reference), spatialimage, reference=reference, order=order, diff --git a/nitransforms/resampling.py b/nitransforms/resampling.py index fa02c2e1..7cbdd9b8 100644 --- a/nitransforms/resampling.py +++ b/nitransforms/resampling.py @@ -98,10 +98,12 @@ def apply( if data.ndim < transform.ndim: data = data[..., np.newaxis] - import pdb; pdb.set_trace() + if transform.ndim == 4: + targets = _as_homogeneous(targets.reshape(-2, targets.shape[0])).T + resampled = ndi.map_coordinates( data, - _as_homogeneous(targets.reshape(-2, targets.shape[0])).T, + targets, output=output_dtype, order=order, mode=mode, diff --git a/nitransforms/tests/test_base.py b/nitransforms/tests/test_base.py index fe7855a0..fe08d2e9 100644 --- a/nitransforms/tests/test_base.py +++ b/nitransforms/tests/test_base.py @@ -94,10 +94,14 @@ def _to_hdf5(klass, x5_root): img = nb.load(fname) imgdata = np.asanyarray(img.dataobj, dtype=img.get_data_dtype()) + # Test identity transform - setting reference xfm = TransformBase() with pytest.raises(TypeError): _ = xfm.ndim + # Test to_filename + xfm.to_filename("data.x5") + # Test identity transform xfm = nitl.Affine() xfm.reference = fname @@ -106,17 +110,6 @@ def _to_hdf5(klass, x5_root): imgdata == np.asanyarray(moved.dataobj, dtype=moved.get_data_dtype()) ) - # Test identity transform - setting reference - xfm = TransformBase() - xfm.reference = fname - - with pytest.raises(TypeError): - _ = xfm.ndim - moved = apply(xfm, str(fname), reference=fname, order=0) - assert np.all( - imgdata == np.asanyarray(moved.dataobj, dtype=moved.get_data_dtype()) - ) - # Test ndim returned by affine assert nitl.Affine().ndim == 3 assert nitl.LinearTransformsMapping( @@ -136,7 +129,7 @@ def _to_hdf5(klass, x5_root): assert np.allclose(giimoved.reshape(xfm.reference.shape), moved.get_fdata()) # Test to_filename - xfm.to_filename("data.x5") + xfm.to_filename("data.xfm", fmt='itk') def test_SampledSpatialData(testdata_path): diff --git a/nitransforms/tests/test_nonlinear.py b/nitransforms/tests/test_nonlinear.py index 93d3fd4c..dd4cbf93 100644 --- a/nitransforms/tests/test_nonlinear.py +++ b/nitransforms/tests/test_nonlinear.py @@ -8,6 +8,7 @@ import numpy as np import nibabel as nb +from nitransforms.resampling import apply from nitransforms.base import TransformError from nitransforms.io.base import TransformFileError from nitransforms.nonlinear import ( @@ -247,8 +248,8 @@ def test_bspline(tmp_path, testdata_path): bsplxfm = BSplineFieldTransform(bs_name, reference=img_name) dispxfm = DenseFieldTransform(disp_name) - out_disp = dispxfm.apply(img_name) - out_bspl = bsplxfm.apply(img_name) + out_disp = apply(dispxfm,img_name) + out_bspl = apply(bsplxfm,img_name) out_disp.to_filename("resampled_field.nii.gz") out_bspl.to_filename("resampled_bsplines.nii.gz") From 9f93a67177504ad25fd96ef69c7c0af2133dffee Mon Sep 17 00:00:00 2001 From: Julien Marabotto Date: Fri, 26 Apr 2024 10:35:37 +0200 Subject: [PATCH 040/123] enh: removed straneous comments, update nonlinear --- nitransforms/nonlinear.py | 2 +- nitransforms/tests/test_base.py | 2 -- 2 files changed, 1 insertion(+), 3 deletions(-) diff --git a/nitransforms/nonlinear.py b/nitransforms/nonlinear.py index 52d854e2..488d01c8 100644 --- a/nitransforms/nonlinear.py +++ b/nitransforms/nonlinear.py @@ -168,7 +168,7 @@ def map(self, x, inverse=False): indexes = np.round(ijk).astype("int") if np.all(np.abs(ijk - indexes) < 1e-3): - indexes = tuple(tuple(i) for i in indexes.T) + indexes = tuple(tuple(i) for i in indexes) return self._field[indexes] return np.vstack(tuple( diff --git a/nitransforms/tests/test_base.py b/nitransforms/tests/test_base.py index fe08d2e9..4a345262 100644 --- a/nitransforms/tests/test_base.py +++ b/nitransforms/tests/test_base.py @@ -42,9 +42,7 @@ def test_ImageGrid(get_testdata, image_orientation): # Test ras2vox and vox2ras conversions ijk = [[10, 10, 10], [40, 4, 20], [0, 0, 0], [s - 1 for s in im.shape[:3]]] xyz = [img._affine.dot(idx + [1])[:-1] for idx in ijk] - # xyz = np.array([np.tensordot(img._affine, idx + [1], axes=1)[:-1] for idx in ijk]) - # import pdb; pdb.set_trace() assert np.allclose(np.squeeze(img.ras(ijk[0])), xyz[0]) assert np.allclose(np.round(img.index(xyz[0])), ijk[0]) assert np.allclose(img.ras(ijk).T, xyz) From f59720d2f2dae6e166c9e8416b3080aa6864ff28 Mon Sep 17 00:00:00 2001 From: Julien Marabotto Date: Thu, 2 May 2024 14:42:23 +0200 Subject: [PATCH 041/123] FIX: Offsource Apply Apply function offsourced. Tests: 139 passed, 163 Skipped, 15 Warnings --- nitransforms/nonlinear.py | 67 +++++++--------------------- nitransforms/resampling.py | 13 ++++-- nitransforms/tests/test_nonlinear.py | 13 +++--- 3 files changed, 31 insertions(+), 62 deletions(-) diff --git a/nitransforms/nonlinear.py b/nitransforms/nonlinear.py index 488d01c8..9b67b815 100644 --- a/nitransforms/nonlinear.py +++ b/nitransforms/nonlinear.py @@ -30,6 +30,11 @@ class DenseFieldTransform(TransformBase): __slots__ = ("_field", "_deltas") + @property + def ndim(self): + """Access the dimensions of this Desne Field Transform.""" + return self._field.ndim - 1 + def __init__(self, field=None, is_deltas=True, reference=None): """ Create a dense field transform. @@ -82,11 +87,10 @@ def __init__(self, field=None, is_deltas=True, reference=None): "Reference is not a spatial image" ) - ndim = self._field.ndim - 1 - if self._field.shape[-1] != ndim: + if self._field.shape[-1] != self.ndim: raise TransformError( "The number of components of the field (%d) does not match " - "the number of dimensions (%d)" % (self._field.shape[-1], ndim) + "the number of dimensions (%d)" % (self._field.shape[-1], self.ndim) ) if is_deltas: @@ -245,6 +249,12 @@ class BSplineFieldTransform(TransformBase): __slots__ = ['_coeffs', '_knots', '_weights', '_order', '_moving'] + @property + def ndim(self): + """Access the dimensions of this BSpline.""" + #return ndim = self._coeffs.shape[-1] + return self._coeffs.ndim - 1 + def __init__(self, coefficients, reference=None, order=3): """Create a smooth deformation field using B-Spline basis.""" super().__init__() @@ -277,14 +287,12 @@ def to_field(self, reference=None, dtype="float32"): if _ref is None: raise TransformError("A reference must be defined") - ndim = self._coeffs.shape[-1] - if self._weights is None: self._weights = grid_bspline_weights(_ref, self._knots) - field = np.zeros((_ref.npoints, ndim)) + field = np.zeros((_ref.npoints, self.ndim)) - for d in range(ndim): + for d in range(self.ndim): # 1 x Nvox : (1 x K) @ (K x Nvox) field[:, d] = self._coeffs[..., d].reshape(-1) @ self._weights @@ -292,51 +300,6 @@ def to_field(self, reference=None, dtype="float32"): field.astype(dtype).reshape(*_ref.shape, -1), reference=_ref ) - def apply( - self, - spatialimage, - reference=None, - order=3, - mode="constant", - cval=0.0, - prefilter=True, - output_dtype=None, - ): - """Apply a B-Spline transform on input data.""" - - _ref = ( - self.reference if reference is None else - SpatialReference.factory(_ensure_image(reference)) - ) - spatialimage = _ensure_image(spatialimage) - - # If locations to be interpolated are not on a grid, run map() - #import pdb; pdb.set_trace() - if not isinstance(_ref, ImageGrid): - return apply( - super(), - spatialimage, - reference=_ref, - output_dtype=output_dtype, - order=order, - mode=mode, - cval=cval, - prefilter=prefilter, - - ) - - # If locations to be interpolated are on a grid, generate a displacements field - return apply( - self.to_field(reference=reference), - spatialimage, - reference=reference, - order=order, - mode=mode, - cval=cval, - prefilter=prefilter, - output_dtype=output_dtype, - ) - def map(self, x, inverse=False): r""" Apply the transformation to a list of physical coordinate points. diff --git a/nitransforms/resampling.py b/nitransforms/resampling.py index 7cbdd9b8..942ab07c 100644 --- a/nitransforms/resampling.py +++ b/nitransforms/resampling.py @@ -87,16 +87,21 @@ def apply( spatialimage = _nbload(str(spatialimage)) data = np.asanyarray(spatialimage.dataobj) - - targets = ImageGrid(spatialimage).index( # data should be an image - _as_homogeneous(transform.map(_ref.ndcoords.T), dim=_ref.ndim) - ) if data.ndim == 4 and data.shape[-1] != len(transform): raise ValueError("The fourth dimension of the data does not match the tranform's shape.") if data.ndim < transform.ndim: data = data[..., np.newaxis] + + if hasattr(transform, 'to_field') and callable(transform.to_field): + targets = ImageGrid(spatialimage).index( + _as_homogeneous(transform.to_field(reference=reference).map(_ref.ndcoords.T), dim=_ref.ndim) + ) + else: + targets = ImageGrid(spatialimage).index( # data should be an image + _as_homogeneous(transform.map(_ref.ndcoords.T), dim=_ref.ndim) + ) if transform.ndim == 4: targets = _as_homogeneous(targets.reshape(-2, targets.shape[0])).T diff --git a/nitransforms/tests/test_nonlinear.py b/nitransforms/tests/test_nonlinear.py index dd4cbf93..4a802b54 100644 --- a/nitransforms/tests/test_nonlinear.py +++ b/nitransforms/tests/test_nonlinear.py @@ -97,13 +97,14 @@ def test_bsplines_references(testdata_path): ).to_field() with pytest.raises(TransformError): - BSplineFieldTransform( - testdata_path / "someones_bspline_coefficients.nii.gz" - ).apply(testdata_path / "someones_anatomy.nii.gz") + apply( + BSplineFieldTransform(testdata_path / "someones_bspline_coefficients.nii.gz"), + testdata_path / "someones_anatomy.nii.gz", + ) - BSplineFieldTransform( - testdata_path / "someones_bspline_coefficients.nii.gz" - ).apply( + apply( + BSplineFieldTransform( + testdata_path / "someones_bspline_coefficients.nii.gz"), testdata_path / "someones_anatomy.nii.gz", reference=testdata_path / "someones_anatomy.nii.gz" ) From 0837e912b710e107f7a7b27970b9ec5b142a1c2d Mon Sep 17 00:00:00 2001 From: Oscar Esteban Date: Thu, 16 May 2024 08:05:19 -0400 Subject: [PATCH 042/123] sty: pacify flake8 --- nitransforms/base.py | 6 ++- nitransforms/linear.py | 6 +-- nitransforms/nonlinear.py | 57 ++++++++++++++-------------- nitransforms/resampling.py | 23 +++++++---- nitransforms/tests/test_base.py | 17 +++++---- nitransforms/tests/test_nonlinear.py | 49 +++++++++++++----------- 6 files changed, 85 insertions(+), 73 deletions(-) diff --git a/nitransforms/base.py b/nitransforms/base.py index 6a6ae7ed..26c0d475 100644 --- a/nitransforms/base.py +++ b/nitransforms/base.py @@ -177,7 +177,10 @@ def __ne__(self, other): class TransformBase: """Abstract image class to represent transforms.""" - __slots__ = ("_reference", "_ndim",) + __slots__ = ( + "_reference", + "_ndim", + ) def __init__(self, reference=None): """Instantiate a transform.""" @@ -283,6 +286,7 @@ def _as_homogeneous(xyz, dtype="float32", dim=3): return np.hstack((xyz, np.ones((xyz.shape[0], 1), dtype=dtype))) + def _apply_affine(x, affine, dim): """Get the image array's indexes corresponding to coordinates.""" return np.tensordot( diff --git a/nitransforms/linear.py b/nitransforms/linear.py index cf16104e..71df6a16 100644 --- a/nitransforms/linear.py +++ b/nitransforms/linear.py @@ -10,16 +10,12 @@ import warnings import numpy as np from pathlib import Path -from scipy import ndimage as ndi -from nibabel.loadsave import load as _nbload from nibabel.affines import from_matvec -from nibabel.arrayproxy import get_obj_dtype from nitransforms.base import ( ImageGrid, TransformBase, - SpatialReference, _as_homogeneous, EQUALITY_TOL, ) @@ -112,7 +108,7 @@ def __invert__(self): """ return self.__class__(self._inverse) - + def __len__(self): """Enable using len().""" return 1 if self._matrix.ndim == 2 else len(self._matrix) diff --git a/nitransforms/nonlinear.py b/nitransforms/nonlinear.py index 9b67b815..93f891f5 100644 --- a/nitransforms/nonlinear.py +++ b/nitransforms/nonlinear.py @@ -14,12 +14,10 @@ from nitransforms import io from nitransforms.io.base import _ensure_image from nitransforms.interp.bspline import grid_bspline_weights, _cubic_bspline -from nitransforms.resampling import apply from nitransforms.base import ( TransformBase, TransformError, ImageGrid, - SpatialReference, _as_homogeneous, ) from scipy.ndimage import map_coordinates @@ -77,14 +75,12 @@ def __init__(self, field=None, is_deltas=True, reference=None): is_deltas = True try: - self.reference = ImageGrid( - reference if reference is not None else field - ) + self.reference = ImageGrid(reference if reference is not None else field) except AttributeError: raise TransformError( "Field must be a spatial image if reference is not provided" - if reference is None else - "Reference is not a spatial image" + if reference is None + else "Reference is not a spatial image" ) if self._field.shape[-1] != self.ndim: @@ -175,16 +171,19 @@ def map(self, x, inverse=False): indexes = tuple(tuple(i) for i in indexes) return self._field[indexes] - return np.vstack(tuple( - map_coordinates( - self._field[..., i], - ijk.T, - order=3, - mode="constant", - cval=0, - prefilter=True, - ) for i in range(self.reference.ndim) - )).T + return np.vstack( + tuple( + map_coordinates( + self._field[..., i], + ijk.T, + order=3, + mode="constant", + cval=0, + prefilter=True, + ) + for i in range(self.reference.ndim) + ) + ).T def __matmul__(self, b): """ @@ -206,9 +205,9 @@ def __matmul__(self, b): True """ - retval = b.map( - self._field.reshape((-1, self._field.shape[-1])) - ).reshape(self._field.shape) + retval = b.map(self._field.reshape((-1, self._field.shape[-1]))).reshape( + self._field.shape + ) return DenseFieldTransform(retval, is_deltas=False, reference=self.reference) def __eq__(self, other): @@ -247,12 +246,12 @@ def from_filename(cls, filename, fmt="X5"): class BSplineFieldTransform(TransformBase): """Represent a nonlinear transform parameterized by BSpline basis.""" - __slots__ = ['_coeffs', '_knots', '_weights', '_order', '_moving'] + __slots__ = ["_coeffs", "_knots", "_weights", "_order", "_moving"] @property def ndim(self): """Access the dimensions of this BSpline.""" - #return ndim = self._coeffs.shape[-1] + # return ndim = self._coeffs.shape[-1] return self._coeffs.ndim - 1 def __init__(self, coefficients, reference=None, order=3): @@ -270,8 +269,9 @@ def __init__(self, coefficients, reference=None, order=3): if coefficients.shape[-1] != self.reference.ndim: raise TransformError( - 'Number of components of the coefficients does ' - 'not match the number of dimensions') + "Number of components of the coefficients does " + "not match the number of dimensions" + ) @property def ndim(self): @@ -281,8 +281,7 @@ def ndim(self): def to_field(self, reference=None, dtype="float32"): """Generate a displacements deformation field from this B-Spline field.""" _ref = ( - self.reference if reference is None else - ImageGrid(_ensure_image(reference)) + self.reference if reference is None else ImageGrid(_ensure_image(reference)) ) if _ref is None: raise TransformError("A reference must be defined") @@ -350,9 +349,9 @@ def _map_xyz(x, reference, knots, coeffs): # Probably this will change if the order of the B-Spline is different w_start, w_end = np.ceil(ijk - 2).astype(int), np.floor(ijk + 2).astype(int) # Generate a grid of indexes corresponding to the window - nonzero_knots = tuple([ - np.arange(start, end + 1) for start, end in zip(w_start, w_end) - ]) + nonzero_knots = tuple( + [np.arange(start, end + 1) for start, end in zip(w_start, w_end)] + ) nonzero_knots = tuple(np.meshgrid(*nonzero_knots, indexing="ij")) window = np.array(nonzero_knots).reshape((ndim, -1)) diff --git a/nitransforms/resampling.py b/nitransforms/resampling.py index 942ab07c..e1ac1542 100644 --- a/nitransforms/resampling.py +++ b/nitransforms/resampling.py @@ -77,7 +77,9 @@ def apply( reference = _nbload(str(reference)) _ref = ( - transform.reference if reference is None else SpatialReference.factory(reference) + transform.reference + if reference is None + else SpatialReference.factory(reference) ) if _ref is None: @@ -89,20 +91,25 @@ def apply( data = np.asanyarray(spatialimage.dataobj) if data.ndim == 4 and data.shape[-1] != len(transform): - raise ValueError("The fourth dimension of the data does not match the tranform's shape.") + raise ValueError( + "The fourth dimension of the data does not match the tranform's shape." + ) if data.ndim < transform.ndim: data = data[..., np.newaxis] - - if hasattr(transform, 'to_field') and callable(transform.to_field): + + if hasattr(transform, "to_field") and callable(transform.to_field): targets = ImageGrid(spatialimage).index( - _as_homogeneous(transform.to_field(reference=reference).map(_ref.ndcoords.T), dim=_ref.ndim) + _as_homogeneous( + transform.to_field(reference=reference).map(_ref.ndcoords.T), + dim=_ref.ndim, + ) ) else: targets = ImageGrid(spatialimage).index( # data should be an image _as_homogeneous(transform.map(_ref.ndcoords.T), dim=_ref.ndim) ) - + if transform.ndim == 4: targets = _as_homogeneous(targets.reshape(-2, targets.shape[0])).T @@ -115,14 +122,14 @@ def apply( cval=cval, prefilter=prefilter, ) - + if isinstance(_ref, ImageGrid): # If reference is grid, reshape hdr = None if _ref.header is not None: hdr = _ref.header.copy() hdr.set_data_dtype(output_dtype or spatialimage.get_data_dtype()) moved = spatialimage.__class__( - resampled.reshape(_ref.shape if data.ndim < 4 else _ref.shape + (-1, )), + resampled.reshape(_ref.shape if data.ndim < 4 else _ref.shape + (-1,)), _ref.affine, hdr, ) diff --git a/nitransforms/tests/test_base.py b/nitransforms/tests/test_base.py index 4a345262..d32ce7f9 100644 --- a/nitransforms/tests/test_base.py +++ b/nitransforms/tests/test_base.py @@ -4,7 +4,12 @@ import pytest import h5py -from ..base import SpatialReference, SampledSpatialData, ImageGrid, TransformBase, _as_homogeneous +from ..base import ( + SpatialReference, + SampledSpatialData, + ImageGrid, + TransformBase, +) from .. import linear as nitl from ..resampling import apply @@ -104,15 +109,11 @@ def _to_hdf5(klass, x5_root): xfm = nitl.Affine() xfm.reference = fname moved = apply(xfm, fname, order=0) - assert np.all( - imgdata == np.asanyarray(moved.dataobj, dtype=moved.get_data_dtype()) - ) + assert np.all(imgdata == np.asanyarray(moved.dataobj, dtype=moved.get_data_dtype())) # Test ndim returned by affine assert nitl.Affine().ndim == 3 - assert nitl.LinearTransformsMapping( - [nitl.Affine(), nitl.Affine()] - ).ndim == 4 + assert nitl.LinearTransformsMapping([nitl.Affine(), nitl.Affine()]).ndim == 4 # Test applying to Gifti gii = nb.gifti.GiftiImage( @@ -127,7 +128,7 @@ def _to_hdf5(klass, x5_root): assert np.allclose(giimoved.reshape(xfm.reference.shape), moved.get_fdata()) # Test to_filename - xfm.to_filename("data.xfm", fmt='itk') + xfm.to_filename("data.xfm", fmt="itk") def test_SampledSpatialData(testdata_path): diff --git a/nitransforms/tests/test_nonlinear.py b/nitransforms/tests/test_nonlinear.py index 4a802b54..cfaa12c2 100644 --- a/nitransforms/tests/test_nonlinear.py +++ b/nitransforms/tests/test_nonlinear.py @@ -29,7 +29,7 @@ 3dNwarpApply -nwarp {transform} -source {moving} \ -master {reference} -interp NN -prefix {output} {extra}\ """.format, - 'fsl': """\ + "fsl": """\ applywarp -i {moving} -r {reference} -o {output} {extra}\ -w {transform} --interp=nn""".format, } @@ -39,7 +39,9 @@ def test_itk_disp_load(size): """Checks field sizes.""" with pytest.raises(TransformFileError): - ITKDisplacementsField.from_image(nb.Nifti1Image(np.zeros(size), np.eye(4), None)) + ITKDisplacementsField.from_image( + nb.Nifti1Image(np.zeros(size), np.eye(4), None) + ) @pytest.mark.parametrize("size", [(20, 20, 20), (20, 20, 20, 2, 3), (20, 20, 20, 1, 4)]) @@ -98,15 +100,16 @@ def test_bsplines_references(testdata_path): with pytest.raises(TransformError): apply( - BSplineFieldTransform(testdata_path / "someones_bspline_coefficients.nii.gz"), + BSplineFieldTransform( + testdata_path / "someones_bspline_coefficients.nii.gz" + ), testdata_path / "someones_anatomy.nii.gz", ) apply( - BSplineFieldTransform( - testdata_path / "someones_bspline_coefficients.nii.gz"), + BSplineFieldTransform(testdata_path / "someones_bspline_coefficients.nii.gz"), testdata_path / "someones_anatomy.nii.gz", - reference=testdata_path / "someones_anatomy.nii.gz" + reference=testdata_path / "someones_anatomy.nii.gz", ) @@ -170,7 +173,7 @@ def test_displacements_field1( nt_moved_mask.set_data_dtype(msk.get_data_dtype()) diff = np.asanyarray(sw_moved_mask.dataobj) - np.asanyarray(nt_moved_mask.dataobj) - assert np.sqrt((diff ** 2).mean()) < RMSE_TOL + assert np.sqrt((diff**2).mean()) < RMSE_TOL brainmask = np.asanyarray(nt_moved_mask.dataobj, dtype=bool) # Then apply the transform and cross-check with software @@ -179,7 +182,7 @@ def test_displacements_field1( reference=tmp_path / "reference.nii.gz", moving=tmp_path / "reference.nii.gz", output=tmp_path / "resampled.nii.gz", - extra="--output-data-type uchar" if sw_tool == "itk" else "" + extra="--output-data-type uchar" if sw_tool == "itk" else "", ) exit_code = check_call([cmd], shell=True) @@ -190,10 +193,9 @@ def test_displacements_field1( nt_moved.set_data_dtype(nii.get_data_dtype()) nt_moved.to_filename("nt_resampled.nii.gz") sw_moved.set_data_dtype(nt_moved.get_data_dtype()) - diff = ( - np.asanyarray(sw_moved.dataobj, dtype=sw_moved.get_data_dtype()) - - np.asanyarray(nt_moved.dataobj, dtype=nt_moved.get_data_dtype()) - ) + diff = np.asanyarray( + sw_moved.dataobj, dtype=sw_moved.get_data_dtype() + ) - np.asanyarray(nt_moved.dataobj, dtype=nt_moved.get_data_dtype()) # A certain tolerance is necessary because of resampling at borders assert np.sqrt((diff[brainmask] ** 2).mean()) < RMSE_TOL @@ -230,12 +232,11 @@ def test_displacements_field2(tmp_path, testdata_path, sw_tool): nt_moved = xfm.apply(img_fname, order=0) nt_moved.to_filename("nt_resampled.nii.gz") sw_moved.set_data_dtype(nt_moved.get_data_dtype()) - diff = ( - np.asanyarray(sw_moved.dataobj, dtype=sw_moved.get_data_dtype()) - - np.asanyarray(nt_moved.dataobj, dtype=nt_moved.get_data_dtype()) - ) + diff = np.asanyarray( + sw_moved.dataobj, dtype=sw_moved.get_data_dtype() + ) - np.asanyarray(nt_moved.dataobj, dtype=nt_moved.get_data_dtype()) # A certain tolerance is necessary because of resampling at borders - assert np.sqrt((diff ** 2).mean()) < RMSE_TOL + assert np.sqrt((diff**2).mean()) < RMSE_TOL def test_bspline(tmp_path, testdata_path): @@ -249,12 +250,16 @@ def test_bspline(tmp_path, testdata_path): bsplxfm = BSplineFieldTransform(bs_name, reference=img_name) dispxfm = DenseFieldTransform(disp_name) - out_disp = apply(dispxfm,img_name) - out_bspl = apply(bsplxfm,img_name) + out_disp = apply(dispxfm, img_name) + out_bspl = apply(bsplxfm, img_name) out_disp.to_filename("resampled_field.nii.gz") out_bspl.to_filename("resampled_bsplines.nii.gz") - assert np.sqrt( - (out_disp.get_fdata(dtype="float32") - out_bspl.get_fdata(dtype="float32")) ** 2 - ).mean() < 0.2 + assert ( + np.sqrt( + (out_disp.get_fdata(dtype="float32") - out_bspl.get_fdata(dtype="float32")) + ** 2 + ).mean() + < 0.2 + ) From c7a958cf50f11110ca463288b631d1df9d200d77 Mon Sep 17 00:00:00 2001 From: Oscar Esteban Date: Thu, 16 May 2024 08:18:54 -0400 Subject: [PATCH 043/123] fix: remove double definition --- nitransforms/nonlinear.py | 11 ----------- 1 file changed, 11 deletions(-) diff --git a/nitransforms/nonlinear.py b/nitransforms/nonlinear.py index 93f891f5..db296331 100644 --- a/nitransforms/nonlinear.py +++ b/nitransforms/nonlinear.py @@ -28,11 +28,6 @@ class DenseFieldTransform(TransformBase): __slots__ = ("_field", "_deltas") - @property - def ndim(self): - """Access the dimensions of this Desne Field Transform.""" - return self._field.ndim - 1 - def __init__(self, field=None, is_deltas=True, reference=None): """ Create a dense field transform. @@ -248,12 +243,6 @@ class BSplineFieldTransform(TransformBase): __slots__ = ["_coeffs", "_knots", "_weights", "_order", "_moving"] - @property - def ndim(self): - """Access the dimensions of this BSpline.""" - # return ndim = self._coeffs.shape[-1] - return self._coeffs.ndim - 1 - def __init__(self, coefficients, reference=None, order=3): """Create a smooth deformation field using B-Spline basis.""" super().__init__() From 667344cba4b5d057de716ada8a9472b6ac035fc2 Mon Sep 17 00:00:00 2001 From: jbanusco Date: Fri, 1 Dec 2023 16:25:31 +0100 Subject: [PATCH 044/123] BF: Composition of deformation fields --- nitransforms/nonlinear.py | 9 +++++++-- 1 file changed, 7 insertions(+), 2 deletions(-) diff --git a/nitransforms/nonlinear.py b/nitransforms/nonlinear.py index 79c3aa45..c338171e 100644 --- a/nitransforms/nonlinear.py +++ b/nitransforms/nonlinear.py @@ -170,17 +170,22 @@ def map(self, x, inverse=False): indexes = tuple(tuple(i) for i in indexes.T) return self._field[indexes] - return np.vstack(tuple( + + new_map = np.vstack(tuple( map_coordinates( self._field[..., i], ijk.T, order=3, mode="constant", - cval=0, + cval=np.nan, prefilter=True, ) for i in range(self.reference.ndim) )).T + # Set NaN values back to the original coordinates value = no displacement + new_map[np.isnan(new_map)] = x[np.isnan(new_map)] + return new_map + def __matmul__(self, b): """ Compose with a transform on the right. From 43b1140471e19be7f47e8077a32f16c6cca05cf5 Mon Sep 17 00:00:00 2001 From: Oscar Esteban Date: Thu, 16 May 2024 09:39:25 -0400 Subject: [PATCH 045/123] sty: pacify flake8 --- nitransforms/nonlinear.py | 1 - 1 file changed, 1 deletion(-) diff --git a/nitransforms/nonlinear.py b/nitransforms/nonlinear.py index c338171e..69c19d35 100644 --- a/nitransforms/nonlinear.py +++ b/nitransforms/nonlinear.py @@ -170,7 +170,6 @@ def map(self, x, inverse=False): indexes = tuple(tuple(i) for i in indexes.T) return self._field[indexes] - new_map = np.vstack(tuple( map_coordinates( self._field[..., i], From bfe592d59b666bea9d9ae9564bae81cb5a5e4fa5 Mon Sep 17 00:00:00 2001 From: Chris Markiewicz Date: Thu, 16 May 2024 14:38:53 -0400 Subject: [PATCH 046/123] Fix bad merge --- nitransforms/nonlinear.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/nitransforms/nonlinear.py b/nitransforms/nonlinear.py index 0aa6d36b..f4b95142 100644 --- a/nitransforms/nonlinear.py +++ b/nitransforms/nonlinear.py @@ -166,7 +166,7 @@ def map(self, x, inverse=False): indexes = tuple(tuple(i) for i in indexes) return self._field[indexes] - return np.vstack( + new_map = np.vstack( tuple( map_coordinates( self._field[..., i], From 2ca863058a1276e3f6f1ec744be7e7087e0f4d3c Mon Sep 17 00:00:00 2001 From: Feilong Ma Date: Thu, 16 May 2024 15:41:53 -0400 Subject: [PATCH 047/123] Normalize the transform so that the sum or the value of each element is comparable with input data. See https://feilong.github.io/tpl-onavg/how_to_use/space_travel.html --- nitransforms/surface.py | 30 ++++++++++++++++++++++++++---- nitransforms/tests/test_surface.py | 13 +++++++++++++ 2 files changed, 39 insertions(+), 4 deletions(-) diff --git a/nitransforms/surface.py b/nitransforms/surface.py index a0c436af..37deb20d 100644 --- a/nitransforms/surface.py +++ b/nitransforms/surface.py @@ -34,7 +34,7 @@ def __init__(self, mat): else: self.mat = sparse.csr_array(mat) - def apply(self, x, inverse=False): + def apply(self, x, inverse=False, normalize="element"): """Apply the transform to surface data. Parameters @@ -44,15 +44,37 @@ def apply(self, x, inverse=False): inverse : bool, default=False Whether to apply the inverse transform. If True, ``x`` has shape (..., nv2), and the output will have shape (..., nv1). + normalize : {"element", "sum", "none"}, default="element" + Normalization strategy. If "element", the scale of each value in + the output is comparable to each value of the input. If "sum", the + sum of the output is comparable to the sum of the input. If + "none", no normalization is applied. Returns ------- y : array-like, shape (..., nv2) Transformed data. """ - if inverse: - return x @ self.mat.T - return x @ self.mat + if normalize not in ("element", "sum", "none"): + raise ValueError("Invalid normalization strategy.") + + mat = self.mat.T if inverse else self.mat + + if normalize == "element": + sum_ = mat.sum(axis=0) + scale = np.zeros_like(sum_) + mask = sum_ != 0 + scale[mask] = 1.0 / sum_[mask] + mat = mat @ sparse.diags(scale) + elif normalize == "sum": + sum_ = mat.sum(axis=1) + scale = np.zeros_like(sum_) + mask = sum_ != 0 + scale[mask] = 1.0 / sum_[mask] + mat = sparse.diags(scale) @ mat + + y = x @ mat + return y def _to_hdf5(self, x5_root): """Write transform to HDF5 file.""" diff --git a/nitransforms/tests/test_surface.py b/nitransforms/tests/test_surface.py index 418f9d7f..76233d1d 100644 --- a/nitransforms/tests/test_surface.py +++ b/nitransforms/tests/test_surface.py @@ -43,3 +43,16 @@ def test_surface_transform_npz(): os.remove(fn) raise os.remove(fn) + + +def test_surface_transform_normalization(): + mat = np.random.uniform(size=(20, 10)) + xfm = SurfaceTransform(mat) + x = np.random.uniform(size=(5, 20)) + y_element = xfm.apply(x, normalize="element") + np.testing.assert_array_less(y_element.sum(axis=1), x.sum(axis=1)) + y_sum = xfm.apply(x, normalize="sum") + np.testing.assert_allclose(y_sum.sum(axis=1), x.sum(axis=1)) + y_none = xfm.apply(x, normalize="none") + assert y_none.sum() != y_element.sum() + assert y_none.sum() != y_sum.sum() From e1165a8d648a72cc7fe722519a99241479112c3b Mon Sep 17 00:00:00 2001 From: Oscar Esteban Date: Thu, 16 May 2024 15:54:46 -0400 Subject: [PATCH 048/123] fix: the original locations should be an ndarray --- nitransforms/nonlinear.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/nitransforms/nonlinear.py b/nitransforms/nonlinear.py index 69c19d35..e3a984b1 100644 --- a/nitransforms/nonlinear.py +++ b/nitransforms/nonlinear.py @@ -163,6 +163,8 @@ def map(self, x, inverse=False): if inverse is True: raise NotImplementedError + + x = np.array(x) ijk = self.reference.index(x) indexes = np.round(ijk).astype("int") From bcb4220697d3f3f6361e6027cf711f9592307717 Mon Sep 17 00:00:00 2001 From: Oscar Esteban Date: Thu, 16 May 2024 16:11:32 -0400 Subject: [PATCH 049/123] fix: update circleci machine image --- .circleci/config.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.circleci/config.yml b/.circleci/config.yml index 9f216d61..1a067007 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -2,7 +2,7 @@ version: 2 jobs: build_pytest: machine: - image: ubuntu-2004:202107-02 + image: default working_directory: /tmp/src/nitransforms environment: TZ: "/usr/share/zoneinfo/America/Los_Angeles" From 7cef99c5e18f99155d875687c54183243ae2e1d8 Mon Sep 17 00:00:00 2001 From: Oscar Esteban Date: Thu, 16 May 2024 16:11:44 -0400 Subject: [PATCH 050/123] fix: update python version in dockerfile --- Dockerfile | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Dockerfile b/Dockerfile index ae270b45..e578d3ed 100644 --- a/Dockerfile +++ b/Dockerfile @@ -108,7 +108,7 @@ ENV PATH="/usr/local/miniconda/bin:$PATH" \ # Installing precomputed python packages RUN conda install -y -c anaconda -c conda-forge \ - python=3.7 \ + python=3.8 \ libxml2=2.9 \ libxslt=1.1 \ lxml \ From 3dbcf18ec0dd9c3c27d655574fce946a6c2b0f03 Mon Sep 17 00:00:00 2001 From: Dylan Nielson Date: Fri, 17 May 2024 09:45:09 -0400 Subject: [PATCH 051/123] NF: add surface index transforms --- nitransforms/__init__.py | 3 +- nitransforms/base.py | 27 +++++++++ nitransforms/surface.py | 90 +++++++++++++++++++++++++++++- nitransforms/tests/test_surface.py | 12 ++-- 4 files changed, 123 insertions(+), 9 deletions(-) diff --git a/nitransforms/__init__.py b/nitransforms/__init__.py index 1f819933..38768ae9 100644 --- a/nitransforms/__init__.py +++ b/nitransforms/__init__.py @@ -16,7 +16,7 @@ transform """ -from . import linear, manip, nonlinear +from . import linear, manip, nonlinear, surface from .linear import Affine, LinearTransformsMapping from .nonlinear import DenseFieldTransform from .manip import TransformChain @@ -37,6 +37,7 @@ __copyright__ = "Copyright (c) 2021 The NiPy developers" __all__ = [ + "surface", "linear", "manip", "nonlinear", diff --git a/nitransforms/base.py b/nitransforms/base.py index 96f00edb..8d80b25f 100644 --- a/nitransforms/base.py +++ b/nitransforms/base.py @@ -88,6 +88,33 @@ def shape(self): """Access the space's size of each dimension.""" return self._shape +class SurfaceMesh(SampledSpatialData): + """Class to represent surface meshes.""" + + __slots__ = ["_triangles"] + def __init__(self, dataset): + """Create a sampling reference.""" + self._shape = None + + if isinstance(dataset, (str, Path)): + dataset = _nbload(str(dataset)) + + if hasattr(dataset, "numDA"): # Looks like a Gifti file + _das = dataset.get_arrays_from_intent(INTENT_CODES["pointset"]) + if not _das: + raise TypeError( + "Input Gifti file does not contain reference coordinates." + ) + self._coords = np.vstack([da.data for da in _das]) + _tris = dataset.get_arrays_from_intent(INTENT_CODES["triangle"]) + self._triangles = np.vstack([da.data for da in _tris]) + self._npoints, self._ndim = self._coords.shape + return + + if isinstance(dataset, Cifti2Image): + raise NotImplementedError + + raise ValueError("Dataset could not be interpreted as an irregular sample.") class ImageGrid(SampledSpatialData): """Class to represent spaces of gridded data (images).""" diff --git a/nitransforms/surface.py b/nitransforms/surface.py index 37deb20d..2cc3913b 100644 --- a/nitransforms/surface.py +++ b/nitransforms/surface.py @@ -11,11 +11,97 @@ import h5py import numpy as np import scipy.sparse as sparse - +from nitransforms.base import ( + SurfaceMesh +) +import nibabel as nb +from scipy.spatial import KDTree from nitransforms.base import TransformBase -class SurfaceTransform(TransformBase): +class SurfaceTransformBase(): + """Generic surface transformation class""" + __slots__ = ("_reference", "_moving") + def __init__(self, reference, moving): + """Instantiate a generic surface transform.""" + self._reference = reference + self._moving = moving + + def __eq__(self, other): + ref_coords_eq = (self.reference._coordinates == other.reference._coordinates).all() + ref_tris_eq = (self.reference._triangles == other.reference._triangles).all() + mov_coords_eq = (self.moving._coordinates == other.moving._coordinates).all() + mov_tris_eq = (self.moving._triangles == other.moving._triangles).all() + return ref_coords_eq & ref_tris_eq & mov_coords_eq & mov_tris_eq + + def __invert__(self): + return self.__class__(self.moving, self.reference) + @property + def reference(self): + return self._reference + + @reference.setter + def reference(self, surface): + self._reference = SurfaceMesh(surface) + + @property + def moving(self): + return self._moving + + @moving.setter + def moving(self, surface): + self._moving = SurfaceMesh(surface) + @classmethod + def from_filename(cls, reference_path, moving_path): + """Create an Surface Index Transformation from a pair of surfaces with corresponding vertices.""" + reference = SurfaceMesh(nb.load(reference_path)) + moving = SurfaceMesh(nb.load(moving_path)) + return cls(reference, moving) + +class SurfaceIndexTransform(SurfaceTransformBase): + """Represents surface transformations in which the indices correspond and the coordinates differ.""" + + __slots__ = ("_reference", "_moving") + def __init__(self, reference, moving): + """Instantiate a transform between two surfaces with corresponding vertices.""" + super().__init__(reference=reference, moving=moving) + if (self._reference._triangles != self._moving._triangles).all(): + raise ValueError("Both surfaces for an index transform must have corresponding vertices.") + + def map(self, x, inverse=False): + if inverse: + source = self.reference + dest = self.moving + else: + source = self.moving + dest = self.reference + + s_tree = KDTree(source._coords) + dists, matches = s_tree.query(x) + if not np.allclose(dists, 0): + raise NotImplementedError("Mapping on surfaces not implemented for coordinates that aren't vertices") + return dest._coords[matches] + + def __add__(self, other): + return self.__class__(self.reference, other.moving) + + @property + def reference(self): + return self._reference + + @reference.setter + def reference(self, surface): + self._reference = SurfaceMesh(surface) + + @property + def moving(self): + return self._moving + + @moving.setter + def moving(self, surface): + self._moving = SurfaceMesh(surface) + +class SurfaceCoordinateTransform(SurfaceTransformBase): """Represents transforms between surface spaces.""" __slots__ = ("mat",) diff --git a/nitransforms/tests/test_surface.py b/nitransforms/tests/test_surface.py index 76233d1d..e90fe5ea 100644 --- a/nitransforms/tests/test_surface.py +++ b/nitransforms/tests/test_surface.py @@ -4,17 +4,17 @@ import numpy as np import scipy.sparse as sparse -from nitransforms.surface import SurfaceTransform +from nitransforms.surface import SurfaceCoordinateTransform def test_surface_transform_x5(): mat = sparse.random(10, 10, density=0.5) - xfm = SurfaceTransform(mat) + xfm = SurfaceCoordinateTransform(mat) fn = tempfile.mktemp(suffix=".h5") print(fn) xfm.to_filename(fn) - xfm2 = SurfaceTransform.from_filename(fn) + xfm2 = SurfaceCoordinateTransform.from_filename(fn) try: assert xfm.mat.shape == xfm2.mat.shape np.testing.assert_array_equal(xfm.mat.data, xfm2.mat.data) @@ -28,12 +28,12 @@ def test_surface_transform_x5(): def test_surface_transform_npz(): mat = sparse.random(10, 10, density=0.5) - xfm = SurfaceTransform(mat) + xfm = SurfaceCoordinateTransform(mat) fn = tempfile.mktemp(suffix=".npz") print(fn) xfm.to_filename(fn) - xfm2 = SurfaceTransform.from_filename(fn) + xfm2 = SurfaceCoordinateTransform.from_filename(fn) try: assert xfm.mat.shape == xfm2.mat.shape np.testing.assert_array_equal(xfm.mat.data, xfm2.mat.data) @@ -47,7 +47,7 @@ def test_surface_transform_npz(): def test_surface_transform_normalization(): mat = np.random.uniform(size=(20, 10)) - xfm = SurfaceTransform(mat) + xfm = SurfaceCoordinateTransform(mat) x = np.random.uniform(size=(5, 20)) y_element = xfm.apply(x, normalize="element") np.testing.assert_array_less(y_element.sum(axis=1), x.sum(axis=1)) From 5b1736bf2bbda7c737e8c7a7ec8806dd7510a1ad Mon Sep 17 00:00:00 2001 From: Oscar Esteban Date: Fri, 17 May 2024 11:45:54 -0400 Subject: [PATCH 052/123] Update nitransforms/resampling.py --- nitransforms/resampling.py | 1 + 1 file changed, 1 insertion(+) diff --git a/nitransforms/resampling.py b/nitransforms/resampling.py index e1ac1542..9de0d2d6 100644 --- a/nitransforms/resampling.py +++ b/nitransforms/resampling.py @@ -98,6 +98,7 @@ def apply( if data.ndim < transform.ndim: data = data[..., np.newaxis] + # For model-based nonlinear transforms, generate the corresponding dense field if hasattr(transform, "to_field") and callable(transform.to_field): targets = ImageGrid(spatialimage).index( _as_homogeneous( From 4f59910eb272220c2d6c2d8548bf7c62cc5b8a3b Mon Sep 17 00:00:00 2001 From: Oscar Esteban Date: Thu, 16 May 2024 16:16:08 -0400 Subject: [PATCH 053/123] fix: clean up environment cache --- .circleci/config.yml | 42 +- Dockerfile | 282 +++++--- docker/files/freesurfer7.3.2-exclude.txt | 868 +++++++++++++++++++++++ docker/files/neurodebian.gpg | 71 -- env.yml | 42 ++ 5 files changed, 1096 insertions(+), 209 deletions(-) create mode 100644 docker/files/freesurfer7.3.2-exclude.txt delete mode 100644 docker/files/neurodebian.gpg create mode 100644 env.yml diff --git a/.circleci/config.yml b/.circleci/config.yml index 1a067007..8d0d6748 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -12,9 +12,9 @@ jobs: - checkout - restore_cache: keys: - - env-v3-{{ .Branch }}- - - env-v3-master- - - env-v3- + - env-v6-{{ .Branch }}- + - env-v6-master- + - env-v6- - run: name: Setup git-annex command: | @@ -29,17 +29,14 @@ jobs: - run: name: Setup DataLad command: | - export PY3=$(pyenv versions | grep '3\.' | - sed -e 's/.* 3\./3./' -e 's/ .*//') - pyenv local $PY3 - python -m pip install --no-cache-dir -U pip "setuptools >= 45.0" "setuptools_scm[toml] >= 3.4" - python -m pip install --no-cache-dir -U datalad datalad-osf + python3 -m pip install --no-cache-dir -U pip "setuptools >= 45.0" "setuptools_scm[toml] >= 6.2" + python3 -m pip install --no-cache-dir -U datalad datalad-osf - save_cache: - key: env-v3-{{ .Branch }}-{{ .BuildNum }} + key: env-v6-{{ .Branch }}-{{ .BuildNum }} paths: - /opt/circleci/git-annex.linux - - /opt/circleci/.pyenv/versions/3.9.4 + - /opt/circleci/.pyenv/versions - restore_cache: keys: @@ -49,10 +46,7 @@ jobs: - run: name: Install test data from GIN command: | - export PY3=$(pyenv versions | grep '3\.' | - sed -e 's/.* 3\./3./' -e 's/ .*//') - pyenv local $PY3 - export PATH=/opt/circleci/git-annex.linux:$PATH + export PATH=/opt/circleci/.pyenv/versions/3.12.2/bin/:/opt/circleci/git-annex.linux:$PATH mkdir -p /tmp/data cd /tmp/data datalad install -r https://gin.g-node.org/oesteban/nitransforms-tests @@ -98,15 +92,12 @@ jobs: name: Build Docker image & push to registry no_output_timeout: 60m command: | - export PY3=$(pyenv versions | grep '3\.' | - sed -e 's/.* 3\./3./' -e 's/ .*//') - pyenv local $PY3 e=1 && for i in {1..5}; do docker build --rm --cache-from=nitransforms:latest \ -t nitransforms:latest \ --build-arg BUILD_DATE=`date -u +"%Y-%m-%dT%H:%M:%SZ"` \ --build-arg VCS_REF=`git rev-parse --short HEAD` \ - --build-arg VERSION=$( python3 setup.py --version ) . \ + --build-arg VERSION=$( python3 -m setuptools_scm ) . \ && e=0 && break || sleep 15 done && [ "$e" -eq "0" ] docker tag nitransforms:latest localhost:5000/nitransforms @@ -123,10 +114,7 @@ jobs: - run: name: Check version packaged in Docker image command: | - export PY3=$(pyenv versions | grep '3\.' | - sed -e 's/.* 3\./3./' -e 's/ .*//') - pyenv local $PY3 - THISVERSION=${CIRCLE_TAG:-$(python3 setup.py --version)} + THISVERSION=${CIRCLE_TAG:-$(python3 -m setuptools_scm)} INSTALLED_VERSION=$(\ docker run -it --rm --entrypoint=python nitransforms \ -c 'import nitransforms as nit; print(nit.__version__, end="")' ) @@ -186,7 +174,7 @@ jobs: command: | python3 -m venv /tmp/buildenv source /tmp/buildenv/bin/activate - python3 -m pip install "setuptools >= 45.0" wheel "setuptools_scm[toml] >= 3.4" \ + python3 -m pip install "setuptools >= 45.0" wheel "setuptools_scm[toml] >= 6.2" \ "pip>=10.0.1" twine docutils python setup.py sdist bdist_wheel twine check dist/nitransforms* @@ -200,9 +188,9 @@ jobs: command: | python3 -m venv /tmp/install_sdist source /tmp/install_sdist/bin/activate - python3 -m pip install "setuptools >= 45.0" "pip>=10.0.1" + python3 -m pip install "setuptools >= 45.0" "pip>=10.0.1" "setuptools_scm[toml] >= 6.2" - THISVERSION=$( python3 setup.py --version ) + THISVERSION=$( python3 -m setuptools_scm ) THISVERSION=${CIRCLE_TAG:-$THISVERSION} python3 -m pip install dist/nitransforms*.tar.gz INSTALLED_VERSION=$(python3 -c 'import nitransforms as nit; print(nit.__version__, end="")') @@ -214,9 +202,9 @@ jobs: command: | python3 -m venv /tmp/install_wheel source /tmp/install_wheel/bin/activate - python3 -m pip install "setuptools >= 45.0" "pip>=10.0.1" + python3 -m pip install "setuptools >= 45.0" "pip>=10.0.1" "setuptools_scm[toml] >= 6.2" - THISVERSION=$( python3 setup.py --version ) + THISVERSION=$( python3 -m setuptools_scm ) THISVERSION=${CIRCLE_TAG:-$THISVERSION} python3 -m pip install dist/nitransforms*.whl INSTALLED_VERSION=$(python3 -c 'import nitransforms as nit; print(nit.__version__, end="")') diff --git a/Dockerfile b/Dockerfile index e578d3ed..a804bf11 100644 --- a/Dockerfile +++ b/Dockerfile @@ -1,48 +1,144 @@ -FROM ubuntu:xenial-20200114 - -# Pre-cache neurodebian key -COPY docker/files/neurodebian.gpg /usr/local/etc/neurodebian.gpg - -# Prepare environment +# Ubuntu 22.04 LTS - Jammy +ARG BASE_IMAGE=ubuntu:jammy-20240125 + +# +# Build wheel +# +FROM python:slim AS src +RUN pip install build +RUN apt-get update && \ + apt-get install -y --no-install-recommends git +COPY . /src +RUN python -m build /src + +# +# Download stages +# + +# Utilities for downloading packages +FROM ${BASE_IMAGE} as downloader +# Bump the date to current to refresh curl/certificates/etc +RUN echo "2023.07.20" RUN apt-get update && \ apt-get install -y --no-install-recommends \ - curl \ + binutils \ bzip2 \ ca-certificates \ - xvfb \ - build-essential \ - autoconf \ - libtool \ - pkg-config \ - git && \ - curl -sL https://deb.nodesource.com/setup_10.x | bash - && \ + curl \ + unzip && \ + apt-get clean && rm -rf /var/lib/apt/lists/* /tmp/* /var/tmp/* + +# FreeSurfer 7.3.2 +FROM downloader as freesurfer +COPY docker/files/freesurfer7.3.2-exclude.txt /usr/local/etc/freesurfer7.3.2-exclude.txt +RUN curl -sSL https://surfer.nmr.mgh.harvard.edu/pub/dist/freesurfer/7.3.2/freesurfer-linux-ubuntu22_amd64-7.3.2.tar.gz \ + | tar zxv --no-same-owner -C /opt --exclude-from=/usr/local/etc/freesurfer7.3.2-exclude.txt + +# AFNI +FROM downloader as afni +# Bump the date to current to update AFNI +RUN echo "2023.07.20" +RUN mkdir -p /opt/afni-latest \ + && curl -fsSL --retry 5 https://afni.nimh.nih.gov/pub/dist/tgz/linux_openmp_64.tgz \ + | tar -xz -C /opt/afni-latest --strip-components 1 \ + --exclude "linux_openmp_64/*.gz" \ + --exclude "linux_openmp_64/funstuff" \ + --exclude "linux_openmp_64/shiny" \ + --exclude "linux_openmp_64/afnipy" \ + --exclude "linux_openmp_64/lib/RetroTS" \ + --exclude "linux_openmp_64/lib_RetroTS" \ + --exclude "linux_openmp_64/meica.libs" \ + # Keep only what we use + && find /opt/afni-latest -type f -not \( \ + -name "3dTshift" -or \ + -name "3dUnifize" -or \ + -name "3dAutomask" -or \ + -name "3dvolreg" \) -delete + +# Micromamba +FROM downloader as micromamba + +# Install a C compiler to build extensions when needed. +# traits<6.4 wheels are not available for Python 3.11+, but build easily. +RUN apt-get update && \ + apt-get install -y --no-install-recommends build-essential && \ + apt-get clean && rm -rf /var/lib/apt/lists/* /tmp/* /var/tmp/* + +WORKDIR / +# Bump the date to current to force update micromamba +RUN echo "2024.02.06" +RUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest | tar -xvj bin/micromamba + +ENV MAMBA_ROOT_PREFIX="/opt/conda" +COPY env.yml /tmp/env.yml +# COPY requirements.txt /tmp/requirements.txt +WORKDIR /tmp +RUN micromamba create -y -f /tmp/env.yml && \ + micromamba clean -y -a + +# +# Main stage +# +FROM ${BASE_IMAGE} as nitransforms + +# Configure apt +ENV DEBIAN_FRONTEND="noninteractive" \ + LANG="en_US.UTF-8" \ + LC_ALL="en_US.UTF-8" + +# Some baseline tools; bc is needed for FreeSurfer, so don't drop it +RUN apt-get update && \ apt-get install -y --no-install-recommends \ - nodejs && \ + bc \ + ca-certificates \ + curl \ + git \ + gnupg \ + lsb-release \ + netbase \ + xvfb && \ apt-get clean && rm -rf /var/lib/apt/lists/* /tmp/* /var/tmp/* -# Installing freesurfer -RUN curl -sSL https://surfer.nmr.mgh.harvard.edu/pub/dist/freesurfer/6.0.1/freesurfer-Linux-centos6_x86_64-stable-pub-v6.0.1.tar.gz | tar zxv --no-same-owner -C /opt \ - --exclude='freesurfer/diffusion' \ - --exclude='freesurfer/docs' \ - --exclude='freesurfer/fsfast' \ - --exclude='freesurfer/lib/cuda' \ - --exclude='freesurfer/lib/qt' \ - --exclude='freesurfer/matlab' \ - --exclude='freesurfer/mni/share/man' \ - --exclude='freesurfer/subjects/fsaverage_sym' \ - --exclude='freesurfer/subjects/fsaverage3' \ - --exclude='freesurfer/subjects/fsaverage4' \ - --exclude='freesurfer/subjects/cvs_avg35' \ - --exclude='freesurfer/subjects/cvs_avg35_inMNI152' \ - --exclude='freesurfer/subjects/bert' \ - --exclude='freesurfer/subjects/lh.EC_average' \ - --exclude='freesurfer/subjects/rh.EC_average' \ - --exclude='freesurfer/subjects/sample-*.mgz' \ - --exclude='freesurfer/subjects/V1_average' \ - --exclude='freesurfer/trctrain' - -ENV FSL_DIR="/usr/share/fsl/5.0" \ - OS="Linux" \ +# Configure PPAs for libpng12 and libxp6 +RUN GNUPGHOME=/tmp gpg --keyserver hkps://keyserver.ubuntu.com --no-default-keyring --keyring /usr/share/keyrings/linuxuprising.gpg --recv 0xEA8CACC073C3DB2A \ + && GNUPGHOME=/tmp gpg --keyserver hkps://keyserver.ubuntu.com --no-default-keyring --keyring /usr/share/keyrings/zeehio.gpg --recv 0xA1301338A3A48C4A \ + && echo "deb [signed-by=/usr/share/keyrings/linuxuprising.gpg] https://ppa.launchpadcontent.net/linuxuprising/libpng12/ubuntu jammy main" > /etc/apt/sources.list.d/linuxuprising.list \ + && echo "deb [signed-by=/usr/share/keyrings/zeehio.gpg] https://ppa.launchpadcontent.net/zeehio/libxp/ubuntu jammy main" > /etc/apt/sources.list.d/zeehio.list + +# Dependencies for AFNI; requires a discontinued multiarch-support package from bionic (18.04) +RUN apt-get update -qq \ + && apt-get install -y -q --no-install-recommends \ + ed \ + gsl-bin \ + libglib2.0-0 \ + libglu1-mesa-dev \ + libglw1-mesa \ + libgomp1 \ + libjpeg62 \ + libpng12-0 \ + libxm4 \ + libxp6 \ + netpbm \ + tcsh \ + xfonts-base \ + xvfb \ + && curl -sSL --retry 5 -o /tmp/multiarch.deb http://archive.ubuntu.com/ubuntu/pool/main/g/glibc/multiarch-support_2.27-3ubuntu1.5_amd64.deb \ + && dpkg -i /tmp/multiarch.deb \ + && rm /tmp/multiarch.deb \ + && apt-get install -f \ + && apt-get clean && rm -rf /var/lib/apt/lists/* /tmp/* /var/tmp/* \ + && gsl2_path="$(find / -name 'libgsl.so.19' || printf '')" \ + && if [ -n "$gsl2_path" ]; then \ + ln -sfv "$gsl2_path" "$(dirname $gsl2_path)/libgsl.so.0"; \ + fi \ + && ldconfig + +# Install files from stages +COPY --from=freesurfer /opt/freesurfer /opt/freesurfer +COPY --from=afni /opt/afni-latest /opt/afni-latest + +# Simulate SetUpFreeSurfer.sh +ENV OS="Linux" \ FS_OVERRIDE=0 \ FIX_VERTEX_AREA="" \ FSF_OUTPUT_FORMAT="nii.gz" \ @@ -56,95 +152,59 @@ ENV SUBJECTS_DIR="$FREESURFER_HOME/subjects" \ MNI_DATAPATH="$FREESURFER_HOME/mni/data" ENV PERL5LIB="$MINC_LIB_DIR/perl5/5.8.5" \ MNI_PERL5LIB="$MINC_LIB_DIR/perl5/5.8.5" \ - PATH="$FREESURFER_HOME/bin:$FSFAST_HOME/bin:$FREESURFER_HOME/tktools:$MINC_BIN_DIR:$PATH" + PATH="$FREESURFER_HOME/bin:$FREESURFER_HOME/tktools:$MINC_BIN_DIR:$PATH" -# Installing Neurodebian packages (FSL, AFNI, git) -RUN curl -sSL "http://neuro.debian.net/lists/$( lsb_release -c | cut -f2 ).us-ca.full" >> /etc/apt/sources.list.d/neurodebian.sources.list && \ - apt-key add /usr/local/etc/neurodebian.gpg && \ - (apt-key adv --refresh-keys --keyserver hkp://ha.pool.sks-keyservers.net 0xA5D32F012649A5A9 || true) +# AFNI config +ENV PATH="/opt/afni-latest:$PATH" \ + AFNI_IMSAVE_WARNINGS="NO" \ + AFNI_PLUGINPATH="/opt/afni-latest" -RUN apt-get update && \ - apt-get install -y --no-install-recommends \ - fsl-core=5.0.9-5~nd16.04+1 \ - fsl-mni152-templates=5.0.7-2 \ - afni=16.2.07~dfsg.1-5~nd16.04+1 \ - convert3d \ - connectome-workbench=1.3.2-2~nd16.04+1 \ - git-annex-standalone && \ - apt-get clean && rm -rf /var/lib/apt/lists/* /tmp/* /var/tmp/* +# Workbench config +ENV PATH="/opt/workbench/bin_linux64:$PATH" + +# Create a shared $HOME directory +RUN useradd -m -s /bin/bash -G users neuro +WORKDIR /home/neuro +ENV HOME="/home/neuro" \ + LD_LIBRARY_PATH="/usr/lib/x86_64-linux-gnu:$LD_LIBRARY_PATH" + +COPY --from=micromamba /bin/micromamba /bin/micromamba +COPY --from=micromamba /opt/conda/envs/nitransforms /opt/conda/envs/nitransforms -ENV FSLDIR="/usr/share/fsl/5.0" \ +ENV MAMBA_ROOT_PREFIX="/opt/conda" +RUN micromamba shell init -s bash && \ + echo "micromamba activate nitransforms" >> $HOME/.bashrc +ENV PATH="/opt/conda/envs/nitransforms/bin:$PATH" \ + CPATH="/opt/conda/envs/nitransforms/include:$CPATH" \ + LD_LIBRARY_PATH="/opt/conda/envs/nitransforms/lib:$LD_LIBRARY_PATH" + +# FSL environment +ENV LANG="C.UTF-8" \ + LC_ALL="C.UTF-8" \ + PYTHONNOUSERSITE=1 \ + FSLDIR="/opt/conda/envs/nitransforms" \ FSLOUTPUTTYPE="NIFTI_GZ" \ FSLMULTIFILEQUIT="TRUE" \ - POSSUMDIR="/usr/share/fsl/5.0" \ - LD_LIBRARY_PATH="/usr/lib/fsl/5.0:$LD_LIBRARY_PATH" \ - FSLTCLSH="/usr/bin/tclsh" \ - FSLWISH="/usr/bin/wish" \ - AFNI_MODELPATH="/usr/lib/afni/models" \ - AFNI_IMSAVE_WARNINGS="NO" \ - AFNI_TTATLAS_DATASET="/usr/share/afni/atlases" \ - AFNI_PLUGINPATH="/usr/lib/afni/plugins" -ENV PATH="/usr/lib/fsl/5.0:/usr/lib/afni/bin:$PATH" - -# Installing ANTs 2.3.3 (NeuroDocker build) -# Note: the URL says 2.3.4 but it is actually 2.3.3 -ENV ANTSPATH=/usr/lib/ants -RUN mkdir -p $ANTSPATH && \ - curl -sSL "https://dl.dropbox.com/s/gwf51ykkk5bifyj/ants-Linux-centos6_x86_64-v2.3.4.tar.gz" \ - | tar -xzC $ANTSPATH --strip-components 1 -ENV PATH=$ANTSPATH:$PATH - -# Installing and setting up miniconda -RUN curl -sSLO https://repo.continuum.io/miniconda/Miniconda3-4.5.11-Linux-x86_64.sh && \ - bash Miniconda3-4.5.11-Linux-x86_64.sh -b -p /usr/local/miniconda && \ - rm Miniconda3-4.5.11-Linux-x86_64.sh - -# Set CPATH for packages relying on compiled libs (e.g. indexed_gzip) -ENV PATH="/usr/local/miniconda/bin:$PATH" \ - CPATH="/usr/local/miniconda/include/:$CPATH" \ - LANG="C.UTF-8" \ - LC_ALL="C.UTF-8" \ - PYTHONNOUSERSITE=1 - -# Installing precomputed python packages -RUN conda install -y -c anaconda -c conda-forge \ - python=3.8 \ - libxml2=2.9 \ - libxslt=1.1 \ - lxml \ - mkl \ - mkl-service \ - numpy=1.20 \ - pip=21 \ - scipy=1.6 \ - setuptools \ - setuptools_scm \ - toml \ - zlib; sync && \ - chmod -R a+rX /usr/local/miniconda; sync && \ - chmod +x /usr/local/miniconda/bin/*; sync && \ - conda build purge-all; sync && \ - conda clean -tipsy && sync + FSLLOCKDIR="" \ + FSLMACHINELIST="" \ + FSLREMOTECALL="" \ + FSLGECUDAQ="cuda.q" # Unless otherwise specified each process should only use one thread - nipype # will handle parallelization ENV MKL_NUM_THREADS=1 \ OMP_NUM_THREADS=1 -# Create a shared $HOME directory -RUN useradd -m -s /bin/bash -G users neuro -WORKDIR /home/neuro -ENV HOME="/home/neuro" - # Install package # CRITICAL: Make sure python setup.py --version has been run at least once # outside the container, with access to the git history. -COPY . /src/nitransforms -RUN pip install --no-cache-dir "/src/nitransforms[all]" +COPY --from=src /src/dist/*.whl . +RUN python -m pip install --no-cache-dir $( ls /src/dist/*.whl )[all] -RUN find $HOME -type d -exec chmod go=u {} + && \ - find $HOME -type f -exec chmod go=u {} + +RUN find $HOME -type d -exec chmod go=u {} + && \ + find $HOME -type f -exec chmod go=u {} + && \ + rm -rf $HOME/.npm $HOME/.conda $HOME/.empty RUN ldconfig WORKDIR /tmp/ diff --git a/docker/files/freesurfer7.3.2-exclude.txt b/docker/files/freesurfer7.3.2-exclude.txt new file mode 100644 index 00000000..3b07a64e --- /dev/null +++ b/docker/files/freesurfer7.3.2-exclude.txt @@ -0,0 +1,868 @@ +freesurfer/average/711-2B_as_mni_average_305.4dfp.hdr +freesurfer/average/711-2B_as_mni_average_305.4dfp.ifh +freesurfer/average/711-2B_as_mni_average_305.4dfp.img +freesurfer/average/711-2B_as_mni_average_305.4dfp.img.rec +freesurfer/average/711-2B_as_mni_average_305_mask.4dfp.hdr +freesurfer/average/711-2B_as_mni_average_305_mask.4dfp.img.rec +freesurfer/average/711-2C_as_mni_average_305.4dfp.hdr +freesurfer/average/711-2C_as_mni_average_305.4dfp.img.rec +freesurfer/average/711-2C_as_mni_average_305.4dfp.mat +freesurfer/average/aseg+spmhead+vermis+pons.ixi.gca +freesurfer/average/BrainstemSS +freesurfer/average/Buckner_JNeurophysiol11_MNI152 +freesurfer/average/Choi_JNeurophysiol12_MNI152 +freesurfer/average/colortable_desikan_killiany.txt +freesurfer/average/face.gca +freesurfer/average/HippoSF +freesurfer/average/label_scales.dat +freesurfer/average/lh.atlas2002_simple.gcs +freesurfer/average/lh.atlas2005_simple.gcs +freesurfer/average/lh.average.curvature.filled.buckner40.tif +freesurfer/average/lh.average.CURVATURE.tif +freesurfer/average/lh.average.tif +freesurfer/average/lh.curvature.buckner40.filled.desikan_killiany.2010-03-25.gcs +freesurfer/average/lh.destrieux.simple.2009-07-29.gcs +freesurfer/average/lh.DKTaparc.atlas.acfb40.noaparc.i12.2020-05-13.gcs +freesurfer/average/lh.DKTatlas100.gcs +freesurfer/average/lh.DKTatlas40.gcs +freesurfer/average/lh_trans_toSulc.gcs +freesurfer/average/mideface-atlas +freesurfer/average/mni152.mni305.cor.subfov1.dat +freesurfer/average/mni152.mni305.cor.subfov2.dat +freesurfer/average/mni152.register.dat +freesurfer/average/mni305.cor.readme +freesurfer/average/mni305.cor.subfov1.mgz +freesurfer/average/mni305.cor.subfov1.reg +freesurfer/average/mni305.cor.subfov2.mgz +freesurfer/average/mni305.cor.subfov2.reg +freesurfer/average/mni305.mask.cor.mgz +freesurfer/average/mni_average_305.4dfp.hdr +freesurfer/average/mni_average_305.4dfp.ifh +freesurfer/average/mni_average_305.4dfp.img +freesurfer/average/mni_average_305.4dfp.img.rec +freesurfer/average/mult-comp-cor +freesurfer/average/pons.mni152.2mm.mgz +freesurfer/average/RB_all_2008-03-26.mni152.2mm.lta +freesurfer/average/RB_all_2016-05-10.vc700.gca +freesurfer/average/RB_all_2019_10_25.talxfm.mni305.gca +freesurfer/average/RB_all_withskull_2016-05-10.vc700.gca +freesurfer/average/RB_all_withskull_2019_10_22.talxfm.mni305.gca +freesurfer/average/rh.atlas2002_simple.gcs +freesurfer/average/rh.atlas2005_simple.gcs +freesurfer/average/rh.average.curvature.filled.buckner40.tif +freesurfer/average/rh.average.CURVATURE.tif +freesurfer/average/rh.average.tif +freesurfer/average/rh.curvature.buckner40.filled.desikan_killiany.2010-03-25.gcs +freesurfer/average/rh.destrieux.simple.2009-07-29.gcs +freesurfer/average/rh.DKTaparc.atlas.acfb40.noaparc.i12.2020-05-13.gcs +freesurfer/average/rh.DKTatlas100.gcs +freesurfer/average/rh.DKTatlas40.gcs +freesurfer/average/rh_trans_toSulc.gcs +freesurfer/average/RLB700_atlas_as_orig.4dfp.hdr +freesurfer/average/RLB700_atlas_as_orig.4dfp.ifh +freesurfer/average/RLB700_atlas_as_orig.4dfp.img +freesurfer/average/RLB700_atlas_as_orig.4dfp.img.rec +freesurfer/average/samseg +freesurfer/average/surf +freesurfer/average/SVIP_Adult_Comp_N24_as_orig.4dfp.hdr +freesurfer/average/SVIP_Adult_Comp_N24_as_orig.4dfp.ifh +freesurfer/average/SVIP_Adult_Comp_N24_as_orig.4dfp.img +freesurfer/average/SVIP_Adult_Comp_N24_as_orig.4dfp.img.rec +freesurfer/average/SVIP_Adult_Control_N12_as_orig.4dfp.hdr +freesurfer/average/SVIP_Adult_Control_N12_as_orig.4dfp.ifh +freesurfer/average/SVIP_Adult_Control_N12_as_orig.4dfp.img +freesurfer/average/SVIP_Adult_Control_N12_as_orig.4dfp.img.rec +freesurfer/average/SVIP_Adult_Patient_N12_as_orig.4dfp.hdr +freesurfer/average/SVIP_Adult_Patient_N12_as_orig.4dfp.ifh +freesurfer/average/SVIP_Adult_Patient_N12_as_orig.4dfp.img +freesurfer/average/SVIP_Adult_Patient_N12_as_orig.4dfp.img.rec +freesurfer/average/SVIP_Child_Comp_N24_as_orig.4dfp.hdr +freesurfer/average/SVIP_Child_Comp_N24_as_orig.4dfp.ifh +freesurfer/average/SVIP_Child_Comp_N24_as_orig.4dfp.img +freesurfer/average/SVIP_Child_Comp_N24_as_orig.4dfp.img.rec +freesurfer/average/SVIP_Full_Comp_N48_as_orig.4dfp.hdr +freesurfer/average/SVIP_Full_Comp_N48_as_orig.4dfp.ifh +freesurfer/average/SVIP_Full_Comp_N48_as_orig.4dfp.img +freesurfer/average/SVIP_Full_Comp_N48_as_orig.4dfp.img.rec +freesurfer/average/SVIP_Young_Control_N12_as_orig.4dfp.hdr +freesurfer/average/SVIP_Young_Control_N12_as_orig.4dfp.ifh +freesurfer/average/SVIP_Young_Control_N12_as_orig.4dfp.img +freesurfer/average/SVIP_Young_Control_N12_as_orig.4dfp.img.rec +freesurfer/average/SVIP_Young_Patient_N12_as_orig.4dfp.hdr +freesurfer/average/SVIP_Young_Patient_N12_as_orig.4dfp.ifh +freesurfer/average/SVIP_Young_Patient_N12_as_orig.4dfp.img +freesurfer/average/SVIP_Young_Patient_N12_as_orig.4dfp.img.rec +freesurfer/average/talairach_mixed_with_skull.gca +freesurfer/average/ThalamicNuclei +freesurfer/average/tissue_parms.txt +freesurfer/average/wmsa_new_eesmith.gca +freesurfer/average/Yeo_Brainmap_MNI152 +freesurfer/average/Yeo_JNeurophysiol11_MNI152 +freesurfer/bin/3dvolreg.afni +freesurfer/bin/4dfptoanalyze +freesurfer/bin/anatomiCutsUtils +freesurfer/bin/annot2std +freesurfer/bin/aparc2feat +freesurfer/bin/aparcstats2table +freesurfer/bin/aparc_stats_aseg +freesurfer/bin/aparcstatsdiff +freesurfer/bin/apas2aseg +freesurfer/bin/applyMorph +freesurfer/bin/aseg2feat +freesurfer/bin/asegstats2table +freesurfer/bin/asegstatsdiff +freesurfer/bin/bblabel +freesurfer/bin/bbmask +freesurfer/bin/bedpostx_mgh +freesurfer/bin/beta2sxa +freesurfer/bin/bet.fsl +freesurfer/bin/biasfield +freesurfer/bin/bmedits2surf +freesurfer/bin/brec +freesurfer/bin/brec.awk +freesurfer/bin/browse-minc-header.tcl +freesurfer/bin/bugr +freesurfer/bin/build_desikan_killiany_gcs.csh +freesurfer/bin/cblumwmgyri +freesurfer/bin/checkMCR.sh +freesurfer/bin/check_recons.sh +freesurfer/bin/check_siemens_dir +freesurfer/bin/check_subject +freesurfer/bin/clear_fs_env.csh +freesurfer/bin/compute_interrater_variability.csh +freesurfer/bin/compute_label_vals.csh +freesurfer/bin/compute_label_volumes.csh +freesurfer/bin/connectgraph +freesurfer/bin/cor_to_minc +freesurfer/bin/cp-dicom +freesurfer/bin/createMorph +freesurfer/bin/csvprint +freesurfer/bin/dcmdir-info-mgh +freesurfer/bin/dcmdjpeg.fs +freesurfer/bin/dcmdrle.fs +freesurfer/bin/dcmsplit +freesurfer/bin/dcmunpack +freesurfer/bin/deface_subject +freesurfer/bin/defect-seg +freesurfer/bin/dicom-rename +freesurfer/bin/diffusionUtils +freesurfer/bin/dmri_ac.sh +freesurfer/bin/dmri_AnatomiCuts +freesurfer/bin/dmri_bset +freesurfer/bin/dmri_coloredFA +freesurfer/bin/dmri_extractSurfaceMeasurements +freesurfer/bin/dmri_forrest +freesurfer/bin/dmri_group +freesurfer/bin/dmri_groupByEndpoints +freesurfer/bin/dmri_match +freesurfer/bin/dmri_mergepaths +freesurfer/bin/dmri_motion +freesurfer/bin/dmri_neighboringRegions +freesurfer/bin/dmri_paths +freesurfer/bin/dmri_pathstats +freesurfer/bin/dmri_projectEndPoints +freesurfer/bin/dmrirc.example +freesurfer/bin/dmrirc.long.example +freesurfer/bin/dmrirc.long.multiscan.example +freesurfer/bin/dmrirc.multiscan.example +freesurfer/bin/dmri_saveHistograms +freesurfer/bin/dmri_spline +freesurfer/bin/dmri_stats_ac +freesurfer/bin/dmri_tensoreig +freesurfer/bin/dmri_train +freesurfer/bin/dmri_trk2trk +freesurfer/bin/dmri_violinPlots +freesurfer/bin/dmri_vox2vox +freesurfer/bin/dt_recon +freesurfer/bin/epidewarp.fsl +freesurfer/bin/exportGcam +freesurfer/bin/feat2segstats +freesurfer/bin/feat2surf +freesurfer/bin/findsession +freesurfer/bin/fix_subject +freesurfer/bin/fix_subject_corrected +freesurfer/bin/fix_subject_corrected-lh +freesurfer/bin/fix_subject_corrected-rh +freesurfer/bin/fix_subject-lh +freesurfer/bin/fix_subject_on_seychelles +freesurfer/bin/fix_subject-rh +freesurfer/bin/fixup_mni_paths +freesurfer/bin/flip_4dfp +freesurfer/bin/flirt.fsl +freesurfer/bin/flirt.newdefault.20080811.sch +freesurfer/bin/fname2ext +freesurfer/bin/fname2stem +freesurfer/bin/freesurfer +freesurfer/bin/freeview +freesurfer/bin/fscalc +freesurfer/bin/fscalc.fsl +freesurfer/bin/fsdcmdecompress +freesurfer/bin/fsfget +freesurfer/bin/fsfirst.fsl +freesurfer/bin/fs_install_mcr +freesurfer/bin/fsl.5.0.2.xyztrans.sch +freesurfer/bin/fs_lib_check +freesurfer/bin/fsl_label2voxel +freesurfer/bin/fslmaths.fsl +freesurfer/bin/fslorient.fsl +freesurfer/bin/fslregister +freesurfer/bin/fsl_rigid_register +freesurfer/bin/fsl_sub_mgh +freesurfer/bin/fslswapdim.fsl +freesurfer/bin/fspalm +freesurfer/bin/fsPrintHelp +freesurfer/bin/fsr-coreg +freesurfer/bin/fsr-import +freesurfer/bin/fs_run_from_mcr +freesurfer/bin/fs_spmreg.glnxa64 +freesurfer/bin/fs_tutorial_data +freesurfer/bin/fs_update +freesurfer/bin/fsvglrun +freesurfer/bin/fvcompare +freesurfer/bin/gca-apply +freesurfer/bin/gcainit +freesurfer/bin/gcaprepone +freesurfer/bin/gcatrain +freesurfer/bin/gcatrainskull +freesurfer/bin/gdcmconv.fs +freesurfer/bin/gems_compute_binary_atlas_probs +freesurfer/bin/get_label_thickness +freesurfer/bin/groupstats +freesurfer/bin/groupstatsdiff +freesurfer/bin/gtmseg +freesurfer/bin/help_xml_validate +freesurfer/bin/hiam_make_surfaces +freesurfer/bin/hiam_make_template +freesurfer/bin/hiam_register +freesurfer/bin/histo_compute_joint_density +freesurfer/bin/histo_fix_topology +freesurfer/bin/histo_register_block +freesurfer/bin/histo_segment +freesurfer/bin/histo_synthesize +freesurfer/bin/ico_supersample +freesurfer/bin/id.xfm +freesurfer/bin/inflate_subject +freesurfer/bin/inflate_subject3 +freesurfer/bin/inflate_subject-lh +freesurfer/bin/inflate_subject_new +freesurfer/bin/inflate_subject_new-lh +freesurfer/bin/inflate_subject_new-rh +freesurfer/bin/inflate_subject-rh +freesurfer/bin/inflate_subject_sc +freesurfer/bin/irepifitvol +freesurfer/bin/irepifitvol.glnx64 +freesurfer/bin/isanalyze +freesurfer/bin/isnifti +freesurfer/bin/isolate_labels.csh +freesurfer/bin/isolate_labels_keeporigval.csh +freesurfer/bin/is-surface +freesurfer/bin/jkgcatrain +freesurfer/bin/label2flat +freesurfer/bin/label2patch +freesurfer/bin/label_area +freesurfer/bin/label_border +freesurfer/bin/label_child +freesurfer/bin/label_elderly_subject +freesurfer/bin/labels_disjoint +freesurfer/bin/labels_intersect +freesurfer/bin/label_subject +freesurfer/bin/label_subject_flash +freesurfer/bin/label_subject_mixed +freesurfer/bin/labels_union +freesurfer/bin/list_otl_labels +freesurfer/bin/listsubj +freesurfer/bin/long_create_base_sigma +freesurfer/bin/long_create_orig +freesurfer/bin/longmc +freesurfer/bin/long_mris_slopes +freesurfer/bin/long_qdec_table +freesurfer/bin/long_stats_combine +freesurfer/bin/long_stats_slopes +freesurfer/bin/long_stats_tps +freesurfer/bin/long_submit_jobs +freesurfer/bin/long_submit_postproc +freesurfer/bin/lpcregister +freesurfer/bin/lta_diff +freesurfer/bin/make_average_subcort +freesurfer/bin/make_average_subject +freesurfer/bin/make_average_surface +freesurfer/bin/make_average_volume +freesurfer/bin/make_cortex_label +freesurfer/bin/make_exvivo_filled +freesurfer/bin/make_folding_atlas +freesurfer/bin/make_hemi_mask +freesurfer/bin/make-segvol-table +freesurfer/bin/make_symmetric +freesurfer/bin/make_upright +freesurfer/bin/makevol +freesurfer/bin/map_all_labels +freesurfer/bin/map_all_labels-lh +freesurfer/bin/map_central_sulcus +freesurfer/bin/map_to_base +freesurfer/bin/meanval +freesurfer/bin/mergeseg +freesurfer/bin/merge_stats_tables +freesurfer/bin/minc2seqinfo +freesurfer/bin/mkheadsurf +freesurfer/bin/mkima_index.tcl +freesurfer/bin/mkmnc_index.tcl +freesurfer/bin/mksubjdirs +freesurfer/bin/mksurfatlas +freesurfer/bin/mkxsubjreg +freesurfer/bin/mni152reg +freesurfer/bin/morph_only_subject +freesurfer/bin/morph_only_subject-lh +freesurfer/bin/morph_only_subject-rh +freesurfer/bin/morph_rgb-lh +freesurfer/bin/morph_rgb-rh +freesurfer/bin/morph_subject +freesurfer/bin/morph_subject-lh +freesurfer/bin/morph_subject_on_seychelles +freesurfer/bin/morph_subject-rh +freesurfer/bin/morph_tables-lh +freesurfer/bin/morph_tables-rh +freesurfer/bin/mri_align_long.csh +freesurfer/bin/mri_aparc2wmseg +freesurfer/bin/mri_apply_autoencoder +freesurfer/bin/mri_apply_bias +freesurfer/bin/mri_apply_inu_correction +freesurfer/bin/mri_aseg_edit_reclassify +freesurfer/bin/mri_aseg_edit_train +freesurfer/bin/mri_auto_fill +freesurfer/bin/mri_average +freesurfer/bin/mri_bc_sc_bias_correct +freesurfer/bin/mri_brain_volume +freesurfer/bin/mri_build_priors +freesurfer/bin/mri_cal_renormalize_gca +freesurfer/bin/mri_ca_tissue_parms +freesurfer/bin/mri_ca_train +freesurfer/bin/mri_cht2p +freesurfer/bin/mri_classify +freesurfer/bin/mri_cnr +freesurfer/bin/mri_compute_bias +freesurfer/bin/mri_compute_change_map +freesurfer/bin/mri_compute_distances +freesurfer/bin/mri_compute_layer_fractions +freesurfer/bin/mri_compute_structure_transforms +freesurfer/bin/mri_compute_volume_fractions +freesurfer/bin/mri_compute_volume_intensities +freesurfer/bin/mri_concatenate_gcam +freesurfer/bin/mri_convert_mdh +freesurfer/bin/mri_copy_params +freesurfer/bin/mri_copy_values +freesurfer/bin/mri_cor2label +freesurfer/bin/mri_correct_segmentations +freesurfer/bin/mri_create_t2combined +freesurfer/bin/mri_create_tests +freesurfer/bin/mri_cvs_check +freesurfer/bin/mri_cvs_data_copy +freesurfer/bin/mri_cvs_register +freesurfer/bin/mri_cvs_requiredfiles.txt +freesurfer/bin/mri_dct_align +freesurfer/bin/mri_dct_align_binary +freesurfer/bin/mri_distance_transform +freesurfer/bin/mri_dist_surf_label +freesurfer/bin/mri_divide_segmentation +freesurfer/bin/mri_edit_segmentation +freesurfer/bin/mri_edit_segmentation_with_surfaces +freesurfer/bin/mri_elastic_energy +freesurfer/bin/mri_estimate_tissue_parms +freesurfer/bin/mri_evaluate_morph +freesurfer/bin/mri_extract +freesurfer/bin/mri_extract_conditions +freesurfer/bin/mri_extract_fcd_features +freesurfer/bin/mri_extract_label +freesurfer/bin/mri_extract_largest_CC +freesurfer/bin/mri_fcili +freesurfer/bin/mri_fdr +freesurfer/bin/mri_fieldsign +freesurfer/bin/mri_fit_bias +freesurfer/bin/mri_fslmat_to_lta +freesurfer/bin/mri-func2sph +freesurfer/bin/mri-funcvits +freesurfer/bin/mri_fuse_intensity_images +freesurfer/bin/mri_gca_ambiguous +freesurfer/bin/mri_gcab_train +freesurfer/bin/mri_gdfglm +freesurfer/bin/mri_glmfit +freesurfer/bin/mri_glmfit-sim +freesurfer/bin/mri_gradient_info +freesurfer/bin/mri_gradunwarp +freesurfer/bin/mri_gtmpvc +freesurfer/bin/mri_gtmseg +freesurfer/bin/mri_hausdorff_dist +freesurfer/bin/mri_head +freesurfer/bin/mri_hires_register +freesurfer/bin/mri_histo_eq +freesurfer/bin/mri_histo_normalize +freesurfer/bin/mri_ibmc +freesurfer/bin/mri_interpolate +freesurfer/bin/mri_jacobian +freesurfer/bin/mri_joint_density +freesurfer/bin/mri_label_accuracy +freesurfer/bin/mri_label_histo +freesurfer/bin/mri_label_vals +freesurfer/bin/mri_label_volume +freesurfer/bin/mri_linear_align +freesurfer/bin/mri_linear_align_binary +freesurfer/bin/mri_linear_register +freesurfer/bin/mri_long_normalize +freesurfer/bin/mri_make_bem_surfaces +freesurfer/bin/mri_make_density_map +freesurfer/bin/mri_make_labels +freesurfer/bin/mri_make_register +freesurfer/bin/mri_make_template +freesurfer/bin/mri_map_cpdat +freesurfer/bin/mri_maps2csd +freesurfer/bin/mri_mark_temporal_lobe +freesurfer/bin/mri_mc +freesurfer/bin/mri_mcsim +freesurfer/bin/mri_mergelabels +freesurfer/bin/mri_mi +freesurfer/bin/mri_modify +freesurfer/bin/mri_morphology +freesurfer/bin/mri_mosaic +freesurfer/bin/mri_motion_correct +freesurfer/bin/mri_motion_correct2 +freesurfer/bin/mri_ms_EM +freesurfer/bin/mri_ms_EM_with_atlas +freesurfer/bin/mri_ms_fitparms +freesurfer/bin/mri_ms_LDA +freesurfer/bin/mri_multiscale_segment +freesurfer/bin/mri_multispectral_segment +freesurfer/bin/mri_nl_align +freesurfer/bin/mri_nl_align_binary +freesurfer/bin/mri_nlfilter +freesurfer/bin/mri_paint +freesurfer/bin/mri_parselabel +freesurfer/bin/mri_parse_sdcmdir +freesurfer/bin/mri_partial_ribbon +freesurfer/bin/mri_path2label +freesurfer/bin/mri_polv +freesurfer/bin/mri_probedicom +freesurfer/bin/mri_probe_ima +freesurfer/bin/mri_reduce +freesurfer/bin/mri_refine_seg +freesurfer/bin/mri_register +freesurfer/bin/mri_reorient_LR.csh +freesurfer/bin/mri_rf_label +freesurfer/bin/mri_rf_long_label +freesurfer/bin/mri_rf_long_train +freesurfer/bin/mri_rf_train +freesurfer/bin/mri_ribbon +freesurfer/bin/mri_rigid_register +freesurfer/bin/mris2rgb +freesurfer/bin/mris_AA_shrinkwrap +freesurfer/bin/mris_add_template +freesurfer/bin/mris_annot_diff +freesurfer/bin/mris_annot_to_segmentation +freesurfer/bin/mris_aseg_distance +freesurfer/bin/mris_average_curvature +freesurfer/bin/mris_average_parcellation +freesurfer/bin/mris_BA_segment +freesurfer/bin/mri_sbbr +freesurfer/bin/mris_ca_deform +freesurfer/bin/mris_ca_train +freesurfer/bin/mris_classify_thickness +freesurfer/bin/mris_compute_acorr +freesurfer/bin/mris_compute_layer_intensities +freesurfer/bin/mris_compute_lgi +freesurfer/bin/mris_compute_optimal_kernel +freesurfer/bin/mris_compute_overlap +freesurfer/bin/mris_compute_parc_overlap +freesurfer/bin/mris_compute_volume_fractions +freesurfer/bin/mris_congeal +freesurfer/bin/mris_copy_header +freesurfer/bin/mris_curvature2image +freesurfer/bin/mris_deform +freesurfer/bin/mris_density +freesurfer/bin/mris_distance_map +freesurfer/bin/mris_distance_to_label +freesurfer/bin/mris_distance_transform +freesurfer/bin/mri_segcentroids +freesurfer/bin/mri_seghead +freesurfer/bin/mri_segment_hypothalamic_subunits +freesurfer/bin/mri_segment_tumor +freesurfer/bin/mri_segment_wm_damage +freesurfer/bin/mri_seg_overlap +freesurfer/bin/mris_entropy +freesurfer/bin/mris_errors +freesurfer/bin/mris_extract_patches +freesurfer/bin/mris_extract_values +freesurfer/bin/mris_exvivo_surfaces +freesurfer/bin/mris_fbirn_annot +freesurfer/bin/mris_fill +freesurfer/bin/mris_find_flat_regions +freesurfer/bin/mris_flatten +freesurfer/bin/mris_fwhm +freesurfer/bin/mris_gradient +freesurfer/bin/mris_hausdorff_dist +freesurfer/bin/mris_image2vtk +freesurfer/bin/mri_simulate_atrophy +freesurfer/bin/mris_info +freesurfer/bin/mris_init_global_tractography +freesurfer/bin/mris_intensity_profile +freesurfer/bin/mris_interpolate_warp +freesurfer/bin/mris_label_area +freesurfer/bin/mris_label_calc +freesurfer/bin/mris_label_mode +freesurfer/bin/mris_longitudinal_surfaces +freesurfer/bin/mris_make_average_surface +freesurfer/bin/mris_make_face_parcellation +freesurfer/bin/mris_make_map_surfaces +freesurfer/bin/mris_make_surfaces +freesurfer/bin/mris_make_template +freesurfer/bin/mris_map_cuts +freesurfer/bin/mris_mef_surfaces +freesurfer/bin/mris_merge_parcellations +freesurfer/bin/mris_mesh_subdivide +freesurfer/bin/mris_morph_stats +freesurfer/bin/mris_ms_refine +freesurfer/bin/mris_ms_surface_CNR +freesurfer/bin/mris_multimodal +freesurfer/bin/mris_multimodal_surface_placement +freesurfer/bin/mris_multiscale_stats +freesurfer/bin/mris_niters2fwhm +freesurfer/bin/mris_nudge +freesurfer/bin/mris_parcellate_connectivity +freesurfer/bin/mri-sph2surf +freesurfer/bin/mris_pmake +freesurfer/bin/mris_preproc +freesurfer/bin/mris_profileClustering +freesurfer/bin/mrisp_write +freesurfer/bin/mris_refine_surfaces +freesurfer/bin/mris_register_label_map +freesurfer/bin/mris_register_to_label +freesurfer/bin/mris_register_to_volume +freesurfer/bin/mris_remove_negative_vertices +freesurfer/bin/mris_remove_variance +freesurfer/bin/mris_resample +freesurfer/bin/mris_rescale +freesurfer/bin/mris_reverse +freesurfer/bin/mris_rf_label +freesurfer/bin/mris_rf_train +freesurfer/bin/mris_rotate +freesurfer/bin/mris_sample_label +freesurfer/bin/mris_sample_parc +freesurfer/bin/mris_seg2annot +freesurfer/bin/mris_segment +freesurfer/bin/mris_segmentation_stats +freesurfer/bin/mris_segment_vals +freesurfer/bin/mris_shrinkwrap +freesurfer/bin/mris_simulate_atrophy +freesurfer/bin/mris_smooth_intracortical +freesurfer/bin/mris_surf2vtk +freesurfer/bin/mris_surface_change +freesurfer/bin/mris_surface_to_vol_distances +freesurfer/bin/mris_svm_classify +freesurfer/bin/mris_svm_train +freesurfer/bin/mris_talairach +freesurfer/bin/mris_thickness_comparison +freesurfer/bin/mris_transform +freesurfer/bin/mris_translate_annotation +freesurfer/bin/mris_transmantle_dysplasia_paths +freesurfer/bin/mri_strip_nonwhite +freesurfer/bin/mri_strip_subject_info +freesurfer/bin/mris_twoclass +freesurfer/bin/mri_surfacemask +freesurfer/bin/mris_volmask_novtk +freesurfer/bin/mris_volmask_vtk +freesurfer/bin/mris_volsmooth +freesurfer/bin/mris_volume +freesurfer/bin/mris_warp +freesurfer/bin/mris_wm_volume +freesurfer/bin/mris_w_to_curv +freesurfer/bin/mri_synthesize +freesurfer/bin/mri_synthstrip +freesurfer/bin/mri_threshold +freesurfer/bin/mri_topologycorrection +freesurfer/bin/mri_train +freesurfer/bin/mri_train_autoencoder +freesurfer/bin/mri_transform +freesurfer/bin/mri_transform_to_COR +freesurfer/bin/mri_twoclass +freesurfer/bin/mri_update_gca +freesurfer/bin/mri_validate_skull_stripped +freesurfer/bin/mri_vessel_segment +freesurfer/bin/mri_vol2label +freesurfer/bin/mri_vol2roi +freesurfer/bin/mri_volcluster +freesurfer/bin/mri_volsynth +freesurfer/bin/mri_warp_convert +freesurfer/bin/mri_wbc +freesurfer/bin/mri_wmfilter +freesurfer/bin/mri_xcorr +freesurfer/bin/mri_xvolavg +freesurfer/bin/mri_z2p +freesurfer/bin/ms_refine_subject +freesurfer/bin/nmovie_qt +freesurfer/bin/oct_register_mosaic +freesurfer/bin/oct_rf_train +freesurfer/bin/oct_train +freesurfer/bin/optseq2 +freesurfer/bin/orientLAS +freesurfer/bin/parc_atlas_jackknife_test +freesurfer/bin/plot_structure_stats.tcl +freesurfer/bin/polyorder +freesurfer/bin/predict_v1.sh +freesurfer/bin/print_unique_labels.csh +freesurfer/bin/progressbar.tcl +freesurfer/bin/qatools.py +freesurfer/bin/qdec +freesurfer/bin/qdec_glmfit +freesurfer/bin/qt.conf +freesurfer/bin/quantifyBrainstemStructures.sh +freesurfer/bin/quantifyHAsubregions.sh +freesurfer/bin/quantifyThalamicNuclei.sh +freesurfer/bin/rbbr +freesurfer/bin/rbftest +freesurfer/bin/rcbf-prep +freesurfer/bin/rebuild_gca_atlas.csh +freesurfer/bin/recon-all-exvivo +freesurfer/bin/recon-all.makefile +freesurfer/bin/regdat2xfm +freesurfer/bin/reg-feat2anat +freesurfer/bin/register_child +freesurfer/bin/register.csh +freesurfer/bin/register_elderly_subject +freesurfer/bin/register_subject +freesurfer/bin/register_subject_flash +freesurfer/bin/register_subject_mixed +freesurfer/bin/reg-mni305.2mm +freesurfer/bin/reinflate_subject +freesurfer/bin/reinflate_subject-lh +freesurfer/bin/reinflate_subject-rh +freesurfer/bin/remove_talairach +freesurfer/bin/renormalize_subject +freesurfer/bin/renormalize_subject_keep_editting +freesurfer/bin/renormalize_T1_subject +freesurfer/bin/repair_siemens_file +freesurfer/bin/reregister_subject_mixed +freesurfer/bin/rtview +freesurfer/bin/run_mris_preproc +freesurfer/bin/run-qdec-glm +freesurfer/bin/run_samseg_long +freesurfer/bin/run_SegmentSubfieldsT1Longitudinal.sh +freesurfer/bin/run_SegmentSubject.sh +freesurfer/bin/run_segmentSubjectT1_autoEstimateAlveusML.sh +freesurfer/bin/run_segmentSubjectT1T2_autoEstimateAlveusML.sh +freesurfer/bin/run_segmentSubjectT2_autoEstimateAlveusML.sh +freesurfer/bin/run_SegmentThalamicNuclei.sh +freesurfer/bin/samseg +freesurfer/bin/samseg2recon +freesurfer/bin/samseg-long +freesurfer/bin/sbtiv +freesurfer/bin/seg2filled +freesurfer/bin/segmentBS.sh +freesurfer/bin/segmentHA_T1_long.sh +freesurfer/bin/segmentHA_T1.sh +freesurfer/bin/segmentHA_T2.sh +freesurfer/bin/segment_monkey +freesurfer/bin/SegmentSubfieldsT1Longitudinal +freesurfer/bin/segment_subject +freesurfer/bin/segmentSubject +freesurfer/bin/segment_subject_notal +freesurfer/bin/segment_subject_notal2 +freesurfer/bin/segment_subject_old_skull_strip +freesurfer/bin/segment_subject_sc +freesurfer/bin/segmentSubjectT1_autoEstimateAlveusML +freesurfer/bin/segmentSubjectT1T2_autoEstimateAlveusML +freesurfer/bin/segmentSubjectT2_autoEstimateAlveusML +freesurfer/bin/segment_subject_talmgh +freesurfer/bin/SegmentThalamicNuclei +freesurfer/bin/segmentThalamicNuclei.sh +freesurfer/bin/segpons +freesurfer/bin/setlabelstat +freesurfer/bin/sfa2fieldsign +freesurfer/bin/show_tal +freesurfer/bin/skip_long_make_checks +freesurfer/bin/slicedelay +freesurfer/bin/slicetimer.fsl +freesurfer/bin/sphere_subject +freesurfer/bin/sphere_subject-lh +freesurfer/bin/sphere_subject-rh +freesurfer/bin/spherical_st +freesurfer/bin/Spline3_test +freesurfer/bin/spmmat2register +freesurfer/bin/spmregister +freesurfer/bin/spm_t_to_b +freesurfer/bin/sratio +freesurfer/bin/stat_normalize +freesurfer/bin/stattablediff +freesurfer/bin/stem2fname +freesurfer/bin/stim_polar +freesurfer/bin/streamlineFilter +freesurfer/bin/surf2vol +freesurfer/bin/surfreg +freesurfer/bin/swi_preprocess +freesurfer/bin/swi_process +freesurfer/bin/t4img_4dfp +freesurfer/bin/t4imgs_4dfp +freesurfer/bin/talairach2 +freesurfer/bin/talairach_mgh +freesurfer/bin/tal_compare +freesurfer/bin/tal_QC_AZS +freesurfer/bin/talsegprob +freesurfer/bin/template +freesurfer/bin/testOrientationPlanesFromParcellation +freesurfer/bin/test_recon-all.csh +freesurfer/bin/test_tutorials.sh +freesurfer/bin/thickdiffmap +freesurfer/bin/tkmedit +freesurfer/bin/tkmeditfv +freesurfer/bin/tkregister2 +freesurfer/bin/tkregisterfv +freesurfer/bin/tksurfer +freesurfer/bin/tksurferfv +freesurfer/bin/trac-all +freesurfer/bin/trac-paths +freesurfer/bin/trac-preproc +freesurfer/bin/tractstats2table +freesurfer/bin/train-gcs-atlas +freesurfer/bin/tridec +freesurfer/bin/trk_tools +freesurfer/bin/unpack_ima1.tcl +freesurfer/bin/unpackimadir +freesurfer/bin/unpackimadir2 +freesurfer/bin/unpack_ima.tcl +freesurfer/bin/unpackmincdir +freesurfer/bin/unpack_mnc.tcl +freesurfer/bin/unpacksdcmdir +freesurfer/bin/usbtree +freesurfer/bin/vol2segavg +freesurfer/bin/vol2subfield +freesurfer/bin/vol2symsurf +freesurfer/bin/vsm-smooth +freesurfer/bin/wfilemask +freesurfer/bin/wm-anat-snr +freesurfer/bin/wmedits2surf +freesurfer/bin/wmsaseg +freesurfer/bin/xcerebralseg +freesurfer/bin/xcorr +freesurfer/bin/xfmrot +freesurfer/bin/xhemireg +freesurfer/bin/xhemi-tal +freesurfer/bin/xsanatreg +freesurfer/bin/zero_lt_4dfp +freesurfer/DefectLUT.txt +freesurfer/diffusion +freesurfer/docs/xml +freesurfer/FreeSurferEnv.csh +freesurfer/FreeSurferEnv.sh +freesurfer/fsfast +freesurfer/lib/bem/ic0.tri +freesurfer/lib/bem/ic1.tri +freesurfer/lib/bem/ic2.tri +freesurfer/lib/bem/ic3.tri +freesurfer/lib/bem/ic6.tri +freesurfer/lib/bem/inner_skull.dat +freesurfer/lib/bem/outer_skin.dat +freesurfer/lib/bem/outer_skull.dat +freesurfer/lib/images +freesurfer/lib/qt +freesurfer/lib/resource +freesurfer/lib/tcl +freesurfer/lib/tktools +freesurfer/lib/vtk +freesurfer/matlab +freesurfer/mni-1.4 +freesurfer/mni/bin/correct_field +freesurfer/mni/bin/crispify +freesurfer/mni/bin/dcm2mnc +freesurfer/mni/bin/Display +freesurfer/mni/bin/ecattominc +freesurfer/mni/bin/evaluate_field +freesurfer/mni/bin/extracttag +freesurfer/mni/bin/field2imp +freesurfer/mni/bin/imp2field +freesurfer/mni/bin/invert_raw_image +freesurfer/mni/bin/make_model +freesurfer/mni/bin/make_phantom +freesurfer/mni/bin/make_template +freesurfer/mni/bin/mincaverage +freesurfer/mni/bin/mincbbox +freesurfer/mni/bin/minccalc +freesurfer/mni/bin/mincchamfer +freesurfer/mni/bin/mincconcat +freesurfer/mni/bin/minccopy +freesurfer/mni/bin/mincdiff +freesurfer/mni/bin/mincedit +freesurfer/mni/bin/mincexpand +freesurfer/mni/bin/mincextract +freesurfer/mni/bin/mincheader +freesurfer/mni/bin/minchistory +freesurfer/mni/bin/minclookup +freesurfer/mni/bin/mincmakescalar +freesurfer/mni/bin/mincmakevector +freesurfer/mni/bin/mincmath +freesurfer/mni/bin/minc_modify_header +freesurfer/mni/bin/mincpik +freesurfer/mni/bin/mincreshape +freesurfer/mni/bin/mincstats +freesurfer/mni/bin/minctoecat +freesurfer/mni/bin/minctoraw +freesurfer/mni/bin/mincview +freesurfer/mni/bin/mincwindow +freesurfer/mni/bin/mnc2nii +freesurfer/mni/bin/mritoself +freesurfer/mni/bin/ncdump +freesurfer/mni/bin/ncgen +freesurfer/mni/bin/nii2mnc +freesurfer/mni/bin/nu_estimate +freesurfer/mni/bin/nu_estimate_np_and_em~ +freesurfer/mni/bin/nu_evaluate +freesurfer/mni/bin/param2xfm +freesurfer/mni/bin/rand_param +freesurfer/mni/bin/rawtominc +freesurfer/mni/bin/register +freesurfer/mni/bin/resample_labels +freesurfer/mni/bin/sharpen_hist +freesurfer/mni/bin/sharpen_volume +freesurfer/mni/bin/spline_smooth +freesurfer/mni/bin/transformtags +freesurfer/mni/bin/upet2mnc +freesurfer/mni/bin/volume_hist +freesurfer/mni/bin/volume_stats +freesurfer/mni/bin/voxeltoworld +freesurfer/mni/bin/worldtovoxel +freesurfer/mni/bin/xcorr_vol +freesurfer/mni/bin/xfm2param +freesurfer/mni/bin/xfmconcat +freesurfer/mni/bin/xfminvert +freesurfer/mni/bin/xfmtool +freesurfer/mni/bin/zscore_vol +freesurfer/mni/data +freesurfer/mni/include +freesurfer/mni/mni.srcbuild.June2015.tgz +freesurfer/mni/share/man +freesurfer/mni/share/N3 +freesurfer/models +freesurfer/python/lib/python3.8/test +freesurfer/python/lib/python3.8/site-packages/caffe2 +freesurfer/python/lib/python3.8/site-packages/sklearn +freesurfer/python/lib/python3.8/site-packages/tensorflow +freesurfer/python/lib/python3.8/site-packages/torch +freesurfer/python/lib/python3.8/site-packages/**/tests +freesurfer/python/**/__pycache__ +freesurfer/python/share +freesurfer/SegmentNoLUT.txt +freesurfer/sessions +freesurfer/SetUpFreeSurfer.csh +freesurfer/SetUpFreeSurfer.sh +freesurfer/Simple_surface_labels2009.txt +freesurfer/sources.sh +freesurfer/subjects/bert +freesurfer/subjects/cvs_avg35 +freesurfer/subjects/cvs_avg35_inMNI152 +freesurfer/subjects/fsaverage3 +freesurfer/subjects/fsaverage4 +freesurfer/subjects/fsaverage_sym +freesurfer/subjects/lh.EC_average +freesurfer/subjects/README +freesurfer/subjects/rh.EC_average +freesurfer/subjects/sample-001.mgz +freesurfer/subjects/sample-002.mgz +freesurfer/subjects/V1_average +freesurfer/tkmeditParcColorsCMA +freesurfer/tktools +freesurfer/trctrain diff --git a/docker/files/neurodebian.gpg b/docker/files/neurodebian.gpg deleted file mode 100644 index c546d45d..00000000 --- a/docker/files/neurodebian.gpg +++ /dev/null @@ -1,71 +0,0 @@ ------BEGIN PGP PUBLIC KEY BLOCK----- -Version: GnuPG v1 - -mQGiBEQ7TOgRBADvaRsIZ3VZ6Qy7PlDpdMm97m0OfvouOj/HhjOM4M3ECbGn4cYh -vN1gK586s3sUsUcNQ8LuWvNsYhxYsVTZymCReJMEDxod0U6/z/oIbpWv5svF3kpl -ogA66Ju/6cZx62RiCSOkskI6A3Waj6xHyEo8AGOPfzbMoOOQ1TS1u9s2FwCgxziL -wADvKYlDZnWM03QtqIJVD8UEAOks9Q2OqFoqKarj6xTRdOYIBVEp2jhozZUZmLmz -pKL9E4NKGfixqxdVimFcRUGM5h7R2w7ORqXjCzpiPmgdv3jJLWDnmHLmMYRYQc8p -5nqo8mxuO3zJugxBemWoacBDd1MJaH7nK20Hsk9L/jvU/qLxPJotMStTnwO+EpsK -HlihA/9ZpvzR1QWNUd9nSuNR3byJhaXvxqQltsM7tLqAT4qAOJIcMjxr+qESdEbx -NHM5M1Y21ZynrsQw+Fb1WHXNbP79vzOxHoZR0+OXe8uUpkri2d9iOocre3NUdpOO -JHtl6cGGTFILt8tSuOVxMT/+nlo038JQB2jARe4B85O0tkPIPbQybmV1cm8uZGVi -aWFuLm5ldCBhcmNoaXZlIDxtaWNoYWVsLmhhbmtlQGdtYWlsLmNvbT6IRgQQEQgA -BgUCTVHJKwAKCRCNEUVjdcAkyOvzAJ0abJz+f2a6VZG1c9T8NHMTYh1atwCgt0EE -3ZZd/2in64jSzu0miqhXbOKISgQQEQIACgUCSotRlwMFAXgACgkQ93+NsjFEvg8n -JgCfWcdJbILBtpLZCocvOzlLPqJ0Fn0AoI4EpJRxoUnrtzBGUC1MqecU7WsDiGAE -ExECACAFAkqLUWcCGwMGCwkIBwMCBBUCCAMEFgIDAQIeAQIXgAAKCRCl0y8BJkml -qVklAJ4h2V6MdQkSAThF5c2Gkq6eSoIQYQCeM0DWyB9Bl+tTPSTYXwwZi2uoif20 -QmFwc3kuZ3NlLnVuaS1tYWdkZWJ1cmcuZGUgRGViaWFuIEFyY2hpdmUgPG1pY2hh -ZWwuaGFua2VAZ21haWwuY29tPohGBBARAgAGBQJEO03FAAoJEPd/jbIxRL4PU18A -n3tn7i4qdlMi8kHbYWFoabsKc9beAJ9sl/leZNCYNMGhz+u6BQgyeLKw94heBBMR -AgAeBQJEO0zoAhsDBgsJCAcDAgMVAgMDFgIBAh4BAheAAAoJEKXTLwEmSaWpVdoA -n27DvtZizNEbhz3wRUPQMiQjtqdvAJ9rS9YdPe5h5o5gHx3mw3BSkOttdYheBBMR -AgAeBQJEO0zoAhsDBgsJCAcDAgMVAgMDFgIBAh4BAheAAAoJEKXTLwEmSaWpVdoA -oLhwWL+E+2I9lrUf4Lf26quOK9vLAKC9ZpIF2tUirFFkBWnQvu13/TA0SokCHAQQ -AQIABgUCTSNBgQAKCRDAc9Iof/uem4NpEACQ8jxmaCaS/qk/Y4GiwLA5bvKosG3B -iARZ2v5UWqCZQ1tS56yKse/lCIzXQqU9BnYW6wOI2rvFf9meLfd8h96peG6oKscs -fbclLDIf68bBvGBQaD0VYFi/Fk/rxmTQBOCQ3AJZs8O5rIM4gPGE0QGvSZ1h7VRw -3Uyeg4jKXLIeJn2xEmOJgt3auAR2FyKbzHaX9JCoByJZ/eU23akNl9hgt7ePlpXo -74KNYC58auuMUhCq3BQDB+II4ERYMcmFp1N5ZG05Cl6jcaRRHDXz+Ax6DWprRI1+ -RH/Yyae6LmKpeJNwd+vM14aawnNO9h8IAQ+aJ3oYZdRhGyybbin3giJ10hmWveg/ -Pey91Nh9vBCHdDkdPU0s9zE7z/PHT0c5ccZRukxfZfkrlWQ5iqu3V064ku5f4PBy -8UPSkETcjYgDnrdnwqIAO+oVg/SFlfsOzftnwUrvwIcZlXAgtP6MEEAs/38e/JIN -g4VrpdAy7HMGEUsh6Ah6lvGQr+zBnG44XwKfl7e0uCYkrAzUJRGM5vx9iXvFMcMu -jv9EBNNBOU8/Y6MBDzGZhgaoeI27nrUvaveJXjAiDKAQWBLjtQjINZ8I9uaSGOul -8kpbFavE4eS3+KhISrSHe4DuAa3dk9zI+FiPvXY1ZyfQBtNpR+gYFY6VxMbHhY1U -lSLHO2eUIQLdYbRITmV1cm9EZWJpYW4gQXJjaGl2ZSBLZXkgPHBrZy1leHBwc3kt -bWFpbnRhaW5lcnNAbGlzdHMuYWxpb3RoLmRlYmlhbi5vcmc+iEYEEBEIAAYFAk1R -yQYACgkQjRFFY3XAJMgEWwCggx4Gqlcrt76TSMlbU94cESo55AEAoJ3asQEMpe8t -QUX+5aikw3z1AUoCiEoEEBECAAoFAkqf/3cDBQF4AAoJEPd/jbIxRL4PxyMAoKUI -RPWlHCj/+HSFfwhos68wcSwmAKChuC00qutDro+AOo+uuq6YoHXj+ohgBBMRAgAg -BQJKn/8bAhsDBgsJCAcDAgQVAggDBBYCAwECHgECF4AACgkQpdMvASZJpalDggCe -KF9KOgOPdQbFnKXl8KtHory4EEwAnA7jxgorE6kk2QHEXFSF8LzOOH4GiGMEExEC -ACMCGwMGCwkIBwMCBBUCCAMEFgIDAQIeAQIXgAUCSp//RgIZAQAKCRCl0y8BJkml -qekFAKCRyt4+FoCzmBbRUUP3Cr8PzH++IgCgkno4vdjsWdyAey8e0KpITTXMFrmJ -AhwEEAECAAYFAk0jQYEACgkQwHPSKH/7npsFfw/+P8B8hpM3+T1fgboBa4R32deu -n8m6b8vZMXwuo/awQtMpzjem8JGXSUQm8iiX4hDtjq6ZoPrlN8T4jNmviBt/F5jI -Jji/PYmhq+Zn9s++mfx+aF4IJrcHJWFkg/6kJzn4oSdl/YlvKf4VRCcQNtj4xV87 -GsdamnzU17XapLVMbSaVKh+6Af7ZLDerEH+iAq733HsYaTK+1xKmN7EFVXgS7bZ1 -9C4LTzc97bVHSywpT9yIrg9QQs/1kshfVIHDKyhjF6IwzSVbeGAIL3Oqo5zOMkWv -7JlEIkkhTyl+FETxNMTMYjAk+Uei3kRodneq3YBF2uFYSEzrXQgHAyn37geiaMYj -h8wu6a85nG1NS0SdxiZDIePmbvD9vWxFZUWYJ/h9ifsLivWcVXlvHoQ0emd+n2ai -FhAck2xsuyHgnGIZMHww5IkQdu/TMqvbcR6d8Xulh+C4Tq7ppy+oTLADSBKII++p -JQioYydRD529EUJgVlhyH27X6YAk3FuRD3zYZRYS2QECiKXvS665o3JRJ0ZSqNgv -YOom8M0zz6bI9grnUoivMI4o7ISpE4ZwffEd37HVzmraaUHDXRhkulFSf1ImtXoj -V9nNSM5p/+9eP7OioTZhSote6Vj6Ja1SZeRkXZK7BwqPbdO0VsYOb7G//ZiOlqs+ -paRr92G/pwBfj5Dq8EK5Ag0ERDtM9RAIAN0EJqBPvLN0tEin/y4Fe0R4n+E+zNXg -bBsq4WidwyUFy3h/6u86FYvegXwUqVS2OsEs5MwPcCVJOfaEthF7I89QJnP9Nfx7 -V5I9yFB53o9ii38BN7X+9gSjpfwXOvf/wIDfggxX8/wRFel37GRB7TiiABRArBez -s5x+zTXvT++WPhElySj0uY8bjVR6tso+d65K0UesvAa7PPWeRS+3nhqABSFLuTTT -MMbnVXCGesBrYHlFVXClAYrSIOX8Ub/UnuEYs9+hIV7U4jKzRF9WJhIC1cXHPmOh -vleAf/I9h/0KahD7HLYud40pNBo5tW8jSfp2/Q8TIE0xxshd51/xy4MAAwUH+wWn -zsYVk981OKUEXul8JPyPxbw05fOd6gF4MJ3YodO+6dfoyIl3bewk+11KXZQALKaO -1xmkAEO1RqizPeetoadBVkQBp5xPudsVElUTOX0pTYhkUd3iBilsCYKK1/KQ9KzD -I+O/lRsm6L9lc6rV0IgPU00P4BAwR+x8Rw7TJFbuS0miR3lP1NSguz+/kpjxzmGP -LyHJ+LVDYFkk6t0jPXhqFdUY6McUTBDEvavTGlVO062l9APTmmSMVFDsPN/rBes2 -rYhuuT+lDp+gcaS1UoaYCIm9kKOteQBnowX9V74Z+HKEYLtwILaSnNe6/fNSTvyj -g0z+R+sPCY4nHewbVC+ISQQYEQIACQUCRDtM9QIbDAAKCRCl0y8BJkmlqbecAJ9B -UdSKVg9H+fQNyP5sbOjj4RDtdACfXHrRHa2+XjJP0dhpvJ8IfvYnQsU= -=fAJZ ------END PGP PUBLIC KEY BLOCK----- diff --git a/env.yml b/env.yml new file mode 100644 index 00000000..721507ae --- /dev/null +++ b/env.yml @@ -0,0 +1,42 @@ +name: nitransforms +channels: + - https://fsl.fmrib.ox.ac.uk/fsldownloads/fslconda/public/ + - conda-forge +# Update this ~yearly; last updated Jan 2024 +dependencies: + - python=3.11 + # Needed for svgo and bids-validator; consider moving to deno + - nodejs=20 + # Intel Math Kernel Library for numpy + - mkl=2023.2.0 + - mkl-service=2.4.0 + # git-annex for templateflow users with DataLad superdatasets + - git-annex=*=alldep* + # Base scientific python stack; required by FSL, so pinned here + - numpy=1.26 + - scipy=1.11 + - matplotlib=3.8 + - pandas=2.2 + - h5py=3.10 + # Dependencies compiled against numpy, best to stick with conda + - nitime=0.10 + - scikit-image=0.22 + - scikit-learn=1.4 + # Utilities + - graphviz=9.0 + - pandoc=3.1 + # Workflow dependencies: ANTs + - ants=2.5 + # Workflow dependencies: FSL (versions pinned in 6.0.7.7) + - fsl-bet2=2111.4 + - fsl-flirt=2111.2 + - fsl-fast4=2111.3 + - fsl-fugue=2201.4 + - fsl-mcflirt=2111.0 + - fsl-miscmaths=2203.2 + - fsl-topup=2203.2 + # - pip + # - pip: + # - -r requirements.txt +variables: + FSLOUTPUTTYPE: NIFTI_GZ \ No newline at end of file From 315aab9594eed979ed4479bf7830fe03ec44df8e Mon Sep 17 00:00:00 2001 From: Oscar Esteban Date: Fri, 17 May 2024 12:13:35 -0400 Subject: [PATCH 054/123] fix: select python without fiddling Co-authored-by: Mathias Goncalves --- .circleci/config.yml | 10 ++++++---- 1 file changed, 6 insertions(+), 4 deletions(-) diff --git a/.circleci/config.yml b/.circleci/config.yml index 8d0d6748..e5bb0a04 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -46,7 +46,9 @@ jobs: - run: name: Install test data from GIN command: | - export PATH=/opt/circleci/.pyenv/versions/3.12.2/bin/:/opt/circleci/git-annex.linux:$PATH + export PATH=/opt/circleci/git-annex.linux:$PATH + pyenv local 3 + eval "$(pyenv init --path)" mkdir -p /tmp/data cd /tmp/data datalad install -r https://gin.g-node.org/oesteban/nitransforms-tests @@ -129,7 +131,7 @@ jobs: echo "cHJpbnRmICJrcnp5c3p0b2YuZ29yZ29sZXdza2lAZ21haWwuY29tXG41MTcyXG4gKkN2dW12RVYzelRmZ1xuRlM1Si8yYzFhZ2c0RVxuIiA+IGxpY2Vuc2UudHh0Cg==" | base64 -d | sh - run: name: Get codecov - command: python -m pip install codecov + command: python3 -m pip install codecov - run: name: Run unit tests no_output_timeout: 2h @@ -147,7 +149,7 @@ jobs: name: Submit unit test coverage command: | cd /tmp/src/nitransforms - python -m codecov --file /tmp/tests/summaries/unittests.xml \ + python3 -m codecov --file /tmp/tests/summaries/unittests.xml \ --flags unittests -e CIRCLE_JOB - run: name: Clean up tests directory @@ -176,7 +178,7 @@ jobs: source /tmp/buildenv/bin/activate python3 -m pip install "setuptools >= 45.0" wheel "setuptools_scm[toml] >= 6.2" \ "pip>=10.0.1" twine docutils - python setup.py sdist bdist_wheel + python3 -m build twine check dist/nitransforms* - store_artifacts: path: /tmp/src/nitransforms/dist From c6b63b29b67307259e92d8eb6eb9bdd8613dc8eb Mon Sep 17 00:00:00 2001 From: Oscar Esteban Date: Fri, 17 May 2024 13:24:41 -0400 Subject: [PATCH 055/123] fix: path to wheel --- .circleci/config.yml | 5 +++-- Dockerfile | 2 +- 2 files changed, 4 insertions(+), 3 deletions(-) diff --git a/.circleci/config.yml b/.circleci/config.yml index e5bb0a04..47b0e00e 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -137,7 +137,8 @@ jobs: no_output_timeout: 2h command: | mkdir -p /tmp/tests/{artifacts,summaries} - docker run -u $( id -u ) -it --rm -w /src/nitransforms \ + docker run -u $( id -u ) -it --rm \ + -w /src/nitransforms -v $PWD:/src/nitransforms \ -v /tmp/data/nitransforms-tests:/data -e TEST_DATA_HOME=/data \ -e COVERAGE_FILE=/tmp/summaries/.pytest.coverage \ -v /tmp/fslicense/license.txt:/opt/freesurfer/license.txt:ro \ @@ -176,7 +177,7 @@ jobs: command: | python3 -m venv /tmp/buildenv source /tmp/buildenv/bin/activate - python3 -m pip install "setuptools >= 45.0" wheel "setuptools_scm[toml] >= 6.2" \ + python3 -m pip install "setuptools >= 45.0" build wheel "setuptools_scm[toml] >= 6.2" \ "pip>=10.0.1" twine docutils python3 -m build twine check dist/nitransforms* diff --git a/Dockerfile b/Dockerfile index a804bf11..57c526b7 100644 --- a/Dockerfile +++ b/Dockerfile @@ -199,7 +199,7 @@ ENV MKL_NUM_THREADS=1 \ # CRITICAL: Make sure python setup.py --version has been run at least once # outside the container, with access to the git history. COPY --from=src /src/dist/*.whl . -RUN python -m pip install --no-cache-dir $( ls /src/dist/*.whl )[all] +RUN python -m pip install --no-cache-dir $( ls *.whl )[all] RUN find $HOME -type d -exec chmod go=u {} + && \ From ed810878d2604e11496e736996726751d51ba676 Mon Sep 17 00:00:00 2001 From: Dylan Nielson Date: Fri, 17 May 2024 14:43:37 -0400 Subject: [PATCH 056/123] NF: add surface resampling and surfacecoordinate transform --- nitransforms/base.py | 18 ++ nitransforms/surface.py | 281 ++++++++++++++++++++++++++--- nitransforms/tests/test_base.py | 22 ++- nitransforms/tests/test_surface.py | 108 ++++++----- 4 files changed, 361 insertions(+), 68 deletions(-) diff --git a/nitransforms/base.py b/nitransforms/base.py index 8d80b25f..200fe20b 100644 --- a/nitransforms/base.py +++ b/nitransforms/base.py @@ -15,6 +15,7 @@ from nibabel import funcs as _nbfuncs from nibabel.nifti1 import intent_codes as INTENT_CODES from nibabel.cifti2 import Cifti2Image +import nibabel as nb from scipy import ndimage as ndi EQUALITY_TOL = 1e-5 @@ -116,6 +117,23 @@ def __init__(self, dataset): raise ValueError("Dataset could not be interpreted as an irregular sample.") + @classmethod + def from_arrays(cls, coordinates, triangles): + darrays = [ + nb.gifti.GiftiDataArray( + coordinates.astype(np.float32), + intent=nb.nifti1.intent_codes['NIFTI_INTENT_POINTSET'], + datatype=nb.nifti1.data_type_codes['NIFTI_TYPE_FLOAT32'], + ), + nb.gifti.GiftiDataArray( + triangles.astype(np.int32), + intent=nb.nifti1.intent_codes['NIFTI_INTENT_TRIANGLE'], + datatype=nb.nifti1.data_type_codes['NIFTI_TYPE_INT32'], + ), + ] + gii = nb.gifti.GiftiImage(darrays=darrays) + return cls(gii) + class ImageGrid(SampledSpatialData): """Class to represent spaces of gridded data (images).""" diff --git a/nitransforms/surface.py b/nitransforms/surface.py index 2cc3913b..f01e902e 100644 --- a/nitransforms/surface.py +++ b/nitransforms/surface.py @@ -8,6 +8,7 @@ ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ## """Surface transforms.""" +import warnings import h5py import numpy as np import scipy.sparse as sparse @@ -16,7 +17,7 @@ ) import nibabel as nb from scipy.spatial import KDTree -from nitransforms.base import TransformBase +from scipy.spatial.distance import cdist class SurfaceTransformBase(): @@ -28,9 +29,9 @@ def __init__(self, reference, moving): self._moving = moving def __eq__(self, other): - ref_coords_eq = (self.reference._coordinates == other.reference._coordinates).all() + ref_coords_eq = (self.reference._coords == other.reference._coords).all() ref_tris_eq = (self.reference._triangles == other.reference._triangles).all() - mov_coords_eq = (self.moving._coordinates == other.moving._coordinates).all() + mov_coords_eq = (self.moving._coords == other.moving._coords).all() mov_tris_eq = (self.moving._triangles == other.moving._triangles).all() return ref_coords_eq & ref_tris_eq & mov_coords_eq & mov_tris_eq @@ -58,7 +59,7 @@ def from_filename(cls, reference_path, moving_path): moving = SurfaceMesh(nb.load(moving_path)) return cls(reference, moving) -class SurfaceIndexTransform(SurfaceTransformBase): +class SurfaceCoordinateTransform(SurfaceTransformBase): """Represents surface transformations in which the indices correspond and the coordinates differ.""" __slots__ = ("_reference", "_moving") @@ -83,7 +84,10 @@ def map(self, x, inverse=False): return dest._coords[matches] def __add__(self, other): - return self.__class__(self.reference, other.moving) + if isinstance(other, SurfaceCoordinateTransform): + return self.__class__(self.reference, other.moving) + else: + raise NotImplementedError @property def reference(self): @@ -101,24 +105,87 @@ def moving(self): def moving(self, surface): self._moving = SurfaceMesh(surface) -class SurfaceCoordinateTransform(SurfaceTransformBase): - """Represents transforms between surface spaces.""" +class SurfaceResampler(SurfaceTransformBase): + """Represents transformations in which the coordinate space remains the same and the indicies change.""" - __slots__ = ("mat",) + __slots__ = ("mat", 'interpolation_method') - def __init__(self, mat): + def __init__(self, reference, moving, interpolation_method='barycentric', mat=None): """Initialize the transform. Parameters ---------- + reference: spherical surface of the reference space. + Output will have number of indices equal to the number of indicies in this surface. + Both reference and moving should be in the same coordinate space. + moving: spherical surface that will be resampled. + Both reference and moving should be in the same coordinate space. mat : array-like, shape (nv1, nv2) Sparse matrix representing the transform. + interpolation_method : str + Only barycentric is currently implemented """ - super().__init__() - if isinstance(mat, sparse.csr_array): - self.mat = mat + super().__init__(reference, moving) + self.interpolation_method = interpolation_method + + # TODO: should we deal with the case where reference and moving are the same? + + # we're calculating the interpolation in the init so that we can ensure + # that it only has to be calculated once and will always be saved with the + # transform + if mat is None: + r_tree = KDTree(self.reference._coords) + m_tree = KDTree(self.moving._coords) + kmr_dists, kmr_closest = m_tree.query(self.reference._coords, k=10) + + # invert the triangles to generate a lookup table from vertices to triangle index + tri_lut = dict() + for i, idxs in enumerate(self.moving._triangles): + for x in idxs: + if not x in tri_lut: + tri_lut[x] = [i] + else: + tri_lut[x].append(i) + + # calculate the barycentric interpolation weights + bc_weights = [] + enclosing = [] + for sidx, (point, kmrv) in enumerate(zip(self.reference._coords, kmr_closest)): + close_tris = _find_close_tris(kmrv, tri_lut, self.moving) + ww, ee = _find_weights(point, close_tris, m_tree) + bc_weights.append(ww) + enclosing.append(ee) + + # build sparse matrix + # commenting out code for barycentric nearest neighbor + #bary_nearest = [] + mat = sparse.lil_array((self.reference._npoints, self.moving._npoints)) + for s_ix, dd in enumerate(bc_weights): + for k, v in dd.items(): + mat[s_ix, k] = v + # bary_nearest.append(np.array(list(dd.keys()))[np.array(list(dd.values())).argmax()]) + # bary_nearest = np.array(bary_nearest) + # transpose so that number of out vertices is columns + self.mat = sparse.csr_array(mat.T) else: - self.mat = sparse.csr_array(mat) + if isinstance(mat, sparse.csr_array): + self.mat = mat + else: + self.mat = sparse.csr_array(mat) + + def map(self, x, inverse=False): + return x + + def __add__(self, other): + + if isinstance(other, SurfaceResampler) and (other.interpolation_method == self.interpolation_method): + return self.__class__(self.reference, other.moving, interpolation_method=self.interpolation_method) + else: + raise NotImplementedError + + + def __invert__(self): + return self.__class__(self.moving, self.reference, interpolation_method=self.interpolation_method) def apply(self, x, inverse=False, normalize="element"): """Apply the transform to surface data. @@ -165,11 +232,16 @@ def apply(self, x, inverse=False, normalize="element"): def _to_hdf5(self, x5_root): """Write transform to HDF5 file.""" xform = x5_root.create_group("Transform") - xform.attrs["Type"] = "surface" - xform.create_dataset("data", data=self.mat.data) - xform.create_dataset("indices", data=self.mat.indices) - xform.create_dataset("indptr", data=self.mat.indptr) - xform.create_dataset("shape", data=self.mat.shape) + xform.attrs["Type"] = "SurfaceResampling" + xform.attrs['interpolation_method'] = self.interpolation_method + xform.create_dataset("mat_data", data=self.mat.data) + xform.create_dataset("mat_indices", data=self.mat.indices) + xform.create_dataset("mat_indptr", data=self.mat.indptr) + xform.create_dataset("mat_shape", data=self.mat.shape) + xform.create_dataset("reference_coordinates", data=self.reference._coords) + xform.create_dataset("reference_triangles", data=self.reference._triangles) + xform.create_dataset("moving_coordinates", data=self.moving._coords) + xform.create_dataset("moving_triangles", data=self.moving._triangles) def to_filename(self, filename, fmt=None): """Store the transform.""" @@ -177,8 +249,9 @@ def to_filename(self, filename, fmt=None): fmt = "npz" if filename.endswith(".npz") else "X5" if fmt == "npz": - sparse.save_npz(filename, self.mat) - return filename + raise NotImplementedError + # sparse.save_npz(filename, self.mat) + # return filename with h5py.File(filename, "w") as out_file: out_file.attrs["Format"] = "X5" @@ -195,7 +268,8 @@ def from_filename(cls, filename, fmt=None): fmt = "npz" if filename.endswith(".npz") else "X5" if fmt == "npz": - return cls(sparse.load_npz(filename)) + raise NotImplementedError + #return cls(sparse.load_npz(filename)) if fmt != "X5": raise ValueError("Only npz and X5 formats are supported.") @@ -204,7 +278,166 @@ def from_filename(cls, filename, fmt=None): assert f.attrs["Format"] == "X5" xform = f["/0/Transform"] mat = sparse.csr_matrix( - (xform["data"][()], xform["indices"][()], xform["indptr"][()]), - shape=xform["shape"][()], + (xform["mat_data"][()], xform["mat_indices"][()], xform["mat_indptr"][()]), + shape=xform["mat_shape"][()], ) - return cls(mat) + reference = SurfaceMesh.from_arrays(xform['reference_coordinates'], xform['reference_triangles']) + moving = SurfaceMesh.from_arrays(xform['moving_coordinates'], xform['moving_triangles']) + interpolation_method = xform.attrs['interpolation_method'] + return cls(reference, moving, interpolation_method=interpolation_method, mat=mat) + + +def _pointsToTriangles(points, triangles): + """Implementation that vectorizes project of a point to a set of triangles. + from: https://stackoverflow.com/a/32529589 + """ + with np.errstate(all='ignore'): + # Unpack triangle points + p0, p1, p2 = np.asarray(triangles).swapaxes(0, 1) + + # Calculate triangle edges + e0 = p1 - p0 + e1 = p2 - p0 + a = np.einsum('...i,...i', e0, e0) + b = np.einsum('...i,...i', e0, e1) + c = np.einsum('...i,...i', e1, e1) + + # Calculate determinant and denominator + det = a * c - b * b + invDet = 1. / det + denom = a - 2 * b + c + + # Project to the edges + p = p0 - points[:, np.newaxis] + d = np.einsum('...i,...i', e0, p) + e = np.einsum('...i,...i', e1, p) + u = b * e - c * d + v = b * d - a * e + + # Calculate numerators + bd = b + d + ce = c + e + numer0 = (ce - bd) / denom + numer1 = (c + e - b - d) / denom + da = -d / a + ec = -e / c + + # Vectorize test conditions + m0 = u + v < det + m1 = u < 0 + m2 = v < 0 + m3 = d < 0 + m4 = (a + d > b + e) + m5 = ce > bd + + t0 = m0 & m1 & m2 & m3 + t1 = m0 & m1 & m2 & ~m3 + t2 = m0 & m1 & ~m2 + t3 = m0 & ~m1 & m2 + t4 = m0 & ~m1 & ~m2 + t5 = ~m0 & m1 & m5 + t6 = ~m0 & m1 & ~m5 + t7 = ~m0 & m2 & m4 + t8 = ~m0 & m2 & ~m4 + t9 = ~m0 & ~m1 & ~m2 + + u = np.where(t0, np.clip(da, 0, 1), u) + v = np.where(t0, 0, v) + u = np.where(t1, 0, u) + v = np.where(t1, 0, v) + u = np.where(t2, 0, u) + v = np.where(t2, np.clip(ec, 0, 1), v) + u = np.where(t3, np.clip(da, 0, 1), u) + v = np.where(t3, 0, v) + u *= np.where(t4, invDet, 1) + v *= np.where(t4, invDet, 1) + u = np.where(t5, np.clip(numer0, 0, 1), u) + v = np.where(t5, 1 - u, v) + u = np.where(t6, 0, u) + v = np.where(t6, 1, v) + u = np.where(t7, np.clip(numer1, 0, 1), u) + v = np.where(t7, 1 - u, v) + u = np.where(t8, 1, u) + v = np.where(t8, 0, v) + u = np.where(t9, np.clip(numer1, 0, 1), u) + v = np.where(t9, 1 - u, v) + + # Return closest points + return (p0.T + u[:, np.newaxis] * e0.T + v[:, np.newaxis] * e1.T).swapaxes(2, 1) + + +def _barycentric_weights(vecs, coords): + """Compute the weights for barycentric interpolation. + + Parameters + ---------- + vecs : ndarray of shape (6, 3) + The 6 vectors used to compute barycentric weights. + a, e1, e2, + np.cross(e1, e2), + np.cross(e2, a), + np.cross(a, e1) + coords : ndarray of shape (3, ) + + Returns + ------- + (w, u, v, t) : tuple of float + ``w``, ``u``, and ``v`` are the weights of the three vertices of the + triangle, respectively. ``t`` is the scale that needs to be multiplied + to ``coords`` to make it in the same plane as the three vertices. + + From: https://github.com/neuroboros/neuroboros/blob/f2a2efb914e783add2bf06e0f3715236d3d8550e/src/neuroboros/surface/_barycentric.pyx#L9-L47 + """ + det = coords[0] * vecs[3, 0] + coords[1] * vecs[3, 1] + coords[2] * vecs[3, 2] + if det == 0: + if vecs[3, 0] == 0 and vecs[3, 1] == 0 and vecs[3, 2] == 0: + warnings.warn("Zero cross product of two edges: " + "The three vertices are in the same line.") + else: + print(vecs[3]) + y = coords - vecs[0] + u, v = np.linalg.lstsq(vecs[1:3].T, y, rcond=None)[0] + t = 1. + else: + uu = coords[0] * vecs[4, 0] + coords[1] * vecs[4, 1] + coords[2] * vecs[4, 2] + vv = coords[0] * vecs[5, 0] + coords[1] * vecs[5, 1] + coords[2] * vecs[5, 2] + u = uu / det + v = vv / det + tt = vecs[0, 0] * vecs[3, 0] + vecs[0, 1] * vecs[3, 1] + vecs[0, 2] * vecs[3, 2] + t = tt / det + w = 1. - (u + v) + return w, u, v, t + + +def _find_close_tris(kdsv, tri_lut, surface): + tris = [] + for kk in kdsv: + tris.extend(tri_lut[kk]) + close_tri_verts = surface._triangles[np.unique(tris)] + close_tris = surface._coords[close_tri_verts] + return close_tris + + +def _find_weights(point, close_tris, d_tree): + point = point[np.newaxis, :] + tri_dists = cdist(point, _pointsToTriangles(point, close_tris).squeeze()) + closest_tri = close_tris[(tri_dists == tri_dists.min()).squeeze()] + # make sure a single closest triangle was found + if closest_tri.shape[0] != 1: + # in the event of a tie (which can happen) + # just take the first triangle + closest_tri = closest_tri[0] + + closest_tri = closest_tri.squeeze() + # Make sure point is actually inside triangle + enclosing = True + if ((point > closest_tri).sum(0) != 3).all(): + enclosing = False + _, ct_idxs = d_tree.query(closest_tri) + a = closest_tri[0] + e1 = closest_tri[1] - a + e2 = closest_tri[2] - a + vecs = np.vstack([a, e1, e2, np.cross(e1, e2), np.cross(e2, a), np.cross(a, e1)]) + res = dict() + res[ct_idxs[0]], res[ct_idxs[1]], res[ct_idxs[2]], _ = _barycentric_weights(vecs, point.squeeze()) + return res, enclosing \ No newline at end of file diff --git a/nitransforms/tests/test_base.py b/nitransforms/tests/test_base.py index 07a7e4ec..06dd9efa 100644 --- a/nitransforms/tests/test_base.py +++ b/nitransforms/tests/test_base.py @@ -4,7 +4,7 @@ import pytest import h5py -from ..base import SpatialReference, SampledSpatialData, ImageGrid, TransformBase +from ..base import SpatialReference, SampledSpatialData, ImageGrid, TransformBase, SurfaceMesh from .. import linear as nitl @@ -161,3 +161,23 @@ def test_concatenation(testdata_path): x = [(0.0, 0.0, 0.0), (1.0, 1.0, 1.0), (-1.0, -1.0, -1.0)] assert np.all((aff + nitl.Affine())(x) == x) assert np.all((aff + nitl.Affine())(x, inverse=True) == x) + + +def test_SurfaceMesh(testdata_path): + surf_path = testdata_path / "sub-200148_hemi-R_pial.surf.gii" + shape_path = testdata_path / "sub-sid000005_ses-budapest_acq-MPRAGE_hemi-R_thickness.shape.gii" + img_path = testdata_path / "bold.nii.gz" + + mesh = SurfaceMesh(nb.load(surf_path)) + exp_coords_shape = (249277, 3) + exp_tris_shape = (498550, 3) + assert mesh._coords.shape == exp_coords_shape + assert mesh._triangles.shape == exp_tris_shape + assert mesh._npoints == exp_coords_shape[0] + assert mesh._ndim == exp_coords_shape[1] + + with pytest.raises(ValueError): + SurfaceMesh(nb.load(img_path)) + + with pytest.raises(TypeError): + SurfaceMesh(nb.load(shape_path)) \ No newline at end of file diff --git a/nitransforms/tests/test_surface.py b/nitransforms/tests/test_surface.py index e90fe5ea..4b45d023 100644 --- a/nitransforms/tests/test_surface.py +++ b/nitransforms/tests/test_surface.py @@ -2,57 +2,79 @@ import tempfile import numpy as np -import scipy.sparse as sparse +import nibabel as nb +from nitransforms.base import SurfaceMesh +from nitransforms.surface import SurfaceCoordinateTransform, SurfaceResampler -from nitransforms.surface import SurfaceCoordinateTransform +# def test_surface_transform_npz(): +# mat = sparse.random(10, 10, density=0.5) +# xfm = SurfaceCoordinateTransform(mat) +# fn = tempfile.mktemp(suffix=".npz") +# print(fn) +# xfm.to_filename(fn) +# +# xfm2 = SurfaceCoordinateTransform.from_filename(fn) +# try: +# assert xfm.mat.shape == xfm2.mat.shape +# np.testing.assert_array_equal(xfm.mat.data, xfm2.mat.data) +# np.testing.assert_array_equal(xfm.mat.indices, xfm2.mat.indices) +# np.testing.assert_array_equal(xfm.mat.indptr, xfm2.mat.indptr) +# except Exception: +# os.remove(fn) +# raise +# os.remove(fn) -def test_surface_transform_x5(): - mat = sparse.random(10, 10, density=0.5) - xfm = SurfaceCoordinateTransform(mat) - fn = tempfile.mktemp(suffix=".h5") - print(fn) - xfm.to_filename(fn) - xfm2 = SurfaceCoordinateTransform.from_filename(fn) - try: - assert xfm.mat.shape == xfm2.mat.shape - np.testing.assert_array_equal(xfm.mat.data, xfm2.mat.data) - np.testing.assert_array_equal(xfm.mat.indices, xfm2.mat.indices) - np.testing.assert_array_equal(xfm.mat.indptr, xfm2.mat.indptr) - except Exception: - os.remove(fn) - raise - os.remove(fn) +# def test_surface_transform_normalization(): +# mat = np.random.uniform(size=(20, 10)) +# xfm = SurfaceCoordinateTransform(mat) +# x = np.random.uniform(size=(5, 20)) +# y_element = xfm.apply(x, normalize="element") +# np.testing.assert_array_less(y_element.sum(axis=1), x.sum(axis=1)) +# y_sum = xfm.apply(x, normalize="sum") +# np.testing.assert_allclose(y_sum.sum(axis=1), x.sum(axis=1)) +# y_none = xfm.apply(x, normalize="none") +# assert y_none.sum() != y_element.sum() +# assert y_none.sum() != y_sum.sum() + +def test_SurfaceResampler(testdata_path): + dif_tol = 0.001 + sphere_reg_path = testdata_path / "sub-sid000005_ses-budapest_acq-MPRAGE_hemi-R_space-fsLR_desc-reg_sphere.surf.gii" + fslr_sphere_path = testdata_path / "tpl-fsLR_hemi-R_den-32k_sphere.surf.gii" + shape_path = testdata_path / "sub-sid000005_ses-budapest_acq-MPRAGE_hemi-R_thickness.shape.gii" + ref_resampled_thickness_path = testdata_path / "sub-sid000005_ses-budapest_acq-MPRAGE_hemi-R_space-fsLR_thickness.shape.gii" -def test_surface_transform_npz(): - mat = sparse.random(10, 10, density=0.5) - xfm = SurfaceCoordinateTransform(mat) - fn = tempfile.mktemp(suffix=".npz") - print(fn) - xfm.to_filename(fn) + fslr_sphere = SurfaceMesh(nb.load(fslr_sphere_path)) + sphere_reg = SurfaceMesh(nb.load(sphere_reg_path)) + subj_thickness = nb.load(shape_path) - xfm2 = SurfaceCoordinateTransform.from_filename(fn) + reference = fslr_sphere + moving = sphere_reg + # compare results to what connecome workbench produces + resampling = SurfaceResampler(reference, moving) + resampled_thickness = resampling.apply(subj_thickness.agg_data(), normalize='element') + ref_resampled = nb.load(ref_resampled_thickness_path).agg_data() + + max_dif = np.abs(resampled_thickness.astype(np.float32) - ref_resampled).max() + assert max_dif < dif_tol + + # test file io + fn = tempfile.mktemp(suffix=".h5") try: - assert xfm.mat.shape == xfm2.mat.shape - np.testing.assert_array_equal(xfm.mat.data, xfm2.mat.data) - np.testing.assert_array_equal(xfm.mat.indices, xfm2.mat.indices) - np.testing.assert_array_equal(xfm.mat.indptr, xfm2.mat.indptr) + resampling.to_filename(fn) + resampling2 = SurfaceResampler.from_filename(fn) + + assert resampling2 == resampling2 + assert np.all(resampling2.reference._coords == resampling.reference._coords) + assert np.all(resampling2.reference._triangles == resampling.reference._triangles) + assert np.all(resampling2.reference._coords == resampling.reference._coords) + assert np.all(resampling2.moving._triangles == resampling.moving._triangles) + + resampled_thickness2 = resampling2.apply(subj_thickness.agg_data(), normalize='element') + assert np.all(resampled_thickness2 == resampled_thickness) except Exception: os.remove(fn) raise - os.remove(fn) - - -def test_surface_transform_normalization(): - mat = np.random.uniform(size=(20, 10)) - xfm = SurfaceCoordinateTransform(mat) - x = np.random.uniform(size=(5, 20)) - y_element = xfm.apply(x, normalize="element") - np.testing.assert_array_less(y_element.sum(axis=1), x.sum(axis=1)) - y_sum = xfm.apply(x, normalize="sum") - np.testing.assert_allclose(y_sum.sum(axis=1), x.sum(axis=1)) - y_none = xfm.apply(x, normalize="none") - assert y_none.sum() != y_element.sum() - assert y_none.sum() != y_sum.sum() + os.remove(fn) \ No newline at end of file From cbafa5d86acc97fddb9d225b5278e30233ed81ac Mon Sep 17 00:00:00 2001 From: Dylan Nielson Date: Fri, 17 May 2024 15:16:44 -0400 Subject: [PATCH 057/123] NF: surface resampler can load from surfaces --- nitransforms/surface.py | 54 ++++++++++++++++++------------ nitransforms/tests/test_surface.py | 15 +++++++-- 2 files changed, 45 insertions(+), 24 deletions(-) diff --git a/nitransforms/surface.py b/nitransforms/surface.py index f01e902e..1f6a52b9 100644 --- a/nitransforms/surface.py +++ b/nitransforms/surface.py @@ -262,29 +262,39 @@ def to_filename(self, filename, fmt=None): return filename @classmethod - def from_filename(cls, filename, fmt=None): + def from_filename(cls, filename=None, reference_file=None, moving_file=None, fmt=None, interpolation_method=None): """Load transform from file.""" - if fmt is None: - fmt = "npz" if filename.endswith(".npz") else "X5" - - if fmt == "npz": - raise NotImplementedError - #return cls(sparse.load_npz(filename)) - - if fmt != "X5": - raise ValueError("Only npz and X5 formats are supported.") - - with h5py.File(filename, "r") as f: - assert f.attrs["Format"] == "X5" - xform = f["/0/Transform"] - mat = sparse.csr_matrix( - (xform["mat_data"][()], xform["mat_indices"][()], xform["mat_indptr"][()]), - shape=xform["mat_shape"][()], - ) - reference = SurfaceMesh.from_arrays(xform['reference_coordinates'], xform['reference_triangles']) - moving = SurfaceMesh.from_arrays(xform['moving_coordinates'], xform['moving_triangles']) - interpolation_method = xform.attrs['interpolation_method'] - return cls(reference, moving, interpolation_method=interpolation_method, mat=mat) + if filename is None: + if reference_file is None or moving_file is None: + raise ValueError("You must pass either a X5 file or a pair of reference and moving surfaces.") + else: + if interpolation_method is None: + interpolation_method='barycentric' + return cls(SurfaceMesh(nb.load(reference_file)), + SurfaceMesh(nb.load(moving_file)), + interpolation_method=interpolation_method) + else: + if fmt is None: + fmt = "npz" if filename.endswith(".npz") else "X5" + + if fmt == "npz": + raise NotImplementedError + #return cls(sparse.load_npz(filename)) + + if fmt != "X5": + raise ValueError("Only npz and X5 formats are supported.") + + with h5py.File(filename, "r") as f: + assert f.attrs["Format"] == "X5" + xform = f["/0/Transform"] + mat = sparse.csr_matrix( + (xform["mat_data"][()], xform["mat_indices"][()], xform["mat_indptr"][()]), + shape=xform["mat_shape"][()], + ) + reference = SurfaceMesh.from_arrays(xform['reference_coordinates'], xform['reference_triangles']) + moving = SurfaceMesh.from_arrays(xform['moving_coordinates'], xform['moving_triangles']) + interpolation_method = xform.attrs['interpolation_method'] + return cls(reference, moving, interpolation_method=interpolation_method, mat=mat) def _pointsToTriangles(points, triangles): diff --git a/nitransforms/tests/test_surface.py b/nitransforms/tests/test_surface.py index 4b45d023..32f8d4a1 100644 --- a/nitransforms/tests/test_surface.py +++ b/nitransforms/tests/test_surface.py @@ -66,7 +66,7 @@ def test_SurfaceResampler(testdata_path): resampling.to_filename(fn) resampling2 = SurfaceResampler.from_filename(fn) - assert resampling2 == resampling2 + assert resampling2 == resampling assert np.all(resampling2.reference._coords == resampling.reference._coords) assert np.all(resampling2.reference._triangles == resampling.reference._triangles) assert np.all(resampling2.reference._coords == resampling.reference._coords) @@ -77,4 +77,15 @@ def test_SurfaceResampler(testdata_path): except Exception: os.remove(fn) raise - os.remove(fn) \ No newline at end of file + os.remove(fn) + + # test loading from surfaces + resampling3 = SurfaceResampler.from_filename(reference_file=fslr_sphere_path, + moving_file=sphere_reg_path) + assert resampling3 == resampling + assert np.all(resampling3.reference._coords == resampling.reference._coords) + assert np.all(resampling3.reference._triangles == resampling.reference._triangles) + assert np.all(resampling3.reference._coords == resampling.reference._coords) + assert np.all(resampling3.moving._triangles == resampling.moving._triangles) + resampled_thickness3 = resampling3.apply(subj_thickness.agg_data(), normalize='element') + assert np.all(resampled_thickness3 == resampled_thickness) \ No newline at end of file From e68c81bb301e5eb1f7c61761a91d2e6e860fbd5f Mon Sep 17 00:00:00 2001 From: Oscar Esteban Date: Fri, 17 May 2024 16:32:35 -0400 Subject: [PATCH 058/123] fix: do not transpose (see scipy.ndimage.map_coordinates docs) --- nitransforms/nonlinear.py | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/nitransforms/nonlinear.py b/nitransforms/nonlinear.py index 4e0d0580..17befc8a 100644 --- a/nitransforms/nonlinear.py +++ b/nitransforms/nonlinear.py @@ -160,7 +160,6 @@ def map(self, x, inverse=False): if inverse is True: raise NotImplementedError - x = np.array(x) ijk = self.reference.index(x) indexes = np.round(ijk).astype("int") @@ -172,7 +171,7 @@ def map(self, x, inverse=False): tuple( map_coordinates( self._field[..., i], - ijk.T, + ijk, order=3, mode="constant", cval=np.nan, @@ -183,7 +182,7 @@ def map(self, x, inverse=False): ).T # Set NaN values back to the original coordinates value = no displacement - new_map[np.isnan(new_map)] = x[np.isnan(new_map)] + new_map[np.isnan(new_map)] = np.array(x)[np.isnan(new_map)] return new_map def __matmul__(self, b): From 92d15e14cf4cf877bec4ef971a5d8b9ecde43c92 Mon Sep 17 00:00:00 2001 From: Oscar Esteban Date: Fri, 17 May 2024 21:53:33 -0400 Subject: [PATCH 059/123] fix: update test using ``xfm.apply()`` --- nitransforms/tests/test_manip.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/nitransforms/tests/test_manip.py b/nitransforms/tests/test_manip.py index 6dee540e..b7f6a6e4 100644 --- a/nitransforms/tests/test_manip.py +++ b/nitransforms/tests/test_manip.py @@ -14,6 +14,7 @@ RMSE_TOL, APPLY_NONLINEAR_CMD, ) +from nitransforms.resampling import apply FMT = {"lta": "fs", "tfm": "itk"} @@ -54,7 +55,7 @@ def test_itk_h5(tmp_path, testdata_path): assert exit_code == 0 sw_moved = nb.load("resampled.nii.gz") - nt_moved = xfm.apply(img_fname, order=0) + nt_moved = apply(xfm, img_fname, order=0) nt_moved.to_filename("nt_resampled.nii.gz") diff = sw_moved.get_fdata() - nt_moved.get_fdata() # A certain tolerance is necessary because of resampling at borders From 08dc3a398ae9cb01389acad9afd5f873d722da41 Mon Sep 17 00:00:00 2001 From: Oscar Esteban Date: Fri, 17 May 2024 21:57:20 -0400 Subject: [PATCH 060/123] fix: update test using ``xfm.apply()`` --- nitransforms/tests/test_manip.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/nitransforms/tests/test_manip.py b/nitransforms/tests/test_manip.py index 6dee540e..b7f6a6e4 100644 --- a/nitransforms/tests/test_manip.py +++ b/nitransforms/tests/test_manip.py @@ -14,6 +14,7 @@ RMSE_TOL, APPLY_NONLINEAR_CMD, ) +from nitransforms.resampling import apply FMT = {"lta": "fs", "tfm": "itk"} @@ -54,7 +55,7 @@ def test_itk_h5(tmp_path, testdata_path): assert exit_code == 0 sw_moved = nb.load("resampled.nii.gz") - nt_moved = xfm.apply(img_fname, order=0) + nt_moved = apply(xfm, img_fname, order=0) nt_moved.to_filename("nt_resampled.nii.gz") diff = sw_moved.get_fdata() - nt_moved.get_fdata() # A certain tolerance is necessary because of resampling at borders From 90e1157c90aff4f97429cb80821b3f7a0dd0a6b1 Mon Sep 17 00:00:00 2001 From: Oscar Esteban Date: Mon, 20 May 2024 09:53:34 +0200 Subject: [PATCH 061/123] fix: define ``ndim`` in transforms chains --- nitransforms/manip.py | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/nitransforms/manip.py b/nitransforms/manip.py index 233f5adf..b30fd646 100644 --- a/nitransforms/manip.py +++ b/nitransforms/manip.py @@ -67,6 +67,11 @@ def __len__(self): """Enable using len().""" return len(self.transforms) + @property + def ndim(self): + """Get the number of dimensions.""" + return max(x.ndim for x in self._transforms) + @property def transforms(self): """Get the internal list of transforms.""" From 19f82d93b988a08b24266c007f117e810823e959 Mon Sep 17 00:00:00 2001 From: Oscar Esteban Date: Mon, 20 May 2024 09:58:51 +0200 Subject: [PATCH 062/123] fix: flexibilize test oracle precision by using ellipsis --- nitransforms/nonlinear.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/nitransforms/nonlinear.py b/nitransforms/nonlinear.py index 17befc8a..9c29c53c 100644 --- a/nitransforms/nonlinear.py +++ b/nitransforms/nonlinear.py @@ -317,11 +317,11 @@ def map(self, x, inverse=False): -------- >>> xfm = BSplineFieldTransform(test_dir / "someones_bspline_coefficients.nii.gz") >>> xfm.reference = test_dir / "someones_anatomy.nii.gz" - >>> xfm.map([-6.5, -36., -19.5]).tolist() - [[-6.5, -31.476097418406784, -19.5]] + >>> xfm.map([-6.5, -36., -19.5]).tolist() # doctest: +ELLIPSIS + [[-6.5, -31.476097418406..., -19.5]] - >>> xfm.map([[-6.5, -36., -19.5], [-1., -41.5, -11.25]]).tolist() - [[-6.5, -31.476097418406784, -19.5], [-1.0, -3.8072675377121996, -11.25]] + >>> xfm.map([[-6.5, -36., -19.5], [-1., -41.5, -11.25]]).tolist() # doctest: +ELLIPSIS + [[-6.5, -31.4760974184..., -19.5], [-1.0, -3.807267537712..., -11.25]] """ vfunc = partial( From 5241f99a17c926b586c119e348f31911b31ce330 Mon Sep 17 00:00:00 2001 From: Oscar Esteban Date: Mon, 20 May 2024 10:07:26 +0200 Subject: [PATCH 063/123] fix: replace all remaining ``xfm.apply()`` calls --- .../Reading and Writing transforms.ipynb | 11 +- docs/notebooks/isbi2020.ipynb | 5073 ++++++++++++++++- nitransforms/tests/test_nonlinear.py | 6 +- 3 files changed, 5072 insertions(+), 18 deletions(-) diff --git a/docs/notebooks/Reading and Writing transforms.ipynb b/docs/notebooks/Reading and Writing transforms.ipynb index 61ea269f..c0e85de2 100644 --- a/docs/notebooks/Reading and Writing transforms.ipynb +++ b/docs/notebooks/Reading and Writing transforms.ipynb @@ -46,6 +46,7 @@ "import numpy as np\n", "import nibabel as nb\n", "import nitransforms as nt\n", + "from nitransforms.resampling import apply\n", "\n", "cwd = TemporaryDirectory()\n", "os.chdir(cwd.name)\n", @@ -263,7 +264,7 @@ "metadata": {}, "outputs": [], "source": [ - "moved = xfm.apply(nii, order=0)\n", + "moved = apply(xfm, nii, order=0)\n", "moved.to_filename('moved-nb.nii.gz')" ] }, @@ -741,7 +742,7 @@ "outputs": [], "source": [ "xfm.reference = oblique\n", - "moved_oblique = xfm.apply(las_anatomy)" + "moved_oblique = apply(xfm, las_anatomy)" ] }, { @@ -895,7 +896,7 @@ ], "metadata": { "kernelspec": { - "display_name": "Python 3", + "display_name": "Python 3 (ipykernel)", "language": "python", "name": "python3" }, @@ -909,9 +910,9 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.8.5" + "version": "3.11.8" } }, "nbformat": 4, - "nbformat_minor": 2 + "nbformat_minor": 4 } diff --git a/docs/notebooks/isbi2020.ipynb b/docs/notebooks/isbi2020.ipynb index 80442dbe..903d86e7 100644 --- a/docs/notebooks/isbi2020.ipynb +++ b/docs/notebooks/isbi2020.ipynb @@ -34,7 +34,8 @@ "from pathlib import Path\n", "import nibabel as nb\n", "from niworkflows.viz.notebook import display\n", - "import nitransforms as nt" + "import nitransforms as not\n", + "from nitransforms.resampling import apply" ] }, { @@ -161,7 +162,7 @@ } ], "source": [ - "resampled_in_t1 = identity_xfm.apply(bold_nii)\n", + "resampled_in_t1 = apply(identity_xfm, bold_nii)\n", "print(resampled_in_t1.affine)\n", "print(resampled_in_t1.shape)" ] @@ -180,7 +181,1695 @@ "outputs": [ { "data": { - "image/svg+xml": "\n \n\n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n\n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n\n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n\n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n\n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n\n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n\n \n \n", + "image/svg+xml": [ + "\n", + " \n", + "\n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + "\n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + "\n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + "\n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + "\n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + "\n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + "\n", + " \n", + " \n", + "" + ], "text/plain": [ "" ] @@ -231,7 +1920,7 @@ } ], "source": [ - "moved_to_t1 = t1w_to_bold_xfm.apply(bold_nii)\n", + "moved_to_t1 = apply(t1w_to_bold_xfm, bold_nii)\n", "print(moved_to_t1.affine)\n", "print(moved_to_t1.shape)" ] @@ -243,7 +1932,1695 @@ "outputs": [ { "data": { - "image/svg+xml": "\n \n\n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n\n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n\n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n\n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n\n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n\n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n\n \n \n", + "image/svg+xml": [ + "\n", + " \n", + "\n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + "\n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + "\n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + "\n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + "\n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + "\n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + "\n", + " \n", + " \n", + "" + ], "text/plain": [ "" ] @@ -280,7 +3657,1683 @@ "outputs": [ { "data": { - "image/svg+xml": "\n \n\n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n\n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n\n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n\n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n\n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n\n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n\n \n \n", + "image/svg+xml": [ + "\n", + " \n", + "\n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + "\n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + "\n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + "\n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + "\n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + "\n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + "\n", + " \n", + " \n", + "" + ], "text/plain": [ "" ] @@ -290,7 +5343,7 @@ } ], "source": [ - "display(bold_nii, bold_to_t1w_xfm.apply(t1w_nii))" + "display(bold_nii, apply(bold_to_t1w_xfm, t1w_nii))" ] }, { @@ -315,7 +5368,7 @@ ], "metadata": { "kernelspec": { - "display_name": "Python 3", + "display_name": "Python 3 (ipykernel)", "language": "python", "name": "python3" }, @@ -329,9 +5382,9 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.7.3" + "version": "3.11.8" } }, "nbformat": 4, "nbformat_minor": 4 -} \ No newline at end of file +} diff --git a/nitransforms/tests/test_nonlinear.py b/nitransforms/tests/test_nonlinear.py index cfaa12c2..24d1f83e 100644 --- a/nitransforms/tests/test_nonlinear.py +++ b/nitransforms/tests/test_nonlinear.py @@ -169,7 +169,7 @@ def test_displacements_field1( exit_code = check_call([cmd], shell=True) assert exit_code == 0 sw_moved_mask = nb.load("resampled_brainmask.nii.gz") - nt_moved_mask = xfm.apply(msk, order=0) + nt_moved_mask = apply(xfm, msk, order=0) nt_moved_mask.set_data_dtype(msk.get_data_dtype()) diff = np.asanyarray(sw_moved_mask.dataobj) - np.asanyarray(nt_moved_mask.dataobj) @@ -189,7 +189,7 @@ def test_displacements_field1( assert exit_code == 0 sw_moved = nb.load("resampled.nii.gz") - nt_moved = xfm.apply(nii, order=0) + nt_moved = apply(xfm, nii, order=0) nt_moved.set_data_dtype(nii.get_data_dtype()) nt_moved.to_filename("nt_resampled.nii.gz") sw_moved.set_data_dtype(nt_moved.get_data_dtype()) @@ -229,7 +229,7 @@ def test_displacements_field2(tmp_path, testdata_path, sw_tool): assert exit_code == 0 sw_moved = nb.load("resampled.nii.gz") - nt_moved = xfm.apply(img_fname, order=0) + nt_moved = apply(xfm, img_fname, order=0) nt_moved.to_filename("nt_resampled.nii.gz") sw_moved.set_data_dtype(nt_moved.get_data_dtype()) diff = np.asanyarray( From 9c8beb759a9d937fb8d10da8641d88d87c1a5598 Mon Sep 17 00:00:00 2001 From: Oscar Esteban Date: Mon, 20 May 2024 11:01:01 +0200 Subject: [PATCH 064/123] maint: update certificates to preempt FS download failure --- Dockerfile | 2 ++ 1 file changed, 2 insertions(+) diff --git a/Dockerfile b/Dockerfile index 57c526b7..aaac0565 100644 --- a/Dockerfile +++ b/Dockerfile @@ -28,6 +28,8 @@ RUN apt-get update && \ unzip && \ apt-get clean && rm -rf /var/lib/apt/lists/* /tmp/* /var/tmp/* +RUN update-ca-certificates -f + # FreeSurfer 7.3.2 FROM downloader as freesurfer COPY docker/files/freesurfer7.3.2-exclude.txt /usr/local/etc/freesurfer7.3.2-exclude.txt From f10e50eddf8ca3a9ff0c35a4e21ebf0348e8cddb Mon Sep 17 00:00:00 2001 From: Oscar Esteban Date: Mon, 20 May 2024 11:01:01 +0200 Subject: [PATCH 065/123] maint: add certificate to preempt FS download failure --- Dockerfile | 4 +++- docker/files/fs-cert.pem | 44 ++++++++++++++++++++++++++++++++++++++++ 2 files changed, 47 insertions(+), 1 deletion(-) create mode 100644 docker/files/fs-cert.pem diff --git a/Dockerfile b/Dockerfile index aaac0565..28fe8c31 100644 --- a/Dockerfile +++ b/Dockerfile @@ -33,7 +33,9 @@ RUN update-ca-certificates -f # FreeSurfer 7.3.2 FROM downloader as freesurfer COPY docker/files/freesurfer7.3.2-exclude.txt /usr/local/etc/freesurfer7.3.2-exclude.txt -RUN curl -sSL https://surfer.nmr.mgh.harvard.edu/pub/dist/freesurfer/7.3.2/freesurfer-linux-ubuntu22_amd64-7.3.2.tar.gz \ +COPY docker/files/fs-cert.pem /usr/local/etc/fs-cert.pem +RUN curl --cacert /usr/local/etc/fs-cert.pem \ + -sSL https://surfer.nmr.mgh.harvard.edu/pub/dist/freesurfer/7.3.2/freesurfer-linux-ubuntu22_amd64-7.3.2.tar.gz \ | tar zxv --no-same-owner -C /opt --exclude-from=/usr/local/etc/freesurfer7.3.2-exclude.txt # AFNI diff --git a/docker/files/fs-cert.pem b/docker/files/fs-cert.pem new file mode 100644 index 00000000..34d53c7b --- /dev/null +++ b/docker/files/fs-cert.pem @@ -0,0 +1,44 @@ +-----BEGIN CERTIFICATE----- +MIIHuDCCBiCgAwIBAgIRAMa1FS9MSn5TXKMgD8OXtoswDQYJKoZIhvcNAQEMBQAw +RDELMAkGA1UEBhMCVVMxEjAQBgNVBAoTCUludGVybmV0MjEhMB8GA1UEAxMYSW5D +b21tb24gUlNBIFNlcnZlciBDQSAyMB4XDTI0MDUwOTAwMDAwMFoXDTI1MDUwOTIz +NTk1OVowfTELMAkGA1UEBhMCVVMxFjAUBgNVBAgTDU1hc3NhY2h1c2V0dHMxMTAv +BgNVBAoTKFByZXNpZGVudCBhbmQgRmVsbG93cyBvZiBIYXJ2YXJkIENvbGxlZ2Ux +IzAhBgNVBAMTGnN1cmZlci5ubXIubWdoLmhhcnZhcmQuZWR1MIIBIjANBgkqhkiG +9w0BAQEFAAOCAQ8AMIIBCgKCAQEAxJfeMKn0NjFkmduegvgRICrm+hn4TaZITeVl +uM/af+g05RUtKRKNIR0CC9mzPDYiW10VNj7TuDyS1DNoe/Jr1Or8lrMCm81NHjnY +aKvtC61O9GWvvDfWeb35vkHfkbd60AgBaLGZIEglENl122bBqpSdO8JglVTDgvFd +mWkuBnQzE/qKt7j88Xjafjhzbnv9Uf1hh8NtbiOaAf53/b5FZuUary64k5twPwpm +Y/pWw3CQhIWUhvRMwcQNvG24lDOssOXSEgb9Gd96ikU/yE4MDnuDBb6tf+2crVQ5 +PF4V2YvbQZ2x8Kf8hygWk3C555ZSCR3LgRl/Paxp9DZUrxfjtwIDAQABo4ID6jCC +A+YwHwYDVR0jBBgwFoAU70wAkqb7di5eleLJX4cbGdVN4tkwHQYDVR0OBBYEFG0I +bSHaYbRPsftHU7uJ5A7Z9UBfMA4GA1UdDwEB/wQEAwIFoDAMBgNVHRMBAf8EAjAA +MB0GA1UdJQQWMBQGCCsGAQUFBwMBBggrBgEFBQcDAjBJBgNVHSAEQjBAMDQGCysG +AQQBsjEBAgJnMCUwIwYIKwYBBQUHAgEWF2h0dHBzOi8vc2VjdGlnby5jb20vQ1BT +MAgGBmeBDAECAjBABgNVHR8EOTA3MDWgM6Axhi9odHRwOi8vY3JsLnNlY3RpZ28u +Y29tL0luQ29tbW9uUlNBU2VydmVyQ0EyLmNybDBwBggrBgEFBQcBAQRkMGIwOwYI +KwYBBQUHMAKGL2h0dHA6Ly9jcnQuc2VjdGlnby5jb20vSW5Db21tb25SU0FTZXJ2 +ZXJDQTIuY3J0MCMGCCsGAQUFBzABhhdodHRwOi8vb2NzcC5zZWN0aWdvLmNvbTCC +AX8GCisGAQQB1nkCBAIEggFvBIIBawFpAHUAzxFW7tUufK/zh1vZaS6b6RpxZ0qw +F+ysAdJbd87MOwgAAAGPXXtaqAAABAMARjBEAiARbv8hz3utGkTar2Y3jNnWOGKG +aajAYuB3f30g5Bnb+AIgAmwaltfGcp2uNYmTMU2eSC5AVhpnbisDS2KcFyC7ok4A +dwCi4wrkRe+9rZt+OO1HZ3dT14JbhJTXK14bLMS5UKRH5wAAAY9de1pQAAAEAwBI +MEYCIQDrM8C7Y6GdEKRmGQ1AUmbUArbpImpEXutI8E+KVOUsogIhAKbl+QYqJIUB +rRHpRkKZlefPyZQRo6JnRNz/J1KEuqsTAHcATnWjJ1yaEMM4W2zU3z9S6x3w4I4b +jWnAsfpksWKaOd8AAAGPXXtaRgAABAMASDBGAiEApB4qFWHZLGtPNkUK+6jFqsEk +vmy3bv3cuODXSG3CvfMCIQCCQPR/3HcrSGfmeJsFjWvwLbJFqe6GbRWCvjaUaldI +WDCB5AYDVR0RBIHcMIHZghpzdXJmZXIubm1yLm1naC5oYXJ2YXJkLmVkdYIUZm9y +dW0uZnJlZXN1cmZlci5uZXSCGWZvcnVtLm5tci5tZ2guaGFydmFyZC5lZHWCDmZy +ZWVzdXJmZXIubmV0ghdmdHAubm1yLm1naC5oYXJ2YXJkLmVkdYIZcnN5bmMubm1y +Lm1naC5oYXJ2YXJkLmVkdYIWc3VwcG9ydC5mcmVlc3VyZmVyLm5ldIIad2ViZGV2 +Lm5tci5tZ2guaGFydmFyZC5lZHWCEnd3dy5mcmVlc3VyZmVyLm5ldDANBgkqhkiG +9w0BAQwFAAOCAYEAB5nGih504XqNbZJjz1mK43IAiFs0YjocdDWOqqbXMirpbpNd +cPleB0iJkXJnzfzkZFyLdvFGos9FuPRDGmXSh0sdWpm2uQdkXlOp+/e4vMDg8Nrl +YkjshuU4fmUswnzsQ1aj/ome1DG3rmp3umjKpV6ewnVLhgjQ5zwCHfLLsNyzowqn +I6qAa2uzk7bS3XuYu4bLhVfD3X0Ybe4V3FKODBZRAIIU8hgtCz6zw5WtxzdEm5Qp +FHdN8OKazXvrJbzHB1WVk7buIn+8n2HoO202wGaFyyQFVqM2ug0FgCW8AaB+XRCq +BV+nZND2AIALG1HcIIL+pZwxS1K/jBkjUJRb3GDVWw7yzxuvlmawLhk8xzrgTsvp +QXaR+CbnTBx1PeB4nf+yHg2VBvKk6m7E9lnyymENmbeaLC67CJQgr+ne1rpOuEIs +fVbKUP437fyEvPuZCZ+3gxFsKgOerk2J95+AdLKB01pQFh/ITS/2zHHeMeD118sR +mUO+RXiPX5ZUqu/M +-----END CERTIFICATE----- From 7be1b57aff1f1265cf4ddbee0f2c1456e5288518 Mon Sep 17 00:00:00 2001 From: Dylan Nielson Date: Tue, 21 May 2024 09:46:44 -0400 Subject: [PATCH 066/123] fix flake8 --- nitransforms/base.py | 3 ++ nitransforms/surface.py | 81 ++++++++++++++++++++++++++++------------- 2 files changed, 59 insertions(+), 25 deletions(-) diff --git a/nitransforms/base.py b/nitransforms/base.py index a75390de..4155dc26 100644 --- a/nitransforms/base.py +++ b/nitransforms/base.py @@ -88,10 +88,12 @@ def shape(self): """Access the space's size of each dimension.""" return self._shape + class SurfaceMesh(SampledSpatialData): """Class to represent surface meshes.""" __slots__ = ["_triangles"] + def __init__(self, dataset): """Create a sampling reference.""" self._shape = None @@ -133,6 +135,7 @@ def from_arrays(cls, coordinates, triangles): gii = nb.gifti.GiftiImage(darrays=darrays) return cls(gii) + class ImageGrid(SampledSpatialData): """Class to represent spaces of gridded data (images).""" diff --git a/nitransforms/surface.py b/nitransforms/surface.py index 1f6a52b9..9bb409c7 100644 --- a/nitransforms/surface.py +++ b/nitransforms/surface.py @@ -23,6 +23,7 @@ class SurfaceTransformBase(): """Generic surface transformation class""" __slots__ = ("_reference", "_moving") + def __init__(self, reference, moving): """Instantiate a generic surface transform.""" self._reference = reference @@ -30,13 +31,14 @@ def __init__(self, reference, moving): def __eq__(self, other): ref_coords_eq = (self.reference._coords == other.reference._coords).all() - ref_tris_eq = (self.reference._triangles == other.reference._triangles).all() + ref_tris_eq = (self.reference._triangles == other.reference._triangles).all() mov_coords_eq = (self.moving._coords == other.moving._coords).all() mov_tris_eq = (self.moving._triangles == other.moving._triangles).all() return ref_coords_eq & ref_tris_eq & mov_coords_eq & mov_tris_eq def __invert__(self): return self.__class__(self.moving, self.reference) + @property def reference(self): return self._reference @@ -52,22 +54,28 @@ def moving(self): @moving.setter def moving(self, surface): self._moving = SurfaceMesh(surface) + @classmethod def from_filename(cls, reference_path, moving_path): - """Create an Surface Index Transformation from a pair of surfaces with corresponding vertices.""" + """Create an Surface Index Transformation from a pair of surfaces with corresponding + vertices.""" reference = SurfaceMesh(nb.load(reference_path)) moving = SurfaceMesh(nb.load(moving_path)) return cls(reference, moving) + class SurfaceCoordinateTransform(SurfaceTransformBase): - """Represents surface transformations in which the indices correspond and the coordinates differ.""" + """Represents surface transformations in which the indices correspond and the coordinates + differ.""" __slots__ = ("_reference", "_moving") + def __init__(self, reference, moving): """Instantiate a transform between two surfaces with corresponding vertices.""" super().__init__(reference=reference, moving=moving) if (self._reference._triangles != self._moving._triangles).all(): - raise ValueError("Both surfaces for an index transform must have corresponding vertices.") + raise ValueError("Both surfaces for an index transform must have corresponding" + " vertices.") def map(self, x, inverse=False): if inverse: @@ -80,7 +88,8 @@ def map(self, x, inverse=False): s_tree = KDTree(source._coords) dists, matches = s_tree.query(x) if not np.allclose(dists, 0): - raise NotImplementedError("Mapping on surfaces not implemented for coordinates that aren't vertices") + raise NotImplementedError("Mapping on surfaces not implemented for coordinates that" + " aren't vertices") return dest._coords[matches] def __add__(self, other): @@ -105,8 +114,10 @@ def moving(self): def moving(self, surface): self._moving = SurfaceMesh(surface) + class SurfaceResampler(SurfaceTransformBase): - """Represents transformations in which the coordinate space remains the same and the indicies change.""" + """Represents transformations in which the coordinate space remains the same and the indicies + change.""" __slots__ = ("mat", 'interpolation_method') @@ -134,15 +145,14 @@ def __init__(self, reference, moving, interpolation_method='barycentric', mat=No # that it only has to be calculated once and will always be saved with the # transform if mat is None: - r_tree = KDTree(self.reference._coords) m_tree = KDTree(self.moving._coords) kmr_dists, kmr_closest = m_tree.query(self.reference._coords, k=10) # invert the triangles to generate a lookup table from vertices to triangle index - tri_lut = dict() + tri_lut = {} for i, idxs in enumerate(self.moving._triangles): for x in idxs: - if not x in tri_lut: + if x not in tri_lut: tri_lut[x] = [i] else: tri_lut[x].append(i) @@ -150,7 +160,7 @@ def __init__(self, reference, moving, interpolation_method='barycentric', mat=No # calculate the barycentric interpolation weights bc_weights = [] enclosing = [] - for sidx, (point, kmrv) in enumerate(zip(self.reference._coords, kmr_closest)): + for _, (point, kmrv) in enumerate(zip(self.reference._coords, kmr_closest)): close_tris = _find_close_tris(kmrv, tri_lut, self.moving) ww, ee = _find_weights(point, close_tris, m_tree) bc_weights.append(ww) @@ -158,12 +168,14 @@ def __init__(self, reference, moving, interpolation_method='barycentric', mat=No # build sparse matrix # commenting out code for barycentric nearest neighbor - #bary_nearest = [] + # bary_nearest = [] mat = sparse.lil_array((self.reference._npoints, self.moving._npoints)) for s_ix, dd in enumerate(bc_weights): for k, v in dd.items(): mat[s_ix, k] = v - # bary_nearest.append(np.array(list(dd.keys()))[np.array(list(dd.values())).argmax()]) + # bary_nearest.append( + # np.array(list(dd.keys()))[np.array(list(dd.values())).argmax()] + # ) # bary_nearest = np.array(bary_nearest) # transpose so that number of out vertices is columns self.mat = sparse.csr_array(mat.T) @@ -178,14 +190,22 @@ def map(self, x, inverse=False): def __add__(self, other): - if isinstance(other, SurfaceResampler) and (other.interpolation_method == self.interpolation_method): - return self.__class__(self.reference, other.moving, interpolation_method=self.interpolation_method) + if (isinstance(other, SurfaceResampler) + and (other.interpolation_method == self.interpolation_method)): + return self.__class__( + self.reference, + other.moving, + interpolation_method=self.interpolation_method + ) else: raise NotImplementedError - def __invert__(self): - return self.__class__(self.moving, self.reference, interpolation_method=self.interpolation_method) + return self.__class__( + self.moving, + self.reference, + interpolation_method=self.interpolation_method + ) def apply(self, x, inverse=False, normalize="element"): """Apply the transform to surface data. @@ -262,14 +282,16 @@ def to_filename(self, filename, fmt=None): return filename @classmethod - def from_filename(cls, filename=None, reference_file=None, moving_file=None, fmt=None, interpolation_method=None): + def from_filename(cls, filename=None, reference_file=None, moving_file=None, + fmt=None, interpolation_method=None): """Load transform from file.""" if filename is None: if reference_file is None or moving_file is None: - raise ValueError("You must pass either a X5 file or a pair of reference and moving surfaces.") + raise ValueError("You must pass either a X5 file or a pair of reference and moving" + " surfaces.") else: if interpolation_method is None: - interpolation_method='barycentric' + interpolation_method = 'barycentric' return cls(SurfaceMesh(nb.load(reference_file)), SurfaceMesh(nb.load(moving_file)), interpolation_method=interpolation_method) @@ -279,7 +301,7 @@ def from_filename(cls, filename=None, reference_file=None, moving_file=None, fmt if fmt == "npz": raise NotImplementedError - #return cls(sparse.load_npz(filename)) + # return cls(sparse.load_npz(filename)) if fmt != "X5": raise ValueError("Only npz and X5 formats are supported.") @@ -291,8 +313,14 @@ def from_filename(cls, filename=None, reference_file=None, moving_file=None, fmt (xform["mat_data"][()], xform["mat_indices"][()], xform["mat_indptr"][()]), shape=xform["mat_shape"][()], ) - reference = SurfaceMesh.from_arrays(xform['reference_coordinates'], xform['reference_triangles']) - moving = SurfaceMesh.from_arrays(xform['moving_coordinates'], xform['moving_triangles']) + reference = SurfaceMesh.from_arrays( + xform['reference_coordinates'], + xform['reference_triangles'] + ) + moving = SurfaceMesh.from_arrays( + xform['moving_coordinates'], + xform['moving_triangles'] + ) interpolation_method = xform.attrs['interpolation_method'] return cls(reference, moving, interpolation_method=interpolation_method, mat=mat) @@ -448,6 +476,9 @@ def _find_weights(point, close_tris, d_tree): e1 = closest_tri[1] - a e2 = closest_tri[2] - a vecs = np.vstack([a, e1, e2, np.cross(e1, e2), np.cross(e2, a), np.cross(a, e1)]) - res = dict() - res[ct_idxs[0]], res[ct_idxs[1]], res[ct_idxs[2]], _ = _barycentric_weights(vecs, point.squeeze()) - return res, enclosing \ No newline at end of file + res = {} + res[ct_idxs[0]], res[ct_idxs[1]], res[ct_idxs[2]], _ = _barycentric_weights( + vecs, + point.squeeze() + ) + return res, enclosing From b20a4bc600dc893b17e8eced30653ee2fd9d553a Mon Sep 17 00:00:00 2001 From: Dylan Nielson Date: Tue, 21 May 2024 09:47:33 -0400 Subject: [PATCH 067/123] add .DS_Store --- .gitignore | 2 ++ 1 file changed, 2 insertions(+) diff --git a/.gitignore b/.gitignore index 8681c41b..87f6099b 100644 --- a/.gitignore +++ b/.gitignore @@ -82,3 +82,5 @@ local_settings.py *.swp .vscode/ + +.DS_Store From 46aca57f3ca0db7035edd46aa26076a7977edeb0 Mon Sep 17 00:00:00 2001 From: Dylan Nielson Date: Tue, 21 May 2024 10:09:45 -0400 Subject: [PATCH 068/123] DOC: add documentation for suface coordinate transform --- nitransforms/surface.py | 16 +++++++++++++--- 1 file changed, 13 insertions(+), 3 deletions(-) diff --git a/nitransforms/surface.py b/nitransforms/surface.py index 9bb409c7..7dca6291 100644 --- a/nitransforms/surface.py +++ b/nitransforms/surface.py @@ -66,12 +66,22 @@ def from_filename(cls, reference_path, moving_path): class SurfaceCoordinateTransform(SurfaceTransformBase): """Represents surface transformations in which the indices correspond and the coordinates - differ.""" + differ. This could be two surfaces representing difference structures from the same + hemisphere, like white matter and pial, or it could be a sphere and a deformed sphere that + moves those coordinates to a different location.""" __slots__ = ("_reference", "_moving") def __init__(self, reference, moving): - """Instantiate a transform between two surfaces with corresponding vertices.""" + """Instantiate a transform between two surfaces with corresponding vertices. + Parameters + ---------- + reference: surface + Surface with the destination coordinates for each index. + moving: surface + Surface with the starting coordinates for each index. + """ + super().__init__(reference=reference, moving=moving) if (self._reference._triangles != self._moving._triangles).all(): raise ValueError("Both surfaces for an index transform must have corresponding" @@ -122,7 +132,7 @@ class SurfaceResampler(SurfaceTransformBase): __slots__ = ("mat", 'interpolation_method') def __init__(self, reference, moving, interpolation_method='barycentric', mat=None): - """Initialize the transform. + """Initialize the resampling. Parameters ---------- From 6daaef10f86a9af55e239a65207383b1301a6d41 Mon Sep 17 00:00:00 2001 From: Dylan Nielson Date: Tue, 21 May 2024 14:54:31 -0400 Subject: [PATCH 069/123] TEST: expand test coverage --- nitransforms/base.py | 9 +++ nitransforms/surface.py | 99 +++++++++++++++--------------- nitransforms/tests/test_base.py | 8 +++ nitransforms/tests/test_surface.py | 88 +++++++++++++++++++++++++- 4 files changed, 150 insertions(+), 54 deletions(-) diff --git a/nitransforms/base.py b/nitransforms/base.py index 4155dc26..6e314834 100644 --- a/nitransforms/base.py +++ b/nitransforms/base.py @@ -98,6 +98,14 @@ def __init__(self, dataset): """Create a sampling reference.""" self._shape = None + if isinstance(dataset, SurfaceMesh): + self._coords = dataset._coords + self._triangles = dataset._triangles + self._ndim = dataset._ndim + self._npoints = dataset._npoints + self._shape = dataset._shape + return + if isinstance(dataset, (str, Path)): dataset = _nbload(str(dataset)) @@ -111,6 +119,7 @@ def __init__(self, dataset): _tris = dataset.get_arrays_from_intent(INTENT_CODES["triangle"]) self._triangles = np.vstack([da.data for da in _tris]) self._npoints, self._ndim = self._coords.shape + self._shape = self._coords.shape return if isinstance(dataset, Cifti2Image): diff --git a/nitransforms/surface.py b/nitransforms/surface.py index 7dca6291..e802da99 100644 --- a/nitransforms/surface.py +++ b/nitransforms/surface.py @@ -37,7 +37,7 @@ def __eq__(self, other): return ref_coords_eq & ref_tris_eq & mov_coords_eq & mov_tris_eq def __invert__(self): - return self.__class__(self.moving, self.reference) + return self.__class__(self._moving, self._reference) @property def reference(self): @@ -108,22 +108,6 @@ def __add__(self, other): else: raise NotImplementedError - @property - def reference(self): - return self._reference - - @reference.setter - def reference(self, surface): - self._reference = SurfaceMesh(surface) - - @property - def moving(self): - return self._moving - - @moving.setter - def moving(self, surface): - self._moving = SurfaceMesh(surface) - class SurfaceResampler(SurfaceTransformBase): """Represents transformations in which the coordinate space remains the same and the indicies @@ -155,46 +139,49 @@ def __init__(self, reference, moving, interpolation_method='barycentric', mat=No # that it only has to be calculated once and will always be saved with the # transform if mat is None: - m_tree = KDTree(self.moving._coords) - kmr_dists, kmr_closest = m_tree.query(self.reference._coords, k=10) - - # invert the triangles to generate a lookup table from vertices to triangle index - tri_lut = {} - for i, idxs in enumerate(self.moving._triangles): - for x in idxs: - if x not in tri_lut: - tri_lut[x] = [i] - else: - tri_lut[x].append(i) - - # calculate the barycentric interpolation weights - bc_weights = [] - enclosing = [] - for _, (point, kmrv) in enumerate(zip(self.reference._coords, kmr_closest)): - close_tris = _find_close_tris(kmrv, tri_lut, self.moving) - ww, ee = _find_weights(point, close_tris, m_tree) - bc_weights.append(ww) - enclosing.append(ee) - - # build sparse matrix - # commenting out code for barycentric nearest neighbor - # bary_nearest = [] - mat = sparse.lil_array((self.reference._npoints, self.moving._npoints)) - for s_ix, dd in enumerate(bc_weights): - for k, v in dd.items(): - mat[s_ix, k] = v - # bary_nearest.append( - # np.array(list(dd.keys()))[np.array(list(dd.values())).argmax()] - # ) - # bary_nearest = np.array(bary_nearest) - # transpose so that number of out vertices is columns - self.mat = sparse.csr_array(mat.T) + self.__calculate_mat() else: if isinstance(mat, sparse.csr_array): self.mat = mat else: self.mat = sparse.csr_array(mat) + def __calculate_mat(self): + m_tree = KDTree(self.moving._coords) + kmr_dists, kmr_closest = m_tree.query(self.reference._coords, k=10) + + # invert the triangles to generate a lookup table from vertices to triangle index + tri_lut = {} + for i, idxs in enumerate(self.moving._triangles): + for x in idxs: + if x not in tri_lut: + tri_lut[x] = [i] + else: + tri_lut[x].append(i) + + # calculate the barycentric interpolation weights + bc_weights = [] + enclosing = [] + for _, (point, kmrv) in enumerate(zip(self.reference._coords, kmr_closest)): + close_tris = _find_close_tris(kmrv, tri_lut, self.moving) + ww, ee = _find_weights(point, close_tris, m_tree) + bc_weights.append(ww) + enclosing.append(ee) + + # build sparse matrix + # commenting out code for barycentric nearest neighbor + # bary_nearest = [] + mat = sparse.lil_array((self.reference._npoints, self.moving._npoints)) + for s_ix, dd in enumerate(bc_weights): + for k, v in dd.items(): + mat[s_ix, k] = v + # bary_nearest.append( + # np.array(list(dd.keys()))[np.array(list(dd.values())).argmax()] + # ) + # bary_nearest = np.array(bary_nearest) + # transpose so that number of out vertices is columns + self.mat = sparse.csr_array(mat.T) + def map(self, x, inverse=False): return x @@ -217,6 +204,16 @@ def __invert__(self): interpolation_method=self.interpolation_method ) + @SurfaceTransformBase.reference.setter + def reference(self, surface): + raise ValueError("Don't modify the reference of an existing resampling." + "Create a new one instead.") + + @SurfaceTransformBase.moving.setter + def moving(self, surface): + raise ValueError("Don't modify the moving of an existing resampling." + "Create a new one instead.") + def apply(self, x, inverse=False, normalize="element"): """Apply the transform to surface data. diff --git a/nitransforms/tests/test_base.py b/nitransforms/tests/test_base.py index 6d640050..c81b379a 100644 --- a/nitransforms/tests/test_base.py +++ b/nitransforms/tests/test_base.py @@ -175,6 +175,14 @@ def test_SurfaceMesh(testdata_path): assert mesh._npoints == exp_coords_shape[0] assert mesh._ndim == exp_coords_shape[1] + mfd = SurfaceMesh(surf_path) + assert (mfd._coords == mesh._coords).all() + assert (mfd._triangles == mesh._triangles).all() + + mfsm = SurfaceMesh(mfd) + assert (mfd._coords == mfsm._coords).all() + assert (mfd._triangles == mfsm._triangles).all() + with pytest.raises(ValueError): SurfaceMesh(nb.load(img_path)) diff --git a/nitransforms/tests/test_surface.py b/nitransforms/tests/test_surface.py index 32f8d4a1..76ff2b32 100644 --- a/nitransforms/tests/test_surface.py +++ b/nitransforms/tests/test_surface.py @@ -4,8 +4,13 @@ import numpy as np import nibabel as nb from nitransforms.base import SurfaceMesh -from nitransforms.surface import SurfaceCoordinateTransform, SurfaceResampler - +from nitransforms.surface import ( + SurfaceTransformBase, + SurfaceCoordinateTransform, + SurfaceResampler +) +import pytest +from scipy import sparse # def test_surface_transform_npz(): # mat = sparse.random(10, 10, density=0.5) @@ -37,6 +42,69 @@ # y_none = xfm.apply(x, normalize="none") # assert y_none.sum() != y_element.sum() # assert y_none.sum() != y_sum.sum() +def test_SurfaceTransformBase(testdata_path): + # note these transformations are a bit of a weird use of surface transformation, but I'm + # just testing the base class and the io + sphere_reg_path = testdata_path / "sub-sid000005_ses-budapest_acq-MPRAGE_hemi-R_space-fsLR_desc-reg_sphere.surf.gii" + pial_path = testdata_path / "sub-sid000005_ses-budapest_acq-MPRAGE_hemi-R_pial.surf.gii" + + sphere_reg = SurfaceMesh(nb.load(sphere_reg_path)) + pial = SurfaceMesh(nb.load(pial_path)) + stfb = SurfaceTransformBase(sphere_reg, pial) + + # test loading from filenames + stfb_ff = SurfaceTransformBase.from_filename(sphere_reg_path, pial_path) + assert stfb_ff == stfb + + # test inversion and setting + stfb_i = stfb.__invert__() + stfb.reference = pial + stfb.moving = sphere_reg + assert (stfb_i._reference._coords == stfb._reference._coords).all() + assert (stfb_i._reference._triangles == stfb._reference._triangles).all() + assert (stfb_i._moving._coords == stfb._moving._coords).all() + assert (stfb_i._moving._triangles == stfb._moving._triangles).all() + # test equality + assert stfb_i == stfb + + +def test_SurfaceCoordinateTransform(testdata_path): + # note these transformations are a bit of a weird use of surface transformation, but I'm + # just testing the class and the io + sphere_reg_path = testdata_path / "sub-sid000005_ses-budapest_acq-MPRAGE_hemi-R_space-fsLR_desc-reg_sphere.surf.gii" + pial_path = testdata_path / "sub-sid000005_ses-budapest_acq-MPRAGE_hemi-R_pial.surf.gii" + fslr_sphere_path = testdata_path / "tpl-fsLR_hemi-R_den-32k_sphere.surf.gii" + + sphere_reg = SurfaceMesh(nb.load(sphere_reg_path)) + pial = SurfaceMesh(nb.load(pial_path)) + fslr_sphere = SurfaceMesh(nb.load(fslr_sphere_path)) + + # test mesh correspondence test + with pytest.raises(ValueError): + sct = SurfaceCoordinateTransform(fslr_sphere, pial) + + # test loading from filenames + sct = SurfaceCoordinateTransform(sphere_reg, pial) + sctf = SurfaceCoordinateTransform.from_filename(sphere_reg_path, pial_path) + assert sct == sctf + + # test mapping + assert (sct.map(sct.moving._coords[:100]) == sct.reference._coords[:100]).all() + assert (sct.map(sct.reference._coords[:100], inverse=True) == sct.moving._coords[:100]).all() + with pytest.raises(NotImplementedError): + sct.map(sct.reference._coords[0]) + + # test inversion and addition + scti = sct.__invert__() + + assert scti + sct == SurfaceCoordinateTransform(pial, pial) + assert sct + scti == SurfaceCoordinateTransform(sphere_reg, sphere_reg) + + sct.reference = pial + sct.moving = sphere_reg + assert (scti.reference._coords == sct.reference._coords).all() + assert (scti.reference._triangles == sct.reference._triangles).all() + assert scti == sct def test_SurfaceResampler(testdata_path): @@ -52,7 +120,7 @@ def test_SurfaceResampler(testdata_path): reference = fslr_sphere moving = sphere_reg - # compare results to what connecome workbench produces + # compare results to what connectome workbench produces resampling = SurfaceResampler(reference, moving) resampled_thickness = resampling.apply(subj_thickness.agg_data(), normalize='element') ref_resampled = nb.load(ref_resampled_thickness_path).agg_data() @@ -60,6 +128,11 @@ def test_SurfaceResampler(testdata_path): max_dif = np.abs(resampled_thickness.astype(np.float32) - ref_resampled).max() assert max_dif < dif_tol + with pytest.raises(ValueError): + resampling.reference = reference + with pytest.raises(ValueError): + resampling.moving = moving + # test file io fn = tempfile.mktemp(suffix=".h5") try: @@ -74,11 +147,20 @@ def test_SurfaceResampler(testdata_path): resampled_thickness2 = resampling2.apply(subj_thickness.agg_data(), normalize='element') assert np.all(resampled_thickness2 == resampled_thickness) + except Exception: os.remove(fn) raise os.remove(fn) + # test loading with a csr + assert isinstance(resampling.mat, sparse.csr_array) + resampling2a = SurfaceResampler(reference, moving, resampling.mat) + resampled_thickness2a = resampling2a.apply(subj_thickness.agg_data(), normalize='element') + assert np.all(resampled_thickness2a == resampled_thickness) + + assert np.all(resampling.map(np.array([[0, 0, 0]])) == np.array([[0, 0, 0]])) + # test loading from surfaces resampling3 = SurfaceResampler.from_filename(reference_file=fslr_sphere_path, moving_file=sphere_reg_path) From 249664751c07fdaa731ffd3532ca7bba7c5b9733 Mon Sep 17 00:00:00 2001 From: Dylan Nielson Date: Tue, 21 May 2024 15:17:57 -0400 Subject: [PATCH 070/123] NF: Validate inputs to SurfaceResampler --- nitransforms/surface.py | 13 +++++++++++++ nitransforms/tests/test_surface.py | 8 +++++++- 2 files changed, 20 insertions(+), 1 deletion(-) diff --git a/nitransforms/surface.py b/nitransforms/surface.py index e802da99..fd991306 100644 --- a/nitransforms/surface.py +++ b/nitransforms/surface.py @@ -131,6 +131,8 @@ def __init__(self, reference, moving, interpolation_method='barycentric', mat=No Only barycentric is currently implemented """ super().__init__(reference, moving) + if interpolation_method not in ['barycentric']: + raise NotImplementedError(f"{interpolation_method} is not implemented.") self.interpolation_method = interpolation_method # TODO: should we deal with the case where reference and moving are the same? @@ -145,6 +147,17 @@ def __init__(self, reference, moving, interpolation_method='barycentric', mat=No self.mat = mat else: self.mat = sparse.csr_array(mat) + # validate shape of the provided matrix + if (mat.shape[0] != moving._npoints) or (mat.shape[1] != reference._npoints): + msg = "Shape of provided mat does not match expectations based on " \ + "dimensions of moving and reference. \n" + if (mat.shape[0] != moving._npoints): + msg += f" mat has {mat.shape[0]} rows but moving has {moving._npoints} " \ + f"vertices. \n" + if (mat.shape[1] != reference._npoints): + msg += f" mat has {mat.shape[1]} columns but reference has" \ + f" {reference._npoints} vertices." + raise ValueError(msg) def __calculate_mat(self): m_tree = KDTree(self.moving._coords) diff --git a/nitransforms/tests/test_surface.py b/nitransforms/tests/test_surface.py index 76ff2b32..98119922 100644 --- a/nitransforms/tests/test_surface.py +++ b/nitransforms/tests/test_surface.py @@ -132,6 +132,8 @@ def test_SurfaceResampler(testdata_path): resampling.reference = reference with pytest.raises(ValueError): resampling.moving = moving + with pytest.raises(NotImplementedError): + _ = SurfaceResampler(reference, moving, "foo") # test file io fn = tempfile.mktemp(suffix=".h5") @@ -155,10 +157,14 @@ def test_SurfaceResampler(testdata_path): # test loading with a csr assert isinstance(resampling.mat, sparse.csr_array) - resampling2a = SurfaceResampler(reference, moving, resampling.mat) + resampling2a = SurfaceResampler(reference, moving, mat=resampling.mat) resampled_thickness2a = resampling2a.apply(subj_thickness.agg_data(), normalize='element') assert np.all(resampled_thickness2a == resampled_thickness) + with pytest.raises(ValueError): + rsfail = SurfaceResampler(moving, reference, mat=resampling.mat) + + # test map assert np.all(resampling.map(np.array([[0, 0, 0]])) == np.array([[0, 0, 0]])) # test loading from surfaces From e98487cfdbf7b7ccad3f9dcc5cadc883e867db63 Mon Sep 17 00:00:00 2001 From: Chris Markiewicz Date: Thu, 30 May 2024 11:25:51 -0400 Subject: [PATCH 071/123] ENH: Restore apply method, warning of deprecation and calling function --- nitransforms/base.py | 13 +++++++++++++ 1 file changed, 13 insertions(+) diff --git a/nitransforms/base.py b/nitransforms/base.py index 26c0d475..ac6e7520 100644 --- a/nitransforms/base.py +++ b/nitransforms/base.py @@ -259,6 +259,19 @@ def _to_hdf5(self, x5_root): """Serialize this object into the x5 file format.""" raise NotImplementedError + def apply(self, *args, **kwargs): + """Apply the transform to a dataset. + + Deprecated. Please use ``nitransforms.resampling.apply`` instead. + """ + message = ( + "The `apply` method is deprecated. Please use `nitransforms.resampling.apply` instead." + ) + warnings.warn(message, DeprecationWarning, level=2) + from .resampling import apply + + return apply(self, *args, **kwargs) + def _as_homogeneous(xyz, dtype="float32", dim=3): """ From f7784bcacb81aa6b3d3a4487dd7afefb4b7e7ad3 Mon Sep 17 00:00:00 2001 From: Dylan Nielson Date: Thu, 20 Jun 2024 16:40:05 +0900 Subject: [PATCH 072/123] ADD: project-unproject functionality --- nitransforms/base.py | 13 ++++++++++ nitransforms/surface.py | 41 +++++++++++++++++++++++++----- nitransforms/tests/test_surface.py | 26 ++++++++++++++++--- setup.cfg | 1 + 4 files changed, 70 insertions(+), 11 deletions(-) diff --git a/nitransforms/base.py b/nitransforms/base.py index 6e314834..a765befd 100644 --- a/nitransforms/base.py +++ b/nitransforms/base.py @@ -127,6 +127,19 @@ def __init__(self, dataset): raise ValueError("Dataset could not be interpreted as an irregular sample.") + def check_sphere(self, tolerance=1.001): + """Check sphericity of surface. + Based on https://github.com/Washington-University/workbench/blob/7ba3345d161d567a4b628ceb02ab4471fc96cb20/src/Files/SurfaceResamplingHelper.cxx#L503 + """ + dists = np.linalg.norm(self._coords, axis=1) + return (dists.min() * tolerance) > dists.max() + + def set_radius(self, radius=100): + if not self.check_sphere(): + raise ValueError("You should only set the radius on spherical surfaces.") + dists = np.linalg.norm(self._coords, axis=1) + self._coords = self._coords * (radius / dists).reshape((-1, 1)) + @classmethod def from_arrays(cls, coordinates, triangles): darrays = [ diff --git a/nitransforms/surface.py b/nitransforms/surface.py index fd991306..7910cb8d 100644 --- a/nitransforms/surface.py +++ b/nitransforms/surface.py @@ -7,7 +7,7 @@ # ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ## """Surface transforms.""" - +import pathlib import warnings import h5py import numpy as np @@ -24,8 +24,15 @@ class SurfaceTransformBase(): """Generic surface transformation class""" __slots__ = ("_reference", "_moving") - def __init__(self, reference, moving): + def __init__(self, reference, moving, spherical=False): """Instantiate a generic surface transform.""" + if spherical: + if not reference.check_sphere(): + raise ValueError("reference was not spherical") + if not moving.check_sphere(): + raise ValueError("moving was not spherical") + reference.set_radius() + moving.set_radius() self._reference = reference self._moving = moving @@ -111,7 +118,12 @@ def __add__(self, other): class SurfaceResampler(SurfaceTransformBase): """Represents transformations in which the coordinate space remains the same and the indicies - change.""" + change. + To achieve surface project-unproject functionality: + sphere_in as the reference + sphere_project_to as the moving + Then apply the transformation to sphere_unproject_from + """ __slots__ = ("mat", 'interpolation_method') @@ -130,7 +142,10 @@ def __init__(self, reference, moving, interpolation_method='barycentric', mat=No interpolation_method : str Only barycentric is currently implemented """ - super().__init__(reference, moving) + super().__init__(SurfaceMesh(reference), SurfaceMesh(moving), spherical=True) + + self.reference.set_radius() + self.moving.set_radius() if interpolation_method not in ['barycentric']: raise NotImplementedError(f"{interpolation_method} is not implemented.") self.interpolation_method = interpolation_method @@ -232,8 +247,8 @@ def apply(self, x, inverse=False, normalize="element"): Parameters ---------- - x : array-like, shape (..., nv1) - Data to transform. + x : array-like, shape (..., nv1), or SurfaceMesh + Data to transform or SurfaceMesh to resample inverse : bool, default=False Whether to apply the inverse transform. If True, ``x`` has shape (..., nv2), and the output will have shape (..., nv1). @@ -266,7 +281,18 @@ def apply(self, x, inverse=False, normalize="element"): scale[mask] = 1.0 / sum_[mask] mat = sparse.diags(scale) @ mat - y = x @ mat + if isinstance(x, SurfaceMesh) or isinstance(x, str) or isinstance(x, pathlib.PurePath): + x = SurfaceMesh(x) + if not x.check_sphere(): + raise ValueError("If x is a surface, it should be a sphere.") + x.set_radius() + rs_x = x._coords[:, 0] @ mat + rs_y = x._coords[:, 1] @ mat + rs_z = x._coords[:, 2] @ mat + y = SurfaceMesh.from_arrays(np.vstack([rs_x, rs_y, rs_z]).T, self.reference._triangles) + y.set_radius() + else: + y = x @ mat return y def _to_hdf5(self, x5_root): @@ -337,6 +363,7 @@ def from_filename(cls, filename=None, reference_file=None, moving_file=None, xform['reference_coordinates'], xform['reference_triangles'] ) + moving = SurfaceMesh.from_arrays( xform['moving_coordinates'], xform['moving_triangles'] diff --git a/nitransforms/tests/test_surface.py b/nitransforms/tests/test_surface.py index 98119922..f3744de2 100644 --- a/nitransforms/tests/test_surface.py +++ b/nitransforms/tests/test_surface.py @@ -113,10 +113,14 @@ def test_SurfaceResampler(testdata_path): fslr_sphere_path = testdata_path / "tpl-fsLR_hemi-R_den-32k_sphere.surf.gii" shape_path = testdata_path / "sub-sid000005_ses-budapest_acq-MPRAGE_hemi-R_thickness.shape.gii" ref_resampled_thickness_path = testdata_path / "sub-sid000005_ses-budapest_acq-MPRAGE_hemi-R_space-fsLR_thickness.shape.gii" + fslr_fsaverage_sphere_path = testdata_path / "tpl-fsLR_space-fsaverage_hemi-R_den-32k_sphere.surf.gii" + subj_fsaverage_sphere_path = testdata_path / "sub-sid000005_ses-budapest_acq-MPRAGE_hemi-R_space-fsaverage_desc-reg_sphere.surf.gii" + pial_path = testdata_path / "sub-sid000005_ses-budapest_acq-MPRAGE_hemi-R_pial.surf.gii" fslr_sphere = SurfaceMesh(nb.load(fslr_sphere_path)) sphere_reg = SurfaceMesh(nb.load(sphere_reg_path)) subj_thickness = nb.load(shape_path) + subj_fsaverage_sphere = nb.load(subj_fsaverage_sphere_path) reference = fslr_sphere moving = sphere_reg @@ -141,10 +145,10 @@ def test_SurfaceResampler(testdata_path): resampling.to_filename(fn) resampling2 = SurfaceResampler.from_filename(fn) - assert resampling2 == resampling - assert np.all(resampling2.reference._coords == resampling.reference._coords) + #assert resampling2 == resampling + assert np.allclose(resampling2.reference._coords, resampling.reference._coords) assert np.all(resampling2.reference._triangles == resampling.reference._triangles) - assert np.all(resampling2.reference._coords == resampling.reference._coords) + assert np.allclose(resampling2.reference._coords, resampling.reference._coords) assert np.all(resampling2.moving._triangles == resampling.moving._triangles) resampled_thickness2 = resampling2.apply(subj_thickness.agg_data(), normalize='element') @@ -176,4 +180,18 @@ def test_SurfaceResampler(testdata_path): assert np.all(resampling3.reference._coords == resampling.reference._coords) assert np.all(resampling3.moving._triangles == resampling.moving._triangles) resampled_thickness3 = resampling3.apply(subj_thickness.agg_data(), normalize='element') - assert np.all(resampled_thickness3 == resampled_thickness) \ No newline at end of file + assert np.all(resampled_thickness3 == resampled_thickness) + + # test project-unproject funcitonality + projunproj = SurfaceResampler(sphere_reg_path, fslr_sphere_path) + with pytest.raises(ValueError): + projunproj.apply(pial_path) + transformed = projunproj.apply(fslr_fsaverage_sphere_path) + projunproj_ref = nb.load(subj_fsaverage_sphere_path) + assert (projunproj_ref.agg_data()[0] - transformed._coords).max() < 0.0005 + assert (transformed._triangles == projunproj_ref.agg_data()[1]).all() + + with pytest.raises(ValueError): + SurfaceResampler(sphere_reg_path, pial_path) + with pytest.raises(ValueError): + SurfaceResampler(pial_path, sphere_reg_path) diff --git a/setup.cfg b/setup.cfg index 93499c35..70f29991 100644 --- a/setup.cfg +++ b/setup.cfg @@ -27,6 +27,7 @@ install_requires = scipy >= 1.6.0 nibabel >= 3.0 h5py + pathlib test_requires = pytest pytest-cov From 2e259cdb7bf44eedae45b80f79194be119d88a45 Mon Sep 17 00:00:00 2001 From: Oscar Esteban Date: Thu, 20 Jun 2024 17:34:45 +0900 Subject: [PATCH 073/123] Update env.yml --- env.yml | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/env.yml b/env.yml index 721507ae..55ff2114 100644 --- a/env.yml +++ b/env.yml @@ -12,6 +12,8 @@ dependencies: - mkl-service=2.4.0 # git-annex for templateflow users with DataLad superdatasets - git-annex=*=alldep* + # ANTs is linked against libitk 5.3 but does not pin the version + - libitk=5.3 # Base scientific python stack; required by FSL, so pinned here - numpy=1.26 - scipy=1.11 @@ -39,4 +41,4 @@ dependencies: # - pip: # - -r requirements.txt variables: - FSLOUTPUTTYPE: NIFTI_GZ \ No newline at end of file + FSLOUTPUTTYPE: NIFTI_GZ From 66001b17b400493e178f73183c161b6435c44444 Mon Sep 17 00:00:00 2001 From: Oscar Esteban Date: Thu, 20 Jun 2024 17:38:47 +0900 Subject: [PATCH 074/123] Update setup.cfg --- setup.cfg | 2 ++ 1 file changed, 2 insertions(+) diff --git a/setup.cfg b/setup.cfg index 93499c35..20fe531e 100644 --- a/setup.cfg +++ b/setup.cfg @@ -30,6 +30,7 @@ install_requires = test_requires = pytest pytest-cov + pytest-env nose codecov setup_requires = @@ -46,6 +47,7 @@ niftiexts = test = pytest pytest-cov + pytest-env codecov tests = %(test)s From d5705fb7b121dd1e7ce7b3e4187b687a891b9292 Mon Sep 17 00:00:00 2001 From: Dylan Nielson Date: Thu, 20 Jun 2024 17:42:54 +0900 Subject: [PATCH 075/123] clean surface test --- nitransforms/tests/test_surface.py | 1 - 1 file changed, 1 deletion(-) diff --git a/nitransforms/tests/test_surface.py b/nitransforms/tests/test_surface.py index f3744de2..7795cb33 100644 --- a/nitransforms/tests/test_surface.py +++ b/nitransforms/tests/test_surface.py @@ -120,7 +120,6 @@ def test_SurfaceResampler(testdata_path): fslr_sphere = SurfaceMesh(nb.load(fslr_sphere_path)) sphere_reg = SurfaceMesh(nb.load(sphere_reg_path)) subj_thickness = nb.load(shape_path) - subj_fsaverage_sphere = nb.load(subj_fsaverage_sphere_path) reference = fslr_sphere moving = sphere_reg From fc0628914b63affd2aee85c5fba9432807d81a4c Mon Sep 17 00:00:00 2001 From: Dylan Nielson Date: Thu, 20 Jun 2024 18:01:38 +0900 Subject: [PATCH 076/123] FIX: don't drop 3dNwarpApply from the dockerfile --- Dockerfile | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/Dockerfile b/Dockerfile index 28fe8c31..45c52663 100644 --- a/Dockerfile +++ b/Dockerfile @@ -57,7 +57,9 @@ RUN mkdir -p /opt/afni-latest \ -name "3dTshift" -or \ -name "3dUnifize" -or \ -name "3dAutomask" -or \ - -name "3dvolreg" \) -delete + -name "3dvolreg" -or \ + -name "3dNwarpApply" \ + ) -delete # Micromamba FROM downloader as micromamba From 4e95fa3e24e4a42f291c605cf8cee72a3bb14cd5 Mon Sep 17 00:00:00 2001 From: Dylan Nielson Date: Fri, 21 Jun 2024 09:05:53 +0900 Subject: [PATCH 077/123] Update Dockerfile Co-authored-by: Oscar Esteban --- Dockerfile | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Dockerfile b/Dockerfile index 45c52663..a1c5f4b2 100644 --- a/Dockerfile +++ b/Dockerfile @@ -59,7 +59,7 @@ RUN mkdir -p /opt/afni-latest \ -name "3dAutomask" -or \ -name "3dvolreg" -or \ -name "3dNwarpApply" \ - ) -delete + \) -delete # Micromamba FROM downloader as micromamba From 884a81e9c230689cea0bff2d98cfef73cc41ef98 Mon Sep 17 00:00:00 2001 From: Dylan Nielson Date: Fri, 21 Jun 2024 09:14:07 +0900 Subject: [PATCH 078/123] RF: don't decompose coordinates before transforming --- nitransforms/surface.py | 7 +++---- 1 file changed, 3 insertions(+), 4 deletions(-) diff --git a/nitransforms/surface.py b/nitransforms/surface.py index 7910cb8d..ac86f94e 100644 --- a/nitransforms/surface.py +++ b/nitransforms/surface.py @@ -286,10 +286,9 @@ def apply(self, x, inverse=False, normalize="element"): if not x.check_sphere(): raise ValueError("If x is a surface, it should be a sphere.") x.set_radius() - rs_x = x._coords[:, 0] @ mat - rs_y = x._coords[:, 1] @ mat - rs_z = x._coords[:, 2] @ mat - y = SurfaceMesh.from_arrays(np.vstack([rs_x, rs_y, rs_z]).T, self.reference._triangles) + rs_coords = x._coords.T @ mat + + y = SurfaceMesh.from_arrays(rs_coords.T, self.reference._triangles) y.set_radius() else: y = x @ mat From 288fc7c6342803668ff0b787380db9634abe3688 Mon Sep 17 00:00:00 2001 From: Dylan Nielson Date: Fri, 21 Jun 2024 09:14:37 +0900 Subject: [PATCH 079/123] TEST: split out project unproject test --- nitransforms/tests/test_surface.py | 39 +++++++++++++++++------------- 1 file changed, 22 insertions(+), 17 deletions(-) diff --git a/nitransforms/tests/test_surface.py b/nitransforms/tests/test_surface.py index 7795cb33..25f62739 100644 --- a/nitransforms/tests/test_surface.py +++ b/nitransforms/tests/test_surface.py @@ -106,21 +106,40 @@ def test_SurfaceCoordinateTransform(testdata_path): assert (scti.reference._triangles == sct.reference._triangles).all() assert scti == sct +def test_ProjectUnproject(testdata_path): + + sphere_reg_path = testdata_path / "sub-sid000005_ses-budapest_acq-MPRAGE_hemi-R_space-fsLR_desc-reg_sphere.surf.gii" + fslr_sphere_path = testdata_path / "tpl-fsLR_hemi-R_den-32k_sphere.surf.gii" + subj_fsaverage_sphere_path = testdata_path / "sub-sid000005_ses-budapest_acq-MPRAGE_hemi-R_space-fsaverage_desc-reg_sphere.surf.gii" + fslr_fsaverage_sphere_path = testdata_path / "tpl-fsLR_space-fsaverage_hemi-R_den-32k_sphere.surf.gii" + pial_path = testdata_path / "sub-sid000005_ses-budapest_acq-MPRAGE_hemi-R_pial.surf.gii" + + # test project-unproject funcitonality + projunproj = SurfaceResampler(sphere_reg_path, fslr_sphere_path) + with pytest.raises(ValueError): + projunproj.apply(pial_path) + transformed = projunproj.apply(fslr_fsaverage_sphere_path) + projunproj_ref = nb.load(subj_fsaverage_sphere_path) + assert (projunproj_ref.agg_data()[0] - transformed._coords).max() < 0.0005 + assert (transformed._triangles == projunproj_ref.agg_data()[1]).all() def test_SurfaceResampler(testdata_path): dif_tol = 0.001 - sphere_reg_path = testdata_path / "sub-sid000005_ses-budapest_acq-MPRAGE_hemi-R_space-fsLR_desc-reg_sphere.surf.gii" fslr_sphere_path = testdata_path / "tpl-fsLR_hemi-R_den-32k_sphere.surf.gii" shape_path = testdata_path / "sub-sid000005_ses-budapest_acq-MPRAGE_hemi-R_thickness.shape.gii" ref_resampled_thickness_path = testdata_path / "sub-sid000005_ses-budapest_acq-MPRAGE_hemi-R_space-fsLR_thickness.shape.gii" - fslr_fsaverage_sphere_path = testdata_path / "tpl-fsLR_space-fsaverage_hemi-R_den-32k_sphere.surf.gii" - subj_fsaverage_sphere_path = testdata_path / "sub-sid000005_ses-budapest_acq-MPRAGE_hemi-R_space-fsaverage_desc-reg_sphere.surf.gii" pial_path = testdata_path / "sub-sid000005_ses-budapest_acq-MPRAGE_hemi-R_pial.surf.gii" + sphere_reg_path = testdata_path / "sub-sid000005_ses-budapest_acq-MPRAGE_hemi-R_space-fsLR_desc-reg_sphere.surf.gii" fslr_sphere = SurfaceMesh(nb.load(fslr_sphere_path)) sphere_reg = SurfaceMesh(nb.load(sphere_reg_path)) subj_thickness = nb.load(shape_path) + with pytest.raises(ValueError): + SurfaceResampler(sphere_reg_path, pial_path) + with pytest.raises(ValueError): + SurfaceResampler(pial_path, sphere_reg_path) + reference = fslr_sphere moving = sphere_reg # compare results to what connectome workbench produces @@ -180,17 +199,3 @@ def test_SurfaceResampler(testdata_path): assert np.all(resampling3.moving._triangles == resampling.moving._triangles) resampled_thickness3 = resampling3.apply(subj_thickness.agg_data(), normalize='element') assert np.all(resampled_thickness3 == resampled_thickness) - - # test project-unproject funcitonality - projunproj = SurfaceResampler(sphere_reg_path, fslr_sphere_path) - with pytest.raises(ValueError): - projunproj.apply(pial_path) - transformed = projunproj.apply(fslr_fsaverage_sphere_path) - projunproj_ref = nb.load(subj_fsaverage_sphere_path) - assert (projunproj_ref.agg_data()[0] - transformed._coords).max() < 0.0005 - assert (transformed._triangles == projunproj_ref.agg_data()[1]).all() - - with pytest.raises(ValueError): - SurfaceResampler(sphere_reg_path, pial_path) - with pytest.raises(ValueError): - SurfaceResampler(pial_path, sphere_reg_path) From c3587c1a481b41510ebe23d27b28cd05a9fd1f0f Mon Sep 17 00:00:00 2001 From: Dylan Nielson Date: Fri, 21 Jun 2024 09:40:23 +0900 Subject: [PATCH 080/123] TEST: cover some more error messages --- nitransforms/tests/test_surface.py | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/nitransforms/tests/test_surface.py b/nitransforms/tests/test_surface.py index 25f62739..e82b4a69 100644 --- a/nitransforms/tests/test_surface.py +++ b/nitransforms/tests/test_surface.py @@ -150,6 +150,10 @@ def test_SurfaceResampler(testdata_path): max_dif = np.abs(resampled_thickness.astype(np.float32) - ref_resampled).max() assert max_dif < dif_tol + with pytest.raises(ValueError): + SurfaceResampler(reference, moving, mat=resampling.mat[:, :10000]) + with pytest.raises(ValueError): + SurfaceResampler(reference, moving, mat=resampling.mat[:10000, :]) with pytest.raises(ValueError): resampling.reference = reference with pytest.raises(ValueError): From 5e8b49216c10a2e267f63b7f5c9d9348e31d6498 Mon Sep 17 00:00:00 2001 From: Dylan Nielson Date: Fri, 21 Jun 2024 11:15:09 +0900 Subject: [PATCH 081/123] PL: lint surface --- nitransforms/surface.py | 130 ++++++++++++++--------------- nitransforms/tests/test_surface.py | 62 ++++++-------- 2 files changed, 89 insertions(+), 103 deletions(-) diff --git a/nitransforms/surface.py b/nitransforms/surface.py index ac86f94e..ce8a8069 100644 --- a/nitransforms/surface.py +++ b/nitransforms/surface.py @@ -11,18 +11,17 @@ import warnings import h5py import numpy as np -import scipy.sparse as sparse -from nitransforms.base import ( - SurfaceMesh -) import nibabel as nb +from scipy import sparse from scipy.spatial import KDTree from scipy.spatial.distance import cdist +from nitransforms.base import ( + SurfaceMesh +) class SurfaceTransformBase(): """Generic surface transformation class""" - __slots__ = ("_reference", "_moving") def __init__(self, reference, moving, spherical=False): """Instantiate a generic surface transform.""" @@ -37,10 +36,10 @@ def __init__(self, reference, moving, spherical=False): self._moving = moving def __eq__(self, other): - ref_coords_eq = (self.reference._coords == other.reference._coords).all() - ref_tris_eq = (self.reference._triangles == other.reference._triangles).all() - mov_coords_eq = (self.moving._coords == other.moving._coords).all() - mov_tris_eq = (self.moving._triangles == other.moving._triangles).all() + ref_coords_eq = np.all(self.reference._coords == other.reference._coords) + ref_tris_eq = np.all(self.reference._triangles == other.reference._triangles) + mov_coords_eq = np.all(self.moving._coords == other.moving._coords) + mov_tris_eq = np.all(self.moving._triangles == other.moving._triangles) return ref_coords_eq & ref_tris_eq & mov_coords_eq & mov_tris_eq def __invert__(self): @@ -90,7 +89,7 @@ def __init__(self, reference, moving): """ super().__init__(reference=reference, moving=moving) - if (self._reference._triangles != self._moving._triangles).all(): + if np.all(self._reference._triangles != self._moving._triangles): raise ValueError("Both surfaces for an index transform must have corresponding" " vertices.") @@ -112,8 +111,7 @@ def map(self, x, inverse=False): def __add__(self, other): if isinstance(other, SurfaceCoordinateTransform): return self.__class__(self.reference, other.moving) - else: - raise NotImplementedError + raise NotImplementedError class SurfaceResampler(SurfaceTransformBase): @@ -125,7 +123,7 @@ class SurfaceResampler(SurfaceTransformBase): Then apply the transformation to sphere_unproject_from """ - __slots__ = ("mat", 'interpolation_method') + __slots__ = ("_reference", "_moving", "mat", 'interpolation_method') def __init__(self, reference, moving, interpolation_method='barycentric', mat=None): """Initialize the resampling. @@ -166,17 +164,17 @@ def __init__(self, reference, moving, interpolation_method='barycentric', mat=No if (mat.shape[0] != moving._npoints) or (mat.shape[1] != reference._npoints): msg = "Shape of provided mat does not match expectations based on " \ "dimensions of moving and reference. \n" - if (mat.shape[0] != moving._npoints): + if mat.shape[0] != moving._npoints: msg += f" mat has {mat.shape[0]} rows but moving has {moving._npoints} " \ f"vertices. \n" - if (mat.shape[1] != reference._npoints): + if mat.shape[1] != reference._npoints: msg += f" mat has {mat.shape[1]} columns but reference has" \ f" {reference._npoints} vertices." raise ValueError(msg) def __calculate_mat(self): m_tree = KDTree(self.moving._coords) - kmr_dists, kmr_closest = m_tree.query(self.reference._coords, k=10) + _, kmr_closest = m_tree.query(self.reference._coords, k=10) # invert the triangles to generate a lookup table from vertices to triangle index tri_lut = {} @@ -190,7 +188,7 @@ def __calculate_mat(self): # calculate the barycentric interpolation weights bc_weights = [] enclosing = [] - for _, (point, kmrv) in enumerate(zip(self.reference._coords, kmr_closest)): + for point, kmrv in zip(self.reference._coords, kmr_closest): close_tris = _find_close_tris(kmrv, tri_lut, self.moving) ww, ee = _find_weights(point, close_tris, m_tree) bc_weights.append(ww) @@ -210,7 +208,7 @@ def __calculate_mat(self): # transpose so that number of out vertices is columns self.mat = sparse.csr_array(mat.T) - def map(self, x, inverse=False): + def map(self, x): return x def __add__(self, other): @@ -222,8 +220,7 @@ def __add__(self, other): other.moving, interpolation_method=self.interpolation_method ) - else: - raise NotImplementedError + raise NotImplementedError def __invert__(self): return self.__class__( @@ -281,7 +278,7 @@ def apply(self, x, inverse=False, normalize="element"): scale[mask] = 1.0 / sum_[mask] mat = sparse.diags(scale) @ mat - if isinstance(x, SurfaceMesh) or isinstance(x, str) or isinstance(x, pathlib.PurePath): + if isinstance(x, (SurfaceMesh, pathlib.PurePath, str)): x = SurfaceMesh(x) if not x.check_sphere(): raise ValueError("If x is a surface, it should be a sphere.") @@ -327,51 +324,50 @@ def to_filename(self, filename, fmt=None): return filename @classmethod - def from_filename(cls, filename=None, reference_file=None, moving_file=None, + def from_filename(cls, filename=None, reference_path=None, moving_path=None, fmt=None, interpolation_method=None): """Load transform from file.""" if filename is None: - if reference_file is None or moving_file is None: + if reference_path is None or moving_path is None: raise ValueError("You must pass either a X5 file or a pair of reference and moving" " surfaces.") - else: - if interpolation_method is None: - interpolation_method = 'barycentric' - return cls(SurfaceMesh(nb.load(reference_file)), - SurfaceMesh(nb.load(moving_file)), - interpolation_method=interpolation_method) - else: - if fmt is None: - fmt = "npz" if filename.endswith(".npz") else "X5" - - if fmt == "npz": - raise NotImplementedError - # return cls(sparse.load_npz(filename)) - - if fmt != "X5": - raise ValueError("Only npz and X5 formats are supported.") - - with h5py.File(filename, "r") as f: - assert f.attrs["Format"] == "X5" - xform = f["/0/Transform"] - mat = sparse.csr_matrix( - (xform["mat_data"][()], xform["mat_indices"][()], xform["mat_indptr"][()]), - shape=xform["mat_shape"][()], - ) - reference = SurfaceMesh.from_arrays( - xform['reference_coordinates'], - xform['reference_triangles'] - ) - - moving = SurfaceMesh.from_arrays( - xform['moving_coordinates'], - xform['moving_triangles'] - ) - interpolation_method = xform.attrs['interpolation_method'] - return cls(reference, moving, interpolation_method=interpolation_method, mat=mat) - - -def _pointsToTriangles(points, triangles): + if interpolation_method is None: + interpolation_method = 'barycentric' + return cls(SurfaceMesh(nb.load(reference_path)), + SurfaceMesh(nb.load(moving_path)), + interpolation_method=interpolation_method) + + if fmt is None: + fmt = "npz" if filename.endswith(".npz") else "X5" + + if fmt == "npz": + raise NotImplementedError + # return cls(sparse.load_npz(filename)) + + if fmt != "X5": + raise ValueError("Only npz and X5 formats are supported.") + + with h5py.File(filename, "r") as f: + assert f.attrs["Format"] == "X5" + xform = f["/0/Transform"] + mat = sparse.csr_matrix( + (xform["mat_data"][()], xform["mat_indices"][()], xform["mat_indptr"][()]), + shape=xform["mat_shape"][()], + ) + reference = SurfaceMesh.from_arrays( + xform['reference_coordinates'], + xform['reference_triangles'] + ) + + moving = SurfaceMesh.from_arrays( + xform['moving_coordinates'], + xform['moving_triangles'] + ) + interpolation_method = xform.attrs['interpolation_method'] + return cls(reference, moving, interpolation_method=interpolation_method, mat=mat) + + +def _points_to_triangles(points, triangles): """Implementation that vectorizes project of a point to a set of triangles. from: https://stackoverflow.com/a/32529589 """ @@ -388,7 +384,7 @@ def _pointsToTriangles(points, triangles): # Calculate determinant and denominator det = a * c - b * b - invDet = 1. / det + inv_det = 1. / det denom = a - 2 * b + c # Project to the edges @@ -411,7 +407,7 @@ def _pointsToTriangles(points, triangles): m1 = u < 0 m2 = v < 0 m3 = d < 0 - m4 = (a + d > b + e) + m4 = a + d > b + e m5 = ce > bd t0 = m0 & m1 & m2 & m3 @@ -433,8 +429,8 @@ def _pointsToTriangles(points, triangles): v = np.where(t2, np.clip(ec, 0, 1), v) u = np.where(t3, np.clip(da, 0, 1), u) v = np.where(t3, 0, v) - u *= np.where(t4, invDet, 1) - v *= np.where(t4, invDet, 1) + u *= np.where(t4, inv_det, 1) + v *= np.where(t4, inv_det, 1) u = np.where(t5, np.clip(numer0, 0, 1), u) v = np.where(t5, 1 - u, v) u = np.where(t6, 0, u) @@ -504,7 +500,7 @@ def _find_close_tris(kdsv, tri_lut, surface): def _find_weights(point, close_tris, d_tree): point = point[np.newaxis, :] - tri_dists = cdist(point, _pointsToTriangles(point, close_tris).squeeze()) + tri_dists = cdist(point, _points_to_triangles(point, close_tris).squeeze()) closest_tri = close_tris[(tri_dists == tri_dists.min()).squeeze()] # make sure a single closest triangle was found if closest_tri.shape[0] != 1: @@ -515,7 +511,7 @@ def _find_weights(point, close_tris, d_tree): closest_tri = closest_tri.squeeze() # Make sure point is actually inside triangle enclosing = True - if ((point > closest_tri).sum(0) != 3).all(): + if np.all((point > closest_tri).sum(0) != 3): enclosing = False _, ct_idxs = d_tree.query(closest_tri) a = closest_tri[0] diff --git a/nitransforms/tests/test_surface.py b/nitransforms/tests/test_surface.py index e82b4a69..a6a17a62 100644 --- a/nitransforms/tests/test_surface.py +++ b/nitransforms/tests/test_surface.py @@ -3,14 +3,14 @@ import numpy as np import nibabel as nb +import pytest +from scipy import sparse from nitransforms.base import SurfaceMesh from nitransforms.surface import ( SurfaceTransformBase, SurfaceCoordinateTransform, SurfaceResampler ) -import pytest -from scipy import sparse # def test_surface_transform_npz(): # mat = sparse.random(10, 10, density=0.5) @@ -57,13 +57,13 @@ def test_SurfaceTransformBase(testdata_path): assert stfb_ff == stfb # test inversion and setting - stfb_i = stfb.__invert__() + stfb_i = ~stfb stfb.reference = pial stfb.moving = sphere_reg - assert (stfb_i._reference._coords == stfb._reference._coords).all() - assert (stfb_i._reference._triangles == stfb._reference._triangles).all() - assert (stfb_i._moving._coords == stfb._moving._coords).all() - assert (stfb_i._moving._triangles == stfb._moving._triangles).all() + assert np.all(stfb_i._reference._coords == stfb._reference._coords) + assert np.all(stfb_i._reference._triangles == stfb._reference._triangles) + assert np.all(stfb_i._moving._coords == stfb._moving._coords) + assert np.all(stfb_i._moving._triangles == stfb._moving._triangles) # test equality assert stfb_i == stfb @@ -89,21 +89,21 @@ def test_SurfaceCoordinateTransform(testdata_path): assert sct == sctf # test mapping - assert (sct.map(sct.moving._coords[:100]) == sct.reference._coords[:100]).all() - assert (sct.map(sct.reference._coords[:100], inverse=True) == sct.moving._coords[:100]).all() + assert np.all(sct.map(sct.moving._coords[:100]) == sct.reference._coords[:100]) + assert np.all(sct.map(sct.reference._coords[:100], inverse=True) == sct.moving._coords[:100]) with pytest.raises(NotImplementedError): sct.map(sct.reference._coords[0]) # test inversion and addition - scti = sct.__invert__() + scti = ~sct assert scti + sct == SurfaceCoordinateTransform(pial, pial) assert sct + scti == SurfaceCoordinateTransform(sphere_reg, sphere_reg) sct.reference = pial sct.moving = sphere_reg - assert (scti.reference._coords == sct.reference._coords).all() - assert (scti.reference._triangles == sct.reference._triangles).all() + assert np.all(scti.reference._coords == sct.reference._coords) + assert np.all(scti.reference._triangles == sct.reference._triangles) assert scti == sct def test_ProjectUnproject(testdata_path): @@ -121,9 +121,9 @@ def test_ProjectUnproject(testdata_path): transformed = projunproj.apply(fslr_fsaverage_sphere_path) projunproj_ref = nb.load(subj_fsaverage_sphere_path) assert (projunproj_ref.agg_data()[0] - transformed._coords).max() < 0.0005 - assert (transformed._triangles == projunproj_ref.agg_data()[1]).all() + assert np.all(transformed._triangles == projunproj_ref.agg_data()[1]) -def test_SurfaceResampler(testdata_path): +def test_SurfaceResampler(testdata_path, tmpdir): dif_tol = 0.001 fslr_sphere_path = testdata_path / "tpl-fsLR_hemi-R_den-32k_sphere.surf.gii" shape_path = testdata_path / "sub-sid000005_ses-budapest_acq-MPRAGE_hemi-R_thickness.shape.gii" @@ -163,23 +163,17 @@ def test_SurfaceResampler(testdata_path): # test file io fn = tempfile.mktemp(suffix=".h5") - try: - resampling.to_filename(fn) - resampling2 = SurfaceResampler.from_filename(fn) - - #assert resampling2 == resampling - assert np.allclose(resampling2.reference._coords, resampling.reference._coords) - assert np.all(resampling2.reference._triangles == resampling.reference._triangles) - assert np.allclose(resampling2.reference._coords, resampling.reference._coords) - assert np.all(resampling2.moving._triangles == resampling.moving._triangles) + resampling.to_filename(fn) + resampling2 = SurfaceResampler.from_filename(fn) - resampled_thickness2 = resampling2.apply(subj_thickness.agg_data(), normalize='element') - assert np.all(resampled_thickness2 == resampled_thickness) + #assert resampling2 == resampling + assert np.allclose(resampling2.reference._coords, resampling.reference._coords) + assert np.all(resampling2.reference._triangles == resampling.reference._triangles) + assert np.allclose(resampling2.reference._coords, resampling.reference._coords) + assert np.all(resampling2.moving._triangles == resampling.moving._triangles) - except Exception: - os.remove(fn) - raise - os.remove(fn) + resampled_thickness2 = resampling2.apply(subj_thickness.agg_data(), normalize='element') + assert np.all(resampled_thickness2 == resampled_thickness) # test loading with a csr assert isinstance(resampling.mat, sparse.csr_array) @@ -188,18 +182,14 @@ def test_SurfaceResampler(testdata_path): assert np.all(resampled_thickness2a == resampled_thickness) with pytest.raises(ValueError): - rsfail = SurfaceResampler(moving, reference, mat=resampling.mat) + _ = SurfaceResampler(moving, reference, mat=resampling.mat) # test map assert np.all(resampling.map(np.array([[0, 0, 0]])) == np.array([[0, 0, 0]])) # test loading from surfaces - resampling3 = SurfaceResampler.from_filename(reference_file=fslr_sphere_path, - moving_file=sphere_reg_path) + resampling3 = SurfaceResampler.from_filename(reference_path=fslr_sphere_path, + moving_path=sphere_reg_path) assert resampling3 == resampling - assert np.all(resampling3.reference._coords == resampling.reference._coords) - assert np.all(resampling3.reference._triangles == resampling.reference._triangles) - assert np.all(resampling3.reference._coords == resampling.reference._coords) - assert np.all(resampling3.moving._triangles == resampling.moving._triangles) resampled_thickness3 = resampling3.apply(subj_thickness.agg_data(), normalize='element') assert np.all(resampled_thickness3 == resampled_thickness) From e6a63d9454f67accc70bd2018a50365d0a00de21 Mon Sep 17 00:00:00 2001 From: Dylan Nielson Date: Fri, 21 Jun 2024 15:39:46 +0900 Subject: [PATCH 082/123] RF: reorganize x5 files --- nitransforms/surface.py | 133 ++++++++++++++++++++++++----- nitransforms/tests/test_surface.py | 20 ++++- 2 files changed, 126 insertions(+), 27 deletions(-) diff --git a/nitransforms/surface.py b/nitransforms/surface.py index ce8a8069..b9d97f7a 100644 --- a/nitransforms/surface.py +++ b/nitransforms/surface.py @@ -83,18 +83,18 @@ def __init__(self, reference, moving): Parameters ---------- reference: surface - Surface with the destination coordinates for each index. - moving: surface Surface with the starting coordinates for each index. + moving: surface + Surface with the destination coordinates for each index. """ - super().__init__(reference=reference, moving=moving) + super().__init__(reference=SurfaceMesh(reference), moving=SurfaceMesh(moving)) if np.all(self._reference._triangles != self._moving._triangles): raise ValueError("Both surfaces for an index transform must have corresponding" " vertices.") def map(self, x, inverse=False): - if inverse: + if not inverse: source = self.reference dest = self.moving else: @@ -113,6 +113,77 @@ def __add__(self, other): return self.__class__(self.reference, other.moving) raise NotImplementedError + def _to_hdf5(self, x5_root): + """Write transform to HDF5 file.""" + triangles = x5_root.create_group("Triangles") + coords = x5_root.create_group("Coordinates") + coords.create_dataset("0", data=self.reference._coords) + coords.create_dataset("1", data=self.moving._coords) + triangles.create_dataset("0", data=self.reference._triangles) + xform = x5_root.create_group("Transform") + xform.attrs["Type"] = "SurfaceCoordinateTransform" + reference = xform.create_group("Reference") + reference['Coordinates'] = h5py.SoftLink('/0/Coordinates/0') + reference['Triangles'] = h5py.SoftLink('/0/Triangles/0') + moving = xform.create_group("Moving") + moving['Coordinates'] = h5py.SoftLink('/0/Coordinates/1') + moving['Triangles'] = h5py.SoftLink('/0/Triangles/0') + + def to_filename(self, filename, fmt=None): + """Store the transform.""" + if fmt is None: + fmt = "npz" if filename.endswith(".npz") else "X5" + + if fmt == "npz": + raise NotImplementedError + # sparse.save_npz(filename, self.mat) + # return filename + + with h5py.File(filename, "w") as out_file: + out_file.attrs["Format"] = "X5" + out_file.attrs["Version"] = np.uint16(1) + root = out_file.create_group("/0") + self._to_hdf5(root) + + return filename + + @classmethod + def from_filename(cls, filename=None, reference_path=None, moving_path=None, + fmt=None): + """Load transform from file.""" + if filename is None: + if reference_path is None or moving_path is None: + raise ValueError("You must pass either a X5 file or a pair of reference and moving" + " surfaces.") + return cls(SurfaceMesh(nb.load(reference_path)), + SurfaceMesh(nb.load(moving_path))) + + if fmt is None: + try: + fmt = "npz" if filename.endswith(".npz") else "X5" + except AttributeError: + fmt = "npz" if filename.as_posix().endswith(".npz") else "X5" + + if fmt == "npz": + raise NotImplementedError + # return cls(sparse.load_npz(filename)) + + if fmt != "X5": + raise ValueError("Only npz and X5 formats are supported.") + + with h5py.File(filename, "r") as f: + assert f.attrs["Format"] == "X5" + xform = f["/0/Transform"] + reference = SurfaceMesh.from_arrays( + xform['Reference']['Coordinates'], + xform['Reference']['Triangles'] + ) + + moving = SurfaceMesh.from_arrays( + xform['Moving']['Coordinates'], + xform['Moving']['Triangles'] + ) + return cls(reference, moving) class SurfaceResampler(SurfaceTransformBase): """Represents transformations in which the coordinate space remains the same and the indicies @@ -293,17 +364,26 @@ def apply(self, x, inverse=False, normalize="element"): def _to_hdf5(self, x5_root): """Write transform to HDF5 file.""" + triangles = x5_root.create_group("Triangles") + coords = x5_root.create_group("Coordinates") + coords.create_dataset("0", data=self.reference._coords) + coords.create_dataset("1", data=self.moving._coords) + triangles.create_dataset("0", data=self.reference._triangles) + triangles.create_dataset("1", data=self.moving._triangles) xform = x5_root.create_group("Transform") xform.attrs["Type"] = "SurfaceResampling" - xform.attrs['interpolation_method'] = self.interpolation_method - xform.create_dataset("mat_data", data=self.mat.data) - xform.create_dataset("mat_indices", data=self.mat.indices) - xform.create_dataset("mat_indptr", data=self.mat.indptr) - xform.create_dataset("mat_shape", data=self.mat.shape) - xform.create_dataset("reference_coordinates", data=self.reference._coords) - xform.create_dataset("reference_triangles", data=self.reference._triangles) - xform.create_dataset("moving_coordinates", data=self.moving._coords) - xform.create_dataset("moving_triangles", data=self.moving._triangles) + xform.attrs['InterpolationMethod'] = self.interpolation_method + mat = xform.create_group("IndexWeights") + mat.create_dataset("Data", data=self.mat.data) + mat.create_dataset("Indices", data=self.mat.indices) + mat.create_dataset("Indptr", data=self.mat.indptr) + mat.create_dataset("Shape", data=self.mat.shape) + reference = xform.create_group("Reference") + reference['Coordinates'] = h5py.SoftLink('/0/Coordinates/0') + reference['Triangles'] = h5py.SoftLink('/0/Triangles/0') + moving = xform.create_group("Moving") + moving['Coordinates'] = h5py.SoftLink('/0/Coordinates/1') + moving['Triangles'] = h5py.SoftLink('/0/Triangles/1') def to_filename(self, filename, fmt=None): """Store the transform.""" @@ -338,7 +418,10 @@ def from_filename(cls, filename=None, reference_path=None, moving_path=None, interpolation_method=interpolation_method) if fmt is None: - fmt = "npz" if filename.endswith(".npz") else "X5" + try: + fmt = "npz" if filename.endswith(".npz") else "X5" + except AttributeError: + fmt = "npz" if filename.as_posix().endswith(".npz") else "X5" if fmt == "npz": raise NotImplementedError @@ -350,20 +433,24 @@ def from_filename(cls, filename=None, reference_path=None, moving_path=None, with h5py.File(filename, "r") as f: assert f.attrs["Format"] == "X5" xform = f["/0/Transform"] - mat = sparse.csr_matrix( - (xform["mat_data"][()], xform["mat_indices"][()], xform["mat_indptr"][()]), - shape=xform["mat_shape"][()], - ) + try: + iws = xform['IndexWeights'] + mat = sparse.csr_matrix( + (iws["Data"][()], iws["Indices"][()], iws["Indptr"][()]), + shape=iws["Shape"][()], + ) + except KeyError: + mat=None reference = SurfaceMesh.from_arrays( - xform['reference_coordinates'], - xform['reference_triangles'] + xform['Reference']['Coordinates'], + xform['Reference']['Triangles'] ) moving = SurfaceMesh.from_arrays( - xform['moving_coordinates'], - xform['moving_triangles'] + xform['Moving']['Coordinates'], + xform['Moving']['Triangles'] ) - interpolation_method = xform.attrs['interpolation_method'] + interpolation_method = xform.attrs['InterpolationMethod'] return cls(reference, moving, interpolation_method=interpolation_method, mat=mat) diff --git a/nitransforms/tests/test_surface.py b/nitransforms/tests/test_surface.py index a6a17a62..de046edf 100644 --- a/nitransforms/tests/test_surface.py +++ b/nitransforms/tests/test_surface.py @@ -85,14 +85,15 @@ def test_SurfaceCoordinateTransform(testdata_path): # test loading from filenames sct = SurfaceCoordinateTransform(sphere_reg, pial) - sctf = SurfaceCoordinateTransform.from_filename(sphere_reg_path, pial_path) + sctf = SurfaceCoordinateTransform.from_filename(reference_path=sphere_reg_path, + moving_path=pial_path) assert sct == sctf # test mapping - assert np.all(sct.map(sct.moving._coords[:100]) == sct.reference._coords[:100]) - assert np.all(sct.map(sct.reference._coords[:100], inverse=True) == sct.moving._coords[:100]) + assert np.all(sct.map(sct.moving._coords[:100], inverse=True) == sct.reference._coords[:100]) + assert np.all(sct.map(sct.reference._coords[:100]) == sct.moving._coords[:100]) with pytest.raises(NotImplementedError): - sct.map(sct.reference._coords[0]) + sct.map(sct.moving._coords[0]) # test inversion and addition scti = ~sct @@ -106,6 +107,17 @@ def test_SurfaceCoordinateTransform(testdata_path): assert np.all(scti.reference._triangles == sct.reference._triangles) assert scti == sct +def test_SurfaceCoordinateTransformIO(testdata_path, tmpdir): + sphere_reg_path = testdata_path / "sub-sid000005_ses-budapest_acq-MPRAGE_hemi-R_space-fsLR_desc-reg_sphere.surf.gii" + pial_path = testdata_path / "sub-sid000005_ses-budapest_acq-MPRAGE_hemi-R_pial.surf.gii" + fslr_sphere_path = testdata_path / "tpl-fsLR_hemi-R_den-32k_sphere.surf.gii" + + sct = SurfaceCoordinateTransform(sphere_reg_path, pial_path) + fn = tempfile.mktemp(suffix=".h5") + sct.to_filename(fn) + sct2 = SurfaceCoordinateTransform.from_filename(fn) + assert sct == sct2 + def test_ProjectUnproject(testdata_path): sphere_reg_path = testdata_path / "sub-sid000005_ses-budapest_acq-MPRAGE_hemi-R_space-fsLR_desc-reg_sphere.surf.gii" From 4e2abf0f86e4f8b25202f0931e7b811a0c03725b Mon Sep 17 00:00:00 2001 From: Chris Markiewicz Date: Sat, 22 Jun 2024 10:31:20 +0900 Subject: [PATCH 083/123] TEST: Fix tests for PY312, NP2 --- nitransforms/io/afni.py | 2 +- nitransforms/io/base.py | 4 ++-- nitransforms/tests/test_version.py | 7 ++++++- setup.cfg | 1 + 4 files changed, 10 insertions(+), 4 deletions(-) diff --git a/nitransforms/io/afni.py b/nitransforms/io/afni.py index 06eaf432..7c66d434 100644 --- a/nitransforms/io/afni.py +++ b/nitransforms/io/afni.py @@ -237,7 +237,7 @@ def _is_oblique(affine, thres=OBLIQUITY_THRESHOLD_DEG): True """ - return (obliquity(affine).max() * 180 / pi) > thres + return float(obliquity(affine).max() * 180 / pi) > thres def _afni_deobliqued_grid(oblique, shape): diff --git a/nitransforms/io/base.py b/nitransforms/io/base.py index d86c8539..3c923426 100644 --- a/nitransforms/io/base.py +++ b/nitransforms/io/base.py @@ -76,12 +76,12 @@ class LinearParameters(LinearTransformStruct): Examples -------- >>> lp = LinearParameters() - >>> np.all(lp.structarr['parameters'] == np.eye(4)) + >>> np.array_equal(lp.structarr['parameters'], np.eye(4)) True >>> p = np.diag([2., 2., 2., 1.]) >>> lp = LinearParameters(p) - >>> np.all(lp.structarr['parameters'] == p) + >>> np.array_equal(lp.structarr['parameters'], p) True """ diff --git a/nitransforms/tests/test_version.py b/nitransforms/tests/test_version.py index a0723e9a..bc4c4a0a 100644 --- a/nitransforms/tests/test_version.py +++ b/nitransforms/tests/test_version.py @@ -1,10 +1,15 @@ """Test _version.py.""" import sys from collections import namedtuple -from pkg_resources import DistributionNotFound from importlib import reload +import pytest import nitransforms +try: + from pkg_resources import DistributionNotFound +except ImportError: + pytest.skip(allow_module_level=True) + def test_version_scm0(monkeypatch): """Retrieve the version via setuptools_scm.""" diff --git a/setup.cfg b/setup.cfg index 20fe531e..79e47f47 100644 --- a/setup.cfg +++ b/setup.cfg @@ -49,6 +49,7 @@ test = pytest-cov pytest-env codecov + lxml tests = %(test)s From 06f8a274a8c19ee7c0f058c4fa83ff8ef12584e3 Mon Sep 17 00:00:00 2001 From: Chris Markiewicz Date: Sat, 22 Jun 2024 11:00:45 +0900 Subject: [PATCH 084/123] TEST: Disable test_cli if xdist is enabled --- nitransforms/tests/test_cli.py | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/nitransforms/tests/test_cli.py b/nitransforms/tests/test_cli.py index 7f16a1de..58867131 100644 --- a/nitransforms/tests/test_cli.py +++ b/nitransforms/tests/test_cli.py @@ -1,10 +1,18 @@ +import os from textwrap import dedent import pytest from ..cli import cli_apply, main as ntcli +if os.getenv("PYTEST_XDIST_WORKER"): + breaks_on_xdist = pytest.mark.skip(reason="xdist is active; rerun without to run this test.") +else: + def breaks_on_xdist(test): + return test + +@breaks_on_xdist def test_cli(capsys): # empty command with pytest.raises(SystemExit): From 4d2f4c7b7c0a5ace5ca5dae9ca774bda16b83379 Mon Sep 17 00:00:00 2001 From: Chris Markiewicz Date: Sat, 22 Jun 2024 11:01:03 +0900 Subject: [PATCH 085/123] CI: Add Python 3.12 tests, unpin the flake8 Python version --- .github/workflows/pythonpackage.yml | 6 ++---- 1 file changed, 2 insertions(+), 4 deletions(-) diff --git a/.github/workflows/pythonpackage.yml b/.github/workflows/pythonpackage.yml index 97c4984e..71a1494b 100644 --- a/.github/workflows/pythonpackage.yml +++ b/.github/workflows/pythonpackage.yml @@ -16,7 +16,7 @@ jobs: runs-on: ubuntu-latest strategy: matrix: - python-version: ['3.8', '3.9', '3.10', '3.11'] + python-version: ['3.8', '3.9', '3.10', '3.11', '3.12'] steps: - name: Set up Python ${{ matrix.python-version }} @@ -91,8 +91,6 @@ jobs: runs-on: ubuntu-latest steps: - uses: actions/checkout@v3 - - name: Set up Python 3.7 + - name: Set up Python 3 uses: actions/setup-python@v4 - with: - python-version: 3.7 - run: pipx run flake8 nitransforms From 7799b59018d9db338463e9407c2d6c98a4d22060 Mon Sep 17 00:00:00 2001 From: Oscar Esteban Date: Sat, 22 Jun 2024 12:19:56 +0900 Subject: [PATCH 086/123] Update .gitignore --- .gitignore | 1 + 1 file changed, 1 insertion(+) diff --git a/.gitignore b/.gitignore index 8681c41b..1494e83a 100644 --- a/.gitignore +++ b/.gitignore @@ -82,3 +82,4 @@ local_settings.py *.swp .vscode/ +.DS_Store From f9cabece977dd62363791481a157e86d7292125e Mon Sep 17 00:00:00 2001 From: Dylan Nielson Date: Sat, 22 Jun 2024 12:29:52 +0900 Subject: [PATCH 087/123] Update nitransforms/surface.py Co-authored-by: Oscar Esteban --- nitransforms/surface.py | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/nitransforms/surface.py b/nitransforms/surface.py index b9d97f7a..0afca86b 100644 --- a/nitransforms/surface.py +++ b/nitransforms/surface.py @@ -186,13 +186,13 @@ def from_filename(cls, filename=None, reference_path=None, moving_path=None, return cls(reference, moving) class SurfaceResampler(SurfaceTransformBase): - """Represents transformations in which the coordinate space remains the same and the indicies - change. - To achieve surface project-unproject functionality: + """ + Represents transformations in which the coordinate space remains the same and the indices change. + To achieve surface project-unproject functionality: sphere_in as the reference sphere_project_to as the moving - Then apply the transformation to sphere_unproject_from - """ + Then apply the transformation to sphere_unproject_from + """ __slots__ = ("_reference", "_moving", "mat", 'interpolation_method') From a4d6df2792447450e902a18a4fb759041940bab0 Mon Sep 17 00:00:00 2001 From: Dylan Nielson Date: Sat, 22 Jun 2024 12:31:23 +0900 Subject: [PATCH 088/123] Update nitransforms/tests/test_surface.py Co-authored-by: Oscar Esteban --- nitransforms/tests/test_surface.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/nitransforms/tests/test_surface.py b/nitransforms/tests/test_surface.py index de046edf..25bd3ee5 100644 --- a/nitransforms/tests/test_surface.py +++ b/nitransforms/tests/test_surface.py @@ -84,9 +84,9 @@ def test_SurfaceCoordinateTransform(testdata_path): sct = SurfaceCoordinateTransform(fslr_sphere, pial) # test loading from filenames - sct = SurfaceCoordinateTransform(sphere_reg, pial) - sctf = SurfaceCoordinateTransform.from_filename(reference_path=sphere_reg_path, - moving_path=pial_path) + sct = SurfaceCoordinateTransform(pial, sphere_reg) + sctf = SurfaceCoordinateTransform.from_filename(reference_path=pial_path, + moving_path=sphere_reg_path) assert sct == sctf # test mapping From 79b7b50026036efb87a476375eb1119113aaccfc Mon Sep 17 00:00:00 2001 From: Dylan Nielson Date: Sat, 22 Jun 2024 12:31:56 +0900 Subject: [PATCH 089/123] Update nitransforms/tests/test_surface.py Co-authored-by: Oscar Esteban --- nitransforms/tests/test_surface.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/nitransforms/tests/test_surface.py b/nitransforms/tests/test_surface.py index 25bd3ee5..6f856e28 100644 --- a/nitransforms/tests/test_surface.py +++ b/nitransforms/tests/test_surface.py @@ -112,7 +112,7 @@ def test_SurfaceCoordinateTransformIO(testdata_path, tmpdir): pial_path = testdata_path / "sub-sid000005_ses-budapest_acq-MPRAGE_hemi-R_pial.surf.gii" fslr_sphere_path = testdata_path / "tpl-fsLR_hemi-R_den-32k_sphere.surf.gii" - sct = SurfaceCoordinateTransform(sphere_reg_path, pial_path) + sct = SurfaceCoordinateTransform(pial_path, sphere_reg_path) fn = tempfile.mktemp(suffix=".h5") sct.to_filename(fn) sct2 = SurfaceCoordinateTransform.from_filename(fn) From 76832f5b6aee12648a8b874ffac3e0bd95aae498 Mon Sep 17 00:00:00 2001 From: Oscar Esteban Date: Sat, 22 Jun 2024 12:33:02 +0900 Subject: [PATCH 090/123] FIX: Re-enable AFNI tests --- Dockerfile | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/Dockerfile b/Dockerfile index 28fe8c31..a1c5f4b2 100644 --- a/Dockerfile +++ b/Dockerfile @@ -57,7 +57,9 @@ RUN mkdir -p /opt/afni-latest \ -name "3dTshift" -or \ -name "3dUnifize" -or \ -name "3dAutomask" -or \ - -name "3dvolreg" \) -delete + -name "3dvolreg" -or \ + -name "3dNwarpApply" \ + \) -delete # Micromamba FROM downloader as micromamba From 57222fddf29c1bc8db65d527e95326a6ad3078db Mon Sep 17 00:00:00 2001 From: Dylan Nielson Date: Sat, 22 Jun 2024 12:52:21 +0900 Subject: [PATCH 091/123] fix_rebase --- nitransforms/surface.py | 43 +++++++++++++++++++++++++++++- nitransforms/tests/test_base.py | 1 + nitransforms/tests/test_surface.py | 6 +++++ 3 files changed, 49 insertions(+), 1 deletion(-) diff --git a/nitransforms/surface.py b/nitransforms/surface.py index 0afca86b..226c335e 100644 --- a/nitransforms/surface.py +++ b/nitransforms/surface.py @@ -18,6 +18,9 @@ from nitransforms.base import ( SurfaceMesh ) +import nibabel as nb +from scipy.spatial import KDTree +from scipy.spatial.distance import cdist class SurfaceTransformBase(): @@ -113,6 +116,7 @@ def __add__(self, other): return self.__class__(self.reference, other.moving) raise NotImplementedError + def _to_hdf5(self, x5_root): """Write transform to HDF5 file.""" triangles = x5_root.create_group("Triangles") @@ -211,6 +215,7 @@ def __init__(self, reference, moving, interpolation_method='barycentric', mat=No interpolation_method : str Only barycentric is currently implemented """ + super().__init__(SurfaceMesh(reference), SurfaceMesh(moving), spherical=True) self.reference.set_radius() @@ -226,6 +231,39 @@ def __init__(self, reference, moving, interpolation_method='barycentric', mat=No # transform if mat is None: self.__calculate_mat() + r_tree = KDTree(self.reference._coords) + m_tree = KDTree(self.moving._coords) + kmr_dists, kmr_closest = m_tree.query(self.reference._coords, k=10) + + # invert the triangles to generate a lookup table from vertices to triangle index + tri_lut = dict() + for i, idxs in enumerate(self.moving._triangles): + for x in idxs: + if not x in tri_lut: + tri_lut[x] = [i] + else: + tri_lut[x].append(i) + + # calculate the barycentric interpolation weights + bc_weights = [] + enclosing = [] + for sidx, (point, kmrv) in enumerate(zip(self.reference._coords, kmr_closest)): + close_tris = _find_close_tris(kmrv, tri_lut, self.moving) + ww, ee = _find_weights(point, close_tris, m_tree) + bc_weights.append(ww) + enclosing.append(ee) + + # build sparse matrix + # commenting out code for barycentric nearest neighbor + #bary_nearest = [] + mat = sparse.lil_array((self.reference._npoints, self.moving._npoints)) + for s_ix, dd in enumerate(bc_weights): + for k, v in dd.items(): + mat[s_ix, k] = v + # bary_nearest.append(np.array(list(dd.keys()))[np.array(list(dd.values())).argmax()]) + # bary_nearest = np.array(bary_nearest) + # transpose so that number of out vertices is columns + self.mat = sparse.csr_array(mat.T) else: if isinstance(mat, sparse.csr_array): self.mat = mat @@ -283,7 +321,6 @@ def map(self, x): return x def __add__(self, other): - if (isinstance(other, SurfaceResampler) and (other.interpolation_method == self.interpolation_method)): return self.__class__( @@ -455,6 +492,7 @@ def from_filename(cls, filename=None, reference_path=None, moving_path=None, def _points_to_triangles(points, triangles): + """Implementation that vectorizes project of a point to a set of triangles. from: https://stackoverflow.com/a/32529589 """ @@ -495,6 +533,7 @@ def _points_to_triangles(points, triangles): m2 = v < 0 m3 = d < 0 m4 = a + d > b + e + m5 = ce > bd t0 = m0 & m1 & m2 & m3 @@ -588,6 +627,7 @@ def _find_close_tris(kdsv, tri_lut, surface): def _find_weights(point, close_tris, d_tree): point = point[np.newaxis, :] tri_dists = cdist(point, _points_to_triangles(point, close_tris).squeeze()) + closest_tri = close_tris[(tri_dists == tri_dists.min()).squeeze()] # make sure a single closest triangle was found if closest_tri.shape[0] != 1: @@ -599,6 +639,7 @@ def _find_weights(point, close_tris, d_tree): # Make sure point is actually inside triangle enclosing = True if np.all((point > closest_tri).sum(0) != 3): + enclosing = False _, ct_idxs = d_tree.query(closest_tri) a = closest_tri[0] diff --git a/nitransforms/tests/test_base.py b/nitransforms/tests/test_base.py index c81b379a..a1c3c419 100644 --- a/nitransforms/tests/test_base.py +++ b/nitransforms/tests/test_base.py @@ -4,6 +4,7 @@ import pytest import h5py + from ..base import ( SpatialReference, SampledSpatialData, diff --git a/nitransforms/tests/test_surface.py b/nitransforms/tests/test_surface.py index 6f856e28..3ca1f648 100644 --- a/nitransforms/tests/test_surface.py +++ b/nitransforms/tests/test_surface.py @@ -12,6 +12,10 @@ SurfaceResampler ) +from nitransforms.base import SurfaceMesh +from nitransforms.surface import SurfaceCoordinateTransform, SurfaceResampler + + # def test_surface_transform_npz(): # mat = sparse.random(10, 10, density=0.5) # xfm = SurfaceCoordinateTransform(mat) @@ -42,6 +46,7 @@ # y_none = xfm.apply(x, normalize="none") # assert y_none.sum() != y_element.sum() # assert y_none.sum() != y_sum.sum() + def test_SurfaceTransformBase(testdata_path): # note these transformations are a bit of a weird use of surface transformation, but I'm # just testing the base class and the io @@ -205,3 +210,4 @@ def test_SurfaceResampler(testdata_path, tmpdir): assert resampling3 == resampling resampled_thickness3 = resampling3.apply(subj_thickness.agg_data(), normalize='element') assert np.all(resampled_thickness3 == resampled_thickness) + From 2e7c7eb14ca7b88d08766fb71d1c6ecdd872f8a7 Mon Sep 17 00:00:00 2001 From: Dylan Nielson Date: Sat, 22 Jun 2024 12:16:38 +0900 Subject: [PATCH 092/123] TEST: drop unused path --- nitransforms/tests/test_surface.py | 1 - 1 file changed, 1 deletion(-) diff --git a/nitransforms/tests/test_surface.py b/nitransforms/tests/test_surface.py index 3ca1f648..d3924a3e 100644 --- a/nitransforms/tests/test_surface.py +++ b/nitransforms/tests/test_surface.py @@ -115,7 +115,6 @@ def test_SurfaceCoordinateTransform(testdata_path): def test_SurfaceCoordinateTransformIO(testdata_path, tmpdir): sphere_reg_path = testdata_path / "sub-sid000005_ses-budapest_acq-MPRAGE_hemi-R_space-fsLR_desc-reg_sphere.surf.gii" pial_path = testdata_path / "sub-sid000005_ses-budapest_acq-MPRAGE_hemi-R_pial.surf.gii" - fslr_sphere_path = testdata_path / "tpl-fsLR_hemi-R_den-32k_sphere.surf.gii" sct = SurfaceCoordinateTransform(pial_path, sphere_reg_path) fn = tempfile.mktemp(suffix=".h5") From 8e7c5679f92cc3866a6d7de26b7819efab021032 Mon Sep 17 00:00:00 2001 From: Chris Markiewicz Date: Sat, 22 Jun 2024 13:21:00 +0900 Subject: [PATCH 093/123] FIX: Load ITK fields from H5 correctly --- nitransforms/io/itk.py | 21 +++++++++++++-------- 1 file changed, 13 insertions(+), 8 deletions(-) diff --git a/nitransforms/io/itk.py b/nitransforms/io/itk.py index ddeb78e6..b7f59ec7 100644 --- a/nitransforms/io/itk.py +++ b/nitransforms/io/itk.py @@ -403,14 +403,19 @@ def from_h5obj(cls, fileobj, check=True, only_linear=False): if xfm["TransformType"][0].startswith(b"DisplacementFieldTransform"): if only_linear: continue - _fixed = np.asanyarray(xfm[f"{typo_fallback}FixedParameters"]) - shape = _fixed[:3].astype("uint16").tolist() - offset = _fixed[3:6].astype("float") - zooms = _fixed[6:9].astype("float") - directions = _fixed[9:].astype("float").reshape((3, 3)) + _fixed = xfm[f"{typo_fallback}FixedParameters"] + shape = _fixed[:3] + offset = _fixed[3:6] + zooms = _fixed[6:9] + directions = np.reshape(_fixed[9:], (3, 3)) affine = from_matvec(directions * zooms, offset) - field = np.asanyarray(xfm[f"{typo_fallback}Parameters"]).reshape( - (*shape, 1, -1) + # ITK uses Fortran ordering, like NIfTI, but with the vector dimension first + field = np.moveaxis( + np.reshape( + xfm[f"{typo_fallback}Parameters"], (3, *shape.astype(int)), order='F' + ), + 0, + -1, ) field[..., (0, 1)] *= -1.0 hdr = Nifti1Header() @@ -418,7 +423,7 @@ def from_h5obj(cls, fileobj, check=True, only_linear=False): hdr.set_data_dtype("float") xfm_list.append( - Nifti1Image(field.astype("float"), LPS @ affine @ LPS, hdr) + Nifti1Image(field.astype("float"), affine @ LPS, hdr) ) continue From 454e2892836b05483f59d774f056bbf1a01f77ea Mon Sep 17 00:00:00 2001 From: Dylan Nielson Date: Sat, 22 Jun 2024 13:23:47 +0900 Subject: [PATCH 094/123] TEST: Fix surface coordinate transfrom tests --- nitransforms/tests/test_surface.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/nitransforms/tests/test_surface.py b/nitransforms/tests/test_surface.py index d3924a3e..e3473f05 100644 --- a/nitransforms/tests/test_surface.py +++ b/nitransforms/tests/test_surface.py @@ -103,11 +103,11 @@ def test_SurfaceCoordinateTransform(testdata_path): # test inversion and addition scti = ~sct - assert scti + sct == SurfaceCoordinateTransform(pial, pial) - assert sct + scti == SurfaceCoordinateTransform(sphere_reg, sphere_reg) + assert sct + scti == SurfaceCoordinateTransform(pial, pial) + assert scti + sct == SurfaceCoordinateTransform(sphere_reg, sphere_reg) - sct.reference = pial - sct.moving = sphere_reg + sct.reference = sphere_reg + sct.moving = pial assert np.all(scti.reference._coords == sct.reference._coords) assert np.all(scti.reference._triangles == sct.reference._triangles) assert scti == sct From bf80e71cc011a2dfc2a6c31a9e1abaff8319ad20 Mon Sep 17 00:00:00 2001 From: Dylan Nielson Date: Sat, 22 Jun 2024 13:48:15 +0900 Subject: [PATCH 095/123] PL: fix style regressions --- nitransforms/surface.py | 10 +++------- nitransforms/tests/test_surface.py | 6 ------ 2 files changed, 3 insertions(+), 13 deletions(-) diff --git a/nitransforms/surface.py b/nitransforms/surface.py index 226c335e..555e2e15 100644 --- a/nitransforms/surface.py +++ b/nitransforms/surface.py @@ -18,9 +18,6 @@ from nitransforms.base import ( SurfaceMesh ) -import nibabel as nb -from scipy.spatial import KDTree -from scipy.spatial.distance import cdist class SurfaceTransformBase(): @@ -231,12 +228,11 @@ def __init__(self, reference, moving, interpolation_method='barycentric', mat=No # transform if mat is None: self.__calculate_mat() - r_tree = KDTree(self.reference._coords) m_tree = KDTree(self.moving._coords) - kmr_dists, kmr_closest = m_tree.query(self.reference._coords, k=10) + _, kmr_closest = m_tree.query(self.reference._coords, k=10) # invert the triangles to generate a lookup table from vertices to triangle index - tri_lut = dict() + tri_lut = {} for i, idxs in enumerate(self.moving._triangles): for x in idxs: if not x in tri_lut: @@ -247,7 +243,7 @@ def __init__(self, reference, moving, interpolation_method='barycentric', mat=No # calculate the barycentric interpolation weights bc_weights = [] enclosing = [] - for sidx, (point, kmrv) in enumerate(zip(self.reference._coords, kmr_closest)): + for point, kmrv in zip(self.reference._coords, kmr_closest): close_tris = _find_close_tris(kmrv, tri_lut, self.moving) ww, ee = _find_weights(point, close_tris, m_tree) bc_weights.append(ww) diff --git a/nitransforms/tests/test_surface.py b/nitransforms/tests/test_surface.py index e3473f05..1f3173e1 100644 --- a/nitransforms/tests/test_surface.py +++ b/nitransforms/tests/test_surface.py @@ -1,4 +1,3 @@ -import os import tempfile import numpy as np @@ -12,10 +11,6 @@ SurfaceResampler ) -from nitransforms.base import SurfaceMesh -from nitransforms.surface import SurfaceCoordinateTransform, SurfaceResampler - - # def test_surface_transform_npz(): # mat = sparse.random(10, 10, density=0.5) # xfm = SurfaceCoordinateTransform(mat) @@ -209,4 +204,3 @@ def test_SurfaceResampler(testdata_path, tmpdir): assert resampling3 == resampling resampled_thickness3 = resampling3.apply(subj_thickness.agg_data(), normalize='element') assert np.all(resampled_thickness3 == resampled_thickness) - From f34d60997d54adbd28ab220f446ce3511a37b7fb Mon Sep 17 00:00:00 2001 From: Dylan Nielson Date: Sat, 22 Jun 2024 14:20:10 +0900 Subject: [PATCH 096/123] PL: fix long lines --- nitransforms/surface.py | 11 +++--- nitransforms/tests/test_base.py | 2 +- nitransforms/tests/test_surface.py | 59 ++++++++++++++++++++++++------ 3 files changed, 54 insertions(+), 18 deletions(-) diff --git a/nitransforms/surface.py b/nitransforms/surface.py index 555e2e15..58fcf5c7 100644 --- a/nitransforms/surface.py +++ b/nitransforms/surface.py @@ -113,7 +113,6 @@ def __add__(self, other): return self.__class__(self.reference, other.moving) raise NotImplementedError - def _to_hdf5(self, x5_root): """Write transform to HDF5 file.""" triangles = x5_root.create_group("Triangles") @@ -186,9 +185,11 @@ def from_filename(cls, filename=None, reference_path=None, moving_path=None, ) return cls(reference, moving) + class SurfaceResampler(SurfaceTransformBase): """ - Represents transformations in which the coordinate space remains the same and the indices change. + Represents transformations in which the coordinate space remains the same + and the indices change. To achieve surface project-unproject functionality: sphere_in as the reference sphere_project_to as the moving @@ -235,7 +236,7 @@ def __init__(self, reference, moving, interpolation_method='barycentric', mat=No tri_lut = {} for i, idxs in enumerate(self.moving._triangles): for x in idxs: - if not x in tri_lut: + if x not in tri_lut: tri_lut[x] = [i] else: tri_lut[x].append(i) @@ -251,7 +252,7 @@ def __init__(self, reference, moving, interpolation_method='barycentric', mat=No # build sparse matrix # commenting out code for barycentric nearest neighbor - #bary_nearest = [] + # bary_nearest = [] mat = sparse.lil_array((self.reference._npoints, self.moving._npoints)) for s_ix, dd in enumerate(bc_weights): for k, v in dd.items(): @@ -473,7 +474,7 @@ def from_filename(cls, filename=None, reference_path=None, moving_path=None, shape=iws["Shape"][()], ) except KeyError: - mat=None + mat = None reference = SurfaceMesh.from_arrays( xform['Reference']['Coordinates'], xform['Reference']['Triangles'] diff --git a/nitransforms/tests/test_base.py b/nitransforms/tests/test_base.py index a1c3c419..fb4be8d8 100644 --- a/nitransforms/tests/test_base.py +++ b/nitransforms/tests/test_base.py @@ -188,4 +188,4 @@ def test_SurfaceMesh(testdata_path): SurfaceMesh(nb.load(img_path)) with pytest.raises(TypeError): - SurfaceMesh(nb.load(shape_path)) \ No newline at end of file + SurfaceMesh(nb.load(shape_path)) diff --git a/nitransforms/tests/test_surface.py b/nitransforms/tests/test_surface.py index 1f3173e1..a210583e 100644 --- a/nitransforms/tests/test_surface.py +++ b/nitransforms/tests/test_surface.py @@ -45,7 +45,10 @@ def test_SurfaceTransformBase(testdata_path): # note these transformations are a bit of a weird use of surface transformation, but I'm # just testing the base class and the io - sphere_reg_path = testdata_path / "sub-sid000005_ses-budapest_acq-MPRAGE_hemi-R_space-fsLR_desc-reg_sphere.surf.gii" + sphere_reg_path = ( + testdata_path + / "sub-sid000005_ses-budapest_acq-MPRAGE_hemi-R_space-fsLR_desc-reg_sphere.surf.gii" + ) pial_path = testdata_path / "sub-sid000005_ses-budapest_acq-MPRAGE_hemi-R_pial.surf.gii" sphere_reg = SurfaceMesh(nb.load(sphere_reg_path)) @@ -71,7 +74,10 @@ def test_SurfaceTransformBase(testdata_path): def test_SurfaceCoordinateTransform(testdata_path): # note these transformations are a bit of a weird use of surface transformation, but I'm # just testing the class and the io - sphere_reg_path = testdata_path / "sub-sid000005_ses-budapest_acq-MPRAGE_hemi-R_space-fsLR_desc-reg_sphere.surf.gii" + sphere_reg_path = ( + testdata_path + / "sub-sid000005_ses-budapest_acq-MPRAGE_hemi-R_space-fsLR_desc-reg_sphere.surf.gii" + ) pial_path = testdata_path / "sub-sid000005_ses-budapest_acq-MPRAGE_hemi-R_pial.surf.gii" fslr_sphere_path = testdata_path / "tpl-fsLR_hemi-R_den-32k_sphere.surf.gii" @@ -107,8 +113,12 @@ def test_SurfaceCoordinateTransform(testdata_path): assert np.all(scti.reference._triangles == sct.reference._triangles) assert scti == sct + def test_SurfaceCoordinateTransformIO(testdata_path, tmpdir): - sphere_reg_path = testdata_path / "sub-sid000005_ses-budapest_acq-MPRAGE_hemi-R_space-fsLR_desc-reg_sphere.surf.gii" + sphere_reg_path = ( + testdata_path + / "sub-sid000005_ses-budapest_acq-MPRAGE_hemi-R_space-fsLR_desc-reg_sphere.surf.gii" + ) pial_path = testdata_path / "sub-sid000005_ses-budapest_acq-MPRAGE_hemi-R_pial.surf.gii" sct = SurfaceCoordinateTransform(pial_path, sphere_reg_path) @@ -117,12 +127,22 @@ def test_SurfaceCoordinateTransformIO(testdata_path, tmpdir): sct2 = SurfaceCoordinateTransform.from_filename(fn) assert sct == sct2 + def test_ProjectUnproject(testdata_path): - sphere_reg_path = testdata_path / "sub-sid000005_ses-budapest_acq-MPRAGE_hemi-R_space-fsLR_desc-reg_sphere.surf.gii" + sphere_reg_path = ( + testdata_path + / "sub-sid000005_ses-budapest_acq-MPRAGE_hemi-R_space-fsLR_desc-reg_sphere.surf.gii" + ) fslr_sphere_path = testdata_path / "tpl-fsLR_hemi-R_den-32k_sphere.surf.gii" - subj_fsaverage_sphere_path = testdata_path / "sub-sid000005_ses-budapest_acq-MPRAGE_hemi-R_space-fsaverage_desc-reg_sphere.surf.gii" - fslr_fsaverage_sphere_path = testdata_path / "tpl-fsLR_space-fsaverage_hemi-R_den-32k_sphere.surf.gii" + subj_fsaverage_sphere_path = ( + testdata_path + / "sub-sid000005_ses-budapest_acq-MPRAGE_hemi-R_space-fsaverage_desc-reg_sphere.surf.gii" + ) + fslr_fsaverage_sphere_path = ( + testdata_path + / "tpl-fsLR_space-fsaverage_hemi-R_den-32k_sphere.surf.gii" + ) pial_path = testdata_path / "sub-sid000005_ses-budapest_acq-MPRAGE_hemi-R_pial.surf.gii" # test project-unproject funcitonality @@ -134,13 +154,28 @@ def test_ProjectUnproject(testdata_path): assert (projunproj_ref.agg_data()[0] - transformed._coords).max() < 0.0005 assert np.all(transformed._triangles == projunproj_ref.agg_data()[1]) + def test_SurfaceResampler(testdata_path, tmpdir): dif_tol = 0.001 - fslr_sphere_path = testdata_path / "tpl-fsLR_hemi-R_den-32k_sphere.surf.gii" - shape_path = testdata_path / "sub-sid000005_ses-budapest_acq-MPRAGE_hemi-R_thickness.shape.gii" - ref_resampled_thickness_path = testdata_path / "sub-sid000005_ses-budapest_acq-MPRAGE_hemi-R_space-fsLR_thickness.shape.gii" - pial_path = testdata_path / "sub-sid000005_ses-budapest_acq-MPRAGE_hemi-R_pial.surf.gii" - sphere_reg_path = testdata_path / "sub-sid000005_ses-budapest_acq-MPRAGE_hemi-R_space-fsLR_desc-reg_sphere.surf.gii" + fslr_sphere_path = ( + testdata_path + / "tpl-fsLR_hemi-R_den-32k_sphere.surf.gii" + ) + shape_path = ( + testdata_path + / "sub-sid000005_ses-budapest_acq-MPRAGE_hemi-R_thickness.shape.gii" + ) + ref_resampled_thickness_path = ( + testdata_path + / "sub-sid000005_ses-budapest_acq-MPRAGE_hemi-R_space-fsLR_thickness.shape.gii" + ) + pial_path = ( + testdata_path / "sub-sid000005_ses-budapest_acq-MPRAGE_hemi-R_pial.surf.gii" + ) + sphere_reg_path = ( + testdata_path + / "sub-sid000005_ses-budapest_acq-MPRAGE_hemi-R_space-fsLR_desc-reg_sphere.surf.gii" + ) fslr_sphere = SurfaceMesh(nb.load(fslr_sphere_path)) sphere_reg = SurfaceMesh(nb.load(sphere_reg_path)) @@ -177,7 +212,7 @@ def test_SurfaceResampler(testdata_path, tmpdir): resampling.to_filename(fn) resampling2 = SurfaceResampler.from_filename(fn) - #assert resampling2 == resampling + # assert resampling2 == resampling assert np.allclose(resampling2.reference._coords, resampling.reference._coords) assert np.all(resampling2.reference._triangles == resampling.reference._triangles) assert np.allclose(resampling2.reference._coords, resampling.reference._coords) From f470a596a18c3aec5a5045c43d576e7358048000 Mon Sep 17 00:00:00 2001 From: Oscar Esteban Date: Sat, 22 Jun 2024 14:45:01 +0900 Subject: [PATCH 097/123] Apply suggestions from code review --- nitransforms/base.py | 3 ++- nitransforms/surface.py | 3 ++- 2 files changed, 4 insertions(+), 2 deletions(-) diff --git a/nitransforms/base.py b/nitransforms/base.py index 3b52443c..9c8310ab 100644 --- a/nitransforms/base.py +++ b/nitransforms/base.py @@ -129,7 +129,8 @@ def __init__(self, dataset): def check_sphere(self, tolerance=1.001): """Check sphericity of surface. - Based on https://github.com/Washington-University/workbench/blob/7ba3345d161d567a4b628ceb02ab4471fc96cb20/src/Files/SurfaceResamplingHelper.cxx#L503 + Based on https://github.com/Washington-University/workbench/blob/\ +7ba3345d161d567a4b628ceb02ab4471fc96cb20/src/Files/SurfaceResamplingHelper.cxx#L503 """ dists = np.linalg.norm(self._coords, axis=1) return (dists.min() * tolerance) > dists.max() diff --git a/nitransforms/surface.py b/nitransforms/surface.py index 58fcf5c7..7e1e7116 100644 --- a/nitransforms/surface.py +++ b/nitransforms/surface.py @@ -589,7 +589,8 @@ def _barycentric_weights(vecs, coords): triangle, respectively. ``t`` is the scale that needs to be multiplied to ``coords`` to make it in the same plane as the three vertices. - From: https://github.com/neuroboros/neuroboros/blob/f2a2efb914e783add2bf06e0f3715236d3d8550e/src/neuroboros/surface/_barycentric.pyx#L9-L47 + From: https://github.com/neuroboros/neuroboros/blob/\ +f2a2efb914e783add2bf06e0f3715236d3d8550e/src/neuroboros/surface/_barycentric.pyx#L9-L47 """ det = coords[0] * vecs[3, 0] + coords[1] * vecs[3, 1] + coords[2] * vecs[3, 2] if det == 0: From f69faaf44a9cab39141df6f546394e862ed408d2 Mon Sep 17 00:00:00 2001 From: Oscar Esteban Date: Sat, 22 Jun 2024 14:51:22 +0900 Subject: [PATCH 098/123] doc: add new module to APIdoc --- docs/_api/surface.rst | 6 ++++++ docs/api.rst | 1 + 2 files changed, 7 insertions(+) create mode 100644 docs/_api/surface.rst diff --git a/docs/_api/surface.rst b/docs/_api/surface.rst new file mode 100644 index 00000000..92ba28ab --- /dev/null +++ b/docs/_api/surface.rst @@ -0,0 +1,6 @@ +================== +Surface Transforms +================== + +.. automodule:: nitransforms.surface + :members: diff --git a/docs/api.rst b/docs/api.rst index eb3c566b..a57d6836 100644 --- a/docs/api.rst +++ b/docs/api.rst @@ -10,5 +10,6 @@ Information on specific functions, classes, and methods for developers. _api/linear _api/manip _api/nonlinear + _api/surface _api/interp _api/patched From 1c7466934dadc36ed6ce2783de7670142074d81d Mon Sep 17 00:00:00 2001 From: Chris Markiewicz Date: Sat, 22 Jun 2024 19:11:47 -0400 Subject: [PATCH 099/123] Update nitransforms/io/itk.py --- nitransforms/io/itk.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/nitransforms/io/itk.py b/nitransforms/io/itk.py index b7f59ec7..afabfd98 100644 --- a/nitransforms/io/itk.py +++ b/nitransforms/io/itk.py @@ -423,7 +423,7 @@ def from_h5obj(cls, fileobj, check=True, only_linear=False): hdr.set_data_dtype("float") xfm_list.append( - Nifti1Image(field.astype("float"), affine @ LPS, hdr) + Nifti1Image(field.astype("float"), LPS @ affine, hdr) ) continue From a366f850066ae9b8d1bd23cc4b32ef6dd74892d2 Mon Sep 17 00:00:00 2001 From: Oscar Esteban Date: Thu, 11 Jul 2024 14:49:05 +0200 Subject: [PATCH 100/123] MAINT: Pin numpy at version 1.x --- setup.cfg | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/setup.cfg b/setup.cfg index fa9859b6..158a9013 100644 --- a/setup.cfg +++ b/setup.cfg @@ -23,7 +23,7 @@ url = https://github.com/nipy/nitransforms [options] python_requires = >= 3.8 install_requires = - numpy >= 1.21.0 + numpy ~= 1.21 scipy >= 1.6.0 nibabel >= 3.0 h5py From b922fa5fe473d43d03f56afe2aff75fbe52a4f55 Mon Sep 17 00:00:00 2001 From: Oscar Esteban Date: Tue, 23 Jul 2024 12:56:36 +0200 Subject: [PATCH 101/123] wip: initiate implementation --- nitransforms/resampling.py | 18 +++++++++++++++--- 1 file changed, 15 insertions(+), 3 deletions(-) diff --git a/nitransforms/resampling.py b/nitransforms/resampling.py index 9de0d2d6..bc343231 100644 --- a/nitransforms/resampling.py +++ b/nitransforms/resampling.py @@ -7,6 +7,7 @@ # ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ## """Resampling utilities.""" +from warnings import warn from pathlib import Path import numpy as np from nibabel.loadsave import load as _nbload @@ -19,6 +20,9 @@ _as_homogeneous, ) +SERIALIZE_VOLUME_WINDOW_WIDTH : int = 8 +"""Minimum number of volumes to automatically serialize 4D transforms.""" + def apply( transform, @@ -29,6 +33,8 @@ def apply( cval=0.0, prefilter=True, output_dtype=None, + serialize_nvols=SERIALIZE_VOLUME_WINDOW_WIDTH, + njobs=None, ): """ Apply a transformation to an image, resampling on the reference spatial object. @@ -89,14 +95,20 @@ def apply( spatialimage = _nbload(str(spatialimage)) data = np.asanyarray(spatialimage.dataobj) + data_nvols = 1 if data.ndim < 4 else data.shape[-1] + xfm_nvols = len(transforms) - if data.ndim == 4 and data.shape[-1] != len(transform): + if data_nvols == 1 and xfm_nvols > 1: + data = data[..., np.newaxis] + elif data_nvols != xfm_nvols: raise ValueError( "The fourth dimension of the data does not match the tranform's shape." ) - if data.ndim < transform.ndim: - data = data[..., np.newaxis] + serialize_nvols = serialize_nvols if serialize_nvols and serialize_nvols > 1 else np.inf + serialize_4d = max(data_nvols, xfm_nvols) > serialize_nvols + if serialize_4d: + warn("4D transforms serialization into 3D+t not implemented") # For model-based nonlinear transforms, generate the corresponding dense field if hasattr(transform, "to_field") and callable(transform.to_field): From ba6b416c14be8c37ba8d0c37436eda5171bba39b Mon Sep 17 00:00:00 2001 From: Oscar Esteban Date: Wed, 24 Jul 2024 00:45:03 +0200 Subject: [PATCH 102/123] hotfix: wrong warning argument name ``level`` --- nitransforms/base.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/nitransforms/base.py b/nitransforms/base.py index 9c8310ab..81ed1a5e 100644 --- a/nitransforms/base.py +++ b/nitransforms/base.py @@ -338,7 +338,7 @@ def apply(self, *args, **kwargs): message = ( "The `apply` method is deprecated. Please use `nitransforms.resampling.apply` instead." ) - warnings.warn(message, DeprecationWarning, level=2) + warnings.warn(message, DeprecationWarning, stacklevel=2) from .resampling import apply return apply(self, *args, **kwargs) From 6064b8c056c2797b1d6dad3ab4a4365054291982 Mon Sep 17 00:00:00 2001 From: Julien Marabotto Date: Wed, 24 Jul 2024 11:19:56 +0200 Subject: [PATCH 103/123] enh: draft implementation of serialize 4d --- nitransforms/resampling.py | 87 ++++++++++++++++++++++++++------------ 1 file changed, 60 insertions(+), 27 deletions(-) diff --git a/nitransforms/resampling.py b/nitransforms/resampling.py index bc343231..ad37c768 100644 --- a/nitransforms/resampling.py +++ b/nitransforms/resampling.py @@ -7,12 +7,13 @@ # ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ## """Resampling utilities.""" -from warnings import warn from pathlib import Path import numpy as np from nibabel.loadsave import load as _nbload +from nibabel.arrayproxy import get_obj_dtype from scipy import ndimage as ndi +from nitransforms.linear import Affine, get from nitransforms.base import ( ImageGrid, TransformError, @@ -96,45 +97,77 @@ def apply( data = np.asanyarray(spatialimage.dataobj) data_nvols = 1 if data.ndim < 4 else data.shape[-1] - xfm_nvols = len(transforms) + xfm_nvols = len(transform) + assert xfm_nvols == transform.ndim == _ref.ndim if data_nvols == 1 and xfm_nvols > 1: data = data[..., np.newaxis] elif data_nvols != xfm_nvols: raise ValueError( - "The fourth dimension of the data does not match the tranform's shape." + "The fourth dimension of the data does not match the transform's shape." ) serialize_nvols = serialize_nvols if serialize_nvols and serialize_nvols > 1 else np.inf serialize_4d = max(data_nvols, xfm_nvols) > serialize_nvols if serialize_4d: - warn("4D transforms serialization into 3D+t not implemented") - - # For model-based nonlinear transforms, generate the corresponding dense field - if hasattr(transform, "to_field") and callable(transform.to_field): - targets = ImageGrid(spatialimage).index( - _as_homogeneous( - transform.to_field(reference=reference).map(_ref.ndcoords.T), - dim=_ref.ndim, + for t, xfm_t in enumerate(transform): + ras2vox = ~Affine(spatialimage.affine) + input_dtype = get_obj_dtype(spatialimage.dataobj) + output_dtype = output_dtype or input_dtype + + # Map the input coordinates on to timepoint t of the target (moving) + xcoords = _ref.ndcoords.astype("f4").T + ycoords = xfm_t.map(xcoords)[..., : _ref.ndim] + + # Calculate corresponding voxel coordinates + yvoxels = ras2vox.map(ycoords)[..., : _ref.ndim] + + # Interpolate + dataobj = ( + np.asanyarray(spatialimage.dataobj, dtype=input_dtype) + if spatialimage.ndim in (2, 3) + else None ) - ) + resampled[..., t] = ndi.map_coordinates( + ( + dataobj + if dataobj is not None + else spatialimage.dataobj[..., t].astype(input_dtype, copy=False) + ), + yvoxels.T, + output=output_dtype, + order=order, + mode=mode, + cval=cval, + prefilter=prefilter, + ) + else: - targets = ImageGrid(spatialimage).index( # data should be an image - _as_homogeneous(transform.map(_ref.ndcoords.T), dim=_ref.ndim) - ) + # For model-based nonlinear transforms, generate the corresponding dense field + if hasattr(transform, "to_field") and callable(transform.to_field): + targets = ImageGrid(spatialimage).index( + _as_homogeneous( + transform.to_field(reference=reference).map(_ref.ndcoords.T), + dim=_ref.ndim, + ) + ) + else: + targets = ImageGrid(spatialimage).index( # data should be an image + _as_homogeneous(transform.map(_ref.ndcoords.T), dim=_ref.ndim) + ) - if transform.ndim == 4: - targets = _as_homogeneous(targets.reshape(-2, targets.shape[0])).T - - resampled = ndi.map_coordinates( - data, - targets, - output=output_dtype, - order=order, - mode=mode, - cval=cval, - prefilter=prefilter, - ) + if transform.ndim == 4: + targets = _as_homogeneous(targets.reshape(-2, targets.shape[0])).T + + resampled = ndi.map_coordinates( + data, + targets, + output=output_dtype, + order=order, + mode=mode, + cval=cval, + prefilter=prefilter, + ) if isinstance(_ref, ImageGrid): # If reference is grid, reshape hdr = None From e47a4769b03c351a8e907e380e3dffd74e3a2955 Mon Sep 17 00:00:00 2001 From: Julien Marabotto Date: Thu, 25 Jul 2024 09:34:44 +0200 Subject: [PATCH 104/123] fix: passes more tests, more suggestions in progress --- nitransforms/resampling.py | 18 +++++++++++++++--- nitransforms/tests/test_base.py | 3 ++- 2 files changed, 17 insertions(+), 4 deletions(-) diff --git a/nitransforms/resampling.py b/nitransforms/resampling.py index ad37c768..b9ca65b8 100644 --- a/nitransforms/resampling.py +++ b/nitransforms/resampling.py @@ -13,7 +13,7 @@ from nibabel.arrayproxy import get_obj_dtype from scipy import ndimage as ndi -from nitransforms.linear import Affine, get +from nitransforms.linear import Affine, LinearTransformsMapping from nitransforms.base import ( ImageGrid, TransformError, @@ -97,15 +97,27 @@ def apply( data = np.asanyarray(spatialimage.dataobj) data_nvols = 1 if data.ndim < 4 else data.shape[-1] - xfm_nvols = len(transform) - assert xfm_nvols == transform.ndim == _ref.ndim + if type(transform) == Affine or type(transform) == LinearTransformsMapping: + xfm_nvols = len(transform) + else: + xfm_nvols = transform.ndim + """ if data_nvols == 1 and xfm_nvols > 1: data = data[..., np.newaxis] elif data_nvols != xfm_nvols: raise ValueError( "The fourth dimension of the data does not match the transform's shape." ) + RESAMPLING FAILS. SUGGEST: + """ + if data.ndim < transform.ndim: + data = data[..., np.newaxis] + elif data_nvols > 1 and data_nvols != xfm_nvols: + import pdb; pdb.set_trace() + raise ValueError( + "The fourth dimension of the data does not match the transform's shape." + ) serialize_nvols = serialize_nvols if serialize_nvols and serialize_nvols > 1 else np.inf serialize_4d = max(data_nvols, xfm_nvols) > serialize_nvols diff --git a/nitransforms/tests/test_base.py b/nitransforms/tests/test_base.py index fb4be8d8..74bc3358 100644 --- a/nitransforms/tests/test_base.py +++ b/nitransforms/tests/test_base.py @@ -186,6 +186,7 @@ def test_SurfaceMesh(testdata_path): with pytest.raises(ValueError): SurfaceMesh(nb.load(img_path)) - + """ with pytest.raises(TypeError): SurfaceMesh(nb.load(shape_path)) + """ \ No newline at end of file From 1616a35bf454898a6ff95b4d2925b4496da5be81 Mon Sep 17 00:00:00 2001 From: Julien Marabotto Date: Thu, 25 Jul 2024 11:37:32 +0200 Subject: [PATCH 105/123] fix: pass tests --- nitransforms/resampling.py | 1 - nitransforms/tests/test_base.py | 3 +-- 2 files changed, 1 insertion(+), 3 deletions(-) diff --git a/nitransforms/resampling.py b/nitransforms/resampling.py index b9ca65b8..c36750ef 100644 --- a/nitransforms/resampling.py +++ b/nitransforms/resampling.py @@ -114,7 +114,6 @@ def apply( if data.ndim < transform.ndim: data = data[..., np.newaxis] elif data_nvols > 1 and data_nvols != xfm_nvols: - import pdb; pdb.set_trace() raise ValueError( "The fourth dimension of the data does not match the transform's shape." ) diff --git a/nitransforms/tests/test_base.py b/nitransforms/tests/test_base.py index 74bc3358..fb4be8d8 100644 --- a/nitransforms/tests/test_base.py +++ b/nitransforms/tests/test_base.py @@ -186,7 +186,6 @@ def test_SurfaceMesh(testdata_path): with pytest.raises(ValueError): SurfaceMesh(nb.load(img_path)) - """ + with pytest.raises(TypeError): SurfaceMesh(nb.load(shape_path)) - """ \ No newline at end of file From 6292daf1d0f7dc56ae51d1d87a83fe827f72dd5c Mon Sep 17 00:00:00 2001 From: Julien Marabotto Date: Thu, 25 Jul 2024 13:44:11 +0200 Subject: [PATCH 106/123] fix: pass tests, serialization implemented --- nitransforms/resampling.py | 44 +++++++++++++++++++++----------------- 1 file changed, 24 insertions(+), 20 deletions(-) diff --git a/nitransforms/resampling.py b/nitransforms/resampling.py index c36750ef..52f831ef 100644 --- a/nitransforms/resampling.py +++ b/nitransforms/resampling.py @@ -102,15 +102,7 @@ def apply( xfm_nvols = len(transform) else: xfm_nvols = transform.ndim - """ - if data_nvols == 1 and xfm_nvols > 1: - data = data[..., np.newaxis] - elif data_nvols != xfm_nvols: - raise ValueError( - "The fourth dimension of the data does not match the transform's shape." - ) - RESAMPLING FAILS. SUGGEST: - """ + if data.ndim < transform.ndim: data = data[..., np.newaxis] elif data_nvols > 1 and data_nvols != xfm_nvols: @@ -119,26 +111,38 @@ def apply( ) serialize_nvols = serialize_nvols if serialize_nvols and serialize_nvols > 1 else np.inf - serialize_4d = max(data_nvols, xfm_nvols) > serialize_nvols + serialize_4d = max(data_nvols, xfm_nvols) >= serialize_nvols + if serialize_4d: - for t, xfm_t in enumerate(transform): - ras2vox = ~Affine(spatialimage.affine) - input_dtype = get_obj_dtype(spatialimage.dataobj) - output_dtype = output_dtype or input_dtype + # Avoid opening the data array just yet + input_dtype = get_obj_dtype(spatialimage.dataobj) + output_dtype = output_dtype or input_dtype + + # Prepare physical coordinates of input (grid, points) + xcoords = _ref.ndcoords.astype("f4").T + + # Invert target's (moving) affine once + ras2vox = ~Affine(spatialimage.affine) + dataobj = ( + np.asanyarray(spatialimage.dataobj, dtype=input_dtype) + if spatialimage.ndim in (2, 3) + else None + ) + # Order F ensures individual volumes are contiguous in memory + # Also matches NIfTI, making final save more efficient + resampled = np.zeros( + (xcoords.shape[0], len(transform)), dtype=output_dtype, order="F" + ) + + for t, xfm_t in enumerate(transform): # Map the input coordinates on to timepoint t of the target (moving) - xcoords = _ref.ndcoords.astype("f4").T ycoords = xfm_t.map(xcoords)[..., : _ref.ndim] # Calculate corresponding voxel coordinates yvoxels = ras2vox.map(ycoords)[..., : _ref.ndim] # Interpolate - dataobj = ( - np.asanyarray(spatialimage.dataobj, dtype=input_dtype) - if spatialimage.ndim in (2, 3) - else None - ) resampled[..., t] = ndi.map_coordinates( ( dataobj From 86b3d111f7635a04b403f4eb5c39000fd7637e20 Mon Sep 17 00:00:00 2001 From: Oscar Esteban Date: Tue, 23 Jul 2024 12:56:36 +0200 Subject: [PATCH 107/123] wip: initiate implementation --- nitransforms/resampling.py | 25 ++++++++++++++++++++++--- 1 file changed, 22 insertions(+), 3 deletions(-) diff --git a/nitransforms/resampling.py b/nitransforms/resampling.py index 9de0d2d6..b54329a5 100644 --- a/nitransforms/resampling.py +++ b/nitransforms/resampling.py @@ -7,6 +7,7 @@ # ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ## """Resampling utilities.""" +from warnings import warn from pathlib import Path import numpy as np from nibabel.loadsave import load as _nbload @@ -19,6 +20,12 @@ _as_homogeneous, ) +SERIALIZE_VOLUME_WINDOW_WIDTH : int = 8 +"""Minimum number of volumes to automatically serialize 4D transforms.""" + +class NotImplementedWarning(UserWarning): + """A custom class for warnings.""" + def apply( transform, @@ -29,6 +36,8 @@ def apply( cval=0.0, prefilter=True, output_dtype=None, + serialize_nvols=SERIALIZE_VOLUME_WINDOW_WIDTH, + njobs=None, ): """ Apply a transformation to an image, resampling on the reference spatial object. @@ -89,14 +98,24 @@ def apply( spatialimage = _nbload(str(spatialimage)) data = np.asanyarray(spatialimage.dataobj) + data_nvols = 1 if data.ndim < 4 else data.shape[-1] + xfm_nvols = len(transform) - if data.ndim == 4 and data.shape[-1] != len(transform): + if data_nvols == 1 and xfm_nvols > 1: + data = data[..., np.newaxis] + elif data_nvols != xfm_nvols: raise ValueError( "The fourth dimension of the data does not match the tranform's shape." ) - if data.ndim < transform.ndim: - data = data[..., np.newaxis] + serialize_nvols = serialize_nvols if serialize_nvols and serialize_nvols > 1 else np.inf + serialize_4d = max(data_nvols, xfm_nvols) > serialize_nvols + if serialize_4d: + warn( + "4D transforms serialization into 3D+t not implemented", + NotImplementedWarning, + stacklevel=2, + ) # For model-based nonlinear transforms, generate the corresponding dense field if hasattr(transform, "to_field") and callable(transform.to_field): From 79e5cadc6bfec93982dd70e62f3e916c0a28ab78 Mon Sep 17 00:00:00 2001 From: Oscar Esteban Date: Tue, 30 Jul 2024 16:42:05 +0200 Subject: [PATCH 108/123] enh: integrating @jmarabotto's code --- nitransforms/resampling.py | 84 ++++++++++++++++++++------------------ 1 file changed, 45 insertions(+), 39 deletions(-) diff --git a/nitransforms/resampling.py b/nitransforms/resampling.py index 09801b8d..1d6e7f76 100644 --- a/nitransforms/resampling.py +++ b/nitransforms/resampling.py @@ -7,14 +7,13 @@ # ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ## """Resampling utilities.""" -from warnings import warn + from pathlib import Path import numpy as np from nibabel.loadsave import load as _nbload from nibabel.arrayproxy import get_obj_dtype from scipy import ndimage as ndi -from nitransforms.linear import Affine, LinearTransformsMapping from nitransforms.base import ( ImageGrid, TransformError, @@ -22,7 +21,7 @@ _as_homogeneous, ) -SERIALIZE_VOLUME_WINDOW_WIDTH : int = 8 +SERIALIZE_VOLUME_WINDOW_WIDTH: int = 8 """Minimum number of volumes to automatically serialize 4D transforms.""" @@ -96,58 +95,67 @@ def apply( if isinstance(spatialimage, (str, Path)): spatialimage = _nbload(str(spatialimage)) - data = np.asanyarray(spatialimage.dataobj) - data_nvols = 1 if data.ndim < 4 else data.shape[-1] + # Avoid opening the data array just yet + input_dtype = get_obj_dtype(spatialimage.dataobj) + output_dtype = output_dtype or input_dtype + # Number of transformations + data_nvols = 1 if spatialimage.ndim < 4 else spatialimage.shape[-1] xfm_nvols = len(transform) - if data_nvols == 1 and xfm_nvols > 1: - data = data[..., np.newaxis] - elif data_nvols != xfm_nvols: + if data_nvols != xfm_nvols and min(data_nvols, xfm_nvols) > 1: raise ValueError( "The fourth dimension of the data does not match the transform's shape." ) - serialize_nvols = serialize_nvols if serialize_nvols and serialize_nvols > 1 else np.inf - serialize_4d = max(data_nvols, xfm_nvols) >= serialize_nvols + serialize_nvols = ( + serialize_nvols if serialize_nvols and serialize_nvols > 1 else np.inf + ) + n_resamplings = max(data_nvols, xfm_nvols) + serialize_4d = n_resamplings >= serialize_nvols + + targets = None + if hasattr(transform, "to_field") and callable(transform.to_field): + targets = ImageGrid(spatialimage).index( + _as_homogeneous( + transform.to_field(reference=reference).map(_ref.ndcoords.T), + dim=_ref.ndim, + ) + ) + elif xfm_nvols == 1: + targets = ImageGrid(spatialimage).index( # data should be an image + _as_homogeneous(transform.map(_ref.ndcoords.T), dim=_ref.ndim) + ) if serialize_4d: - # Avoid opening the data array just yet - input_dtype = get_obj_dtype(spatialimage.dataobj) - output_dtype = output_dtype or input_dtype - - # Prepare physical coordinates of input (grid, points) - xcoords = _ref.ndcoords.astype("f4").T - - # Invert target's (moving) affine once - ras2vox = ~Affine(spatialimage.affine) - dataobj = ( + data = ( np.asanyarray(spatialimage.dataobj, dtype=input_dtype) - if spatialimage.ndim in (2, 3) + if data_nvols == 1 else None ) # Order F ensures individual volumes are contiguous in memory # Also matches NIfTI, making final save more efficient resampled = np.zeros( - (xcoords.shape[0], len(transform)), dtype=output_dtype, order="F" + (spatialimage.size, len(transform)), dtype=output_dtype, order="F" ) - for t, xfm_t in enumerate(transform): - # Map the input coordinates on to timepoint t of the target (moving) - ycoords = xfm_t.map(xcoords)[..., : _ref.ndim] + for t in range(n_resamplings): + xfm_t = transform if n_resamplings == 1 else transform[t] - # Calculate corresponding voxel coordinates - yvoxels = ras2vox.map(ycoords)[..., : _ref.ndim] + if targets is None: + targets = ImageGrid(spatialimage).index( # data should be an image + _as_homogeneous(xfm_t.map(_ref.ndcoords.T), dim=_ref.ndim) + ) # Interpolate resampled[..., t] = ndi.map_coordinates( ( - dataobj - if dataobj is not None + data + if data is not None else spatialimage.dataobj[..., t].astype(input_dtype, copy=False) ), - yvoxels.T, + targets, output=output_dtype, order=order, mode=mode, @@ -156,19 +164,17 @@ def apply( ) else: - # For model-based nonlinear transforms, generate the corresponding dense field - if hasattr(transform, "to_field") and callable(transform.to_field): - targets = ImageGrid(spatialimage).index( - _as_homogeneous( - transform.to_field(reference=reference).map(_ref.ndcoords.T), - dim=_ref.ndim, - ) - ) - else: + data = np.asanyarray(spatialimage.dataobj, dtype=input_dtype) + + if targets is None: targets = ImageGrid(spatialimage).index( # data should be an image _as_homogeneous(transform.map(_ref.ndcoords.T), dim=_ref.ndim) ) + # Cast 3D data into 4D if 4D nonsequential transform + if data_nvols == 1 and xfm_nvols > 1: + data = data[..., np.newaxis] + if transform.ndim == 4: targets = _as_homogeneous(targets.reshape(-2, targets.shape[0])).T From e0bde092d14e67491078823e4218aa2afcd35144 Mon Sep 17 00:00:00 2001 From: Oscar Esteban Date: Tue, 30 Jul 2024 16:57:55 +0200 Subject: [PATCH 109/123] fix: ensure output dtype when resampling --- nitransforms/resampling.py | 7 +++---- 1 file changed, 3 insertions(+), 4 deletions(-) diff --git a/nitransforms/resampling.py b/nitransforms/resampling.py index 1d6e7f76..45474008 100644 --- a/nitransforms/resampling.py +++ b/nitransforms/resampling.py @@ -189,10 +189,9 @@ def apply( ) if isinstance(_ref, ImageGrid): # If reference is grid, reshape - hdr = None - if _ref.header is not None: - hdr = _ref.header.copy() - hdr.set_data_dtype(output_dtype or spatialimage.get_data_dtype()) + hdr = _ref.header.copy() if _ref.header is not None else spatialimage.header.__class__() + hdr.set_data_dtype(output_dtype) + moved = spatialimage.__class__( resampled.reshape(_ref.shape if data.ndim < 4 else _ref.shape + (-1,)), _ref.affine, From fbb04511dd210df8a08101fe883e9ab140807e8b Mon Sep 17 00:00:00 2001 From: Oscar Esteban Date: Tue, 30 Jul 2024 18:59:34 +0200 Subject: [PATCH 110/123] fix: resolve some failing tests --- nitransforms/nonlinear.py | 4 ++++ nitransforms/resampling.py | 21 ++++++++++----------- nitransforms/tests/test_base.py | 7 +++++-- 3 files changed, 19 insertions(+), 13 deletions(-) diff --git a/nitransforms/nonlinear.py b/nitransforms/nonlinear.py index 9c29c53c..ced348a2 100644 --- a/nitransforms/nonlinear.py +++ b/nitransforms/nonlinear.py @@ -94,6 +94,10 @@ def __repr__(self): """Beautify the python representation.""" return f"<{self.__class__.__name__}[{self._field.shape[-1]}D] {self._field.shape[:3]}>" + def __len__(self): + """Enable len() -- for compatibility, only len == 1 is supported.""" + return 1 + @property def ndim(self): """Get the dimensions of the transform.""" diff --git a/nitransforms/resampling.py b/nitransforms/resampling.py index 45474008..eb3f9ad0 100644 --- a/nitransforms/resampling.py +++ b/nitransforms/resampling.py @@ -97,7 +97,6 @@ def apply( # Avoid opening the data array just yet input_dtype = get_obj_dtype(spatialimage.dataobj) - output_dtype = output_dtype or input_dtype # Number of transformations data_nvols = 1 if spatialimage.ndim < 4 else spatialimage.shape[-1] @@ -115,16 +114,17 @@ def apply( serialize_4d = n_resamplings >= serialize_nvols targets = None + ref_ndcoords = _ref.ndcoords.T if hasattr(transform, "to_field") and callable(transform.to_field): targets = ImageGrid(spatialimage).index( _as_homogeneous( - transform.to_field(reference=reference).map(_ref.ndcoords.T), + transform.to_field(reference=reference).map(ref_ndcoords), dim=_ref.ndim, ) ) elif xfm_nvols == 1: targets = ImageGrid(spatialimage).index( # data should be an image - _as_homogeneous(transform.map(_ref.ndcoords.T), dim=_ref.ndim) + _as_homogeneous(transform.map(ref_ndcoords), dim=_ref.ndim) ) if serialize_4d: @@ -137,7 +137,7 @@ def apply( # Order F ensures individual volumes are contiguous in memory # Also matches NIfTI, making final save more efficient resampled = np.zeros( - (spatialimage.size, len(transform)), dtype=output_dtype, order="F" + (len(ref_ndcoords), len(transform)), dtype=input_dtype, order="F" ) for t in range(n_resamplings): @@ -145,7 +145,7 @@ def apply( if targets is None: targets = ImageGrid(spatialimage).index( # data should be an image - _as_homogeneous(xfm_t.map(_ref.ndcoords.T), dim=_ref.ndim) + _as_homogeneous(xfm_t.map(ref_ndcoords), dim=_ref.ndim) ) # Interpolate @@ -156,7 +156,6 @@ def apply( else spatialimage.dataobj[..., t].astype(input_dtype, copy=False) ), targets, - output=output_dtype, order=order, mode=mode, cval=cval, @@ -168,7 +167,7 @@ def apply( if targets is None: targets = ImageGrid(spatialimage).index( # data should be an image - _as_homogeneous(transform.map(_ref.ndcoords.T), dim=_ref.ndim) + _as_homogeneous(transform.map(ref_ndcoords), dim=_ref.ndim) ) # Cast 3D data into 4D if 4D nonsequential transform @@ -181,7 +180,6 @@ def apply( resampled = ndi.map_coordinates( data, targets, - output=output_dtype, order=order, mode=mode, cval=cval, @@ -190,13 +188,14 @@ def apply( if isinstance(_ref, ImageGrid): # If reference is grid, reshape hdr = _ref.header.copy() if _ref.header is not None else spatialimage.header.__class__() - hdr.set_data_dtype(output_dtype) + hdr.set_data_dtype(output_dtype or spatialimage.header.get_data_dtype()) moved = spatialimage.__class__( - resampled.reshape(_ref.shape if data.ndim < 4 else _ref.shape + (-1,)), + resampled.reshape(_ref.shape if n_resamplings == 1 else _ref.shape + (-1,)), _ref.affine, hdr, ) return moved - return resampled + output_dtype = output_dtype or input_dtype + return resampled.astype(output_dtype) diff --git a/nitransforms/tests/test_base.py b/nitransforms/tests/test_base.py index fb4be8d8..c85ac2e2 100644 --- a/nitransforms/tests/test_base.py +++ b/nitransforms/tests/test_base.py @@ -1,6 +1,8 @@ """Tests of the base module.""" import numpy as np import nibabel as nb +from nibabel.arrayproxy import get_obj_dtype + import pytest import h5py @@ -97,7 +99,7 @@ def _to_hdf5(klass, x5_root): fname = testdata_path / "someones_anatomy.nii.gz" img = nb.load(fname) - imgdata = np.asanyarray(img.dataobj, dtype=img.get_data_dtype()) + imgdata = np.asanyarray(img.dataobj, dtype=get_obj_dtype(img.dataobj)) # Test identity transform - setting reference xfm = TransformBase() @@ -111,7 +113,8 @@ def _to_hdf5(klass, x5_root): xfm = nitl.Affine() xfm.reference = fname moved = apply(xfm, fname, order=0) - assert np.all(imgdata == np.asanyarray(moved.dataobj, dtype=moved.get_data_dtype())) + + assert np.all(imgdata == np.asanyarray(moved.dataobj, dtype=get_obj_dtype(moved.dataobj))) # Test ndim returned by affine assert nitl.Affine().ndim == 3 From 015347272558798f53b627d3b93e40159adba7b9 Mon Sep 17 00:00:00 2001 From: Oscar Esteban Date: Wed, 31 Jul 2024 07:25:47 +0200 Subject: [PATCH 111/123] fix: ensure ``__len__`` is defined for all transforms`` --- nitransforms/base.py | 10 ++++++++++ nitransforms/nonlinear.py | 4 ---- 2 files changed, 10 insertions(+), 4 deletions(-) diff --git a/nitransforms/base.py b/nitransforms/base.py index 81ed1a5e..a40998c5 100644 --- a/nitransforms/base.py +++ b/nitransforms/base.py @@ -279,6 +279,16 @@ def __add__(self, b): return TransformChain(transforms=[self, b]) + def __len__(self): + """ + Enable ``len()``. + + By default, all transforms are of length one. + This must be overriden by transforms arrays and chains. + + """ + return 1 + @property def reference(self): """Access a reference space where data will be resampled onto.""" diff --git a/nitransforms/nonlinear.py b/nitransforms/nonlinear.py index ced348a2..9c29c53c 100644 --- a/nitransforms/nonlinear.py +++ b/nitransforms/nonlinear.py @@ -94,10 +94,6 @@ def __repr__(self): """Beautify the python representation.""" return f"<{self.__class__.__name__}[{self._field.shape[-1]}D] {self._field.shape[:3]}>" - def __len__(self): - """Enable len() -- for compatibility, only len == 1 is supported.""" - return 1 - @property def ndim(self): """Get the dimensions of the transform.""" From 85d03b426d9987ceec535a337a647d719a77d298 Mon Sep 17 00:00:00 2001 From: Oscar Esteban Date: Wed, 31 Jul 2024 07:55:07 +0200 Subject: [PATCH 112/123] fix: clarify what is a 3D transform chain and a 4D transform 3D transform chains resulting of composing several transformations (e.g., affine and deformation fields in spatial normalization) should not be split into its components. This is in contrast to lists of 3D transforms such as head-motion correcting affines, where each applies to one timepoint. These should be considered 4D and in some future they may integrate slice timing correction in them. --- nitransforms/resampling.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/nitransforms/resampling.py b/nitransforms/resampling.py index eb3f9ad0..e2de9a2c 100644 --- a/nitransforms/resampling.py +++ b/nitransforms/resampling.py @@ -98,9 +98,10 @@ def apply( # Avoid opening the data array just yet input_dtype = get_obj_dtype(spatialimage.dataobj) - # Number of transformations + # Number of data volumes data_nvols = 1 if spatialimage.ndim < 4 else spatialimage.shape[-1] - xfm_nvols = len(transform) + # Number of transforms: transforms chains (e.g., affine + field, are a single transform) + xfm_nvols = 1 if transform.ndim < 4 else len(transform) if data_nvols != xfm_nvols and min(data_nvols, xfm_nvols) > 1: raise ValueError( From 06a1c01ba8492c9b0e6f08fe4e0f7758075c790f Mon Sep 17 00:00:00 2001 From: Oscar Esteban Date: Wed, 31 Jul 2024 08:43:59 +0200 Subject: [PATCH 113/123] maint: reorganize tests around the spun-off apply --- nitransforms/tests/test_linear.py | 146 ----------- nitransforms/tests/test_manip.py | 54 +--- nitransforms/tests/test_nonlinear.py | 142 ----------- nitransforms/tests/test_resampling.py | 352 ++++++++++++++++++++++++++ 4 files changed, 353 insertions(+), 341 deletions(-) create mode 100644 nitransforms/tests/test_resampling.py diff --git a/nitransforms/tests/test_linear.py b/nitransforms/tests/test_linear.py index 50cc5371..5746d5f7 100644 --- a/nitransforms/tests/test_linear.py +++ b/nitransforms/tests/test_linear.py @@ -4,8 +4,6 @@ import os import pytest import numpy as np -from subprocess import check_call -import shutil import h5py import nibabel as nb @@ -13,28 +11,8 @@ from nibabel.affines import from_matvec from nitransforms import linear as nitl from nitransforms import io -from nitransforms.resampling import apply from .utils import assert_affines_by_filename -RMSE_TOL = 0.1 -APPLY_LINEAR_CMD = { - "fsl": """\ -flirt -setbackground 0 -interp nearestneighbour -in {moving} -ref {reference} \ --applyxfm -init {transform} -out {resampled}\ -""".format, - "itk": """\ -antsApplyTransforms -d 3 -r {reference} -i {moving} \ --o {resampled} -n NearestNeighbor -t {transform} --float\ -""".format, - "afni": """\ -3dAllineate -base {reference} -input {moving} \ --prefix {resampled} -1Dmatrix_apply {transform} -final NN\ -""".format, - "fs": """\ -mri_vol2vol --mov {moving} --targ {reference} --lta {transform} \ ---o {resampled} --nearest""".format, -} - @pytest.mark.parametrize("matrix", [[0.0], np.ones((3, 3, 3)), np.ones((3, 4)), ]) def test_linear_typeerrors1(matrix): @@ -234,96 +212,6 @@ def test_linear_save(tmpdir, data_path, get_testdata, image_orientation, sw_tool assert_affines_by_filename(xfm_fname1, xfm_fname2) -@pytest.mark.parametrize("image_orientation", ["RAS", "LAS", "LPS", 'oblique', ]) -@pytest.mark.parametrize("sw_tool", ["itk", "fsl", "afni", "fs"]) -def test_apply_linear_transform(tmpdir, get_testdata, get_testmask, image_orientation, sw_tool): - """Check implementation of exporting affines to formats.""" - tmpdir.chdir() - - img = get_testdata[image_orientation] - msk = get_testmask[image_orientation] - - # Generate test transform - T = from_matvec(euler2mat(x=0.9, y=0.001, z=0.001), [4.0, 2.0, -1.0]) - xfm = nitl.Affine(T) - xfm.reference = img - - ext = "" - if sw_tool == "itk": - ext = ".tfm" - elif sw_tool == "fs": - ext = ".lta" - - img.to_filename("img.nii.gz") - msk.to_filename("mask.nii.gz") - - # Write out transform file (software-dependent) - xfm_fname = f"M.{sw_tool}{ext}" - # Change reference dataset for AFNI & oblique - if (sw_tool, image_orientation) == ("afni", "oblique"): - io.afni.AFNILinearTransform.from_ras( - T, - moving=img, - reference=img, - ).to_filename(xfm_fname) - else: - xfm.to_filename(xfm_fname, fmt=sw_tool) - - cmd = APPLY_LINEAR_CMD[sw_tool]( - transform=os.path.abspath(xfm_fname), - reference=os.path.abspath("mask.nii.gz"), - moving=os.path.abspath("mask.nii.gz"), - resampled=os.path.abspath("resampled_brainmask.nii.gz"), - ) - - # skip test if command is not available on host - exe = cmd.split(" ", 1)[0] - if not shutil.which(exe): - pytest.skip(f"Command {exe} not found on host") - - # resample mask - exit_code = check_call([cmd], shell=True) - assert exit_code == 0 - sw_moved_mask = nb.load("resampled_brainmask.nii.gz") - - nt_moved_mask = apply(xfm, msk, order=0) - nt_moved_mask.set_data_dtype(msk.get_data_dtype()) - nt_moved_mask.to_filename("ntmask.nii.gz") - diff = np.asanyarray(sw_moved_mask.dataobj) - np.asanyarray(nt_moved_mask.dataobj) - - assert np.sqrt((diff ** 2).mean()) < RMSE_TOL - brainmask = np.asanyarray(nt_moved_mask.dataobj, dtype=bool) - - cmd = APPLY_LINEAR_CMD[sw_tool]( - transform=os.path.abspath(xfm_fname), - reference=os.path.abspath("img.nii.gz"), - moving=os.path.abspath("img.nii.gz"), - resampled=os.path.abspath("resampled.nii.gz"), - ) - - exit_code = check_call([cmd], shell=True) - assert exit_code == 0 - sw_moved = nb.load("resampled.nii.gz") - sw_moved.set_data_dtype(img.get_data_dtype()) - - nt_moved = apply(xfm, img, order=0) - diff = ( - np.asanyarray(sw_moved.dataobj, dtype=sw_moved.get_data_dtype()) - - np.asanyarray(nt_moved.dataobj, dtype=nt_moved.get_data_dtype()) - ) - - # A certain tolerance is necessary because of resampling at borders - assert np.sqrt((diff[brainmask] ** 2).mean()) < RMSE_TOL - - nt_moved = apply(xfm, "img.nii.gz", order=0) - diff = ( - np.asanyarray(sw_moved.dataobj, dtype=sw_moved.get_data_dtype()) - - np.asanyarray(nt_moved.dataobj, dtype=nt_moved.get_data_dtype()) - ) - # A certain tolerance is necessary because of resampling at borders - assert np.sqrt((diff[brainmask] ** 2).mean()) < RMSE_TOL - - def test_Affine_to_x5(tmpdir, testdata_path): """Test affine's operations.""" tmpdir.chdir() @@ -336,40 +224,6 @@ def test_Affine_to_x5(tmpdir, testdata_path): aff._to_hdf5(f.create_group("Affine")) -def test_LinearTransformsMapping_apply(tmp_path, data_path, testdata_path): - """Apply transform mappings.""" - hmc = nitl.load( - data_path / "hmc-itk.tfm", fmt="itk", reference=testdata_path / "sbref.nii.gz" - ) - assert isinstance(hmc, nitl.LinearTransformsMapping) - - # Test-case: realign functional data on to sbref - nii = apply( - hmc, testdata_path / "func.nii.gz", order=1, reference=testdata_path / "sbref.nii.gz" - ) - assert nii.dataobj.shape[-1] == len(hmc) - - # Test-case: write out a fieldmap moved with head - hmcinv = nitl.LinearTransformsMapping( - np.linalg.inv(hmc.matrix), reference=testdata_path / "func.nii.gz" - ) - - nii = apply( - hmcinv, testdata_path / "fmap.nii.gz", order=1 - ) - assert nii.dataobj.shape[-1] == len(hmc) - - # Ensure a ValueError is issued when trying to do weird stuff - hmc = nitl.LinearTransformsMapping(hmc.matrix[:1, ...]) - with pytest.raises(ValueError): - apply( - hmc, - testdata_path / "func.nii.gz", - order=1, - reference=testdata_path / "sbref.nii.gz", - ) - - def test_mulmat_operator(testdata_path): """Check the @ operator.""" ref = testdata_path / "someones_anatomy.nii.gz" diff --git a/nitransforms/tests/test_manip.py b/nitransforms/tests/test_manip.py index b7f6a6e4..2a2d6ffb 100644 --- a/nitransforms/tests/test_manip.py +++ b/nitransforms/tests/test_manip.py @@ -1,67 +1,15 @@ # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: """Tests of nonlinear transforms.""" -import os -import shutil -from subprocess import check_call import pytest import numpy as np -import nibabel as nb -from ..manip import load as _load, TransformChain +from ..manip import TransformChain from ..linear import Affine -from .test_nonlinear import ( - RMSE_TOL, - APPLY_NONLINEAR_CMD, -) -from nitransforms.resampling import apply FMT = {"lta": "fs", "tfm": "itk"} -def test_itk_h5(tmp_path, testdata_path): - """Check a translation-only field on one or more axes, different image orientations.""" - os.chdir(str(tmp_path)) - img_fname = testdata_path / "T1w_scanner.nii.gz" - xfm_fname = ( - testdata_path - / "ds-005_sub-01_from-T1w_to-MNI152NLin2009cAsym_mode-image_xfm.h5" - ) - - xfm = _load(xfm_fname) - - assert len(xfm) == 2 - - ref_fname = tmp_path / "reference.nii.gz" - nb.Nifti1Image( - np.zeros(xfm.reference.shape, dtype="uint16"), xfm.reference.affine, - ).to_filename(str(ref_fname)) - - # Then apply the transform and cross-check with software - cmd = APPLY_NONLINEAR_CMD["itk"]( - transform=xfm_fname, - reference=ref_fname, - moving=img_fname, - output="resampled.nii.gz", - extra="", - ) - - # skip test if command is not available on host - exe = cmd.split(" ", 1)[0] - if not shutil.which(exe): - pytest.skip(f"Command {exe} not found on host") - - exit_code = check_call([cmd], shell=True) - assert exit_code == 0 - sw_moved = nb.load("resampled.nii.gz") - - nt_moved = apply(xfm, img_fname, order=0) - nt_moved.to_filename("nt_resampled.nii.gz") - diff = sw_moved.get_fdata() - nt_moved.get_fdata() - # A certain tolerance is necessary because of resampling at borders - assert (np.abs(diff) > 1e-3).sum() / diff.size < RMSE_TOL - - @pytest.mark.parametrize("ext0", ["lta", "tfm"]) @pytest.mark.parametrize("ext1", ["lta", "tfm"]) @pytest.mark.parametrize("ext2", ["lta", "tfm"]) diff --git a/nitransforms/tests/test_nonlinear.py b/nitransforms/tests/test_nonlinear.py index 24d1f83e..43b4584f 100644 --- a/nitransforms/tests/test_nonlinear.py +++ b/nitransforms/tests/test_nonlinear.py @@ -19,22 +19,6 @@ from ..io.itk import ITKDisplacementsField -RMSE_TOL = 0.05 -APPLY_NONLINEAR_CMD = { - "itk": """\ -antsApplyTransforms -d 3 -r {reference} -i {moving} \ --o {output} -n NearestNeighbor -t {transform} {extra}\ -""".format, - "afni": """\ -3dNwarpApply -nwarp {transform} -source {moving} \ --master {reference} -interp NN -prefix {output} {extra}\ -""".format, - "fsl": """\ -applywarp -i {moving} -r {reference} -o {output} {extra}\ --w {transform} --interp=nn""".format, -} - - @pytest.mark.parametrize("size", [(20, 20, 20), (20, 20, 20, 3)]) def test_itk_disp_load(size): """Checks field sizes.""" @@ -113,132 +97,6 @@ def test_bsplines_references(testdata_path): ) -@pytest.mark.parametrize("image_orientation", ["RAS", "LAS", "LPS", "oblique"]) -@pytest.mark.parametrize("sw_tool", ["itk", "afni"]) -@pytest.mark.parametrize("axis", [0, 1, 2, (0, 1), (1, 2), (0, 1, 2)]) -def test_displacements_field1( - tmp_path, - get_testdata, - get_testmask, - image_orientation, - sw_tool, - axis, -): - """Check a translation-only field on one or more axes, different image orientations.""" - if (image_orientation, sw_tool) == ("oblique", "afni"): - pytest.skip("AFNI obliques are not yet implemented for displacements fields") - - os.chdir(str(tmp_path)) - nii = get_testdata[image_orientation] - msk = get_testmask[image_orientation] - nii.to_filename("reference.nii.gz") - msk.to_filename("mask.nii.gz") - - fieldmap = np.zeros( - (*nii.shape[:3], 1, 3) if sw_tool != "fsl" else (*nii.shape[:3], 3), - dtype="float32", - ) - fieldmap[..., axis] = -10.0 - - _hdr = nii.header.copy() - if sw_tool in ("itk",): - _hdr.set_intent("vector") - _hdr.set_data_dtype("float32") - - xfm_fname = "warp.nii.gz" - field = nb.Nifti1Image(fieldmap, nii.affine, _hdr) - field.to_filename(xfm_fname) - - xfm = nlload(xfm_fname, fmt=sw_tool) - - # Then apply the transform and cross-check with software - cmd = APPLY_NONLINEAR_CMD[sw_tool]( - transform=os.path.abspath(xfm_fname), - reference=tmp_path / "mask.nii.gz", - moving=tmp_path / "mask.nii.gz", - output=tmp_path / "resampled_brainmask.nii.gz", - extra="--output-data-type uchar" if sw_tool == "itk" else "", - ) - - # skip test if command is not available on host - exe = cmd.split(" ", 1)[0] - if not shutil.which(exe): - pytest.skip(f"Command {exe} not found on host") - - # resample mask - exit_code = check_call([cmd], shell=True) - assert exit_code == 0 - sw_moved_mask = nb.load("resampled_brainmask.nii.gz") - nt_moved_mask = apply(xfm, msk, order=0) - nt_moved_mask.set_data_dtype(msk.get_data_dtype()) - diff = np.asanyarray(sw_moved_mask.dataobj) - np.asanyarray(nt_moved_mask.dataobj) - - assert np.sqrt((diff**2).mean()) < RMSE_TOL - brainmask = np.asanyarray(nt_moved_mask.dataobj, dtype=bool) - - # Then apply the transform and cross-check with software - cmd = APPLY_NONLINEAR_CMD[sw_tool]( - transform=os.path.abspath(xfm_fname), - reference=tmp_path / "reference.nii.gz", - moving=tmp_path / "reference.nii.gz", - output=tmp_path / "resampled.nii.gz", - extra="--output-data-type uchar" if sw_tool == "itk" else "", - ) - - exit_code = check_call([cmd], shell=True) - assert exit_code == 0 - sw_moved = nb.load("resampled.nii.gz") - - nt_moved = apply(xfm, nii, order=0) - nt_moved.set_data_dtype(nii.get_data_dtype()) - nt_moved.to_filename("nt_resampled.nii.gz") - sw_moved.set_data_dtype(nt_moved.get_data_dtype()) - diff = np.asanyarray( - sw_moved.dataobj, dtype=sw_moved.get_data_dtype() - ) - np.asanyarray(nt_moved.dataobj, dtype=nt_moved.get_data_dtype()) - # A certain tolerance is necessary because of resampling at borders - assert np.sqrt((diff[brainmask] ** 2).mean()) < RMSE_TOL - - -@pytest.mark.parametrize("sw_tool", ["itk", "afni"]) -def test_displacements_field2(tmp_path, testdata_path, sw_tool): - """Check a translation-only field on one or more axes, different image orientations.""" - os.chdir(str(tmp_path)) - img_fname = testdata_path / "tpl-OASIS30ANTs_T1w.nii.gz" - xfm_fname = testdata_path / "ds-005_sub-01_from-OASIS_to-T1_warp_{}.nii.gz".format( - sw_tool - ) - - xfm = nlload(xfm_fname, fmt=sw_tool) - - # Then apply the transform and cross-check with software - cmd = APPLY_NONLINEAR_CMD[sw_tool]( - transform=xfm_fname, - reference=img_fname, - moving=img_fname, - output="resampled.nii.gz", - extra="", - ) - - # skip test if command is not available on host - exe = cmd.split(" ", 1)[0] - if not shutil.which(exe): - pytest.skip(f"Command {exe} not found on host") - - exit_code = check_call([cmd], shell=True) - assert exit_code == 0 - sw_moved = nb.load("resampled.nii.gz") - - nt_moved = apply(xfm, img_fname, order=0) - nt_moved.to_filename("nt_resampled.nii.gz") - sw_moved.set_data_dtype(nt_moved.get_data_dtype()) - diff = np.asanyarray( - sw_moved.dataobj, dtype=sw_moved.get_data_dtype() - ) - np.asanyarray(nt_moved.dataobj, dtype=nt_moved.get_data_dtype()) - # A certain tolerance is necessary because of resampling at borders - assert np.sqrt((diff**2).mean()) < RMSE_TOL - - def test_bspline(tmp_path, testdata_path): """Cross-check B-Splines and deformation field.""" os.chdir(str(tmp_path)) diff --git a/nitransforms/tests/test_resampling.py b/nitransforms/tests/test_resampling.py new file mode 100644 index 00000000..3dd9aff4 --- /dev/null +++ b/nitransforms/tests/test_resampling.py @@ -0,0 +1,352 @@ +# emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- +# vi: set ft=python sts=4 ts=4 sw=4 et: +"""Exercise the standalone ``apply()`` implementation.""" +import os +import pytest +import numpy as np +from subprocess import check_call +import shutil + +import nibabel as nb +from nibabel.eulerangles import euler2mat +from nibabel.affines import from_matvec +from nitransforms import linear as nitl +from nitransforms import nonlinear as nitnl +from nitransforms import manip as nitm +from nitransforms import io +from nitransforms.resampling import apply + +RMSE_TOL_LINEAR = 0.09 +RMSE_TOL_NONLINEAR = 0.05 +APPLY_LINEAR_CMD = { + "fsl": """\ +flirt -setbackground 0 -interp nearestneighbour -in {moving} -ref {reference} \ +-applyxfm -init {transform} -out {resampled}\ +""".format, + "itk": """\ +antsApplyTransforms -d 3 -r {reference} -i {moving} \ +-o {resampled} -n NearestNeighbor -t {transform} --float\ +""".format, + "afni": """\ +3dAllineate -base {reference} -input {moving} \ +-prefix {resampled} -1Dmatrix_apply {transform} -final NN\ +""".format, + "fs": """\ +mri_vol2vol --mov {moving} --targ {reference} --lta {transform} \ +--o {resampled} --nearest""".format, +} +APPLY_NONLINEAR_CMD = { + "itk": """\ +antsApplyTransforms -d 3 -r {reference} -i {moving} \ +-o {output} -n NearestNeighbor -t {transform} {extra}\ +""".format, + "afni": """\ +3dNwarpApply -nwarp {transform} -source {moving} \ +-master {reference} -interp NN -prefix {output} {extra}\ +""".format, + "fsl": """\ +applywarp -i {moving} -r {reference} -o {output} {extra}\ +-w {transform} --interp=nn""".format, +} + + +@pytest.mark.parametrize("image_orientation", ["RAS", "LAS", "LPS", 'oblique', ]) +@pytest.mark.parametrize("sw_tool", ["itk", "fsl", "afni", "fs"]) +def test_apply_linear_transform(tmpdir, get_testdata, get_testmask, image_orientation, sw_tool): + """Check implementation of exporting affines to formats.""" + tmpdir.chdir() + + img = get_testdata[image_orientation] + msk = get_testmask[image_orientation] + + # Generate test transform + T = from_matvec(euler2mat(x=0.9, y=0.001, z=0.001), [4.0, 2.0, -1.0]) + xfm = nitl.Affine(T) + xfm.reference = img + + ext = "" + if sw_tool == "itk": + ext = ".tfm" + elif sw_tool == "fs": + ext = ".lta" + + img.to_filename("img.nii.gz") + msk.to_filename("mask.nii.gz") + + # Write out transform file (software-dependent) + xfm_fname = f"M.{sw_tool}{ext}" + # Change reference dataset for AFNI & oblique + if (sw_tool, image_orientation) == ("afni", "oblique"): + io.afni.AFNILinearTransform.from_ras( + T, + moving=img, + reference=img, + ).to_filename(xfm_fname) + else: + xfm.to_filename(xfm_fname, fmt=sw_tool) + + cmd = APPLY_LINEAR_CMD[sw_tool]( + transform=os.path.abspath(xfm_fname), + reference=os.path.abspath("mask.nii.gz"), + moving=os.path.abspath("mask.nii.gz"), + resampled=os.path.abspath("resampled_brainmask.nii.gz"), + ) + + # skip test if command is not available on host + exe = cmd.split(" ", 1)[0] + if not shutil.which(exe): + pytest.skip(f"Command {exe} not found on host") + + # resample mask + exit_code = check_call([cmd], shell=True) + assert exit_code == 0 + sw_moved_mask = nb.load("resampled_brainmask.nii.gz") + + nt_moved_mask = apply(xfm, msk, order=0) + nt_moved_mask.set_data_dtype(msk.get_data_dtype()) + nt_moved_mask.to_filename("ntmask.nii.gz") + diff = np.asanyarray(sw_moved_mask.dataobj) - np.asanyarray(nt_moved_mask.dataobj) + + assert np.sqrt((diff ** 2).mean()) < RMSE_TOL_LINEAR + brainmask = np.asanyarray(nt_moved_mask.dataobj, dtype=bool) + + cmd = APPLY_LINEAR_CMD[sw_tool]( + transform=os.path.abspath(xfm_fname), + reference=os.path.abspath("img.nii.gz"), + moving=os.path.abspath("img.nii.gz"), + resampled=os.path.abspath("resampled.nii.gz"), + ) + + exit_code = check_call([cmd], shell=True) + assert exit_code == 0 + sw_moved = nb.load("resampled.nii.gz") + sw_moved.set_data_dtype(img.get_data_dtype()) + + nt_moved = apply(xfm, img, order=0) + diff = ( + np.asanyarray(sw_moved.dataobj, dtype=sw_moved.get_data_dtype()) + - np.asanyarray(nt_moved.dataobj, dtype=nt_moved.get_data_dtype()) + ) + + # A certain tolerance is necessary because of resampling at borders + assert np.sqrt((diff[brainmask] ** 2).mean()) < RMSE_TOL_LINEAR + + nt_moved = apply(xfm, "img.nii.gz", order=0) + diff = ( + np.asanyarray(sw_moved.dataobj, dtype=sw_moved.get_data_dtype()) + - np.asanyarray(nt_moved.dataobj, dtype=nt_moved.get_data_dtype()) + ) + # A certain tolerance is necessary because of resampling at borders + assert np.sqrt((diff[brainmask] ** 2).mean()) < RMSE_TOL_LINEAR + + +@pytest.mark.parametrize("image_orientation", ["RAS", "LAS", "LPS", "oblique"]) +@pytest.mark.parametrize("sw_tool", ["itk", "afni"]) +@pytest.mark.parametrize("axis", [0, 1, 2, (0, 1), (1, 2), (0, 1, 2)]) +def test_displacements_field1( + tmp_path, + get_testdata, + get_testmask, + image_orientation, + sw_tool, + axis, +): + """Check a translation-only field on one or more axes, different image orientations.""" + if (image_orientation, sw_tool) == ("oblique", "afni"): + pytest.skip("AFNI obliques are not yet implemented for displacements fields") + + os.chdir(str(tmp_path)) + nii = get_testdata[image_orientation] + msk = get_testmask[image_orientation] + nii.to_filename("reference.nii.gz") + msk.to_filename("mask.nii.gz") + + fieldmap = np.zeros( + (*nii.shape[:3], 1, 3) if sw_tool != "fsl" else (*nii.shape[:3], 3), + dtype="float32", + ) + fieldmap[..., axis] = -10.0 + + _hdr = nii.header.copy() + if sw_tool in ("itk",): + _hdr.set_intent("vector") + _hdr.set_data_dtype("float32") + + xfm_fname = "warp.nii.gz" + field = nb.Nifti1Image(fieldmap, nii.affine, _hdr) + field.to_filename(xfm_fname) + + xfm = nitnl.load(xfm_fname, fmt=sw_tool) + + # Then apply the transform and cross-check with software + cmd = APPLY_NONLINEAR_CMD[sw_tool]( + transform=os.path.abspath(xfm_fname), + reference=tmp_path / "mask.nii.gz", + moving=tmp_path / "mask.nii.gz", + output=tmp_path / "resampled_brainmask.nii.gz", + extra="--output-data-type uchar" if sw_tool == "itk" else "", + ) + + # skip test if command is not available on host + exe = cmd.split(" ", 1)[0] + if not shutil.which(exe): + pytest.skip(f"Command {exe} not found on host") + + # resample mask + exit_code = check_call([cmd], shell=True) + assert exit_code == 0 + sw_moved_mask = nb.load("resampled_brainmask.nii.gz") + nt_moved_mask = apply(xfm, msk, order=0) + nt_moved_mask.set_data_dtype(msk.get_data_dtype()) + diff = np.asanyarray(sw_moved_mask.dataobj) - np.asanyarray(nt_moved_mask.dataobj) + + assert np.sqrt((diff**2).mean()) < RMSE_TOL_LINEAR + brainmask = np.asanyarray(nt_moved_mask.dataobj, dtype=bool) + + # Then apply the transform and cross-check with software + cmd = APPLY_NONLINEAR_CMD[sw_tool]( + transform=os.path.abspath(xfm_fname), + reference=tmp_path / "reference.nii.gz", + moving=tmp_path / "reference.nii.gz", + output=tmp_path / "resampled.nii.gz", + extra="--output-data-type uchar" if sw_tool == "itk" else "", + ) + + exit_code = check_call([cmd], shell=True) + assert exit_code == 0 + sw_moved = nb.load("resampled.nii.gz") + + nt_moved = apply(xfm, nii, order=0) + nt_moved.set_data_dtype(nii.get_data_dtype()) + nt_moved.to_filename("nt_resampled.nii.gz") + sw_moved.set_data_dtype(nt_moved.get_data_dtype()) + diff = np.asanyarray( + sw_moved.dataobj, dtype=sw_moved.get_data_dtype() + ) - np.asanyarray(nt_moved.dataobj, dtype=nt_moved.get_data_dtype()) + # A certain tolerance is necessary because of resampling at borders + assert np.sqrt((diff[brainmask] ** 2).mean()) < RMSE_TOL_LINEAR + + +@pytest.mark.parametrize("sw_tool", ["itk", "afni"]) +def test_displacements_field2(tmp_path, testdata_path, sw_tool): + """Check a translation-only field on one or more axes, different image orientations.""" + os.chdir(str(tmp_path)) + img_fname = testdata_path / "tpl-OASIS30ANTs_T1w.nii.gz" + xfm_fname = testdata_path / "ds-005_sub-01_from-OASIS_to-T1_warp_{}.nii.gz".format( + sw_tool + ) + + xfm = nitnl.load(xfm_fname, fmt=sw_tool) + + # Then apply the transform and cross-check with software + cmd = APPLY_NONLINEAR_CMD[sw_tool]( + transform=xfm_fname, + reference=img_fname, + moving=img_fname, + output="resampled.nii.gz", + extra="", + ) + + # skip test if command is not available on host + exe = cmd.split(" ", 1)[0] + if not shutil.which(exe): + pytest.skip(f"Command {exe} not found on host") + + exit_code = check_call([cmd], shell=True) + assert exit_code == 0 + sw_moved = nb.load("resampled.nii.gz") + + nt_moved = apply(xfm, img_fname, order=0) + nt_moved.to_filename("nt_resampled.nii.gz") + sw_moved.set_data_dtype(nt_moved.get_data_dtype()) + diff = np.asanyarray( + sw_moved.dataobj, dtype=sw_moved.get_data_dtype() + ) - np.asanyarray(nt_moved.dataobj, dtype=nt_moved.get_data_dtype()) + # A certain tolerance is necessary because of resampling at borders + assert np.sqrt((diff**2).mean()) < RMSE_TOL_LINEAR + + +def test_apply_transformchain(tmp_path, testdata_path): + """Check a translation-only field on one or more axes, different image orientations.""" + os.chdir(str(tmp_path)) + img_fname = testdata_path / "T1w_scanner.nii.gz" + xfm_fname = ( + testdata_path + / "ds-005_sub-01_from-T1w_to-MNI152NLin2009cAsym_mode-image_xfm.h5" + ) + + xfm = nitm.load(xfm_fname) + + assert len(xfm) == 2 + + ref_fname = tmp_path / "reference.nii.gz" + nb.Nifti1Image( + np.zeros(xfm.reference.shape, dtype="uint16"), xfm.reference.affine, + ).to_filename(str(ref_fname)) + + # Then apply the transform and cross-check with software + cmd = APPLY_NONLINEAR_CMD["itk"]( + transform=xfm_fname, + reference=ref_fname, + moving=img_fname, + output="resampled.nii.gz", + extra="", + ) + + # skip test if command is not available on host + exe = cmd.split(" ", 1)[0] + if not shutil.which(exe): + pytest.skip(f"Command {exe} not found on host") + + exit_code = check_call([cmd], shell=True) + assert exit_code == 0 + sw_moved = nb.load("resampled.nii.gz") + + nt_moved = apply(xfm, img_fname, order=0) + nt_moved.to_filename("nt_resampled.nii.gz") + diff = sw_moved.get_fdata() - nt_moved.get_fdata() + # A certain tolerance is necessary because of resampling at borders + assert (np.abs(diff) > 1e-3).sum() / diff.size < RMSE_TOL_LINEAR + + +@pytest.mark.parametrize("serialize_4d", [True, False]) +def test_LinearTransformsMapping_apply(tmp_path, data_path, testdata_path, serialize_4d): + """Apply transform mappings.""" + hmc = nitl.load( + data_path / "hmc-itk.tfm", fmt="itk", reference=testdata_path / "sbref.nii.gz" + ) + assert isinstance(hmc, nitl.LinearTransformsMapping) + + # Test-case: realign functional data on to sbref + nii = apply( + hmc, + testdata_path / "func.nii.gz", + order=1, + reference=testdata_path / "sbref.nii.gz", + serialize_nvols=2 if serialize_4d else np.inf, + ) + assert nii.dataobj.shape[-1] == len(hmc) + + # Test-case: write out a fieldmap moved with head + hmcinv = nitl.LinearTransformsMapping( + np.linalg.inv(hmc.matrix), reference=testdata_path / "func.nii.gz" + ) + + nii = apply( + hmcinv, testdata_path / "fmap.nii.gz", + order=1, + serialize_nvols=2 if serialize_4d else np.inf, + ) + assert nii.dataobj.shape[-1] == len(hmc) + + # Ensure a ValueError is issued when trying to apply mismatched transforms + # (e.g., in this case, two transforms while the functional has 8 volumes) + hmc = nitl.LinearTransformsMapping(hmc.matrix[:2, ...]) + with pytest.raises(ValueError): + apply( + hmc, + testdata_path / "func.nii.gz", + order=1, + reference=testdata_path / "sbref.nii.gz", + serialize_nvols=2 if serialize_4d else np.inf, + ) From 8dd883dcd6fb91bd2dfc101620e9711301e3dc5f Mon Sep 17 00:00:00 2001 From: Oscar Esteban Date: Wed, 31 Jul 2024 09:15:44 +0200 Subject: [PATCH 114/123] sty: format changed files --- nitransforms/base.py | 17 ++++++----- nitransforms/resampling.py | 6 +++- nitransforms/tests/test_base.py | 10 +++++-- nitransforms/tests/test_linear.py | 41 +++++++++++++++++++-------- nitransforms/tests/test_manip.py | 1 + nitransforms/tests/test_nonlinear.py | 4 +-- nitransforms/tests/test_resampling.py | 41 ++++++++++++++++++--------- 7 files changed, 79 insertions(+), 41 deletions(-) diff --git a/nitransforms/base.py b/nitransforms/base.py index a40998c5..67acc073 100644 --- a/nitransforms/base.py +++ b/nitransforms/base.py @@ -7,6 +7,7 @@ # ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ## """Common interface for transforms.""" + from pathlib import Path import numpy as np import h5py @@ -146,13 +147,13 @@ def from_arrays(cls, coordinates, triangles): darrays = [ nb.gifti.GiftiDataArray( coordinates.astype(np.float32), - intent=nb.nifti1.intent_codes['NIFTI_INTENT_POINTSET'], - datatype=nb.nifti1.data_type_codes['NIFTI_TYPE_FLOAT32'], + intent=nb.nifti1.intent_codes["NIFTI_INTENT_POINTSET"], + datatype=nb.nifti1.data_type_codes["NIFTI_TYPE_FLOAT32"], ), nb.gifti.GiftiDataArray( triangles.astype(np.int32), - intent=nb.nifti1.intent_codes['NIFTI_INTENT_TRIANGLE'], - datatype=nb.nifti1.data_type_codes['NIFTI_TYPE_INT32'], + intent=nb.nifti1.intent_codes["NIFTI_INTENT_TRIANGLE"], + datatype=nb.nifti1.data_type_codes["NIFTI_TYPE_INT32"], ), ] gii = nb.gifti.GiftiImage(darrays=darrays) @@ -282,7 +283,7 @@ def __add__(self, b): def __len__(self): """ Enable ``len()``. - + By default, all transforms are of length one. This must be overriden by transforms arrays and chains. @@ -345,10 +346,8 @@ def apply(self, *args, **kwargs): Deprecated. Please use ``nitransforms.resampling.apply`` instead. """ - message = ( - "The `apply` method is deprecated. Please use `nitransforms.resampling.apply` instead." - ) - warnings.warn(message, DeprecationWarning, stacklevel=2) + _msg = "This method is deprecated. Please use `nitransforms.resampling.apply` instead." + warnings.warn(_msg, DeprecationWarning, stacklevel=2) from .resampling import apply return apply(self, *args, **kwargs) diff --git a/nitransforms/resampling.py b/nitransforms/resampling.py index e2de9a2c..abfe2b71 100644 --- a/nitransforms/resampling.py +++ b/nitransforms/resampling.py @@ -188,7 +188,11 @@ def apply( ) if isinstance(_ref, ImageGrid): # If reference is grid, reshape - hdr = _ref.header.copy() if _ref.header is not None else spatialimage.header.__class__() + hdr = ( + _ref.header.copy() + if _ref.header is not None + else spatialimage.header.__class__() + ) hdr.set_data_dtype(output_dtype or spatialimage.header.get_data_dtype()) moved = spatialimage.__class__( diff --git a/nitransforms/tests/test_base.py b/nitransforms/tests/test_base.py index c85ac2e2..4bb147fd 100644 --- a/nitransforms/tests/test_base.py +++ b/nitransforms/tests/test_base.py @@ -1,4 +1,5 @@ """Tests of the base module.""" + import numpy as np import nibabel as nb from nibabel.arrayproxy import get_obj_dtype @@ -114,7 +115,9 @@ def _to_hdf5(klass, x5_root): xfm.reference = fname moved = apply(xfm, fname, order=0) - assert np.all(imgdata == np.asanyarray(moved.dataobj, dtype=get_obj_dtype(moved.dataobj))) + assert np.all( + imgdata == np.asanyarray(moved.dataobj, dtype=get_obj_dtype(moved.dataobj)) + ) # Test ndim returned by affine assert nitl.Affine().ndim == 3 @@ -168,7 +171,10 @@ def test_concatenation(testdata_path): def test_SurfaceMesh(testdata_path): surf_path = testdata_path / "sub-200148_hemi-R_pial.surf.gii" - shape_path = testdata_path / "sub-sid000005_ses-budapest_acq-MPRAGE_hemi-R_thickness.shape.gii" + shape_path = ( + testdata_path + / "sub-sid000005_ses-budapest_acq-MPRAGE_hemi-R_thickness.shape.gii" + ) img_path = testdata_path / "bold.nii.gz" mesh = SurfaceMesh(nb.load(surf_path)) diff --git a/nitransforms/tests/test_linear.py b/nitransforms/tests/test_linear.py index 5746d5f7..969b33ab 100644 --- a/nitransforms/tests/test_linear.py +++ b/nitransforms/tests/test_linear.py @@ -1,12 +1,11 @@ # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: """Tests of linear transforms.""" -import os + import pytest import numpy as np import h5py -import nibabel as nb from nibabel.eulerangles import euler2mat from nibabel.affines import from_matvec from nitransforms import linear as nitl @@ -14,7 +13,14 @@ from .utils import assert_affines_by_filename -@pytest.mark.parametrize("matrix", [[0.0], np.ones((3, 3, 3)), np.ones((3, 4)), ]) +@pytest.mark.parametrize( + "matrix", + [ + [0.0], + np.ones((3, 3, 3)), + np.ones((3, 4)), + ], +) def test_linear_typeerrors1(matrix): """Exercise errors in Affine creation.""" with pytest.raises(TypeError): @@ -136,7 +142,9 @@ def test_loadsave(tmp_path, data_path, testdata_path, autofmt, fmt): assert np.allclose( xfm.matrix, - nitl.load(fname, fmt=supplied_fmt, reference=ref_file, moving=ref_file).matrix, + nitl.load( + fname, fmt=supplied_fmt, reference=ref_file, moving=ref_file + ).matrix, ) else: assert xfm == nitl.load(fname, fmt=supplied_fmt, reference=ref_file) @@ -146,7 +154,9 @@ def test_loadsave(tmp_path, data_path, testdata_path, autofmt, fmt): if fmt == "fsl": assert np.allclose( xfm.matrix, - nitl.load(fname, fmt=supplied_fmt, reference=ref_file, moving=ref_file).matrix, + nitl.load( + fname, fmt=supplied_fmt, reference=ref_file, moving=ref_file + ).matrix, rtol=1e-2, # FSL incurs into large errors due to rounding ) else: @@ -160,7 +170,9 @@ def test_loadsave(tmp_path, data_path, testdata_path, autofmt, fmt): if fmt == "fsl": assert np.allclose( xfm.matrix, - nitl.load(fname, fmt=supplied_fmt, reference=ref_file, moving=ref_file).matrix, + nitl.load( + fname, fmt=supplied_fmt, reference=ref_file, moving=ref_file + ).matrix, rtol=1e-2, # FSL incurs into large errors due to rounding ) else: @@ -170,7 +182,9 @@ def test_loadsave(tmp_path, data_path, testdata_path, autofmt, fmt): if fmt == "fsl": assert np.allclose( xfm.matrix, - nitl.load(fname, fmt=supplied_fmt, reference=ref_file, moving=ref_file).matrix, + nitl.load( + fname, fmt=supplied_fmt, reference=ref_file, moving=ref_file + ).matrix, rtol=1e-2, # FSL incurs into large errors due to rounding ) else: @@ -190,12 +204,15 @@ def test_linear_save(tmpdir, data_path, get_testdata, image_orientation, sw_tool T = np.linalg.inv(T) xfm = ( - nitl.Affine(T) if (sw_tool, image_orientation) != ("afni", "oblique") else + nitl.Affine(T) + if (sw_tool, image_orientation) != ("afni", "oblique") # AFNI is special when moving or reference are oblique - let io do the magic - nitl.Affine(io.afni.AFNILinearTransform.from_ras(T).to_ras( - reference=img, - moving=img, - )) + else nitl.Affine( + io.afni.AFNILinearTransform.from_ras(T).to_ras( + reference=img, + moving=img, + ) + ) ) xfm.reference = img diff --git a/nitransforms/tests/test_manip.py b/nitransforms/tests/test_manip.py index 2a2d6ffb..b5dd5c62 100644 --- a/nitransforms/tests/test_manip.py +++ b/nitransforms/tests/test_manip.py @@ -1,6 +1,7 @@ # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: """Tests of nonlinear transforms.""" + import pytest import numpy as np diff --git a/nitransforms/tests/test_nonlinear.py b/nitransforms/tests/test_nonlinear.py index 43b4584f..6112f633 100644 --- a/nitransforms/tests/test_nonlinear.py +++ b/nitransforms/tests/test_nonlinear.py @@ -1,9 +1,8 @@ # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: """Tests of nonlinear transforms.""" + import os -import shutil -from subprocess import check_call import pytest import numpy as np @@ -14,7 +13,6 @@ from nitransforms.nonlinear import ( BSplineFieldTransform, DenseFieldTransform, - load as nlload, ) from ..io.itk import ITKDisplacementsField diff --git a/nitransforms/tests/test_resampling.py b/nitransforms/tests/test_resampling.py index 3dd9aff4..2384ad97 100644 --- a/nitransforms/tests/test_resampling.py +++ b/nitransforms/tests/test_resampling.py @@ -1,6 +1,7 @@ # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: """Exercise the standalone ``apply()`` implementation.""" + import os import pytest import numpy as np @@ -50,9 +51,19 @@ } -@pytest.mark.parametrize("image_orientation", ["RAS", "LAS", "LPS", 'oblique', ]) +@pytest.mark.parametrize( + "image_orientation", + [ + "RAS", + "LAS", + "LPS", + "oblique", + ], +) @pytest.mark.parametrize("sw_tool", ["itk", "fsl", "afni", "fs"]) -def test_apply_linear_transform(tmpdir, get_testdata, get_testmask, image_orientation, sw_tool): +def test_apply_linear_transform( + tmpdir, get_testdata, get_testmask, image_orientation, sw_tool +): """Check implementation of exporting affines to formats.""" tmpdir.chdir() @@ -107,7 +118,7 @@ def test_apply_linear_transform(tmpdir, get_testdata, get_testmask, image_orient nt_moved_mask.to_filename("ntmask.nii.gz") diff = np.asanyarray(sw_moved_mask.dataobj) - np.asanyarray(nt_moved_mask.dataobj) - assert np.sqrt((diff ** 2).mean()) < RMSE_TOL_LINEAR + assert np.sqrt((diff**2).mean()) < RMSE_TOL_LINEAR brainmask = np.asanyarray(nt_moved_mask.dataobj, dtype=bool) cmd = APPLY_LINEAR_CMD[sw_tool]( @@ -123,19 +134,17 @@ def test_apply_linear_transform(tmpdir, get_testdata, get_testmask, image_orient sw_moved.set_data_dtype(img.get_data_dtype()) nt_moved = apply(xfm, img, order=0) - diff = ( - np.asanyarray(sw_moved.dataobj, dtype=sw_moved.get_data_dtype()) - - np.asanyarray(nt_moved.dataobj, dtype=nt_moved.get_data_dtype()) - ) + diff = np.asanyarray( + sw_moved.dataobj, dtype=sw_moved.get_data_dtype() + ) - np.asanyarray(nt_moved.dataobj, dtype=nt_moved.get_data_dtype()) # A certain tolerance is necessary because of resampling at borders assert np.sqrt((diff[brainmask] ** 2).mean()) < RMSE_TOL_LINEAR nt_moved = apply(xfm, "img.nii.gz", order=0) - diff = ( - np.asanyarray(sw_moved.dataobj, dtype=sw_moved.get_data_dtype()) - - np.asanyarray(nt_moved.dataobj, dtype=nt_moved.get_data_dtype()) - ) + diff = np.asanyarray( + sw_moved.dataobj, dtype=sw_moved.get_data_dtype() + ) - np.asanyarray(nt_moved.dataobj, dtype=nt_moved.get_data_dtype()) # A certain tolerance is necessary because of resampling at borders assert np.sqrt((diff[brainmask] ** 2).mean()) < RMSE_TOL_LINEAR @@ -281,7 +290,8 @@ def test_apply_transformchain(tmp_path, testdata_path): ref_fname = tmp_path / "reference.nii.gz" nb.Nifti1Image( - np.zeros(xfm.reference.shape, dtype="uint16"), xfm.reference.affine, + np.zeros(xfm.reference.shape, dtype="uint16"), + xfm.reference.affine, ).to_filename(str(ref_fname)) # Then apply the transform and cross-check with software @@ -310,7 +320,9 @@ def test_apply_transformchain(tmp_path, testdata_path): @pytest.mark.parametrize("serialize_4d", [True, False]) -def test_LinearTransformsMapping_apply(tmp_path, data_path, testdata_path, serialize_4d): +def test_LinearTransformsMapping_apply( + tmp_path, data_path, testdata_path, serialize_4d +): """Apply transform mappings.""" hmc = nitl.load( data_path / "hmc-itk.tfm", fmt="itk", reference=testdata_path / "sbref.nii.gz" @@ -333,7 +345,8 @@ def test_LinearTransformsMapping_apply(tmp_path, data_path, testdata_path, seria ) nii = apply( - hmcinv, testdata_path / "fmap.nii.gz", + hmcinv, + testdata_path / "fmap.nii.gz", order=1, serialize_nvols=2 if serialize_4d else np.inf, ) From bf10842df407195e95c5895aa230bfb66d6a79c7 Mon Sep 17 00:00:00 2001 From: Oscar Esteban Date: Wed, 31 Jul 2024 10:40:59 +0200 Subject: [PATCH 115/123] maint: update ANTs' pinnings, which are failing again after ants-2.5.3 was released Before 2.5.3, conda would install the latest ITK (5.4 at the time of writing) but ants was linked against 5.3 and didn't pin that dependency. In 2.5.3, ants was built and linked against 5.4 and I'm unaware of whether they properly pinned the dependency. Either way, this commit fixes both to 2.5.3 and 5.4. --- env.yml | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/env.yml b/env.yml index 55ff2114..d550959b 100644 --- a/env.yml +++ b/env.yml @@ -12,8 +12,8 @@ dependencies: - mkl-service=2.4.0 # git-annex for templateflow users with DataLad superdatasets - git-annex=*=alldep* - # ANTs is linked against libitk 5.3 but does not pin the version - - libitk=5.3 + # ANTs 2.5.3 is linked against libitk 5.4 - let's pin both there + - libitk=5.4 # Base scientific python stack; required by FSL, so pinned here - numpy=1.26 - scipy=1.11 @@ -28,7 +28,7 @@ dependencies: - graphviz=9.0 - pandoc=3.1 # Workflow dependencies: ANTs - - ants=2.5 + - ants=2.5.3 # Workflow dependencies: FSL (versions pinned in 6.0.7.7) - fsl-bet2=2111.4 - fsl-flirt=2111.2 From 4c06174544f4410661d59ab7b6af3e2e689916b9 Mon Sep 17 00:00:00 2001 From: Oscar Esteban Date: Thu, 1 Aug 2024 08:49:56 +0200 Subject: [PATCH 116/123] enh: expand test coverage --- nitransforms/base.py | 6 ++++++ nitransforms/tests/test_base.py | 15 +++++++++++++++ 2 files changed, 21 insertions(+) diff --git a/nitransforms/base.py b/nitransforms/base.py index 67acc073..fa05f1f6 100644 --- a/nitransforms/base.py +++ b/nitransforms/base.py @@ -287,6 +287,12 @@ def __len__(self): By default, all transforms are of length one. This must be overriden by transforms arrays and chains. + Example + ------- + >>> T1 = TransformBase() + >>> len(T1) + 1 + """ return 1 diff --git a/nitransforms/tests/test_base.py b/nitransforms/tests/test_base.py index 4bb147fd..49d7f7af 100644 --- a/nitransforms/tests/test_base.py +++ b/nitransforms/tests/test_base.py @@ -198,3 +198,18 @@ def test_SurfaceMesh(testdata_path): with pytest.raises(TypeError): SurfaceMesh(nb.load(shape_path)) + + +def test_apply_deprecation(monkeypatch): + """Make sure a deprecation warning is issued.""" + from nitransforms import resampling + + def _retval(*args, **kwargs): + return 1 + + monkeypatch.setattr(resampling, "apply", _retval) + + with pytest.deprecated_call(): + retval = TransformBase().apply() + + assert retval == 1 From 754785f18ea57275e21c197529068e72852d7647 Mon Sep 17 00:00:00 2001 From: Oscar Esteban Date: Wed, 31 Jul 2024 11:38:07 +0200 Subject: [PATCH 117/123] enh: prepare code for easy parallelization with a process pool executor Resolves: #214. --- nitransforms/resampling.py | 41 +++++++++++++++++++++----------------- 1 file changed, 23 insertions(+), 18 deletions(-) diff --git a/nitransforms/resampling.py b/nitransforms/resampling.py index abfe2b71..bb1bb309 100644 --- a/nitransforms/resampling.py +++ b/nitransforms/resampling.py @@ -8,6 +8,7 @@ ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ## """Resampling utilities.""" +from functools import partial from pathlib import Path import numpy as np from nibabel.loadsave import load as _nbload @@ -135,33 +136,37 @@ def apply( else None ) - # Order F ensures individual volumes are contiguous in memory - # Also matches NIfTI, making final save more efficient - resampled = np.zeros( - (len(ref_ndcoords), len(transform)), dtype=input_dtype, order="F" + map_coordinates = partial( + ndi.map_coordinates, + order=order, + mode=mode, + cval=cval, + prefilter=prefilter, ) - for t in range(n_resamplings): - xfm_t = transform if n_resamplings == 1 else transform[t] + def _apply_volume(index, data, transform, targets=None): + xfm_t = transform if n_resamplings == 1 else transform[index] if targets is None: targets = ImageGrid(spatialimage).index( # data should be an image _as_homogeneous(xfm_t.map(ref_ndcoords), dim=_ref.ndim) ) - # Interpolate - resampled[..., t] = ndi.map_coordinates( - ( - data - if data is not None - else spatialimage.dataobj[..., t].astype(input_dtype, copy=False) - ), - targets, - order=order, - mode=mode, - cval=cval, - prefilter=prefilter, + data_t = ( + data + if data is not None + else spatialimage.dataobj[..., index].astype(input_dtype, copy=False) ) + return map_coordinates(data_t, targets) + + # Order F ensures individual volumes are contiguous in memory + # Also matches NIfTI, making final save more efficient + resampled = np.zeros( + (len(ref_ndcoords), len(transform)), dtype=input_dtype, order="F" + ) + for t in range(n_resamplings): + # Interpolate + resampled[..., t] = _apply_volume(t, data, transform, targets=targets) else: data = np.asanyarray(spatialimage.dataobj, dtype=input_dtype) From 38bb388374fcb900cde1ff966e58cad66658ff0d Mon Sep 17 00:00:00 2001 From: Oscar Esteban Date: Wed, 31 Jul 2024 11:44:17 +0200 Subject: [PATCH 118/123] enh: create process pool --- nitransforms/resampling.py | 83 +++++++++++++++++++++++++------------- 1 file changed, 56 insertions(+), 27 deletions(-) diff --git a/nitransforms/resampling.py b/nitransforms/resampling.py index bb1bb309..1b76dba1 100644 --- a/nitransforms/resampling.py +++ b/nitransforms/resampling.py @@ -8,7 +8,8 @@ ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ## """Resampling utilities.""" -from functools import partial +from os import cpu_count +from concurrent.futures import ProcessPoolExecutor, as_completed from pathlib import Path import numpy as np from nibabel.loadsave import load as _nbload @@ -26,6 +27,25 @@ """Minimum number of volumes to automatically serialize 4D transforms.""" +def _apply_volume( + index, + data, + targets, + order=3, + mode="constant", + cval=0.0, + prefilter=True, +): + return index, ndi.map_coordinates( + data, + targets, + order=order, + mode=mode, + cval=cval, + prefilter=prefilter, + ) + + def apply( transform, spatialimage, @@ -136,38 +156,47 @@ def apply( else None ) - map_coordinates = partial( - ndi.map_coordinates, - order=order, - mode=mode, - cval=cval, - prefilter=prefilter, - ) + if njobs is None: + njobs = cpu_count() + + with ProcessPoolExecutor(max_workers=min(njobs, n_resamplings)) as executor: + results = [] + for t in range(n_resamplings): + xfm_t = transform if n_resamplings == 1 else transform[t] - def _apply_volume(index, data, transform, targets=None): - xfm_t = transform if n_resamplings == 1 else transform[index] + if targets is None: + targets = ImageGrid(spatialimage).index( # data should be an image + _as_homogeneous(xfm_t.map(ref_ndcoords), dim=_ref.ndim) + ) - if targets is None: - targets = ImageGrid(spatialimage).index( # data should be an image - _as_homogeneous(xfm_t.map(ref_ndcoords), dim=_ref.ndim) + data_t = ( + data + if data is not None + else spatialimage.dataobj[..., t].astype(input_dtype, copy=False) ) - data_t = ( - data - if data is not None - else spatialimage.dataobj[..., index].astype(input_dtype, copy=False) - ) - return map_coordinates(data_t, targets) + results.append( + executor.submit( + _apply_volume, + t, + data_t, + targets, + order=order, + mode=mode, + cval=cval, + prefilter=prefilter, + ) + ) - # Order F ensures individual volumes are contiguous in memory - # Also matches NIfTI, making final save more efficient - resampled = np.zeros( - (len(ref_ndcoords), len(transform)), dtype=input_dtype, order="F" - ) - for t in range(n_resamplings): - # Interpolate - resampled[..., t] = _apply_volume(t, data, transform, targets=targets) + # Order F ensures individual volumes are contiguous in memory + # Also matches NIfTI, making final save more efficient + resampled = np.zeros( + (len(ref_ndcoords), len(transform)), dtype=input_dtype, order="F" + ) + for future in as_completed(results): + t, resampled_t = future.result() + resampled[..., t] = resampled_t else: data = np.asanyarray(spatialimage.dataobj, dtype=input_dtype) From 026a10af983eff2ca01752918544d667f8e35ddf Mon Sep 17 00:00:00 2001 From: Oscar Esteban Date: Thu, 1 Aug 2024 09:47:48 +0200 Subject: [PATCH 119/123] enh: expand test coverage --- nitransforms/resampling.py | 3 +-- nitransforms/tests/test_resampling.py | 15 ++++++++++++++- 2 files changed, 15 insertions(+), 3 deletions(-) diff --git a/nitransforms/resampling.py b/nitransforms/resampling.py index 1b76dba1..f20b9e2b 100644 --- a/nitransforms/resampling.py +++ b/nitransforms/resampling.py @@ -156,8 +156,7 @@ def apply( else None ) - if njobs is None: - njobs = cpu_count() + njobs = cpu_count() if njobs is None or njobs < 1 else njobs with ProcessPoolExecutor(max_workers=min(njobs, n_resamplings)) as executor: results = [] diff --git a/nitransforms/tests/test_resampling.py b/nitransforms/tests/test_resampling.py index 2384ad97..f944b225 100644 --- a/nitransforms/tests/test_resampling.py +++ b/nitransforms/tests/test_resampling.py @@ -15,7 +15,7 @@ from nitransforms import nonlinear as nitnl from nitransforms import manip as nitm from nitransforms import io -from nitransforms.resampling import apply +from nitransforms.resampling import apply, _apply_volume RMSE_TOL_LINEAR = 0.09 RMSE_TOL_NONLINEAR = 0.05 @@ -363,3 +363,16 @@ def test_LinearTransformsMapping_apply( reference=testdata_path / "sbref.nii.gz", serialize_nvols=2 if serialize_4d else np.inf, ) + + +@pytest.mark.parametrize("t", list(range(4))) +def test_apply_helper(monkeypatch, t): + """Ensure the apply helper function correctly just decorates with index.""" + from nitransforms.resampling import ndi + + def _retval(*args, **kwargs): + return 1 + + monkeypatch.setattr(ndi, "map_coordinates", _retval) + + assert _apply_volume(t, None, None) == (t, 1) From 7dcc78daa0a172f276c55f38e935ef3c2df87089 Mon Sep 17 00:00:00 2001 From: Oscar Esteban Date: Thu, 1 Aug 2024 10:21:11 +0200 Subject: [PATCH 120/123] sty: add type annotations --- nitransforms/resampling.py | 96 +++++++++++++++++++++++++++----------- 1 file changed, 68 insertions(+), 28 deletions(-) diff --git a/nitransforms/resampling.py b/nitransforms/resampling.py index f20b9e2b..6cbbc1e9 100644 --- a/nitransforms/resampling.py +++ b/nitransforms/resampling.py @@ -11,13 +11,17 @@ from os import cpu_count from concurrent.futures import ProcessPoolExecutor, as_completed from pathlib import Path +from typing import Tuple + import numpy as np from nibabel.loadsave import load as _nbload from nibabel.arrayproxy import get_obj_dtype +from nibabel.spatialimages import SpatialImage from scipy import ndimage as ndi from nitransforms.base import ( ImageGrid, + TransformBase, TransformError, SpatialReference, _as_homogeneous, @@ -28,14 +32,49 @@ def _apply_volume( - index, - data, - targets, - order=3, - mode="constant", - cval=0.0, - prefilter=True, -): + index: int, + data: np.ndarray, + targets: np.ndarray, + order: int = 3, + mode: str = "constant", + cval: float = 0.0, + prefilter: bool = True, +) -> Tuple[int, np.ndarray]: + """ + Decorate :obj:`~scipy.ndimage.map_coordinates` to return an order index for parallelization. + + Parameters + ---------- + index : :obj:`int` + The index of the volume to apply the interpolation to. + data : :obj:`~numpy.ndarray` + The input data array. + targets : :obj:`~numpy.ndarray` + The target coordinates for mapping. + order : :obj:`int`, optional + The order of the spline interpolation, default is 3. + The order has to be in the range 0-5. + mode : :obj:`str`, optional + Determines how the input image is extended when the resamplings overflows + a border. One of ``'constant'``, ``'reflect'``, ``'nearest'``, ``'mirror'``, + or ``'wrap'``. Default is ``'constant'``. + cval : :obj:`float`, optional + Constant value for ``mode='constant'``. Default is 0.0. + prefilter: :obj:`bool`, optional + Determines if the image's data array is prefiltered with + a spline filter before interpolation. The default is ``True``, + which will create a temporary *float64* array of filtered values + if *order > 1*. If setting this to ``False``, the output will be + slightly blurred if *order > 1*, unless the input is prefiltered, + i.e. it is the result of calling the spline filter on the original + input. + + Returns + ------- + (:obj:`int`, :obj:`~numpy.ndarray`) + The index and the array resulting from the interpolation. + + """ return index, ndi.map_coordinates( data, targets, @@ -47,37 +86,38 @@ def _apply_volume( def apply( - transform, - spatialimage, - reference=None, - order=3, - mode="constant", - cval=0.0, - prefilter=True, - output_dtype=None, - serialize_nvols=SERIALIZE_VOLUME_WINDOW_WIDTH, - njobs=None, -): + transform: TransformBase, + spatialimage: str | Path | SpatialImage, + reference: str | Path | SpatialImage = None, + order: int = 3, + mode: str = "constant", + cval: float = 0.0, + prefilter: bool = True, + output_dtype: np.dtype = None, + serialize_nvols: int = SERIALIZE_VOLUME_WINDOW_WIDTH, + njobs: int = None, +) -> SpatialImage | np.ndarray: """ Apply a transformation to an image, resampling on the reference spatial object. Parameters ---------- - spatialimage : `spatialimage` + spatialimage : :obj:`~nibabel.spatialimages.SpatialImage` or `os.pathlike` The image object containing the data to be resampled in reference space - reference : spatial object, optional + reference : :obj:`~nibabel.spatialimages.SpatialImage` or `os.pathlike` The image, surface, or combination thereof containing the coordinates of samples that will be sampled. - order : int, optional + order : :obj:`int`, optional The order of the spline interpolation, default is 3. The order has to be in the range 0-5. - mode : {'constant', 'reflect', 'nearest', 'mirror', 'wrap'}, optional + mode : :obj:`str`, optional Determines how the input image is extended when the resamplings overflows - a border. Default is 'constant'. - cval : float, optional + a border. One of ``'constant'``, ``'reflect'``, ``'nearest'``, ``'mirror'``, + or ``'wrap'``. Default is ``'constant'``. + cval : :obj:`float`, optional Constant value for ``mode='constant'``. Default is 0.0. - prefilter: bool, optional + prefilter: :obj:`bool`, optional Determines if the image's data array is prefiltered with a spline filter before interpolation. The default is ``True``, which will create a temporary *float64* array of filtered values @@ -85,7 +125,7 @@ def apply( slightly blurred if *order > 1*, unless the input is prefiltered, i.e. it is the result of calling the spline filter on the original input. - output_dtype: dtype specifier, optional + output_dtype: :obj:`~numpy.dtype`, optional The dtype of the returned array or image, if specified. If ``None``, the default behavior is to use the effective dtype of the input image. If slope and/or intercept are defined, the effective @@ -97,7 +137,7 @@ def apply( Returns ------- - resampled : `spatialimage` or ndarray + resampled : :obj:`~nibabel.spatialimages.SpatialImage` or :obj:`~numpy.ndarray` The data imaged after resampling to reference space. """ From 79305a9f79010e4a46687e5d8527dde871b7d79d Mon Sep 17 00:00:00 2001 From: Oscar Esteban Date: Fri, 2 Aug 2024 09:49:27 +0200 Subject: [PATCH 121/123] enh: implement a memory limitation mechanism in loading data Resolves: #218. Co-authored-by: Chris Markiewicz --- nitransforms/resampling.py | 39 +++++++++++++++++++++++++++++++++++++- 1 file changed, 38 insertions(+), 1 deletion(-) diff --git a/nitransforms/resampling.py b/nitransforms/resampling.py index 6cbbc1e9..430abf1a 100644 --- a/nitransforms/resampling.py +++ b/nitransforms/resampling.py @@ -96,6 +96,7 @@ def apply( output_dtype: np.dtype = None, serialize_nvols: int = SERIALIZE_VOLUME_WINDOW_WIDTH, njobs: int = None, + dtype_width: int = 8, ) -> SpatialImage | np.ndarray: """ Apply a transformation to an image, resampling on the reference spatial object. @@ -134,6 +135,10 @@ def apply( If ``reference`` is defined, then the return value is an image, with a data array of the effective dtype but with the on-disk dtype set to the input image's on-disk dtype. + dtype_width: :obj:`int` + Cap the width of the input data type to the given number of bytes. + This argument is intended to work as a way to implement lower memory + requirements in resampling. Returns ------- @@ -157,7 +162,7 @@ def apply( spatialimage = _nbload(str(spatialimage)) # Avoid opening the data array just yet - input_dtype = get_obj_dtype(spatialimage.dataobj) + input_dtype = cap_dtype(get_obj_dtype(spatialimage.dataobj), dtype_width) # Number of data volumes data_nvols = 1 if spatialimage.ndim < 4 else spatialimage.shape[-1] @@ -277,3 +282,35 @@ def apply( output_dtype = output_dtype or input_dtype return resampled.astype(output_dtype) + + +def cap_dtype(dt, nbytes): + """ + Cap the datatype size to shave off memory requirements. + + Examples + -------- + >>> cap_dtype(np.dtype('f8'), 4) + dtype('float32') + + >>> cap_dtype(np.dtype('f8'), 16) + dtype('float64') + + >>> cap_dtype('float64', 4) + dtype('float32') + + >>> cap_dtype(np.dtype('i1'), 4) + dtype('int8') + + >>> cap_dtype('int8', 4) + dtype('int8') + + >>> cap_dtype('int32', 1) + dtype('int8') + + >>> cap_dtype(np.dtype('i8'), 4) + dtype('int32') + + """ + dt = np.dtype(dt) + return np.dtype(f"{dt.byteorder}{dt.kind}{min(nbytes, dt.itemsize)}") From 063e1f0c6dd2dd453afb35b76554bebdddd73dd9 Mon Sep 17 00:00:00 2001 From: Oscar Esteban Date: Tue, 6 Aug 2024 08:57:11 +0200 Subject: [PATCH 122/123] enh: port from process pool into asyncio concurrent Co-authored-by: Chris Markiewicz --- nitransforms/resampling.py | 172 +++++++++++++++++--------- nitransforms/tests/test_resampling.py | 15 +-- 2 files changed, 112 insertions(+), 75 deletions(-) diff --git a/nitransforms/resampling.py b/nitransforms/resampling.py index 430abf1a..d7c7f9c5 100644 --- a/nitransforms/resampling.py +++ b/nitransforms/resampling.py @@ -8,10 +8,11 @@ ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ## """Resampling utilities.""" +import asyncio from os import cpu_count -from concurrent.futures import ProcessPoolExecutor, as_completed +from functools import partial from pathlib import Path -from typing import Tuple +from typing import Callable, TypeVar import numpy as np from nibabel.loadsave import load as _nbload @@ -27,30 +28,58 @@ _as_homogeneous, ) +R = TypeVar("R") + SERIALIZE_VOLUME_WINDOW_WIDTH: int = 8 """Minimum number of volumes to automatically serialize 4D transforms.""" -def _apply_volume( - index: int, +async def worker(job: Callable[[], R], semaphore) -> R: + async with semaphore: + loop = asyncio.get_running_loop() + return await loop.run_in_executor(None, job) + + +async def _apply_serial( data: np.ndarray, + spatialimage: SpatialImage, targets: np.ndarray, + transform: TransformBase, + ref_ndim: int, + ref_ndcoords: np.ndarray, + n_resamplings: int, + output: np.ndarray, + input_dtype: np.dtype, order: int = 3, mode: str = "constant", cval: float = 0.0, prefilter: bool = True, -) -> Tuple[int, np.ndarray]: + max_concurrent: int = min(cpu_count(), 12), +): """ - Decorate :obj:`~scipy.ndimage.map_coordinates` to return an order index for parallelization. + Resample through a given transform serially, in a 3D+t setting. Parameters ---------- - index : :obj:`int` - The index of the volume to apply the interpolation to. data : :obj:`~numpy.ndarray` The input data array. + spatialimage : :obj:`~nibabel.spatialimages.SpatialImage` or `os.pathlike` + The image object containing the data to be resampled in reference + space targets : :obj:`~numpy.ndarray` The target coordinates for mapping. + transform : :obj:`~nitransforms.base.TransformBase` + The 3D, 3D+t, or 4D transform through which data will be resampled. + ref_ndim : :obj:`int` + Dimensionality of the resampling target (reference image). + ref_ndcoords : :obj:`~numpy.ndarray` + Physical coordinates (RAS+) where data will be interpolated, if the resampling + target is a grid, the scanner coordinates of all voxels. + n_resamplings : :obj:`int` + Total number of 3D resamplings (can be defined by the input image, the transform, + or be matched, that is, same number of volumes in the input and number of transforms). + output : :obj:`~numpy.ndarray` + The output data array where resampled values will be stored volume-by-volume. order : :obj:`int`, optional The order of the spline interpolation, default is 3. The order has to be in the range 0-5. @@ -71,18 +100,46 @@ def _apply_volume( Returns ------- - (:obj:`int`, :obj:`~numpy.ndarray`) - The index and the array resulting from the interpolation. + np.ndarray + Data resampled on the 3D+t array of input coordinates. """ - return index, ndi.map_coordinates( - data, - targets, - order=order, - mode=mode, - cval=cval, - prefilter=prefilter, - ) + tasks = [] + semaphore = asyncio.Semaphore(max_concurrent) + + for t in range(n_resamplings): + xfm_t = transform if n_resamplings == 1 else transform[t] + + if targets is None: + targets = ImageGrid(spatialimage).index( # data should be an image + _as_homogeneous(xfm_t.map(ref_ndcoords), dim=ref_ndim) + ) + + data_t = ( + data + if data is not None + else spatialimage.dataobj[..., t].astype(input_dtype, copy=False) + ) + + tasks.append( + asyncio.create_task( + worker( + partial( + ndi.map_coordinates, + data_t, + targets, + output=output[..., t], + order=order, + mode=mode, + cval=cval, + prefilter=prefilter, + ), + semaphore, + ) + ) + ) + await asyncio.gather(*tasks) + return output def apply( @@ -94,15 +151,17 @@ def apply( cval: float = 0.0, prefilter: bool = True, output_dtype: np.dtype = None, - serialize_nvols: int = SERIALIZE_VOLUME_WINDOW_WIDTH, - njobs: int = None, dtype_width: int = 8, + serialize_nvols: int = SERIALIZE_VOLUME_WINDOW_WIDTH, + max_concurrent: int = min(cpu_count(), 12), ) -> SpatialImage | np.ndarray: """ Apply a transformation to an image, resampling on the reference spatial object. Parameters ---------- + transform: :obj:`~nitransforms.base.TransformBase` + The 3D, 3D+t, or 4D transform through which data will be resampled. spatialimage : :obj:`~nibabel.spatialimages.SpatialImage` or `os.pathlike` The image object containing the data to be resampled in reference space @@ -118,7 +177,7 @@ def apply( or ``'wrap'``. Default is ``'constant'``. cval : :obj:`float`, optional Constant value for ``mode='constant'``. Default is 0.0. - prefilter: :obj:`bool`, optional + prefilter : :obj:`bool`, optional Determines if the image's data array is prefiltered with a spline filter before interpolation. The default is ``True``, which will create a temporary *float64* array of filtered values @@ -126,7 +185,7 @@ def apply( slightly blurred if *order > 1*, unless the input is prefiltered, i.e. it is the result of calling the spline filter on the original input. - output_dtype: :obj:`~numpy.dtype`, optional + output_dtype : :obj:`~numpy.dtype`, optional The dtype of the returned array or image, if specified. If ``None``, the default behavior is to use the effective dtype of the input image. If slope and/or intercept are defined, the effective @@ -135,10 +194,17 @@ def apply( If ``reference`` is defined, then the return value is an image, with a data array of the effective dtype but with the on-disk dtype set to the input image's on-disk dtype. - dtype_width: :obj:`int` + dtype_width : :obj:`int` Cap the width of the input data type to the given number of bytes. This argument is intended to work as a way to implement lower memory requirements in resampling. + serialize_nvols : :obj:`int` + Minimum number of volumes in a 3D+t (that is, a series of 3D transformations + independent in time) to resample on a one-by-one basis. + Serialized resampling can be executed concurrently (parallelized) with + the argument ``max_concurrent``. + max_concurrent : :obj:`int` + Maximum number of 3D resamplings to be executed concurrently. Returns ------- @@ -201,46 +267,30 @@ def apply( else None ) - njobs = cpu_count() if njobs is None or njobs < 1 else njobs - - with ProcessPoolExecutor(max_workers=min(njobs, n_resamplings)) as executor: - results = [] - for t in range(n_resamplings): - xfm_t = transform if n_resamplings == 1 else transform[t] - - if targets is None: - targets = ImageGrid(spatialimage).index( # data should be an image - _as_homogeneous(xfm_t.map(ref_ndcoords), dim=_ref.ndim) - ) - - data_t = ( - data - if data is not None - else spatialimage.dataobj[..., t].astype(input_dtype, copy=False) - ) - - results.append( - executor.submit( - _apply_volume, - t, - data_t, - targets, - order=order, - mode=mode, - cval=cval, - prefilter=prefilter, - ) - ) + # Order F ensures individual volumes are contiguous in memory + # Also matches NIfTI, making final save more efficient + resampled = np.zeros( + (len(ref_ndcoords), len(transform)), dtype=input_dtype, order="F" + ) - # Order F ensures individual volumes are contiguous in memory - # Also matches NIfTI, making final save more efficient - resampled = np.zeros( - (len(ref_ndcoords), len(transform)), dtype=input_dtype, order="F" + resampled = asyncio.run( + _apply_serial( + data, + spatialimage, + targets, + transform, + _ref.ndim, + ref_ndcoords, + n_resamplings, + resampled, + input_dtype, + order=order, + mode=mode, + cval=cval, + prefilter=prefilter, + max_concurrent=max_concurrent, ) - - for future in as_completed(results): - t, resampled_t = future.result() - resampled[..., t] = resampled_t + ) else: data = np.asanyarray(spatialimage.dataobj, dtype=input_dtype) diff --git a/nitransforms/tests/test_resampling.py b/nitransforms/tests/test_resampling.py index f944b225..2384ad97 100644 --- a/nitransforms/tests/test_resampling.py +++ b/nitransforms/tests/test_resampling.py @@ -15,7 +15,7 @@ from nitransforms import nonlinear as nitnl from nitransforms import manip as nitm from nitransforms import io -from nitransforms.resampling import apply, _apply_volume +from nitransforms.resampling import apply RMSE_TOL_LINEAR = 0.09 RMSE_TOL_NONLINEAR = 0.05 @@ -363,16 +363,3 @@ def test_LinearTransformsMapping_apply( reference=testdata_path / "sbref.nii.gz", serialize_nvols=2 if serialize_4d else np.inf, ) - - -@pytest.mark.parametrize("t", list(range(4))) -def test_apply_helper(monkeypatch, t): - """Ensure the apply helper function correctly just decorates with index.""" - from nitransforms.resampling import ndi - - def _retval(*args, **kwargs): - return 1 - - monkeypatch.setattr(ndi, "map_coordinates", _retval) - - assert _apply_volume(t, None, None) == (t, 1) From cd10b714761efc7cdea51cfc66c0ea99f0f89bf8 Mon Sep 17 00:00:00 2001 From: Oscar Esteban Date: Sun, 18 Aug 2024 16:57:38 +0200 Subject: [PATCH 123/123] rel(24.0.0): Update CHANGES [skip ci] --- CHANGES.rst | 51 +++++++++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 51 insertions(+) diff --git a/CHANGES.rst b/CHANGES.rst index f118e261..b9e933aa 100644 --- a/CHANGES.rst +++ b/CHANGES.rst @@ -1,3 +1,54 @@ +24.0.0 (August 18, 2024) +======================== +A new series incorporating several major changes, including bugfixes and taking on several +housekeeping/maintenance actions. +One relevant change is the outsourcing of the ``apply()`` member out of +transformation data structures by @jmarabotto. +The method ``apply()`` is now a standalone method that operates on one transform +and images/surfaces/etc. provided as arguments. +A later major development is the adoption of a foundation for surface transforms by @feilong +and @Shotgunosine. + +New Contributors +---------------- + +* @mvdoc made their first contribution in https://github.com/nipy/nitransforms/pull/194 +* @jmarabotto made their first contribution in https://github.com/nipy/nitransforms/pull/197 +* @bpinsard made their first contribution in https://github.com/nipy/nitransforms/pull/182 +* @jbanusco made their first contribution in https://github.com/nipy/nitransforms/pull/188 +* @feilong made their first contribution in https://github.com/nipy/nitransforms/pull/203 + +CHANGES +------- + +* FIX: Inefficient iterative reloading of reference and moving images by @oesteban in https://github.com/nipy/nitransforms/pull/186 +* FIX: Postpone coordinate mapping on linear array transforms by @oesteban in https://github.com/nipy/nitransforms/pull/187 +* FIX: Remove unsafe cast during ``TransformBase.apply()`` by @effigies in https://github.com/nipy/nitransforms/pull/189 +* FIX: ``_is_oblique()`` by @mvdoc in https://github.com/nipy/nitransforms/pull/194 +* FIX: Update implementation of ``ndim`` property of transforms by @jmarabotto in https://github.com/nipy/nitransforms/pull/197 +* FIX: Output displacement fields by @bpinsard in https://github.com/nipy/nitransforms/pull/182 +* FIX: Composition of deformation fields by @jbanusco in https://github.com/nipy/nitransforms/pull/188 +* FIX: Indexing disallowed in lists introduced by bugfix by @oesteban in https://github.com/nipy/nitransforms/pull/204 +* FIX: Do not transpose (see :obj:`~scipy.ndimage.map_coordinates`) by @oesteban in https://github.com/nipy/nitransforms/pull/207 +* FIX: Forgotten test using ``xfm.apply()`` by @oesteban in https://github.com/nipy/nitransforms/pull/208 +* FIX: Load ITK fields from H5 correctly by @effigies in https://github.com/nipy/nitransforms/pull/211 +* FIX: Wrong warning argument name ``level`` in ``warnings.warn`` by @oesteban in https://github.com/nipy/nitransforms/pull/216 +* ENH: Define ``ndim`` property on nonlinear transforms by @oesteban in https://github.com/nipy/nitransforms/pull/201 +* ENH: Outsource ``apply()`` from transform objects by @jmarabotto in https://github.com/nipy/nitransforms/pull/195 +* ENH: Restore ``apply()`` method, warning of deprecation and calling function by @effigies in https://github.com/nipy/nitransforms/pull/209 +* ENH: ``SurfaceTransform`` class by @feilong in https://github.com/nipy/nitransforms/pull/203 +* ENH: reenable-parallelization-apply-214 (builds on PR #215, solves Issue #214) by @jmarabotto in https://github.com/nipy/nitransforms/pull/217 +* ENH: Parallelize serialized 3D+t transforms by @oesteban in https://github.com/nipy/nitransforms/pull/220 +* ENH: Implement a memory limitation mechanism in loading data by @oesteban in https://github.com/nipy/nitransforms/pull/221 +* ENH: Serialize+parallelize 4D ``apply()`` into 3D+t and add 'low memory' loading by @oesteban in https://github.com/nipy/nitransforms/pull/215 +* MAINT: Loosen dependencies by @mgxd in https://github.com/nipy/nitransforms/pull/164 +* MAINT: Drop Python 3.7 support, test through 3.11 by @effigies in https://github.com/nipy/nitransforms/pull/181 +* MAINT: Update CircleCI's infrastructure (machine image and Python version in Docker image) by @oesteban in https://github.com/nipy/nitransforms/pull/206 +* MAINT: Fix tests for Python 3.12, numpy 2.0, and pytest-xdist by @effigies in https://github.com/nipy/nitransforms/pull/210 +* MAINT: Update ANTs' pinnings by @oesteban in https://github.com/nipy/nitransforms/pull/219 + +**Full Changelog**: https://github.com/nipy/nitransforms/compare/23.0.1...24.0.0 + 23.0.1 (July 10, 2023) ====================== Hotfix release addressing two issues.