diff --git a/.git-blame-ignore-revs b/.git-blame-ignore-revs index 826a62fe9f..6f762e919b 100644 --- a/.git-blame-ignore-revs +++ b/.git-blame-ignore-revs @@ -1,3 +1,7 @@ +# Fri Feb 21 14:06:53 2025 -0500 - markiewicz@stanford.edu - sty: black [ignore-rev] +8ed2b2306aeb7d89de4958b5293223ffe27a4f34 +# Tue Apr 13 10:16:17 2021 -0400 - markiewicz@stanford.edu - STY: black +b1690d5beb391e08c1e5463f1e3c641cf1e9f58e # Thu Oct 31 10:01:38 2024 -0400 - effigies@gmail.com - STY: black [ignore-rev] bd0d5856d183ba3918eda31f80db3b1d4387c55c # Thu Mar 21 13:34:09 2024 -0400 - effigies@gmail.com - STY: black [ignore-rev] diff --git a/.github/workflows/contrib.yml b/.github/workflows/contrib.yml deleted file mode 100644 index 6b3ef96f0c..0000000000 --- a/.github/workflows/contrib.yml +++ /dev/null @@ -1,84 +0,0 @@ -name: Contribution checks - -# This checks validate contributions meet baseline checks -# -# * specs - Ensure make - -on: - push: - branches: - - master - - maint/* - pull_request: - branches: - - master - - maint/* - -defaults: - run: - shell: bash - -concurrency: - group: contrib-${{ github.ref }} - cancel-in-progress: true - -permissions: - contents: read # to fetch code (actions/checkout) - -jobs: - stable: - # Check each OS, all supported Python, minimum versions and latest releases - runs-on: ${{ matrix.os }} - strategy: - matrix: - os: ["ubuntu-latest"] - python-version: ["3.12"] - nipype-extras: ["dev"] - check: ["specs", "style"] - env: - DEPENDS: "" - CHECK_TYPE: ${{ matrix.check }} - NIPYPE_EXTRAS: ${{ matrix.nipype-extras }} - EXTRA_PIP_FLAGS: "" - INSTALL_DEB_DEPENDENCIES: false - INSTALL_TYPE: pip - CI_SKIP_TEST: 1 - - steps: - - uses: actions/checkout@v4 - with: - submodules: recursive - fetch-depth: 0 - - name: Set up Python ${{ matrix.python-version }} - uses: actions/setup-python@v5 - with: - python-version: ${{ matrix.python-version }} - - name: Display Python version - run: python -c "import sys; print(sys.version)" - - name: Create virtual environment - run: tools/ci/create_venv.sh - - name: Build archive - run: | - source tools/ci/build_archive.sh - echo "ARCHIVE=$ARCHIVE" >> $GITHUB_ENV - - name: Install Debian dependencies - run: tools/ci/install_deb_dependencies.sh - if: ${{ matrix.os == 'ubuntu-18.04' }} - - name: Install dependencies - run: tools/ci/install_dependencies.sh - - name: Install Nipype - run: tools/ci/install.sh - - name: Run tests - run: tools/ci/check.sh - if: ${{ matrix.check != 'skiptests' }} - - uses: codecov/codecov-action@v4 - with: - file: coverage.xml - token: ${{ secrets.CODECOV_TOKEN }} - if: ${{ always() }} - - name: Upload pytest test results - uses: actions/upload-artifact@v4 - with: - name: pytest-results-${{ matrix.os }}-${{ matrix.python-version }} - path: test-results.xml - if: ${{ always() && matrix.check == 'test' }} diff --git a/.github/workflows/tests.yml b/.github/workflows/tests.yml index 41776bc188..7934de87a7 100644 --- a/.github/workflows/tests.yml +++ b/.github/workflows/tests.yml @@ -1,22 +1,11 @@ -name: Stable tests - -# This file tests the claimed support range of nipype including -# -# * Operating systems: Linux, OSX -# * Dependencies: minimum requirements, optional requirements -# * Installation methods: setup.py, sdist, wheel, archive +name: Tox on: push: - branches: - - master - - maint/* - tags: - - "*" + branches: [ master, main, 'maint/*' ] + tags: [ '*' ] pull_request: - branches: - - master - - maint/* + branches: [ master, main, 'maint/*' ] schedule: # 8am EST / 9am EDT Mondays - cron: "0 13 * * 1" @@ -26,27 +15,28 @@ defaults: shell: bash concurrency: - group: tests-${{ github.ref }} + group: ${{ github.workflow }}-${{ github.ref }} cancel-in-progress: true -permissions: {} +permissions: + contents: read + +env: + # Force tox and pytest to use color + FORCE_COLOR: true + + jobs: build: - permissions: - contents: read # to fetch code (actions/checkout) - runs-on: ubuntu-latest steps: - uses: actions/checkout@v4 with: fetch-depth: 0 - - uses: actions/setup-python@v5 - with: - python-version: 3 - - run: pip install --upgrade build twine - - name: Build sdist and wheel - run: python -m build - - run: twine check dist/* + - name: Install the latest version of uv + uses: astral-sh/setup-uv@v6 + - run: uv build + - run: uvx twine check dist/* - uses: actions/upload-artifact@v4 with: name: dist @@ -82,72 +72,52 @@ jobs: - name: Run tests run: pytest --doctest-modules -v --pyargs nipype - stable: + test: # Check each OS, all supported Python, minimum versions and latest releases - permissions: - contents: read # to fetch code (actions/checkout) - runs-on: ${{ matrix.os }} strategy: matrix: - os: ["ubuntu-22.04"] - python-version: ["3.9", "3.10", "3.11", "3.12"] - check: ["test"] - pip-flags: [""] - depends: ["REQUIREMENTS"] - deb-depends: [false] - nipype-extras: ["doc,tests,profiler"] + os: ["ubuntu-latest"] + python-version: ["3.9", "3.10", "3.11", "3.12", "3.13"] + dependencies: [none, full, pre] include: - - os: ubuntu-22.04 + - os: ubuntu-latest python-version: "3.9" - check: test - pip-flags: "" - depends: REQUIREMENTS - deb-depends: true - nipype-extras: doc,tests,profiler,duecredit,ssh - - os: ubuntu-20.04 - python-version: "3.9" - check: test - pip-flags: "" - depends: REQUIREMENTS - deb-depends: true - nipype-extras: doc,tests,nipy,profiler,duecredit,ssh + dependencies: min + exclude: + # Skip some intermediate versions for full tests + - python-version: "3.10" + dependencies: full + - python-version: "3.11" + dependencies: full + # Do not test pre-releases for versions out of SPEC0 + - python-version: "3.9" + dependencies: pre + - python-version: "3.10" + dependencies: pre + env: - DEPENDS: ${{ matrix.depends }} - CHECK_TYPE: ${{ matrix.check }} - EXTRA_PIP_FLAGS: ${{ matrix.pip-flags }} - INSTALL_DEB_DEPENDENCIES: ${{ matrix.deb-depends }} - NIPYPE_EXTRAS: ${{ matrix.nipype-extras }} - INSTALL_TYPE: pip - CI_SKIP_TEST: 1 + DEPENDS: ${{ matrix.dependencies }} steps: - uses: actions/checkout@v4 + - name: Install the latest version of uv + uses: astral-sh/setup-uv@v6 - name: Set up Python ${{ matrix.python-version }} uses: actions/setup-python@v5 with: python-version: ${{ matrix.python-version }} - name: Display Python version run: python -c "import sys; print(sys.version)" - - name: Create virtual environment - run: tools/ci/create_venv.sh - - name: Build archive + - name: Install tox run: | - source tools/ci/build_archive.sh - echo "ARCHIVE=$ARCHIVE" >> $GITHUB_ENV - - name: Install Debian dependencies - run: tools/ci/install_deb_dependencies.sh - if: ${{ matrix.os == 'ubuntu-latest' }} - - name: Install dependencies - run: tools/ci/install_dependencies.sh - - name: Install Nipype - run: tools/ci/install.sh - - name: Run tests - run: tools/ci/check.sh - if: ${{ matrix.check != 'skiptests' }} - - uses: codecov/codecov-action@v4 + uv tool install tox --with=tox-uv --with=tox-gh-actions + - name: Show tox config + run: tox c + - name: Run tox + run: tox -v --exit-and-dump-after 1200 + - uses: codecov/codecov-action@v5 with: - file: coverage.xml token: ${{ secrets.CODECOV_TOKEN }} if: ${{ always() }} - name: Upload pytest test results @@ -160,7 +130,7 @@ jobs: publish: runs-on: ubuntu-latest environment: "Package deployment" - needs: [stable, test-package] + needs: [test, test-package] if: github.event_name == 'push' && startsWith(github.ref, 'refs/tags/') steps: - uses: actions/download-artifact@v4 @@ -171,3 +141,21 @@ jobs: with: user: __token__ password: ${{ secrets.PYPI_API_TOKEN }} + + checks: + runs-on: 'ubuntu-latest' + continue-on-error: true + strategy: + matrix: + check: ['specs', 'style'] + + steps: + - uses: actions/checkout@v4 + - name: Install the latest version of uv + uses: astral-sh/setup-uv@v6 + - name: Show tox config + run: uvx tox c + - name: Show tox config (this call) + run: uvx tox c -e ${{ matrix.check }} + - name: Run check + run: uvx tox -e ${{ matrix.check }} diff --git a/.mailmap b/.mailmap index 4c5bd5de4d..35ccbf89d2 100644 --- a/.mailmap +++ b/.mailmap @@ -107,6 +107,7 @@ Joerg Stadler Joerg Stadler John A. Lee John A. Lee +Jon Cluce Joke Durnez Jordi Huguet Josh Warner diff --git a/.wci.yml b/.wci.yml new file mode 100644 index 0000000000..2adbae9fcc --- /dev/null +++ b/.wci.yml @@ -0,0 +1,30 @@ +# Project available at https://github.com/nipy/nipype + +name: nipype + +headline: "Neuroimaging in Python: Pipelines and Interfaces" + +description: | + Nipype, an open-source, community-developed initiative under the umbrella of NiPy, is a Python project that + provides a uniform interface to existing neuroimaging software and facilitates interaction between these + packages within a single workflow. Nipype provides an environment that encourages interactive exploration of + algorithms from different packages (e.g., SPM, FSL, FreeSurfer, AFNI, Slicer, ANTS), eases the design of + workflows within and between packages, and reduces the learning curve necessary to use different packages. + +language: Python3 + +documentation: + general: https://nipype.readthedocs.io/en/latest/ + installation: https://nipype.readthedocs.io/en/latest/users/install.html + tutorial: https://miykael.github.io/nipype_tutorial/ + +execution_environment: + resource_managers: + - SLURM + - Condor + - DAGMan + - LSF + - OAR + - PBS + - SGE + - Soma-workflow diff --git a/.zenodo.json b/.zenodo.json index 5ddb9a71dd..3e2c2be6f6 100644 --- a/.zenodo.json +++ b/.zenodo.json @@ -285,6 +285,11 @@ { "name": "Mordom, David" }, + { + "affiliation": "Child Mind Institute", + "name": "Cluce, Jon", + "orcid": "0000-0001-7590-5806" + }, { "affiliation": "ARAMIS LAB, Brain and Spine Institute (ICM), Paris, France.", "name": "Guillon, Jérémy", diff --git a/doc/changelog/1.X.X-changelog.rst b/doc/changelog/1.X.X-changelog.rst index b150eece35..e31e508edf 100644 --- a/doc/changelog/1.X.X-changelog.rst +++ b/doc/changelog/1.X.X-changelog.rst @@ -1,8 +1,52 @@ +1.10.0 (March 19, 2025) +======================= + +New feature release in the 1.10.x series. + +This release adds GPUs to multiprocess resource management. +In general, no changes to existing code should be required if the GPU-enabled +interface has a ``use_gpu`` input. +The ``n_gpu_procs`` can be used to set the number of GPU processes that may +be run in parallel, which will override the default of GPUs identified by +``nvidia-smi``, or 1 if no GPUs are detected. + + * FIX: Reimplement ``gpu_count()`` (https://github.com/nipy/nipype/pull/3718) + * FIX: Avoid 0D array in ``algorithms.misc.merge_rois`` (https://github.com/nipy/nipype/pull/3713) + * FIX: Allow nipype.sphinx.ext.apidoc Config to work with Sphinx 8.2.1+ (https://github.com/nipy/nipype/pull/3716) + * FIX: Resolve crashes when running workflows with updatehash=True (https://github.com/nipy/nipype/pull/3709) + * ENH: Support for gpu queue (https://github.com/nipy/nipype/pull/3642) + * ENH: Update to .wci.yml (https://github.com/nipy/nipype/pull/3708) + * ENH: Add Workflow Community Initiative (WCI) descriptor (https://github.com/nipy/nipype/pull/3608) + + +1.9.2 (December 17, 2024) +========================= + +Bug fix release in the 1.9.x series. + + * FIX: Missed np.savetxt bstring (https://github.com/nipy/nipype/pull/3704) + * MAINT: Bump astral-sh/setup-uv from 3 to 4 (https://github.com/nipy/nipype/pull/3702) + * MAINT: Bump codecov/codecov-action from 4 to 5 (https://github.com/nipy/nipype/pull/3703) + + +1.9.1 (November 19, 2024) +========================= + +Bug fix release in the 1.9.x series. + +This release adds support for Numpy 2 and Python 3.13. + + * FIX: Restore generate_gantt_chart functionality (https://github.com/nipy/nipype/pull/3290) + * FIX: Address numpy and traits deprecations (https://github.com/nipy/nipype/pull/3699) + * FIX: `ts_Z_corr` → `ts_wb_Z` (https://github.com/nipy/nipype/pull/3697) + * ENH: Remove unused and recently unsupported antsRegistration flag (https://github.com/nipy/nipype/pull/3695) + * MAINT: Bump codecov/codecov-action from 4 to 5 (https://github.com/nipy/nipype/pull/3698) + 1.9.0 (October 31, 2024) ======================== -New feature release in the 1.9.0 series. +New feature release in the 1.9.x series. * FIX: Remove exists=True from fsl.MotionOutliers output that might not exist (https://github.com/nipy/nipype/pull/1428) * FIX: Improve evaluate_connect_function errors across Python versions (https://github.com/nipy/nipype/pull/3655) diff --git a/doc/interfaces.rst b/doc/interfaces.rst index e7b9dca2f4..1079d15607 100644 --- a/doc/interfaces.rst +++ b/doc/interfaces.rst @@ -8,7 +8,7 @@ Interfaces and Workflows :Release: |version| :Date: |today| -Previous versions: `1.8.6 `_ `1.8.5 `_ +Previous versions: `1.10.0 `_ `1.9.2 `_ Workflows --------- diff --git a/nipype/algorithms/confounds.py b/nipype/algorithms/confounds.py index 157d1e48d7..d2e6168ea7 100644 --- a/nipype/algorithms/confounds.py +++ b/nipype/algorithms/confounds.py @@ -188,7 +188,7 @@ def _run_interface(self, runtime): if self.inputs.save_std: out_file = self._gen_fname("dvars_std", ext="tsv") - np.savetxt(out_file, dvars[0], fmt=b"%0.6f") + np.savetxt(out_file, dvars[0], fmt="%0.6f") self._results["out_std"] = out_file if self.inputs.save_plot: @@ -208,7 +208,7 @@ def _run_interface(self, runtime): if self.inputs.save_nstd: out_file = self._gen_fname("dvars_nstd", ext="tsv") - np.savetxt(out_file, dvars[1], fmt=b"%0.6f") + np.savetxt(out_file, dvars[1], fmt="%0.6f") self._results["out_nstd"] = out_file if self.inputs.save_plot: @@ -228,7 +228,7 @@ def _run_interface(self, runtime): if self.inputs.save_vxstd: out_file = self._gen_fname("dvars_vxstd", ext="tsv") - np.savetxt(out_file, dvars[2], fmt=b"%0.6f") + np.savetxt(out_file, dvars[2], fmt="%0.6f") self._results["out_vxstd"] = out_file if self.inputs.save_plot: @@ -251,8 +251,8 @@ def _run_interface(self, runtime): np.savetxt( out_file, np.vstack(dvars).T, - fmt=b"%0.8f", - delimiter=b"\t", + fmt="%0.8f", + delimiter="\t", header="std DVARS\tnon-std DVARS\tvx-wise std DVARS", comments="", ) @@ -689,7 +689,7 @@ def _run_interface(self, runtime): np.savetxt( components_file, components, - fmt=b"%.10f", + fmt="%.10f", delimiter="\t", header="\t".join(components_header), comments="", @@ -729,7 +729,7 @@ def _run_interface(self, runtime): np.savetxt( self._results["pre_filter_file"], filter_basis, - fmt=b"%.10f", + fmt="%.10f", delimiter="\t", header="\t".join(header), comments="", diff --git a/nipype/algorithms/misc.py b/nipype/algorithms/misc.py index e1a67f0b08..fe27b877a2 100644 --- a/nipype/algorithms/misc.py +++ b/nipype/algorithms/misc.py @@ -1490,14 +1490,13 @@ def merge_rois(in_files, in_idxs, in_ref, dtype=None, out_file=None): for cname, iname in zip(in_files, in_idxs): f = np.load(iname) - idxs = np.squeeze(f["arr_0"]) + idxs = np.atleast_1d(np.squeeze(f["arr_0"])) + nels = len(idxs) for d, fname in enumerate(nii): data = np.asanyarray(nb.load(fname).dataobj).reshape(-1) cdata = nb.load(cname).dataobj[..., d].reshape(-1) - nels = len(idxs) - idata = (idxs,) - data[idata] = cdata[0:nels] + data[idxs] = cdata[:nels] nb.Nifti1Image(data.reshape(rsh[:3]), aff, hdr).to_filename(fname) imgs = [nb.load(im) for im in nii] diff --git a/nipype/algorithms/rapidart.py b/nipype/algorithms/rapidart.py index ff867ae26c..65aae2ef1c 100644 --- a/nipype/algorithms/rapidart.py +++ b/nipype/algorithms/rapidart.py @@ -189,7 +189,8 @@ class ArtifactDetectInputSpec(BaseInterfaceInputSpec): desc="Source of movement parameters", mandatory=True, ) - use_differences = traits.ListBool( + use_differences = traits.List( + traits.Bool, [True, False], minlen=2, maxlen=2, @@ -600,10 +601,10 @@ def _detect_outliers_core(self, imgfile, motionfile, runidx, cwd=None): outliers = np.unique(np.union1d(iidx, np.union1d(tidx, ridx))) # write output to outputfile - np.savetxt(artifactfile, outliers, fmt=b"%d", delimiter=" ") - np.savetxt(intensityfile, g, fmt=b"%.2f", delimiter=" ") + np.savetxt(artifactfile, outliers, fmt="%d", delimiter=" ") + np.savetxt(intensityfile, g, fmt="%.2f", delimiter=" ") if self.inputs.use_norm: - np.savetxt(normfile, normval, fmt=b"%.4f", delimiter=" ") + np.savetxt(normfile, normval, fmt="%.4f", delimiter=" ") if isdefined(self.inputs.save_plot) and self.inputs.save_plot: import matplotlib diff --git a/nipype/algorithms/tests/test_auto_ArtifactDetect.py b/nipype/algorithms/tests/test_auto_ArtifactDetect.py index 51010aea3a..4d5a7ca53b 100644 --- a/nipype/algorithms/tests/test_auto_ArtifactDetect.py +++ b/nipype/algorithms/tests/test_auto_ArtifactDetect.py @@ -48,8 +48,6 @@ def test_ArtifactDetect_inputs(): xor=["norm_threshold"], ), use_differences=dict( - maxlen=2, - minlen=2, usedefault=True, ), use_norm=dict( diff --git a/nipype/caching/tests/test_memory.py b/nipype/caching/tests/test_memory.py index 5bd9fad528..cd5b8f8075 100644 --- a/nipype/caching/tests/test_memory.py +++ b/nipype/caching/tests/test_memory.py @@ -1,5 +1,4 @@ -""" Test the nipype interface caching mechanism -""" +"""Test the nipype interface caching mechanism""" from .. import Memory from ...pipeline.engine.tests.test_engine import EngineTestInterface diff --git a/nipype/conftest.py b/nipype/conftest.py index 18b8a1ca6d..151906678f 100644 --- a/nipype/conftest.py +++ b/nipype/conftest.py @@ -2,7 +2,7 @@ import shutil from tempfile import mkdtemp import pytest -import numpy +import numpy as np import py.path as pp NIPYPE_DATADIR = os.path.realpath( @@ -15,12 +15,17 @@ @pytest.fixture(autouse=True) def add_np(doctest_namespace): - doctest_namespace["np"] = numpy + doctest_namespace["np"] = np doctest_namespace["os"] = os doctest_namespace["pytest"] = pytest doctest_namespace["datadir"] = data_dir +@pytest.fixture(scope='session', autouse=True) +def legacy_printoptions(): + np.set_printoptions(legacy='1.21') + + @pytest.fixture(autouse=True) def _docdir(request): """Grabbed from https://stackoverflow.com/a/46991331""" diff --git a/nipype/external/cloghandler.py b/nipype/external/cloghandler.py index 289c8dfa2f..680ba30e2e 100644 --- a/nipype/external/cloghandler.py +++ b/nipype/external/cloghandler.py @@ -9,7 +9,7 @@ # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. -""" cloghandler.py: A smart replacement for the standard RotatingFileHandler +"""cloghandler.py: A smart replacement for the standard RotatingFileHandler ConcurrentRotatingFileHandler: This class is a log handler which is a drop-in replacement for the python standard log handler 'RotateFileHandler', the primary diff --git a/nipype/info.py b/nipype/info.py index 3b006ae161..7ad5aba5bb 100644 --- a/nipype/info.py +++ b/nipype/info.py @@ -1,11 +1,11 @@ -""" This file contains defines parameters for nipy that we use to fill +"""This file contains defines parameters for nipy that we use to fill settings in setup.py, the nipy top-level docstring, and for building the docs. In setup.py in particular, we exec this file, so it cannot import nipy """ # nipype version information # Remove .dev0 for release -__version__ = "1.9.0" +__version__ = "1.11.0.dev0" def get_nipype_gitversion(): @@ -58,6 +58,7 @@ def get_nipype_gitversion(): "Programming Language :: Python :: 3.10", "Programming Language :: Python :: 3.11", "Programming Language :: Python :: 3.12", + "Programming Language :: Python :: 3.13", "Topic :: Scientific/Engineering", ] PYTHON_REQUIRES = ">= 3.9" @@ -99,12 +100,11 @@ def get_nipype_gitversion(): """ # versions -NIBABEL_MIN_VERSION = "2.1.0" -NETWORKX_MIN_VERSION = "2.0" -NUMPY_MIN_VERSION = "1.17" -NUMPY_MAX_VERSION = "2.0" -SCIPY_MIN_VERSION = "0.14" -TRAITS_MIN_VERSION = "4.6" +NIBABEL_MIN_VERSION = "3.0" +NETWORKX_MIN_VERSION = "2.5" +NUMPY_MIN_VERSION = "1.21" +SCIPY_MIN_VERSION = "1.8" +TRAITS_MIN_VERSION = "6.2" DATEUTIL_MIN_VERSION = "2.2" SIMPLEJSON_MIN_VERSION = "3.8.0" PROV_MIN_VERSION = "1.5.2" @@ -136,7 +136,7 @@ def get_nipype_gitversion(): "click>=%s" % CLICK_MIN_VERSION, "networkx>=%s" % NETWORKX_MIN_VERSION, "nibabel>=%s" % NIBABEL_MIN_VERSION, - "numpy>=%s,<%s" % (NUMPY_MIN_VERSION, NUMPY_MAX_VERSION), + "numpy>=%s" % NUMPY_MIN_VERSION, "packaging", "prov>=%s" % PROV_MIN_VERSION, "pydot>=%s" % PYDOT_MIN_VERSION, @@ -144,22 +144,24 @@ def get_nipype_gitversion(): "rdflib>=%s" % RDFLIB_MIN_VERSION, "scipy>=%s" % SCIPY_MIN_VERSION, "simplejson>=%s" % SIMPLEJSON_MIN_VERSION, - "traits>=%s,!=5.0" % TRAITS_MIN_VERSION, + "traits>=%s" % TRAITS_MIN_VERSION, "filelock>=3.0.0", - "etelemetry>=0.2.0", + "acres", + "etelemetry>=0.3.1", "looseversion!=1.2", "puremagic", ] TESTS_REQUIRES = [ - "codecov", - "coverage", - "pytest", - "pytest-cov", + "coverage >= 5.2.1", + "pandas >= 1.5.0", + "pytest >= 6", + "pytest-cov >=2.11", "pytest-env", - "pytest-timeout", + "pytest-timeout >=1.4", "pytest-doctestplus", - "sphinx", + "pytest-xdist >= 2.5", + "sphinx >=7", ] EXTRA_REQUIRES = { diff --git a/nipype/interfaces/afni/preprocess.py b/nipype/interfaces/afni/preprocess.py index d3daebcf4c..b5e27ea53a 100644 --- a/nipype/interfaces/afni/preprocess.py +++ b/nipype/interfaces/afni/preprocess.py @@ -2741,7 +2741,7 @@ def _list_outputs(self): odir = os.path.dirname(os.path.abspath(prefix)) outputs["out_corr_matrix"] = glob.glob(os.path.join(odir, "*.netcc"))[0] - if isdefined(self.inputs.ts_wb_corr) or isdefined(self.inputs.ts_Z_corr): + if self.inputs.ts_wb_corr or self.inputs.ts_wb_Z: corrdir = os.path.join(odir, prefix + "_000_INDIV") outputs["out_corr_maps"] = glob.glob(os.path.join(corrdir, "*.nii.gz")) diff --git a/nipype/interfaces/ants/registration.py b/nipype/interfaces/ants/registration.py index 41037ffc5f..55e9738170 100644 --- a/nipype/interfaces/ants/registration.py +++ b/nipype/interfaces/ants/registration.py @@ -1,5 +1,5 @@ """The ants module provides basic functions for interfacing with ants - functions. +functions. """ import os @@ -710,9 +710,9 @@ class Registration(ANTSCommand): --initialize-transforms-per-stage 0 --interpolation Linear --output [ output_, output_warped_image.nii.gz ] \ --transform Affine[ 2.0 ] --metric Mattes[ fixed1.nii, moving1.nii, 1, 32, Random, 0.05 ] \ --convergence [ 1500x200, 1e-08, 20 ] --smoothing-sigmas 1.0x0.0vox --shrink-factors 2x1 \ ---use-estimate-learning-rate-once 1 --use-histogram-matching 1 --transform SyN[ 0.25, 3.0, 0.0 ] \ +--use-histogram-matching 1 --transform SyN[ 0.25, 3.0, 0.0 ] \ --metric Mattes[ fixed1.nii, moving1.nii, 1, 32 ] --convergence [ 100x50x30, 1e-09, 20 ] \ ---smoothing-sigmas 2.0x1.0x0.0vox --shrink-factors 3x2x1 --use-estimate-learning-rate-once 1 \ +--smoothing-sigmas 2.0x1.0x0.0vox --shrink-factors 3x2x1 \ --use-histogram-matching 1 --winsorize-image-intensities [ 0.0, 1.0 ] --write-composite-transform 1' >>> reg.run() # doctest: +SKIP @@ -726,9 +726,9 @@ class Registration(ANTSCommand): --initialize-transforms-per-stage 0 --interpolation Linear --output [ output_, output_warped_image.nii.gz ] \ --transform Affine[ 2.0 ] --metric Mattes[ fixed1.nii, moving1.nii, 1, 32, Random, 0.05 ] \ --convergence [ 1500x200, 1e-08, 20 ] --smoothing-sigmas 1.0x0.0vox --shrink-factors 2x1 \ ---use-estimate-learning-rate-once 1 --use-histogram-matching 1 --transform SyN[ 0.25, 3.0, 0.0 ] \ +--use-histogram-matching 1 --transform SyN[ 0.25, 3.0, 0.0 ] \ --metric Mattes[ fixed1.nii, moving1.nii, 1, 32 ] --convergence [ 100x50x30, 1e-09, 20 ] \ ---smoothing-sigmas 2.0x1.0x0.0vox --shrink-factors 3x2x1 --use-estimate-learning-rate-once 1 \ +--smoothing-sigmas 2.0x1.0x0.0vox --shrink-factors 3x2x1 \ --use-histogram-matching 1 --winsorize-image-intensities [ 0.025, 1.0 ] --write-composite-transform 1' >>> reg1.run() # doctest: +SKIP @@ -742,9 +742,9 @@ class Registration(ANTSCommand): --initialize-transforms-per-stage 0 --interpolation Linear --output [ output_, output_warped_image.nii.gz ] \ --transform Affine[ 2.0 ] --metric Mattes[ fixed1.nii, moving1.nii, 1, 32, Random, 0.05 ] \ --convergence [ 1500x200, 1e-08, 20 ] --smoothing-sigmas 1.0x0.0vox --shrink-factors 2x1 \ ---use-estimate-learning-rate-once 1 --use-histogram-matching 1 --transform SyN[ 0.25, 3.0, 0.0 ] \ +--use-histogram-matching 1 --transform SyN[ 0.25, 3.0, 0.0 ] \ --metric Mattes[ fixed1.nii, moving1.nii, 1, 32 ] --convergence [ 100x50x30, 1e-09, 20 ] \ ---smoothing-sigmas 2.0x1.0x0.0vox --shrink-factors 3x2x1 --use-estimate-learning-rate-once 1 \ +--smoothing-sigmas 2.0x1.0x0.0vox --shrink-factors 3x2x1 \ --use-histogram-matching 1 --winsorize-image-intensities [ 0.0, 0.975 ] --write-composite-transform 1' Clip extremely low intensity data points using winsorize_lower_quantile. All data points @@ -759,9 +759,9 @@ class Registration(ANTSCommand): --initialize-transforms-per-stage 0 --interpolation Linear --output [ output_, output_warped_image.nii.gz ] \ --transform Affine[ 2.0 ] --metric Mattes[ fixed1.nii, moving1.nii, 1, 32, Random, 0.05 ] \ --convergence [ 1500x200, 1e-08, 20 ] --smoothing-sigmas 1.0x0.0vox --shrink-factors 2x1 \ ---use-estimate-learning-rate-once 1 --use-histogram-matching 1 --transform SyN[ 0.25, 3.0, 0.0 ] \ +--use-histogram-matching 1 --transform SyN[ 0.25, 3.0, 0.0 ] \ --metric Mattes[ fixed1.nii, moving1.nii, 1, 32 ] --convergence [ 100x50x30, 1e-09, 20 ] \ ---smoothing-sigmas 2.0x1.0x0.0vox --shrink-factors 3x2x1 --use-estimate-learning-rate-once 1 \ +--smoothing-sigmas 2.0x1.0x0.0vox --shrink-factors 3x2x1 \ --use-histogram-matching 1 --winsorize-image-intensities [ 0.025, 0.975 ] --write-composite-transform 1' Use float instead of double for computations (saves memory usage) @@ -773,10 +773,10 @@ class Registration(ANTSCommand): --initial-moving-transform [ trans.mat, 1 ] --initialize-transforms-per-stage 0 --interpolation Linear \ --output [ output_, output_warped_image.nii.gz ] --transform Affine[ 2.0 ] \ --metric Mattes[ fixed1.nii, moving1.nii, 1, 32, Random, 0.05 ] --convergence [ 1500x200, 1e-08, 20 ] \ ---smoothing-sigmas 1.0x0.0vox --shrink-factors 2x1 --use-estimate-learning-rate-once 1 --use-histogram-matching 1 \ +--smoothing-sigmas 1.0x0.0vox --shrink-factors 2x1 --use-histogram-matching 1 \ --transform SyN[ 0.25, 3.0, 0.0 ] --metric Mattes[ fixed1.nii, moving1.nii, 1, 32 ] \ --convergence [ 100x50x30, 1e-09, 20 ] --smoothing-sigmas 2.0x1.0x0.0vox --shrink-factors 3x2x1 \ ---use-estimate-learning-rate-once 1 --use-histogram-matching 1 --winsorize-image-intensities [ 0.0, 1.0 ] \ +--use-histogram-matching 1 --winsorize-image-intensities [ 0.0, 1.0 ] \ --write-composite-transform 1' Force to use double instead of float for computations (more precision and memory usage). @@ -788,10 +788,10 @@ class Registration(ANTSCommand): --initial-moving-transform [ trans.mat, 1 ] --initialize-transforms-per-stage 0 --interpolation Linear \ --output [ output_, output_warped_image.nii.gz ] --transform Affine[ 2.0 ] \ --metric Mattes[ fixed1.nii, moving1.nii, 1, 32, Random, 0.05 ] --convergence [ 1500x200, 1e-08, 20 ] \ ---smoothing-sigmas 1.0x0.0vox --shrink-factors 2x1 --use-estimate-learning-rate-once 1 --use-histogram-matching 1 \ +--smoothing-sigmas 1.0x0.0vox --shrink-factors 2x1 --use-histogram-matching 1 \ --transform SyN[ 0.25, 3.0, 0.0 ] --metric Mattes[ fixed1.nii, moving1.nii, 1, 32 ] \ --convergence [ 100x50x30, 1e-09, 20 ] --smoothing-sigmas 2.0x1.0x0.0vox --shrink-factors 3x2x1 \ ---use-estimate-learning-rate-once 1 --use-histogram-matching 1 --winsorize-image-intensities [ 0.0, 1.0 ] \ +--use-histogram-matching 1 --winsorize-image-intensities [ 0.0, 1.0 ] \ --write-composite-transform 1' 'collapse_output_transforms' can be used to put all transformation in a single 'composite_transform'- @@ -823,10 +823,10 @@ class Registration(ANTSCommand): --initialize-transforms-per-stage 1 --interpolation Linear --output [ output_, output_warped_image.nii.gz ] \ --restore-state trans.mat --save-state trans.mat --transform Affine[ 2.0 ] \ --metric Mattes[ fixed1.nii, moving1.nii, 1, 32, Random, 0.05 ] --convergence [ 1500x200, 1e-08, 20 ] \ ---smoothing-sigmas 1.0x0.0vox --shrink-factors 2x1 --use-estimate-learning-rate-once 1 --use-histogram-matching 1 \ +--smoothing-sigmas 1.0x0.0vox --shrink-factors 2x1 --use-histogram-matching 1 \ --transform SyN[ 0.25, 3.0, 0.0 ] --metric Mattes[ fixed1.nii, moving1.nii, 1, 32 ] \ --convergence [ 100x50x30, 1e-09, 20 ] --smoothing-sigmas 2.0x1.0x0.0vox --shrink-factors 3x2x1 \ ---use-estimate-learning-rate-once 1 --use-histogram-matching 1 --winsorize-image-intensities [ 0.0, 1.0 ] \ +--use-histogram-matching 1 --winsorize-image-intensities [ 0.0, 1.0 ] \ --write-composite-transform 1' @@ -857,10 +857,10 @@ class Registration(ANTSCommand): --initialize-transforms-per-stage 1 --interpolation Linear --output [ output_, output_warped_image.nii.gz ] \ --restore-state trans.mat --save-state trans.mat --transform Affine[ 2.0 ] \ --metric Mattes[ fixed1.nii, moving1.nii, 1, 32, Random, 0.05 ] --convergence [ 1500x200, 1e-08, 20 ] \ ---smoothing-sigmas 1.0x0.0vox --shrink-factors 2x1 --use-estimate-learning-rate-once 1 --use-histogram-matching 1 \ +--smoothing-sigmas 1.0x0.0vox --shrink-factors 2x1 --use-histogram-matching 1 \ --transform SyN[ 0.25, 3.0, 0.0 ] --metric Mattes[ fixed1.nii, moving1.nii, 1, 32 ] \ --convergence [ 100x50x30, 1e-09, 20 ] --smoothing-sigmas 2.0x1.0x0.0vox --shrink-factors 3x2x1 \ ---use-estimate-learning-rate-once 1 --use-histogram-matching 1 --winsorize-image-intensities [ 0.0, 1.0 ] \ +--use-histogram-matching 1 --winsorize-image-intensities [ 0.0, 1.0 ] \ --write-composite-transform 0' One can use multiple similarity metrics in a single registration stage.The Node below first @@ -885,10 +885,10 @@ class Registration(ANTSCommand): --initialize-transforms-per-stage 0 --interpolation Linear --output [ output_, output_warped_image.nii.gz ] \ --transform Affine[ 2.0 ] --metric Mattes[ fixed1.nii, moving1.nii, 1, 32, Random, 0.05 ] \ --convergence [ 1500x200, 1e-08, 20 ] --smoothing-sigmas 1.0x0.0vox --shrink-factors 2x1 \ ---use-estimate-learning-rate-once 1 --use-histogram-matching 1 --transform SyN[ 0.25, 3.0, 0.0 ] \ +--use-histogram-matching 1 --transform SyN[ 0.25, 3.0, 0.0 ] \ --metric Mattes[ fixed1.nii, moving1.nii, 0.5, 32, None, 0.05 ] \ --metric CC[ fixed1.nii, moving1.nii, 0.5, 4, None, 0.1 ] --convergence [ 100x50x30, 1e-09, 20 ] \ ---smoothing-sigmas 2.0x1.0x0.0vox --shrink-factors 3x2x1 --use-estimate-learning-rate-once 1 \ +--smoothing-sigmas 2.0x1.0x0.0vox --shrink-factors 3x2x1 \ --use-histogram-matching 1 --winsorize-image-intensities [ 0.0, 1.0 ] --write-composite-transform 1' ANTS Registration can also use multiple modalities to perform the registration. Here it is assumed @@ -906,10 +906,10 @@ class Registration(ANTSCommand): --initialize-transforms-per-stage 0 --interpolation Linear --output [ output_, output_warped_image.nii.gz ] \ --transform Affine[ 2.0 ] --metric Mattes[ fixed1.nii, moving1.nii, 1, 32, Random, 0.05 ] \ --convergence [ 1500x200, 1e-08, 20 ] --smoothing-sigmas 1.0x0.0vox --shrink-factors 2x1 \ ---use-estimate-learning-rate-once 1 --use-histogram-matching 1 --transform SyN[ 0.25, 3.0, 0.0 ] \ +--use-histogram-matching 1 --transform SyN[ 0.25, 3.0, 0.0 ] \ --metric Mattes[ fixed1.nii, moving1.nii, 0.5, 32, None, 0.05 ] \ --metric CC[ fixed2.nii, moving2.nii, 0.5, 4, None, 0.1 ] --convergence [ 100x50x30, 1e-09, 20 ] \ ---smoothing-sigmas 2.0x1.0x0.0vox --shrink-factors 3x2x1 --use-estimate-learning-rate-once 1 \ +--smoothing-sigmas 2.0x1.0x0.0vox --shrink-factors 3x2x1 \ --use-histogram-matching 1 --winsorize-image-intensities [ 0.0, 1.0 ] --write-composite-transform 1' Different methods can be used for the interpolation when applying transformations. @@ -923,9 +923,9 @@ class Registration(ANTSCommand): --initialize-transforms-per-stage 0 --interpolation BSpline[ 3 ] --output [ output_, output_warped_image.nii.gz ] \ --transform Affine[ 2.0 ] --metric Mattes[ fixed1.nii, moving1.nii, 1, 32, Random, 0.05 ] \ --convergence [ 1500x200, 1e-08, 20 ] --smoothing-sigmas 1.0x0.0vox --shrink-factors 2x1 \ ---use-estimate-learning-rate-once 1 --use-histogram-matching 1 --transform SyN[ 0.25, 3.0, 0.0 ] \ +--use-histogram-matching 1 --transform SyN[ 0.25, 3.0, 0.0 ] \ --metric Mattes[ fixed1.nii, moving1.nii, 1, 32 ] --convergence [ 100x50x30, 1e-09, 20 ] \ ---smoothing-sigmas 2.0x1.0x0.0vox --shrink-factors 3x2x1 --use-estimate-learning-rate-once 1 \ +--smoothing-sigmas 2.0x1.0x0.0vox --shrink-factors 3x2x1 \ --use-histogram-matching 1 --winsorize-image-intensities [ 0.0, 1.0 ] --write-composite-transform 1' >>> # Test Interpolation Parameters (MultiLabel/Gaussian) @@ -937,10 +937,10 @@ class Registration(ANTSCommand): --initialize-transforms-per-stage 0 --interpolation Gaussian[ 1.0, 1.0 ] \ --output [ output_, output_warped_image.nii.gz ] --transform Affine[ 2.0 ] \ --metric Mattes[ fixed1.nii, moving1.nii, 1, 32, Random, 0.05 ] --convergence [ 1500x200, 1e-08, 20 ] \ ---smoothing-sigmas 1.0x0.0vox --shrink-factors 2x1 --use-estimate-learning-rate-once 1 --use-histogram-matching 1 \ +--smoothing-sigmas 1.0x0.0vox --shrink-factors 2x1 --use-histogram-matching 1 \ --transform SyN[ 0.25, 3.0, 0.0 ] --metric Mattes[ fixed1.nii, moving1.nii, 1, 32 ] \ --convergence [ 100x50x30, 1e-09, 20 ] --smoothing-sigmas 2.0x1.0x0.0vox --shrink-factors 3x2x1 \ ---use-estimate-learning-rate-once 1 --use-histogram-matching 1 --winsorize-image-intensities [ 0.0, 1.0 ] \ +--use-histogram-matching 1 --winsorize-image-intensities [ 0.0, 1.0 ] \ --write-composite-transform 1' BSplineSyN non-linear registration with custom parameters. @@ -954,9 +954,9 @@ class Registration(ANTSCommand): --initialize-transforms-per-stage 0 --interpolation Linear --output [ output_, output_warped_image.nii.gz ] \ --transform Affine[ 2.0 ] --metric Mattes[ fixed1.nii, moving1.nii, 1, 32, Random, 0.05 ] \ --convergence [ 1500x200, 1e-08, 20 ] --smoothing-sigmas 1.0x0.0vox --shrink-factors 2x1 \ ---use-estimate-learning-rate-once 1 --use-histogram-matching 1 --transform BSplineSyN[ 0.25, 26, 0, 3 ] \ +--use-histogram-matching 1 --transform BSplineSyN[ 0.25, 26, 0, 3 ] \ --metric Mattes[ fixed1.nii, moving1.nii, 1, 32 ] --convergence [ 100x50x30, 1e-09, 20 ] \ ---smoothing-sigmas 2.0x1.0x0.0vox --shrink-factors 3x2x1 --use-estimate-learning-rate-once 1 \ +--smoothing-sigmas 2.0x1.0x0.0vox --shrink-factors 3x2x1 \ --use-histogram-matching 1 --winsorize-image-intensities [ 0.0, 1.0 ] --write-composite-transform 1' Mask the fixed image in the second stage of the registration (but not the first). @@ -969,10 +969,10 @@ class Registration(ANTSCommand): --initialize-transforms-per-stage 0 --interpolation Linear --output [ output_, output_warped_image.nii.gz ] \ --transform Affine[ 2.0 ] --metric Mattes[ fixed1.nii, moving1.nii, 1, 32, Random, 0.05 ] \ --convergence [ 1500x200, 1e-08, 20 ] --smoothing-sigmas 1.0x0.0vox --shrink-factors 2x1 \ ---use-estimate-learning-rate-once 1 --use-histogram-matching 1 --masks [ NULL, NULL ] \ +--use-histogram-matching 1 --masks [ NULL, NULL ] \ --transform SyN[ 0.25, 3.0, 0.0 ] --metric Mattes[ fixed1.nii, moving1.nii, 1, 32 ] \ --convergence [ 100x50x30, 1e-09, 20 ] --smoothing-sigmas 2.0x1.0x0.0vox --shrink-factors 3x2x1 \ ---use-estimate-learning-rate-once 1 --use-histogram-matching 1 --masks [ fixed1.nii, NULL ] \ +--use-histogram-matching 1 --masks [ fixed1.nii, NULL ] \ --winsorize-image-intensities [ 0.0, 1.0 ] --write-composite-transform 1' Here we use both a warpfield and a linear transformation, before registration commences. Note that @@ -988,10 +988,10 @@ class Registration(ANTSCommand): [ func_to_struct.mat, 0 ] [ ants_Warp.nii.gz, 0 ] --initialize-transforms-per-stage 0 --interpolation Linear \ --output [ output_, output_warped_image.nii.gz ] --transform Affine[ 2.0 ] \ --metric Mattes[ fixed1.nii, moving1.nii, 1, 32, Random, 0.05 ] --convergence [ 1500x200, 1e-08, 20 ] \ ---smoothing-sigmas 1.0x0.0vox --shrink-factors 2x1 --use-estimate-learning-rate-once 1 --use-histogram-matching 1 \ +--smoothing-sigmas 1.0x0.0vox --shrink-factors 2x1 --use-histogram-matching 1 \ --transform SyN[ 0.25, 3.0, 0.0 ] --metric Mattes[ fixed1.nii, moving1.nii, 1, 32 ] \ --convergence [ 100x50x30, 1e-09, 20 ] --smoothing-sigmas 2.0x1.0x0.0vox --shrink-factors 3x2x1 \ ---use-estimate-learning-rate-once 1 --use-histogram-matching 1 --winsorize-image-intensities [ 0.0, 1.0 ] \ +--use-histogram-matching 1 --winsorize-image-intensities [ 0.0, 1.0 ] \ --write-composite-transform 1' """ @@ -1155,10 +1155,9 @@ def _format_registration(self): % self._format_xarray(self.inputs.shrink_factors[ii]) ) if isdefined(self.inputs.use_estimate_learning_rate_once): - retval.append( - "--use-estimate-learning-rate-once %d" - % self.inputs.use_estimate_learning_rate_once[ii] - ) + # this flag was removed because it was never used in the ants codebase + # removed from Ants in commit e1e47994b on 2022-08-09 + pass if isdefined(self.inputs.use_histogram_matching): # use_histogram_matching is either a common flag for all transforms # or a list of transform-specific flags diff --git a/nipype/interfaces/ants/resampling.py b/nipype/interfaces/ants/resampling.py index 95f29d5982..883eff1de3 100644 --- a/nipype/interfaces/ants/resampling.py +++ b/nipype/interfaces/ants/resampling.py @@ -1,5 +1,4 @@ -"""ANTS Apply Transforms interface -""" +"""ANTS Apply Transforms interface""" import os diff --git a/nipype/interfaces/ants/segmentation.py b/nipype/interfaces/ants/segmentation.py index 3c87b71975..47592d70b5 100644 --- a/nipype/interfaces/ants/segmentation.py +++ b/nipype/interfaces/ants/segmentation.py @@ -1328,7 +1328,8 @@ class JointFusionInputSpec(ANTSCommandInputSpec): usedefault=True, desc=("Constrain solution to non-negative weights."), ) - patch_radius = traits.ListInt( + patch_radius = traits.List( + traits.Int, minlen=3, maxlen=3, argstr="-p %s", diff --git a/nipype/interfaces/ants/tests/test_auto_JointFusion.py b/nipype/interfaces/ants/tests/test_auto_JointFusion.py index f234ceea7c..98d8d696a1 100644 --- a/nipype/interfaces/ants/tests/test_auto_JointFusion.py +++ b/nipype/interfaces/ants/tests/test_auto_JointFusion.py @@ -70,8 +70,6 @@ def test_JointFusion_inputs(): ), patch_radius=dict( argstr="-p %s", - maxlen=3, - minlen=3, ), retain_atlas_voting_images=dict( argstr="-f", diff --git a/nipype/interfaces/ants/visualization.py b/nipype/interfaces/ants/visualization.py index c73b64c632..cdfa3529a7 100644 --- a/nipype/interfaces/ants/visualization.py +++ b/nipype/interfaces/ants/visualization.py @@ -1,5 +1,4 @@ -"""The ants visualisation module provides basic functions based on ITK. -""" +"""The ants visualisation module provides basic functions based on ITK.""" import os diff --git a/nipype/interfaces/base/__init__.py b/nipype/interfaces/base/__init__.py index 2e54847958..2af425d284 100644 --- a/nipype/interfaces/base/__init__.py +++ b/nipype/interfaces/base/__init__.py @@ -7,7 +7,8 @@ This module defines the API of all nipype interfaces. """ -from traits.trait_handlers import TraitDictObject, TraitListObject +from traits.trait_dict_object import TraitDictObject +from traits.trait_list_object import TraitListObject from traits.trait_errors import TraitError from .core import ( diff --git a/nipype/interfaces/base/specs.py b/nipype/interfaces/base/specs.py index a7f61e6889..defbca7f43 100644 --- a/nipype/interfaces/base/specs.py +++ b/nipype/interfaces/base/specs.py @@ -15,7 +15,8 @@ from packaging.version import Version from traits.trait_errors import TraitError -from traits.trait_handlers import TraitDictObject, TraitListObject +from traits.trait_dict_object import TraitDictObject +from traits.trait_list_object import TraitListObject from ...utils.filemanip import md5, hash_infile, hash_timestamp from .traits_extension import ( traits, diff --git a/nipype/interfaces/base/tests/test_support.py b/nipype/interfaces/base/tests/test_support.py index 52770e476c..406e6e9358 100644 --- a/nipype/interfaces/base/tests/test_support.py +++ b/nipype/interfaces/base/tests/test_support.py @@ -3,7 +3,7 @@ import os import pytest -from pkg_resources import resource_filename as pkgrf +import acres from ....utils.filemanip import md5 from ... import base as nib @@ -42,14 +42,13 @@ def test_bunch_methods(): def test_bunch_hash(): # NOTE: Since the path to the json file is included in the Bunch, # the hash will be unique to each machine. - json_pth = pkgrf("nipype", os.path.join("testing", "data", "realign_json.json")) + json_pth = acres.Loader('nipype.testing').cached('data', 'realign_json.json') - b = nib.Bunch(infile=json_pth, otherthing="blue", yat=True) + b = nib.Bunch(infile=str(json_pth), otherthing="blue", yat=True) newbdict, bhash = b._get_bunch_hash() assert bhash == "d1f46750044c3de102efc847720fc35f" # Make sure the hash stored in the json file for `infile` is correct. jshash = md5() - with open(json_pth) as fp: - jshash.update(fp.read().encode("utf-8")) + jshash.update(json_pth.read_bytes()) assert newbdict["infile"][0][1] == jshash.hexdigest() assert newbdict["yat"] is True diff --git a/nipype/interfaces/bru2nii.py b/nipype/interfaces/bru2nii.py index 746af18f1a..b07f6a58d3 100644 --- a/nipype/interfaces/bru2nii.py +++ b/nipype/interfaces/bru2nii.py @@ -1,5 +1,4 @@ -"""The bru2nii module provides basic functions for dicom conversion -""" +"""The bru2nii module provides basic functions for dicom conversion""" import os from .base import ( diff --git a/nipype/interfaces/camino/__init__.py b/nipype/interfaces/camino/__init__.py index 766fa9c906..67e973df66 100644 --- a/nipype/interfaces/camino/__init__.py +++ b/nipype/interfaces/camino/__init__.py @@ -1,7 +1,6 @@ # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: -"""Camino top level namespace -""" +"""Camino top level namespace""" from .connectivity import Conmat from .convert import ( diff --git a/nipype/interfaces/cmtk/base.py b/nipype/interfaces/cmtk/base.py index d0c226dc49..c4c997288b 100644 --- a/nipype/interfaces/cmtk/base.py +++ b/nipype/interfaces/cmtk/base.py @@ -1,6 +1,6 @@ # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: -""" Base interface for cmtk """ +"""Base interface for cmtk""" from ..base import LibraryBaseInterface from ...utils.misc import package_check diff --git a/nipype/interfaces/diffusion_toolkit/dti.py b/nipype/interfaces/diffusion_toolkit/dti.py index fa031799e3..bf6336c96d 100644 --- a/nipype/interfaces/diffusion_toolkit/dti.py +++ b/nipype/interfaces/diffusion_toolkit/dti.py @@ -1,7 +1,6 @@ # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: -"""Provides interfaces to various commands provided by diffusion toolkit -""" +"""Provides interfaces to various commands provided by diffusion toolkit""" import os import re diff --git a/nipype/interfaces/diffusion_toolkit/odf.py b/nipype/interfaces/diffusion_toolkit/odf.py index 00f86a322c..daadffc200 100644 --- a/nipype/interfaces/diffusion_toolkit/odf.py +++ b/nipype/interfaces/diffusion_toolkit/odf.py @@ -1,7 +1,6 @@ # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: -"""Provides interfaces to various commands provided by diffusion toolkit -""" +"""Provides interfaces to various commands provided by diffusion toolkit""" import os import re diff --git a/nipype/interfaces/diffusion_toolkit/postproc.py b/nipype/interfaces/diffusion_toolkit/postproc.py index 5190843875..d05cfadff6 100644 --- a/nipype/interfaces/diffusion_toolkit/postproc.py +++ b/nipype/interfaces/diffusion_toolkit/postproc.py @@ -1,7 +1,6 @@ # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: -"""Provides interfaces to various commands provided by diffusion toolkit -""" +"""Provides interfaces to various commands provided by diffusion toolkit""" import os from ..base import ( diff --git a/nipype/interfaces/dipy/base.py b/nipype/interfaces/dipy/base.py index ec19d1fe7b..44290cd1d7 100644 --- a/nipype/interfaces/dipy/base.py +++ b/nipype/interfaces/dipy/base.py @@ -1,7 +1,8 @@ -""" Base interfaces for dipy """ +"""Base interfaces for dipy""" import os.path as op import inspect +from functools import partial import numpy as np from ..base import ( traits, @@ -109,15 +110,15 @@ def convert_to_traits_type(dipy_type, is_file=False): dipy_type = dipy_type.lower() is_mandatory = bool("optional" not in dipy_type) if "variable" in dipy_type and "str" in dipy_type: - return traits.ListStr, is_mandatory + return partial(traits.List, traits.Str), is_mandatory elif "variable" in dipy_type and "int" in dipy_type: - return traits.ListInt, is_mandatory + return partial(traits.List, traits.Int), is_mandatory elif "variable" in dipy_type and "float" in dipy_type: - return traits.ListFloat, is_mandatory + return partial(traits.List, traits.Float), is_mandatory elif "variable" in dipy_type and "bool" in dipy_type: - return traits.ListBool, is_mandatory + return partial(traits.List, traits.Bool), is_mandatory elif "variable" in dipy_type and "complex" in dipy_type: - return traits.ListComplex, is_mandatory + return partial(traits.List, traits.Complex), is_mandatory elif "str" in dipy_type and not is_file: return traits.Str, is_mandatory elif "str" in dipy_type and is_file: diff --git a/nipype/interfaces/dipy/tests/test_base.py b/nipype/interfaces/dipy/tests/test_base.py index d2d81ec005..015215054d 100644 --- a/nipype/interfaces/dipy/tests/test_base.py +++ b/nipype/interfaces/dipy/tests/test_base.py @@ -16,7 +16,7 @@ def test_convert_to_traits_type(): Params = namedtuple("Params", "traits_type is_file") - Res = namedtuple("Res", "traits_type is_mandatory") + Res = namedtuple("Res", "traits_type subtype is_mandatory") l_entries = [ Params("variable string", False), Params("variable int", False), @@ -42,35 +42,38 @@ def test_convert_to_traits_type(): Params("complex, optional", False), ] l_expected = [ - Res(traits.ListStr, True), - Res(traits.ListInt, True), - Res(traits.ListFloat, True), - Res(traits.ListBool, True), - Res(traits.ListComplex, True), - Res(traits.ListInt, False), - Res(traits.ListStr, False), - Res(traits.ListFloat, False), - Res(traits.ListBool, False), - Res(traits.ListComplex, False), - Res(traits.Str, True), - Res(traits.Int, True), - Res(File, True), - Res(traits.Float, True), - Res(traits.Bool, True), - Res(traits.Complex, True), - Res(traits.Str, False), - Res(traits.Int, False), - Res(File, False), - Res(traits.Float, False), - Res(traits.Bool, False), - Res(traits.Complex, False), + Res(traits.List, traits.Str, True), + Res(traits.List, traits.Int, True), + Res(traits.List, traits.Float, True), + Res(traits.List, traits.Bool, True), + Res(traits.List, traits.Complex, True), + Res(traits.List, traits.Int, False), + Res(traits.List, traits.Str, False), + Res(traits.List, traits.Float, False), + Res(traits.List, traits.Bool, False), + Res(traits.List, traits.Complex, False), + Res(traits.Str, None, True), + Res(traits.Int, None, True), + Res(File, None, True), + Res(traits.Float, None, True), + Res(traits.Bool, None, True), + Res(traits.Complex, None, True), + Res(traits.Str, None, False), + Res(traits.Int, None, False), + Res(File, None, False), + Res(traits.Float, None, False), + Res(traits.Bool, None, False), + Res(traits.Complex, None, False), ] for entry, res in zip(l_entries, l_expected): traits_type, is_mandatory = convert_to_traits_type( entry.traits_type, entry.is_file ) - assert traits_type == res.traits_type + trait_instance = traits_type() + assert isinstance(trait_instance, res.traits_type) + if res.subtype: + assert isinstance(trait_instance.inner_traits()[0].trait_type, res.subtype) assert is_mandatory == res.is_mandatory with pytest.raises(IOError): diff --git a/nipype/interfaces/freesurfer/longitudinal.py b/nipype/interfaces/freesurfer/longitudinal.py index 227ea76775..41e95c091b 100644 --- a/nipype/interfaces/freesurfer/longitudinal.py +++ b/nipype/interfaces/freesurfer/longitudinal.py @@ -1,7 +1,6 @@ # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: -"""Provides interfaces to various longitudinal commands provided by freesurfer -""" +"""Provides interfaces to various longitudinal commands provided by freesurfer""" import os diff --git a/nipype/interfaces/freesurfer/model.py b/nipype/interfaces/freesurfer/model.py index 6376c1b971..5e245a9a85 100644 --- a/nipype/interfaces/freesurfer/model.py +++ b/nipype/interfaces/freesurfer/model.py @@ -1,7 +1,7 @@ # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: """The freesurfer module provides basic functions for interfacing with - freesurfer tools. +freesurfer tools. """ import os diff --git a/nipype/interfaces/freesurfer/petsurfer.py b/nipype/interfaces/freesurfer/petsurfer.py index 4505985127..28aa763b06 100644 --- a/nipype/interfaces/freesurfer/petsurfer.py +++ b/nipype/interfaces/freesurfer/petsurfer.py @@ -1,7 +1,6 @@ # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: -"""Provides interfaces to various commands for running PET analyses provided by FreeSurfer -""" +"""Provides interfaces to various commands for running PET analyses provided by FreeSurfer""" import os diff --git a/nipype/interfaces/freesurfer/preprocess.py b/nipype/interfaces/freesurfer/preprocess.py index 5b2fd19a0b..89c218f969 100644 --- a/nipype/interfaces/freesurfer/preprocess.py +++ b/nipype/interfaces/freesurfer/preprocess.py @@ -1,7 +1,6 @@ # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: -"""Provides interfaces to various commands provided by FreeSurfer -""" +"""Provides interfaces to various commands provided by FreeSurfer""" import os import os.path as op from glob import glob diff --git a/nipype/interfaces/freesurfer/registration.py b/nipype/interfaces/freesurfer/registration.py index bc70fc44a6..790066d0ec 100644 --- a/nipype/interfaces/freesurfer/registration.py +++ b/nipype/interfaces/freesurfer/registration.py @@ -1,7 +1,6 @@ # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: -"""Provides interfaces to various longitudinal commands provided by freesurfer -""" +"""Provides interfaces to various longitudinal commands provided by freesurfer""" import os import os.path diff --git a/nipype/interfaces/freesurfer/utils.py b/nipype/interfaces/freesurfer/utils.py index 777f42f019..2c1cdbcc94 100644 --- a/nipype/interfaces/freesurfer/utils.py +++ b/nipype/interfaces/freesurfer/utils.py @@ -1,7 +1,6 @@ # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: -"""Interfaces to assorted Freesurfer utility programs. -""" +"""Interfaces to assorted Freesurfer utility programs.""" import os import re import shutil diff --git a/nipype/interfaces/fsl/epi.py b/nipype/interfaces/fsl/epi.py index 09daacb17f..7dda9a49d7 100644 --- a/nipype/interfaces/fsl/epi.py +++ b/nipype/interfaces/fsl/epi.py @@ -417,7 +417,7 @@ def _generate_encfile(self): float(val[0] == encdir[0]) * direction for val in ["x", "y", "z"] ] + [durations[idx]] lines.append(line) - np.savetxt(out_file, np.array(lines), fmt=b"%d %d %d %.8f") + np.savetxt(out_file, np.array(lines), fmt="%d %d %d %.8f") return out_file def _overload_extension(self, value, name=None): diff --git a/nipype/interfaces/fsl/model.py b/nipype/interfaces/fsl/model.py index 2a148025f5..2ada4ab969 100644 --- a/nipype/interfaces/fsl/model.py +++ b/nipype/interfaces/fsl/model.py @@ -9,6 +9,7 @@ from shutil import rmtree from string import Template +import acres import numpy as np from looseversion import LooseVersion from nibabel import load @@ -2547,12 +2548,5 @@ def load_template(name): template : string.Template """ - from pkg_resources import resource_filename as pkgrf - - full_fname = pkgrf( - "nipype", os.path.join("interfaces", "fsl", "model_templates", name) - ) - with open(full_fname) as template_file: - template = Template(template_file.read()) - - return template + loader = acres.Loader('nipype.interfaces.fsl') + return Template(loader.readable('model_templates', name).read_text()) diff --git a/nipype/interfaces/io.py b/nipype/interfaces/io.py index 46cdfb44f2..d6af1ba073 100644 --- a/nipype/interfaces/io.py +++ b/nipype/interfaces/io.py @@ -1,14 +1,14 @@ # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: -""" Set of interfaces that allow interaction with data. Currently - available interfaces are: +"""Set of interfaces that allow interaction with data. Currently +available interfaces are: - DataSource: Generic nifti to named Nifti interface - DataSink: Generic named output from interfaces to data store - XNATSource: preliminary interface to XNAT +DataSource: Generic nifti to named Nifti interface +DataSink: Generic named output from interfaces to data store +XNATSource: preliminary interface to XNAT - To come : - XNATSink +To come : +XNATSink """ import glob import fnmatch diff --git a/nipype/interfaces/mixins/reporting.py b/nipype/interfaces/mixins/reporting.py index 90ca804618..a836cfa3fa 100644 --- a/nipype/interfaces/mixins/reporting.py +++ b/nipype/interfaces/mixins/reporting.py @@ -1,6 +1,6 @@ # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: -""" class mixin and utilities for enabling reports for nipype interfaces """ +"""class mixin and utilities for enabling reports for nipype interfaces""" import os from abc import abstractmethod diff --git a/nipype/interfaces/mrtrix3/preprocess.py b/nipype/interfaces/mrtrix3/preprocess.py index 57cdad0168..0165087376 100644 --- a/nipype/interfaces/mrtrix3/preprocess.py +++ b/nipype/interfaces/mrtrix3/preprocess.py @@ -99,8 +99,9 @@ class MRDeGibbsInputSpec(MRTrix3BaseInputSpec): mandatory=True, desc="input DWI image", ) - axes = traits.ListInt( - default_value=[0, 1], + axes = traits.List( + traits.Int, + [0, 1], usedefault=True, sep=",", minlen=2, diff --git a/nipype/interfaces/mrtrix3/tests/test_auto_MRDeGibbs.py b/nipype/interfaces/mrtrix3/tests/test_auto_MRDeGibbs.py index cd15f36ac6..83f5bfef4b 100644 --- a/nipype/interfaces/mrtrix3/tests/test_auto_MRDeGibbs.py +++ b/nipype/interfaces/mrtrix3/tests/test_auto_MRDeGibbs.py @@ -9,8 +9,6 @@ def test_MRDeGibbs_inputs(): ), axes=dict( argstr="-axes %s", - maxlen=2, - minlen=2, sep=",", usedefault=True, ), diff --git a/nipype/interfaces/nilearn.py b/nipype/interfaces/nilearn.py index 9d78517f79..df6413320e 100644 --- a/nipype/interfaces/nilearn.py +++ b/nipype/interfaces/nilearn.py @@ -105,7 +105,7 @@ def _run_interface(self, runtime): # save output self._results["out_file"] = os.path.join(runtime.cwd, self.inputs.out_file) - np.savetxt(self._results["out_file"], output, fmt=b"%s", delimiter="\t") + np.savetxt(self._results["out_file"], output, fmt="%s", delimiter="\t") return runtime def _process_inputs(self): diff --git a/nipype/interfaces/nipy/base.py b/nipype/interfaces/nipy/base.py index 25aef8b873..1f8f1e4657 100644 --- a/nipype/interfaces/nipy/base.py +++ b/nipype/interfaces/nipy/base.py @@ -1,6 +1,6 @@ # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: -""" Base interface for nipy """ +"""Base interface for nipy""" from ..base import LibraryBaseInterface from ...utils.misc import package_check diff --git a/nipype/interfaces/nitime/base.py b/nipype/interfaces/nitime/base.py index 7e434f1d3e..4109bc3a74 100644 --- a/nipype/interfaces/nitime/base.py +++ b/nipype/interfaces/nitime/base.py @@ -1,6 +1,6 @@ # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: -""" Base interface for nitime """ +"""Base interface for nitime""" from ..base import LibraryBaseInterface diff --git a/nipype/interfaces/nitime/tests/test_nitime.py b/nipype/interfaces/nitime/tests/test_nitime.py index 64bb8366a0..8351a3c38a 100644 --- a/nipype/interfaces/nitime/tests/test_nitime.py +++ b/nipype/interfaces/nitime/tests/test_nitime.py @@ -51,7 +51,9 @@ def test_coherence_analysis(tmpdir): # This is the nitime analysis: TR = 1.89 - data_rec = np.recfromcsv(example_data("fmri_timeseries.csv")) + data_rec = np.genfromtxt( + example_data("fmri_timeseries.csv"), delimiter=',', names=True + ) roi_names = np.array(data_rec.dtype.names) n_samples = data_rec.shape[0] data = np.zeros((len(roi_names), n_samples)) diff --git a/nipype/interfaces/spm/preprocess.py b/nipype/interfaces/spm/preprocess.py index 8a3a479705..8d931a72ba 100644 --- a/nipype/interfaces/spm/preprocess.py +++ b/nipype/interfaces/spm/preprocess.py @@ -1,7 +1,6 @@ # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: -"""SPM wrappers for preprocessing data -""" +"""SPM wrappers for preprocessing data""" import os from copy import deepcopy @@ -273,7 +272,8 @@ class ApplyVDMInputSpec(SPMCommandInputSpec): desc="phase encode direction input data have been acquired with", usedefault=True, ) - write_which = traits.ListInt( + write_which = traits.List( + traits.Int, [2, 1], field="roptions.which", minlen=2, @@ -524,7 +524,8 @@ class RealignInputSpec(SPMCommandInputSpec): field="eoptions.wrap", desc="Check if interpolation should wrap in [x,y,z]", ) - write_which = traits.ListInt( + write_which = traits.List( + traits.Int, [2, 1], field="roptions.which", minlen=2, @@ -731,7 +732,8 @@ class RealignUnwarpInputSpec(SPMCommandInputSpec): "maximization and smoothness maximization of the estimated field." ), ) - est_reg_factor = traits.ListInt( + est_reg_factor = traits.List( + traits.Int, [100000], field="uweoptions.lambda", minlen=1, @@ -769,7 +771,8 @@ class RealignUnwarpInputSpec(SPMCommandInputSpec): field="uweoptions.rem", desc="Re-estimate movement parameters at each unwarping iteration.", ) - est_num_of_iterations = traits.ListInt( + est_num_of_iterations = traits.List( + traits.Int, [5], field="uweoptions.noi", minlen=1, @@ -783,7 +786,8 @@ class RealignUnwarpInputSpec(SPMCommandInputSpec): usedefault=True, desc="Point in position space to perform Taylor-expansion around.", ) - reslice_which = traits.ListInt( + reslice_which = traits.List( + traits.Int, [2, 1], field="uwroptions.uwwhich", minlen=2, diff --git a/nipype/interfaces/spm/tests/test_auto_ApplyVDM.py b/nipype/interfaces/spm/tests/test_auto_ApplyVDM.py index 2f56b49ef2..6d3b3c360d 100644 --- a/nipype/interfaces/spm/tests/test_auto_ApplyVDM.py +++ b/nipype/interfaces/spm/tests/test_auto_ApplyVDM.py @@ -41,8 +41,6 @@ def test_ApplyVDM_inputs(): ), write_which=dict( field="roptions.which", - maxlen=2, - minlen=2, usedefault=True, ), write_wrap=dict( diff --git a/nipype/interfaces/spm/tests/test_auto_Realign.py b/nipype/interfaces/spm/tests/test_auto_Realign.py index 5165d6f33e..8262243a61 100644 --- a/nipype/interfaces/spm/tests/test_auto_Realign.py +++ b/nipype/interfaces/spm/tests/test_auto_Realign.py @@ -56,8 +56,6 @@ def test_Realign_inputs(): ), write_which=dict( field="roptions.which", - maxlen=2, - minlen=2, usedefault=True, ), write_wrap=dict( diff --git a/nipype/interfaces/spm/tests/test_auto_RealignUnwarp.py b/nipype/interfaces/spm/tests/test_auto_RealignUnwarp.py index bb27419547..dc996c130e 100644 --- a/nipype/interfaces/spm/tests/test_auto_RealignUnwarp.py +++ b/nipype/interfaces/spm/tests/test_auto_RealignUnwarp.py @@ -15,8 +15,6 @@ def test_RealignUnwarp_inputs(): ), est_num_of_iterations=dict( field="uweoptions.noi", - maxlen=1, - minlen=1, usedefault=True, ), est_re_est_mov_par=dict( @@ -24,8 +22,6 @@ def test_RealignUnwarp_inputs(): ), est_reg_factor=dict( field="uweoptions.lambda", - maxlen=1, - minlen=1, usedefault=True, ), est_reg_order=dict( @@ -80,8 +76,6 @@ def test_RealignUnwarp_inputs(): ), reslice_which=dict( field="uwroptions.uwwhich", - maxlen=2, - minlen=2, usedefault=True, ), reslice_wrap=dict( diff --git a/nipype/interfaces/utility/base.py b/nipype/interfaces/utility/base.py index 564966cb5b..ecc1bf7935 100644 --- a/nipype/interfaces/utility/base.py +++ b/nipype/interfaces/utility/base.py @@ -1,9 +1,9 @@ # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: """ - # changing to temporary directories - >>> tmp = getfixture('tmpdir') - >>> old = tmp.chdir() +# changing to temporary directories +>>> tmp = getfixture('tmpdir') +>>> old = tmp.chdir() """ import os import re diff --git a/nipype/interfaces/utility/csv.py b/nipype/interfaces/utility/csv.py index 979e328bb6..7470eecbfe 100644 --- a/nipype/interfaces/utility/csv.py +++ b/nipype/interfaces/utility/csv.py @@ -1,7 +1,6 @@ # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: -"""CSV Handling utilities -""" +"""CSV Handling utilities""" import csv from ..base import traits, TraitedSpec, DynamicTraitedSpec, File, BaseInterface from ..io import add_traits diff --git a/nipype/pipeline/engine/nodes.py b/nipype/pipeline/engine/nodes.py index 31ee29e04d..e29b56718b 100644 --- a/nipype/pipeline/engine/nodes.py +++ b/nipype/pipeline/engine/nodes.py @@ -452,7 +452,7 @@ def run(self, updatehash=False): cached, updated = self.is_cached() # If the node is cached, check on pklz files and finish - if not force_run and (updated or (not updated and updatehash)): + if cached and not force_run and (updated or updatehash): logger.debug("Only updating node hashes or skipping execution") inputs_file = op.join(outdir, "_inputs.pklz") if not op.exists(inputs_file): @@ -820,6 +820,11 @@ def update(self, **opts): """Update inputs""" self.inputs.update(**opts) + def is_gpu_node(self): + return bool(getattr(self.inputs, 'use_cuda', False)) or bool( + getattr(self.inputs, 'use_gpu', False) + ) + class JoinNode(Node): """Wraps interface objects that join inputs into a list. diff --git a/nipype/pipeline/engine/tests/test_engine.py b/nipype/pipeline/engine/tests/test_engine.py index abf9426d43..7650be1cd3 100644 --- a/nipype/pipeline/engine/tests/test_engine.py +++ b/nipype/pipeline/engine/tests/test_engine.py @@ -1,7 +1,6 @@ # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: -"""Tests for the engine module -""" +"""Tests for the engine module""" from copy import deepcopy from glob import glob import os @@ -541,7 +540,9 @@ def test_write_graph_dotfile(tmpdir, graph_type, simple): pipe.write_graph(graph2use=graph_type, simple_form=simple, format="dot") with open("graph.dot") as f: - graph_str = f.read() + # Replace handles change in networkx behavior when graph is missing a name + # Probably around 3, but I haven't tracked it down. + graph_str = f.read().replace(' {', ' {') if simple: for line in dotfiles[graph_type]: @@ -635,7 +636,9 @@ def test_write_graph_dotfile_iterables(tmpdir, graph_type, simple): pipe.write_graph(graph2use=graph_type, simple_form=simple, format="dot") with open("graph.dot") as f: - graph_str = f.read() + # Replace handles change in networkx behavior when graph is missing a name + # Probably around 3, but I haven't tracked it down. + graph_str = f.read().replace(' {', ' {') if simple: for line in dotfiles_iter[graph_type]: diff --git a/nipype/pipeline/engine/tests/test_join.py b/nipype/pipeline/engine/tests/test_join.py index 2fe5f70564..c177ad24d3 100644 --- a/nipype/pipeline/engine/tests/test_join.py +++ b/nipype/pipeline/engine/tests/test_join.py @@ -1,7 +1,6 @@ # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: -"""Tests for join expansion -""" +"""Tests for join expansion""" import pytest from .... import config diff --git a/nipype/pipeline/engine/tests/test_utils.py b/nipype/pipeline/engine/tests/test_utils.py index 78483b6923..7ae8ce5b33 100644 --- a/nipype/pipeline/engine/tests/test_utils.py +++ b/nipype/pipeline/engine/tests/test_utils.py @@ -1,7 +1,6 @@ # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: -"""Tests for the engine utils module -""" +"""Tests for the engine utils module""" import os from copy import deepcopy import pytest diff --git a/nipype/pipeline/engine/tests/test_workflows.py b/nipype/pipeline/engine/tests/test_workflows.py index 12d56de285..980b54fa28 100644 --- a/nipype/pipeline/engine/tests/test_workflows.py +++ b/nipype/pipeline/engine/tests/test_workflows.py @@ -1,7 +1,6 @@ # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: -"""Tests for the engine workflows module -""" +"""Tests for the engine workflows module""" from glob import glob import os from shutil import rmtree diff --git a/nipype/pipeline/plugins/condor.py b/nipype/pipeline/plugins/condor.py index 0fff477377..789eaecfab 100644 --- a/nipype/pipeline/plugins/condor.py +++ b/nipype/pipeline/plugins/condor.py @@ -1,5 +1,4 @@ -"""Parallel workflow execution via Condor -""" +"""Parallel workflow execution via Condor""" import os from time import sleep diff --git a/nipype/pipeline/plugins/dagman.py b/nipype/pipeline/plugins/dagman.py index 55f3f03bee..1c424c24ef 100644 --- a/nipype/pipeline/plugins/dagman.py +++ b/nipype/pipeline/plugins/dagman.py @@ -1,5 +1,4 @@ -"""Parallel workflow execution via Condor DAGMan -""" +"""Parallel workflow execution via Condor DAGMan""" import os import sys diff --git a/nipype/pipeline/plugins/debug.py b/nipype/pipeline/plugins/debug.py index 1dac35cf8f..4798e083bd 100644 --- a/nipype/pipeline/plugins/debug.py +++ b/nipype/pipeline/plugins/debug.py @@ -1,7 +1,6 @@ # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: -"""Debug plugin -""" +"""Debug plugin""" import networkx as nx from .base import PluginBase, logger diff --git a/nipype/pipeline/plugins/ipython.py b/nipype/pipeline/plugins/ipython.py index f52b3e6282..2c80eb4655 100644 --- a/nipype/pipeline/plugins/ipython.py +++ b/nipype/pipeline/plugins/ipython.py @@ -1,7 +1,6 @@ # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: -"""Parallel workflow execution via IPython controller -""" +"""Parallel workflow execution via IPython controller""" from pickle import dumps import sys diff --git a/nipype/pipeline/plugins/linear.py b/nipype/pipeline/plugins/linear.py index 93029ee1b9..aa29a5951b 100644 --- a/nipype/pipeline/plugins/linear.py +++ b/nipype/pipeline/plugins/linear.py @@ -1,7 +1,6 @@ # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: -"""Local serial workflow execution -""" +"""Local serial workflow execution""" import os from .base import PluginBase, logger, report_crash, report_nodes_not_run, str2bool diff --git a/nipype/pipeline/plugins/lsf.py b/nipype/pipeline/plugins/lsf.py index cf334be051..4ca380dfaa 100644 --- a/nipype/pipeline/plugins/lsf.py +++ b/nipype/pipeline/plugins/lsf.py @@ -1,5 +1,4 @@ -"""Parallel workflow execution via LSF -""" +"""Parallel workflow execution via LSF""" import os import re diff --git a/nipype/pipeline/plugins/multiproc.py b/nipype/pipeline/plugins/multiproc.py index 401b01b388..be0e006229 100644 --- a/nipype/pipeline/plugins/multiproc.py +++ b/nipype/pipeline/plugins/multiproc.py @@ -21,6 +21,7 @@ from ...utils.profiler import get_system_total_memory_gb from ..engine import MapNode from .base import DistributedPluginBase +from ...utils.gpu_count import gpu_count try: from textwrap import indent @@ -100,6 +101,7 @@ class MultiProcPlugin(DistributedPluginBase): - non_daemon: boolean flag to execute as non-daemon processes - n_procs: maximum number of threads to be executed in parallel + - n_gpu_procs: maximum number of GPU threads to be executed in parallel - memory_gb: maximum memory (in GB) that can be used at once. - raise_insufficient: raise error if the requested resources for a node over the maximum `n_procs` and/or `memory_gb` @@ -130,10 +132,24 @@ def __init__(self, plugin_args=None): ) self.raise_insufficient = self.plugin_args.get("raise_insufficient", True) + # GPU found on system + self.n_gpus_visible = gpu_count() + # proc per GPU set by user + self.n_gpu_procs = self.plugin_args.get('n_gpu_procs', self.n_gpus_visible) + + # total no. of processes allowed on all gpus + if self.n_gpu_procs > self.n_gpus_visible: + logger.info( + 'Total number of GPUs proc requested (%d) exceeds the available number of GPUs (%d) on the system. Using requested GPU slots at your own risk!', + self.n_gpu_procs, + self.n_gpus_visible, + ) + # Instantiate different thread pools for non-daemon processes logger.debug( - "[MultiProc] Starting (n_procs=%d, mem_gb=%0.2f, cwd=%s)", + "[MultiProc] Starting (n_procs=%d, n_gpu_procs=%d, mem_gb=%0.2f, cwd=%s)", self.processors, + self.n_gpu_procs, self.memory_gb, self._cwd, ) @@ -184,9 +200,12 @@ def _prerun_check(self, graph): """Check if any node exceeds the available resources""" tasks_mem_gb = [] tasks_num_th = [] + tasks_gpu_th = [] for node in graph.nodes(): tasks_mem_gb.append(node.mem_gb) tasks_num_th.append(node.n_procs) + if node.is_gpu_node(): + tasks_gpu_th.append(node.n_procs) if np.any(np.array(tasks_mem_gb) > self.memory_gb): logger.warning( @@ -203,6 +222,10 @@ def _prerun_check(self, graph): ) if self.raise_insufficient: raise RuntimeError("Insufficient resources available for job") + if np.any(np.array(tasks_gpu_th) > self.n_gpu_procs): + logger.warning('Nodes demand more GPU than allowed (%d).', self.n_gpu_procs) + if self.raise_insufficient: + raise RuntimeError('Insufficient GPU resources available for job') def _postrun_check(self): self.pool.shutdown() @@ -213,11 +236,14 @@ def _check_resources(self, running_tasks): """ free_memory_gb = self.memory_gb free_processors = self.processors + free_gpu_slots = self.n_gpu_procs for _, jobid in running_tasks: free_memory_gb -= min(self.procs[jobid].mem_gb, free_memory_gb) free_processors -= min(self.procs[jobid].n_procs, free_processors) + if self.procs[jobid].is_gpu_node(): + free_gpu_slots -= min(self.procs[jobid].n_procs, free_gpu_slots) - return free_memory_gb, free_processors + return free_memory_gb, free_processors, free_gpu_slots def _send_procs_to_workers(self, updatehash=False, graph=None): """ @@ -232,7 +258,9 @@ def _send_procs_to_workers(self, updatehash=False, graph=None): ) # Check available resources by summing all threads and memory used - free_memory_gb, free_processors = self._check_resources(self.pending_tasks) + free_memory_gb, free_processors, free_gpu_slots = self._check_resources( + self.pending_tasks + ) stats = ( len(self.pending_tasks), @@ -241,6 +269,8 @@ def _send_procs_to_workers(self, updatehash=False, graph=None): self.memory_gb, free_processors, self.processors, + free_gpu_slots, + self.n_gpu_procs, ) if self._stats != stats: tasks_list_msg = "" @@ -256,13 +286,15 @@ def _send_procs_to_workers(self, updatehash=False, graph=None): tasks_list_msg = indent(tasks_list_msg, " " * 21) logger.info( "[MultiProc] Running %d tasks, and %d jobs ready. Free " - "memory (GB): %0.2f/%0.2f, Free processors: %d/%d.%s", + "memory (GB): %0.2f/%0.2f, Free processors: %d/%d, Free GPU slot:%d/%d.%s", len(self.pending_tasks), len(jobids), free_memory_gb, self.memory_gb, free_processors, self.processors, + free_gpu_slots, + self.n_gpu_procs, tasks_list_msg, ) self._stats = stats @@ -304,28 +336,39 @@ def _send_procs_to_workers(self, updatehash=False, graph=None): # Check requirements of this job next_job_gb = min(self.procs[jobid].mem_gb, self.memory_gb) next_job_th = min(self.procs[jobid].n_procs, self.processors) + next_job_gpu_th = min(self.procs[jobid].n_procs, self.n_gpu_procs) + + is_gpu_node = self.procs[jobid].is_gpu_node() # If node does not fit, skip at this moment - if next_job_th > free_processors or next_job_gb > free_memory_gb: + if ( + next_job_th > free_processors + or next_job_gb > free_memory_gb + or (is_gpu_node and next_job_gpu_th > free_gpu_slots) + ): logger.debug( - "Cannot allocate job %d (%0.2fGB, %d threads).", + "Cannot allocate job %d (%0.2fGB, %d threads, %d GPU slots).", jobid, next_job_gb, next_job_th, + next_job_gpu_th, ) continue free_memory_gb -= next_job_gb free_processors -= next_job_th + if is_gpu_node: + free_gpu_slots -= next_job_gpu_th logger.debug( "Allocating %s ID=%d (%0.2fGB, %d threads). Free: " - "%0.2fGB, %d threads.", + "%0.2fGB, %d threads, %d GPU slots.", self.procs[jobid].fullname, jobid, next_job_gb, next_job_th, free_memory_gb, free_processors, + free_gpu_slots, ) # change job status in appropriate queues @@ -336,8 +379,11 @@ def _send_procs_to_workers(self, updatehash=False, graph=None): if self._local_hash_check(jobid, graph): continue + cached, updated = self.procs[jobid].is_cached() # updatehash and run_without_submitting are also run locally - if updatehash or self.procs[jobid].run_without_submitting: + if (cached and updatehash and not updated) or self.procs[ + jobid + ].run_without_submitting: logger.debug("Running node %s on master thread", self.procs[jobid]) try: self.procs[jobid].run(updatehash=updatehash) @@ -352,6 +398,8 @@ def _send_procs_to_workers(self, updatehash=False, graph=None): self._remove_node_dirs() free_memory_gb += next_job_gb free_processors += next_job_th + if is_gpu_node: + free_gpu_slots += next_job_gpu_th # Display stats next loop self._stats = None diff --git a/nipype/pipeline/plugins/oar.py b/nipype/pipeline/plugins/oar.py index df56391bae..b9c4a050ab 100644 --- a/nipype/pipeline/plugins/oar.py +++ b/nipype/pipeline/plugins/oar.py @@ -1,5 +1,4 @@ -"""Parallel workflow execution via OAR http://oar.imag.fr -""" +"""Parallel workflow execution via OAR http://oar.imag.fr""" import os import stat diff --git a/nipype/pipeline/plugins/pbs.py b/nipype/pipeline/plugins/pbs.py index d967af0bed..01c80efc5a 100644 --- a/nipype/pipeline/plugins/pbs.py +++ b/nipype/pipeline/plugins/pbs.py @@ -1,5 +1,4 @@ -"""Parallel workflow execution via PBS/Torque -""" +"""Parallel workflow execution via PBS/Torque""" import os from time import sleep diff --git a/nipype/pipeline/plugins/pbsgraph.py b/nipype/pipeline/plugins/pbsgraph.py index 4b245dedb7..0cb925af38 100644 --- a/nipype/pipeline/plugins/pbsgraph.py +++ b/nipype/pipeline/plugins/pbsgraph.py @@ -1,5 +1,4 @@ -"""Parallel workflow execution via PBS/Torque -""" +"""Parallel workflow execution via PBS/Torque""" import os import sys diff --git a/nipype/pipeline/plugins/sge.py b/nipype/pipeline/plugins/sge.py index 38079e947d..ce8e046f01 100644 --- a/nipype/pipeline/plugins/sge.py +++ b/nipype/pipeline/plugins/sge.py @@ -1,5 +1,4 @@ -"""Parallel workflow execution via SGE -""" +"""Parallel workflow execution via SGE""" import os import pwd diff --git a/nipype/pipeline/plugins/sgegraph.py b/nipype/pipeline/plugins/sgegraph.py index 5cd1c7bfb7..3b33b73dee 100644 --- a/nipype/pipeline/plugins/sgegraph.py +++ b/nipype/pipeline/plugins/sgegraph.py @@ -1,5 +1,4 @@ -"""Parallel workflow execution via SGE -""" +"""Parallel workflow execution via SGE""" import os import sys diff --git a/nipype/pipeline/plugins/slurmgraph.py b/nipype/pipeline/plugins/slurmgraph.py index c74ab05a87..05824b016b 100644 --- a/nipype/pipeline/plugins/slurmgraph.py +++ b/nipype/pipeline/plugins/slurmgraph.py @@ -1,5 +1,4 @@ -"""Parallel workflow execution via SLURM -""" +"""Parallel workflow execution via SLURM""" import os import sys diff --git a/nipype/pipeline/plugins/somaflow.py b/nipype/pipeline/plugins/somaflow.py index 2105204979..16bedaab23 100644 --- a/nipype/pipeline/plugins/somaflow.py +++ b/nipype/pipeline/plugins/somaflow.py @@ -1,5 +1,4 @@ -"""Parallel workflow execution via PBS/Torque -""" +"""Parallel workflow execution via PBS/Torque""" import os import sys diff --git a/nipype/pipeline/plugins/tests/test_base.py b/nipype/pipeline/plugins/tests/test_base.py index 43471a7d64..11acb369e9 100644 --- a/nipype/pipeline/plugins/tests/test_base.py +++ b/nipype/pipeline/plugins/tests/test_base.py @@ -1,7 +1,6 @@ # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: -"""Tests for the engine module -""" +"""Tests for the engine module""" import numpy as np import scipy.sparse as ssp diff --git a/nipype/pipeline/plugins/tests/test_callback.py b/nipype/pipeline/plugins/tests/test_callback.py index f7606708c7..b10238ec4a 100644 --- a/nipype/pipeline/plugins/tests/test_callback.py +++ b/nipype/pipeline/plugins/tests/test_callback.py @@ -1,8 +1,9 @@ # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: -"""Tests for workflow callbacks -""" +"""Tests for workflow callbacks.""" +from pathlib import Path from time import sleep +import json import pytest import nipype.interfaces.utility as niu import nipype.pipeline.engine as pe @@ -60,3 +61,51 @@ def test_callback_exception(tmpdir, plugin, stop_on_first_crash): sleep(0.5) # Wait for callback to be called (python 2.7) assert so.statuses == [("f_node", "start"), ("f_node", "exception")] + + +@pytest.mark.parametrize("plugin", ["Linear", "MultiProc", "LegacyMultiProc"]) +def test_callback_gantt(tmp_path: Path, plugin: str) -> None: + import logging + + from os import path + + from nipype.utils.profiler import log_nodes_cb + from nipype.utils.draw_gantt_chart import generate_gantt_chart + + log_filename = tmp_path / "callback.log" + logger = logging.getLogger("callback") + logger.setLevel(logging.DEBUG) + handler = logging.FileHandler(log_filename) + logger.addHandler(handler) + + # create workflow + wf = pe.Workflow(name="test", base_dir=str(tmp_path)) + f_node = pe.Node( + niu.Function(function=func, input_names=[], output_names=[]), name="f_node" + ) + wf.add_nodes([f_node]) + wf.config["execution"] = {"crashdump_dir": wf.base_dir, "poll_sleep_duration": 2} + + plugin_args = {"status_callback": log_nodes_cb} + if plugin != "Linear": + plugin_args["n_procs"] = 8 + wf.run(plugin=plugin, plugin_args=plugin_args) + + with open(log_filename, "r") as _f: + loglines = _f.readlines() + + # test missing duration + first_line = json.loads(loglines[0]) + if "duration" in first_line: + del first_line["duration"] + loglines[0] = f"{json.dumps(first_line)}\n" + + # test duplicate timestamp warning + loglines.append(loglines[-1]) + + with open(log_filename, "w") as _f: + _f.write("".join(loglines)) + + with pytest.warns(Warning): + generate_gantt_chart(str(log_filename), 1 if plugin == "Linear" else 8) + assert (tmp_path / "callback.log.html").exists() diff --git a/nipype/pipeline/plugins/tests/test_legacymultiproc_nondaemon.py b/nipype/pipeline/plugins/tests/test_legacymultiproc_nondaemon.py index 2f35579a40..cd79fbe31c 100644 --- a/nipype/pipeline/plugins/tests/test_legacymultiproc_nondaemon.py +++ b/nipype/pipeline/plugins/tests/test_legacymultiproc_nondaemon.py @@ -1,7 +1,6 @@ # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: -"""Testing module for functions and classes from multiproc.py -""" +"""Testing module for functions and classes from multiproc.py""" # Import packages import os import sys diff --git a/nipype/pipeline/plugins/tests/test_multiproc.py b/nipype/pipeline/plugins/tests/test_multiproc.py index 938e1aab9e..484c0d07bc 100644 --- a/nipype/pipeline/plugins/tests/test_multiproc.py +++ b/nipype/pipeline/plugins/tests/test_multiproc.py @@ -56,6 +56,7 @@ def test_run_multiproc(tmpdir): class InputSpecSingleNode(nib.TraitedSpec): input1 = nib.traits.Int(desc="a random int") input2 = nib.traits.Int(desc="a random int") + use_gpu = nib.traits.Bool(False, mandatory=False, desc="boolean for GPU nodes") class OutputSpecSingleNode(nib.TraitedSpec): @@ -117,6 +118,24 @@ def test_no_more_threads_than_specified(tmpdir): pipe.run(plugin="MultiProc", plugin_args={"n_procs": max_threads}) +def test_no_more_gpu_threads_than_specified(tmpdir): + tmpdir.chdir() + + pipe = pe.Workflow(name="pipe") + n1 = pe.Node(SingleNodeTestInterface(), name="n1", n_procs=2) + n1.inputs.use_gpu = True + n1.inputs.input1 = 4 + pipe.add_nodes([n1]) + + max_threads = 2 + max_gpu = 1 + with pytest.raises(RuntimeError): + pipe.run( + plugin="MultiProc", + plugin_args={"n_procs": max_threads, 'n_gpu_procs': max_gpu}, + ) + + @pytest.mark.skipif( sys.version_info >= (3, 8), reason="multiprocessing issues in Python 3.8" ) diff --git a/nipype/pipeline/plugins/tests/test_tools.py b/nipype/pipeline/plugins/tests/test_tools.py index e21ef42072..e352253dbe 100644 --- a/nipype/pipeline/plugins/tests/test_tools.py +++ b/nipype/pipeline/plugins/tests/test_tools.py @@ -1,7 +1,6 @@ # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: -"""Tests for the engine module -""" +"""Tests for the engine module""" import re from unittest import mock diff --git a/nipype/pipeline/plugins/tools.py b/nipype/pipeline/plugins/tools.py index bce3eb82da..7e066b0ea3 100644 --- a/nipype/pipeline/plugins/tools.py +++ b/nipype/pipeline/plugins/tools.py @@ -1,7 +1,6 @@ # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: -"""Common graph operations for execution -""" +"""Common graph operations for execution""" import os import getpass from socket import gethostname diff --git a/nipype/sphinxext/apidoc/__init__.py b/nipype/sphinxext/apidoc/__init__.py index 151011bdfc..429848d2f5 100644 --- a/nipype/sphinxext/apidoc/__init__.py +++ b/nipype/sphinxext/apidoc/__init__.py @@ -2,6 +2,9 @@ # vi: set ft=python sts=4 ts=4 sw=4 et: """Settings for sphinxext.interfaces and connection to sphinx-apidoc.""" import re +from packaging.version import Version + +import sphinx from sphinx.ext.napoleon import ( Config as NapoleonConfig, _patch_python_domain, @@ -39,13 +42,24 @@ class Config(NapoleonConfig): """ - _config_values = { - "nipype_skip_classes": ( - ["Tester", "InputSpec", "OutputSpec", "Numpy", "NipypeTester"], - "env", - ), - **NapoleonConfig._config_values, - } + if Version(sphinx.__version__) >= Version("8.2.1"): + _config_values = ( + ( + "nipype_skip_classes", + ["Tester", "InputSpec", "OutputSpec", "Numpy", "NipypeTester"], + "env", + frozenset({list[str]}), + ), + *NapoleonConfig._config_values, + ) + else: + _config_values = { + "nipype_skip_classes": ( + ["Tester", "InputSpec", "OutputSpec", "Numpy", "NipypeTester"], + "env", + ), + **NapoleonConfig._config_values, + } def setup(app): @@ -82,8 +96,12 @@ def setup(app): app.connect("autodoc-process-docstring", _process_docstring) app.connect("autodoc-skip-member", _skip_member) - for name, (default, rebuild) in Config._config_values.items(): - app.add_config_value(name, default, rebuild) + if Version(sphinx.__version__) >= Version("8.2.1"): + for name, default, rebuild, types in Config._config_values: + app.add_config_value(name, default, rebuild, types=types) + else: + for name, (default, rebuild) in Config._config_values.items(): + app.add_config_value(name, default, rebuild) return {"version": __version__, "parallel_read_safe": True} diff --git a/nipype/testing/tests/test_utils.py b/nipype/testing/tests/test_utils.py index 9217d54694..c3b1cae638 100644 --- a/nipype/testing/tests/test_utils.py +++ b/nipype/testing/tests/test_utils.py @@ -1,7 +1,6 @@ # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: -"""Test testing utilities -""" +"""Test testing utilities""" import os import subprocess diff --git a/nipype/testing/utils.py b/nipype/testing/utils.py index 71a75a41c7..96a94d6564 100644 --- a/nipype/testing/utils.py +++ b/nipype/testing/utils.py @@ -1,7 +1,6 @@ # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: -"""Additional handy utilities for testing -""" +"""Additional handy utilities for testing""" import os import time import shutil diff --git a/nipype/utils/draw_gantt_chart.py b/nipype/utils/draw_gantt_chart.py index 3ae4b77246..64a0d793db 100644 --- a/nipype/utils/draw_gantt_chart.py +++ b/nipype/utils/draw_gantt_chart.py @@ -8,8 +8,10 @@ import random import datetime import simplejson as json +from typing import Union from collections import OrderedDict +from warnings import warn # Pandas try: @@ -66,9 +68,9 @@ def create_event_dict(start_time, nodes_list): finish_delta = (node["finish"] - start_time).total_seconds() # Populate dictionary - if events.get(start_delta) or events.get(finish_delta): + if events.get(start_delta): err_msg = "Event logged twice or events started at exact same time!" - raise KeyError(err_msg) + warn(err_msg, category=Warning) events[start_delta] = start_node events[finish_delta] = finish_node @@ -101,15 +103,25 @@ def log_to_dict(logfile): nodes_list = [json.loads(l) for l in lines] - def _convert_string_to_datetime(datestring): - try: + def _convert_string_to_datetime( + datestring: Union[str, datetime.datetime], + ) -> datetime.datetime: + """Convert a date string to a datetime object.""" + if isinstance(datestring, datetime.datetime): + datetime_object = datestring + elif isinstance(datestring, str): + date_format = ( + "%Y-%m-%dT%H:%M:%S.%f%z" + if "+" in datestring + else "%Y-%m-%dT%H:%M:%S.%f" + ) datetime_object: datetime.datetime = datetime.datetime.strptime( - datestring, "%Y-%m-%dT%H:%M:%S.%f" + datestring, date_format ) - return datetime_object - except Exception as _: - pass - return datestring + else: + msg = f"{datestring} is not a string or datetime object." + raise TypeError(msg) + return datetime_object date_object_node_list: list = list() for n in nodes_list: @@ -154,12 +166,18 @@ def calculate_resource_timeseries(events, resource): # Iterate through the events for _, event in sorted(events.items()): if event["event"] == "start": - if resource in event and event[resource] != "Unknown": - all_res += float(event[resource]) + if resource in event: + try: + all_res += float(event[resource]) + except ValueError: + continue current_time = event["start"] elif event["event"] == "finish": - if resource in event and event[resource] != "Unknown": - all_res -= float(event[resource]) + if resource in event: + try: + all_res -= float(event[resource]) + except ValueError: + continue current_time = event["finish"] res[current_time] = all_res @@ -284,7 +302,14 @@ def draw_nodes(start, nodes_list, cores, minute_scale, space_between_minutes, co # Left left = 60 for core in range(len(end_times)): - if end_times[core] < node_start: + try: + end_time_condition = end_times[core] < node_start + except TypeError: + # if one has a timezone and one does not + end_time_condition = end_times[core].replace( + tzinfo=None + ) < node_start.replace(tzinfo=None) + if end_time_condition: left += core * 30 end_times[core] = datetime.datetime( node_finish.year, @@ -307,7 +332,7 @@ def draw_nodes(start, nodes_list, cores, minute_scale, space_between_minutes, co "offset": offset, "scale_duration": scale_duration, "color": color, - "node_name": node["name"], + "node_name": node.get("name", node.get("id", "")), "node_dur": node["duration"] / 60.0, "node_start": node_start.strftime("%Y-%m-%d %H:%M:%S"), "node_finish": node_finish.strftime("%Y-%m-%d %H:%M:%S"), @@ -527,6 +552,25 @@ def generate_gantt_chart( # Read in json-log to get list of node dicts nodes_list = log_to_dict(logfile) + # Only include nodes with timing information, and convert timestamps + # from strings to datetimes + nodes_list = [ + { + k: ( + datetime.datetime.strptime(i[k], "%Y-%m-%dT%H:%M:%S.%f") + if k in {"start", "finish"} and isinstance(i[k], str) + else i[k] + ) + for k in i + } + for i in nodes_list + if "start" in i and "finish" in i + ] + + for node in nodes_list: + if "duration" not in node: + node["duration"] = (node["finish"] - node["start"]).total_seconds() + # Create the header of the report with useful information start_node = nodes_list[0] last_node = nodes_list[-1] diff --git a/nipype/utils/filemanip.py b/nipype/utils/filemanip.py index 52558f59f0..4916cbacef 100644 --- a/nipype/utils/filemanip.py +++ b/nipype/utils/filemanip.py @@ -1,7 +1,6 @@ # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: -"""Miscellaneous file manipulation functions -""" +"""Miscellaneous file manipulation functions""" import sys import pickle import errno diff --git a/nipype/utils/gpu_count.py b/nipype/utils/gpu_count.py new file mode 100644 index 0000000000..70eb6d724e --- /dev/null +++ b/nipype/utils/gpu_count.py @@ -0,0 +1,46 @@ +# -*- DISCLAIMER: this file contains code derived from gputil (https://github.com/anderskm/gputil) +# and therefore is distributed under to the following license: +# +# MIT License +# +# Copyright (c) 2017 anderskm +# +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the "Software"), to deal +# in the Software without restriction, including without limitation the rights +# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +# copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: +# +# The above copyright notice and this permission notice shall be included in all +# copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. + +import platform +import shutil +import subprocess +import os + + +def gpu_count(): + nvidia_smi = shutil.which('nvidia-smi') + if nvidia_smi is None and platform.system() == "Windows": + nvidia_smi = f'{os.environ["systemdrive"]}\\Program Files\\NVIDIA Corporation\\NVSMI\\nvidia-smi.exe' + if nvidia_smi is None: + return 0 + try: + p = subprocess.run( + [nvidia_smi, "--query-gpu=name", "--format=csv,noheader,nounits"], + stdout=subprocess.PIPE, + text=True, + ) + except (OSError, UnicodeDecodeError): + return 0 + return len(p.stdout.splitlines()) diff --git a/nipype/utils/matlabtools.py b/nipype/utils/matlabtools.py index ea06cd4126..d871885c06 100644 --- a/nipype/utils/matlabtools.py +++ b/nipype/utils/matlabtools.py @@ -1,6 +1,6 @@ # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: -""" Useful Functions for working with matlab""" +"""Useful Functions for working with matlab""" # Stdlib imports import os diff --git a/nipype/utils/misc.py b/nipype/utils/misc.py index ed8a539e66..3f76fbab3c 100644 --- a/nipype/utils/misc.py +++ b/nipype/utils/misc.py @@ -1,7 +1,6 @@ # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: -"""Miscellaneous utility functions -""" +"""Miscellaneous utility functions""" import os import sys import re diff --git a/nipype/utils/subprocess.py b/nipype/utils/subprocess.py index acd6b63256..2fa9e52c3b 100644 --- a/nipype/utils/subprocess.py +++ b/nipype/utils/subprocess.py @@ -1,7 +1,6 @@ # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: -"""Miscellaneous utility functions -""" +"""Miscellaneous utility functions""" import os import sys import gc diff --git a/pyproject.toml b/pyproject.toml index 06f4d798c7..2b1282eb74 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -4,3 +4,24 @@ build-backend = "setuptools.build_meta" [tool.black] skip-string-normalization = true + +[tool.pytest.ini_options] +minversion = "6" +testpaths = ["nipype"] +log_cli_level = "INFO" +xfail_strict = true +norecursedirs = [".git"] +addopts = [ + "-svx", + "-ra", + "--strict-config", + "--strict-markers", + "--doctest-modules", + "--cov=nipype", + "--cov-report=xml", + "--cov-config=pyproject.toml", +] +doctest_optionflags = "ALLOW_UNICODE NORMALIZE_WHITESPACE ELLIPSIS" +env = "PYTHONHASHSEED=0" +filterwarnings = ["ignore::DeprecationWarning"] +junit_family = "xunit2" diff --git a/tox.ini b/tox.ini new file mode 100644 index 0000000000..571b93628b --- /dev/null +++ b/tox.ini @@ -0,0 +1,108 @@ +[tox] +requires = + tox>=4 +envlist = + py3{9,10,11,12,13}-none # Test nipype functionality on all versions + py3{9,12,13}-full # Test with extra dependencies on oldest and two newest + py39-min # Test with minimal dependencies + py3{11,12,13}-pre # Test with pre-release on SPEC0-supported Python +skip_missing_interpreters = true + +# Configuration that allows us to split tests across GitHub runners effectively +[gh-actions] +python = + 3.9: py39 + 3.10: py310 + 3.11: py311 + 3.12: py312 + 3.13: py313 + +[gh-actions:env] +DEPENDS = + min: min + none: none + full: full + pre: pre + +[testenv] +description = Pytest with coverage +labels = test +pip_pre = + pre: true +pass_env = + # Parsed from `git grep getenv` and `git grep os.environ` + # May not all be needed + NIPYPE_NO_ET + NO_ET + ANTSPATH + CI_SKIP_TEST + FREESURFER_HOME + USER + FSLDIR + FSLOUTPUTTYPE + FSL_COURSE_DATA + NIPYPE_NO_MATLAB + OMP_NUM_THREADS + NIPYPE_NO_R + SPMMCRCMD + FORCE_SPMMCR + LOGNAME + AWS_ACCESS_KEY_ID + AWS_SECRET_ACCESS_KEY + MATLABCMD + MRTRIX3_HOME + RCMD + ETS_TOOLKIT + NIPYPE_CONFIG_DIR + DISPLAY + PATHEXT + # getpass.getuser() sources for Windows: + LOGNAME + USER + LNAME + USERNAME + # Pass user color preferences through + PY_COLORS + FORCE_COLOR + NO_COLOR + CLICOLOR + CLICOLOR_FORCE + PYTHON_GIL +extras = + tests + full: doc + full: profiler + full: duecredit + full: ssh + full: nipy +setenv = + FSLOUTPUTTYPE=NIFTI_GZ + pre: PIP_EXTRA_INDEX_URL=https://pypi.anaconda.org/scientific-python-nightly-wheels/simple + pre: UV_EXTRA_INDEX_URL=https://pypi.anaconda.org/scientific-python-nightly-wheels/simple +uv_resolution = + min: lowest-direct + +commands = + python -c "import nipype; print(nipype.__version__)" + pytest --durations=20 --durations-min=1.0 --cov-report term-missing {posargs:-n auto} + +[testenv:specs] +description = Rebuild spec tests +deps = + black + # Rebuild dipy specs + dipy + # Faster to install old numpy than unreleased Dipy + # This can be dropped once a Dipy release supports numpy 2 + numpy<2 +commands = + python tools/checkspecs.py + +[testenv:style] +description = Check our style guide +labels = check +deps = + black +skip_install = true +commands = + black --check --diff nipype setup.py