diff --git a/.mailmap b/.mailmap index 6e6ca9a0d6..79ac75ba46 100644 --- a/.mailmap +++ b/.mailmap @@ -75,6 +75,7 @@ Franz Liem Franz Liem Fred Loney Gael Varoquaux +Gal Ben-Zvi Gavin Cooper Ghislain Vaillant Ghislain Vaillant diff --git a/.zenodo.json b/.zenodo.json index 5b11234463..f715cad428 100644 --- a/.zenodo.json +++ b/.zenodo.json @@ -58,7 +58,7 @@ }, { "affiliation": "Klinikum rechts der Isar, TUM. ACPySS", - "name": "Manh\u00e3es-Savio, Alexandre", + "name": "Manhães-Savio, Alexandre", "orcid": "0000-0002-6608-6885" }, { @@ -127,7 +127,7 @@ }, { "affiliation": "Stanford University", - "name": "\u0106iri\u0107 , Rastko", + "name": "Ćirić , Rastko", "orcid": "0000-0001-6347-7939" }, { @@ -214,7 +214,7 @@ "name": "Tungaraza, Rosalia" }, { - "affiliation": "Charit\u00e8 Universit\u00e4tsmedizin Berlin, Germany", + "affiliation": "Charitè Universitätsmedizin Berlin, Germany", "name": "Dell'Orco, Andrea", "orcid": "0000-0002-3964-8360" }, @@ -277,7 +277,7 @@ }, { "affiliation": "ARAMIS LAB, Brain and Spine Institute (ICM), Paris, France.", - "name": "Guillon, Je\u0301re\u0301my", + "name": "Guillon, Jérémy", "orcid": "0000-0002-2672-7510" }, { @@ -314,7 +314,7 @@ "orcid": "0000-0001-9130-1092" }, { - "affiliation": "Division of Psychological and Social Medicine and Developmental Neuroscience, Faculty of Medicine, Technische Universit\u00e4t Dresden, Dresden, Germany", + "affiliation": "Division of Psychological and Social Medicine and Developmental Neuroscience, Faculty of Medicine, Technische Universität Dresden, Dresden, Germany", "name": "Bernardoni, Fabio", "orcid": "0000-0002-5112-405X" }, @@ -324,7 +324,7 @@ "orcid": "0000-0002-0805-1350" }, { - "affiliation": "Division of Psychological and Social Medicine and Developmental Neuroscience, Faculty of Medicine, Technische Universit\u00e4t Dresden, Dresden, Germany", + "affiliation": "Division of Psychological and Social Medicine and Developmental Neuroscience, Faculty of Medicine, Technische Universität Dresden, Dresden, Germany", "name": "Geisler, Daniel", "orcid": "0000-0003-2076-5329" }, @@ -332,7 +332,7 @@ "name": "Salvatore, John" }, { - "affiliation": "CNRS LTCI, Telecom ParisTech, Universit\u00e9 Paris-Saclay", + "affiliation": "CNRS LTCI, Telecom ParisTech, Université Paris-Saclay", "name": "Gramfort, Alexandre", "orcid": "0000-0001-9791-4404" }, @@ -403,7 +403,7 @@ "orcid": "0000-0003-4497-861X" }, { - "name": "Heinsfeld, Anibal S\u00f3lon", + "name": "Heinsfeld, Anibal Sólon", "orcid": "0000-0002-2050-0614" }, { @@ -422,7 +422,7 @@ "name": "Poldrack, Russell" }, { - "affiliation": "1 McGill Centre for Integrative Neuroscience (MCIN), Ludmer Centre for Neuroinformatics and Mental Health, Montreal Neurological Institute (MNI), McGill University, Montr\u00e9al, 3801 University Street, WB-208, H3A 2B4, Qu\u00e9bec, Canada. 2 University of Lyon, CNRS, INSERM, CREATIS., Villeurbanne, 7, avenue Jean Capelle, 69621, France.", + "affiliation": "1 McGill Centre for Integrative Neuroscience (MCIN), Ludmer Centre for Neuroinformatics and Mental Health, Montreal Neurological Institute (MNI), McGill University, Montréal, 3801 University Street, WB-208, H3A 2B4, Québec, Canada. 2 University of Lyon, CNRS, INSERM, CREATIS., Villeurbanne, 7, avenue Jean Capelle, 69621, France.", "name": "Glatard, Tristan", "orcid": "0000-0003-2620-5883" }, @@ -441,7 +441,7 @@ }, { "affiliation": "University College London", - "name": "P\u00e9rez-Garc\u00eda, Fernando", + "name": "Pérez-García, Fernando", "orcid": "0000-0001-9090-3024" }, { @@ -456,6 +456,11 @@ "affiliation": "University of Iowa", "name": "Welch, David" }, + { + "affiliation": "Sagol School of Neuroscience, Tel Aviv University", + "name": "Ben-Zvi, Gal", + "orcid": "0000-0002-5655-9423" + }, { "affiliation": "Max Planck Institute for Human Cognitive and Brain Sciences", "name": "Contier, Oliver", @@ -481,7 +486,7 @@ }, { "affiliation": "Leibniz Institute for Neurobiology", - "name": "Stadler, J\u00f6rg", + "name": "Stadler, Jörg", "orcid": "0000-0003-4313-129X" }, { @@ -543,10 +548,6 @@ "name": "Bielievtsov, Dmytro", "orcid": "0000-0003-3846-7696" }, - { - "affiliation": "Sagol School of Neuroscience, Tel Aviv University", - "name": "Ben-Zvi, Gal" - }, { "affiliation": "University of Newcastle, Australia", "name": "Cooper, Gavin", @@ -562,7 +563,7 @@ }, { "affiliation": "German Institute for International Educational Research", - "name": "Linkersd\u00f6rfer, Janosch", + "name": "Linkersdörfer, Janosch", "orcid": "0000-0002-1577-1233" }, { @@ -576,7 +577,7 @@ "name": "Stanley, Olivia" }, { - "name": "K\u00fcttner, Ren\u00e9" + "name": "Küttner, René" }, { "affiliation": "California Institute of Technology", @@ -729,7 +730,7 @@ "orcid": "0000-0003-2077-3070" }, { - "affiliation": "TIB \u2013 Leibniz Information Centre for Science and Technology and University Library, Hannover, Germany", + "affiliation": "TIB – Leibniz Information Centre for Science and Technology and University Library, Hannover, Germany", "name": "Leinweber, Katrin", "orcid": "0000-0001-5135-5758" }, @@ -763,7 +764,7 @@ }, { "affiliation": "Universidad de Guadalajara", - "name": "Gonz\u00e1lez Orozco, Abel A." + "name": "González Orozco, Abel A." }, { "affiliation": "ARAMIS Lab", diff --git a/doc/changelog/1.X.X-changelog.rst b/doc/changelog/1.X.X-changelog.rst index a39422a78e..922d06db6f 100644 --- a/doc/changelog/1.X.X-changelog.rst +++ b/doc/changelog/1.X.X-changelog.rst @@ -1,3 +1,21 @@ +1.8.3 (July 14, 2022) +===================== + +Bug-fix release in the 1.8.x series. + +This release includes compatibility fixes for nibabel 4.x and resolves a denial-of-service +bug when the etelemetry server is down that resulted in excessive (blocking) network hits that +would cause any tools using nipype interfaces to take a very long time. + +(`Full changelog `__) + + * FIX: Argument order to ``numpy.save()`` (https://github.com/nipy/nipype/pull/3485) + * FIX: Add tolerance parameter to ComputeDVARS (https://github.com/nipy/nipype/pull/3489) + * FIX: Delay access of nibabel.trackvis until actually needed (https://github.com/nipy/nipype/pull/3488) + * FIX: Avoid excessive etelemetry pings (https://github.com/nipy/nipype/pull/3484) + * ENH: Added outputs' generation to DWIBiascorrect interface (https://github.com/nipy/nipype/pull/3476) + + 1.8.2 (June 06, 2022) ===================== diff --git a/doc/interfaces.rst b/doc/interfaces.rst index 2cb745be0f..bad49381c5 100644 --- a/doc/interfaces.rst +++ b/doc/interfaces.rst @@ -8,7 +8,7 @@ Interfaces and Workflows :Release: |version| :Date: |today| -Previous versions: `1.8.1 `_ `1.8.0 `_ +Previous versions: `1.8.2 `_ `1.8.1 `_ Workflows --------- diff --git a/nipype/__init__.py b/nipype/__init__.py index bf6968a95a..06084e823a 100644 --- a/nipype/__init__.py +++ b/nipype/__init__.py @@ -98,4 +98,4 @@ def check_latest_version(raise_exception=False): from .interfaces.base import BaseInterface if BaseInterface._etelemetry_version_data is None: - BaseInterface._etelemetry_version_data = check_latest_version() + BaseInterface._etelemetry_version_data = check_latest_version() or "n/a" diff --git a/nipype/algorithms/confounds.py b/nipype/algorithms/confounds.py index 82566c07d4..63dc3def2a 100644 --- a/nipype/algorithms/confounds.py +++ b/nipype/algorithms/confounds.py @@ -50,6 +50,11 @@ class ComputeDVARSInputSpec(BaseInterfaceInputSpec): remove_zerovariance = traits.Bool( True, usedefault=True, desc="remove voxels with zero variance" ) + variance_tol = traits.Float( + 1e-7, + usedefault=True, + desc="maximum variance to consider \"close to\" zero for the purposes of removal", + ) save_std = traits.Bool(True, usedefault=True, desc="save standardized DVARS") save_nstd = traits.Bool(False, usedefault=True, desc="save non-standardized DVARS") save_vxstd = traits.Bool( @@ -167,6 +172,7 @@ def _run_interface(self, runtime): self.inputs.in_file, self.inputs.in_mask, remove_zerovariance=self.inputs.remove_zerovariance, + variance_tol=self.inputs.variance_tol, intensity_normalization=self.inputs.intensity_normalization, ) @@ -994,8 +1000,19 @@ def _list_outputs(self): return self._results +def _AR_est_YW(x, order, rxx=None): + """Retrieve AR coefficients while dropping the sig_sq return value""" + from nitime.algorithms import AR_est_YW + + return AR_est_YW(x, order, rxx=rxx)[0] + + def compute_dvars( - in_file, in_mask, remove_zerovariance=False, intensity_normalization=1000 + in_file, + in_mask, + remove_zerovariance=False, + intensity_normalization=1000, + variance_tol=0.0, ): """ Compute the :abbr:`DVARS (D referring to temporal @@ -1027,17 +1044,15 @@ def compute_dvars( """ import numpy as np import nibabel as nb - from nitime.algorithms import AR_est_YW import warnings - func = nb.load(in_file).get_fdata(dtype=np.float32) - mask = np.asanyarray(nb.load(in_mask).dataobj).astype(np.uint8) + func = np.float32(nb.load(in_file).dataobj) + mask = np.bool_(nb.load(in_mask).dataobj) if len(func.shape) != 4: raise RuntimeError("Input fMRI dataset should be 4-dimensional") - idx = np.where(mask > 0) - mfunc = func[idx[0], idx[1], idx[2], :] + mfunc = func[mask] if intensity_normalization != 0: mfunc = (mfunc / np.median(mfunc)) * intensity_normalization @@ -1045,18 +1060,19 @@ def compute_dvars( # Robust standard deviation (we are using "lower" interpolation # because this is what FSL is doing func_sd = ( - np.percentile(mfunc, 75, axis=1, interpolation="lower") - - np.percentile(mfunc, 25, axis=1, interpolation="lower") + np.percentile(mfunc, 75, axis=1, method="lower") + - np.percentile(mfunc, 25, axis=1, method="lower") ) / 1.349 if remove_zerovariance: - mfunc = mfunc[func_sd != 0, :] - func_sd = func_sd[func_sd != 0] + zero_variance_voxels = func_sd > variance_tol + mfunc = mfunc[zero_variance_voxels, :] + func_sd = func_sd[zero_variance_voxels] # Compute (non-robust) estimate of lag-1 autocorrelation ar1 = np.apply_along_axis( - AR_est_YW, 1, regress_poly(0, mfunc, remove_mean=True)[0].astype(np.float32), 1 - )[:, 0] + _AR_est_YW, 1, regress_poly(0, mfunc, remove_mean=True)[0].astype(np.float32), 1 + ) # Compute (predicted) standard deviation of temporal difference time series diff_sdhat = np.squeeze(np.sqrt(((1 - ar1) * 2).tolist())) * func_sd diff --git a/nipype/algorithms/tests/test_auto_ComputeDVARS.py b/nipype/algorithms/tests/test_auto_ComputeDVARS.py index 5fe2d241b9..c5e1118341 100644 --- a/nipype/algorithms/tests/test_auto_ComputeDVARS.py +++ b/nipype/algorithms/tests/test_auto_ComputeDVARS.py @@ -43,6 +43,9 @@ def test_ComputeDVARS_inputs(): usedefault=True, ), series_tr=dict(), + variance_tol=dict( + usedefault=True, + ), ) inputs = ComputeDVARS.input_spec() diff --git a/nipype/info.py b/nipype/info.py index 81ff91776e..b4f8373a1b 100644 --- a/nipype/info.py +++ b/nipype/info.py @@ -5,7 +5,7 @@ # nipype version information # Remove .dev0 for release -__version__ = "1.8.2" +__version__ = "1.8.3" def get_nipype_gitversion(): diff --git a/nipype/interfaces/base/core.py b/nipype/interfaces/base/core.py index 69d621bbc1..c8099be630 100644 --- a/nipype/interfaces/base/core.py +++ b/nipype/interfaces/base/core.py @@ -186,7 +186,7 @@ def __init__( from ... import check_latest_version if BaseInterface._etelemetry_version_data is None: - BaseInterface._etelemetry_version_data = check_latest_version() + BaseInterface._etelemetry_version_data = check_latest_version() or "n/a" if not self.input_spec: raise Exception("No input_spec in class: %s" % self.__class__.__name__) diff --git a/nipype/interfaces/fsl/utils.py b/nipype/interfaces/fsl/utils.py index 24ebeec040..51d0d7fce5 100644 --- a/nipype/interfaces/fsl/utils.py +++ b/nipype/interfaces/fsl/utils.py @@ -2584,7 +2584,7 @@ def _trk_to_coords(self, in_file, out_file=None): if out_file is None: out_file, _ = op.splitext(in_file) - np.savetxt(streamlines, out_file + ".txt") + np.savetxt(out_file + ".txt", streamlines) return out_file + ".txt" def _coords_to_trk(self, points, out_file): diff --git a/nipype/interfaces/mrtrix/convert.py b/nipype/interfaces/mrtrix/convert.py index d09a388c33..8fa2d3a058 100644 --- a/nipype/interfaces/mrtrix/convert.py +++ b/nipype/interfaces/mrtrix/convert.py @@ -3,9 +3,7 @@ # vi: set ft=python sts=4 ts=4 sw=4 et: import os.path as op import nibabel as nb -import nibabel.trackvis as trk import numpy as np -from nibabel.trackvis import HeaderError from nibabel.volumeutils import native_code from nibabel.orientations import aff2axcodes @@ -120,7 +118,7 @@ def track_gen(track_points): nan_str = fileobj.read(bytesize) if len(pts_str) < (n_pts * bytesize): if not n_streams == stream_count: - raise HeaderError( + raise nb.trackvis.HeaderError( "Expecting %s points, found only %s" % (stream_count, n_streams) ) iflogger.error( @@ -255,7 +253,7 @@ def _run_interface(self, runtime): final_streamlines = transform_tracking_output(transformed_streamlines, aff) trk_tracks = ((ii, None, None) for ii in final_streamlines) - trk.write(out_filename, trk_tracks, trk_header) + nb.trackvis.write(out_filename, trk_tracks, trk_header) iflogger.info("Saving transformed Trackvis file as %s", out_filename) iflogger.info("New TrackVis Header:") iflogger.info(trk_header) @@ -271,7 +269,7 @@ def _run_interface(self, runtime): streamlines, trk_header, affine ) trk_tracks = ((ii, None, None) for ii in transformed_streamlines) - trk.write(out_filename, trk_tracks, trk_header) + nb.trackvis.write(out_filename, trk_tracks, trk_header) iflogger.info("Saving Trackvis file as %s", out_filename) iflogger.info("TrackVis Header:") iflogger.info(trk_header) diff --git a/nipype/interfaces/mrtrix3/preprocess.py b/nipype/interfaces/mrtrix3/preprocess.py index ef67365f0b..928833aaf6 100644 --- a/nipype/interfaces/mrtrix3/preprocess.py +++ b/nipype/interfaces/mrtrix3/preprocess.py @@ -242,6 +242,13 @@ def _format_arg(self, name, trait_spec, value): return f"-{trait_spec.argstr}" return super()._format_arg(name, trait_spec, value) + def _list_outputs(self): + outputs = self.output_spec().get() + outputs["out_file"] = op.abspath(self.inputs.out_file) + if self.inputs.bias: + outputs["bias"] = op.abspath(self.inputs.bias) + return outputs + class DWIPreprocInputSpec(MRTrix3BaseInputSpec): in_file = File( diff --git a/tools/update_zenodo.py b/tools/update_zenodo.py index e63c2ed2e7..64eb617644 100755 --- a/tools/update_zenodo.py +++ b/tools/update_zenodo.py @@ -69,4 +69,4 @@ def decommify(name): zenodo["creators"] = creators - zenodo_file.write_text("%s\n" % json.dumps(zenodo, indent=2)) + zenodo_file.write_text("%s\n" % json.dumps(zenodo, indent=2, ensure_ascii=False))