Skip to content

MNT: Require Python 3.8+, auto-upgrade syntax #3588

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
merged 5 commits into from
Jul 5, 2023
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
The table of contents is too big for display.
Diff view
Diff view
  •  
  •  
  •  
2 changes: 1 addition & 1 deletion .github/workflows/tests.yml
Original file line number Diff line number Diff line change
Expand Up @@ -91,7 +91,7 @@ jobs:
strategy:
matrix:
os: ['ubuntu-20.04']
python-version: [3.7, 3.8, 3.9, "3.10"]
python-version: [3.8, 3.9, "3.10"]
check: ['test']
pip-flags: ['']
depends: ['REQUIREMENTS']
Expand Down
18 changes: 11 additions & 7 deletions .pre-commit-config.yaml
Original file line number Diff line number Diff line change
@@ -1,14 +1,18 @@
# See https://pre-commit.com for more information
# See https://pre-commit.com/hooks.html for more hooks
repos:
- repo: https://github.com/pre-commit/pre-commit-hooks
- repo: https://github.com/pre-commit/pre-commit-hooks
rev: v4.4.0
hooks:
- id: trailing-whitespace
- id: end-of-file-fixer
- id: check-yaml
- id: check-added-large-files
- repo: https://github.com/psf/black
- id: trailing-whitespace
- id: end-of-file-fixer
- id: check-yaml
- id: check-added-large-files
- repo: https://github.com/psf/black
rev: 23.3.0
hooks:
- id: black
- id: black
- repo: https://github.com/codespell-project/codespell
rev: v2.2.5
hooks:
- id: codespell
6 changes: 2 additions & 4 deletions doc/conf.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,3 @@
# emacs: -*- coding: utf-8; mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*-
# vi: set fileencoding=utf-8 ft=python sts=4 ts=4 sw=4 et:
#
# nipype documentation build configuration file, created by
# sphinx-quickstart on Mon Jul 20 12:30:18 2009.
Expand Down Expand Up @@ -151,8 +149,8 @@
master_doc = "index"

# General information about the project.
project = u"nipype"
copyright = u"2009-21, Neuroimaging in Python team"
project = "nipype"
copyright = "2009-21, Neuroimaging in Python team"

# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
Expand Down
5 changes: 2 additions & 3 deletions nipype/__init__.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,3 @@
# -*- coding: utf-8 -*-
# emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*-
# vi: set ft=python sts=4 ts=4 sw=4 et:
"""
Expand Down Expand Up @@ -26,14 +25,14 @@
import faulthandler

faulthandler.enable()
except (ImportError, IOError) as e:
except (ImportError, OSError) as e:
pass

config = NipypeConfig()
logging = Logging(config)


class NipypeTester(object):
class NipypeTester:
def __call__(self, doctests=True, parallel=False):
try:
import pytest
Expand Down
1 change: 0 additions & 1 deletion nipype/algorithms/__init__.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,3 @@
# -*- coding: utf-8 -*-
# emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*-
# vi: set ft=python sts=4 ts=4 sw=4 et:
"""
Expand Down
45 changes: 20 additions & 25 deletions nipype/algorithms/confounds.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,3 @@
# -*- coding: utf-8 -*-
# emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*-
# vi: set ft=python sts=4 ts=4 sw=4 et:
"""
Expand Down Expand Up @@ -150,7 +149,7 @@ class ComputeDVARS(BaseInterface):

def __init__(self, **inputs):
self._results = {}
super(ComputeDVARS, self).__init__(**inputs)
super().__init__(**inputs)

def _gen_fname(self, suffix, ext=None):
fname, in_ext = op.splitext(op.basename(self.inputs.in_file))
Expand All @@ -165,7 +164,7 @@ def _gen_fname(self, suffix, ext=None):
if ext.startswith("."):
ext = ext[1:]

return op.abspath("{}_{}.{}".format(fname, suffix, ext))
return op.abspath(f"{fname}_{suffix}.{ext}")

def _run_interface(self, runtime):
dvars = compute_dvars(
Expand Down Expand Up @@ -466,12 +465,12 @@ class CompCorInputSpec(BaseInterfaceInputSpec):
"cosine",
False,
usedefault=True,
desc="Detrend time series prior to component " "extraction",
desc="Detrend time series prior to component extraction",
)
use_regress_poly = traits.Bool(
deprecated="0.15.0",
new_name="pre_filter",
desc=("use polynomial regression " "pre-component extraction"),
desc=("use polynomial regression pre-component extraction"),
)
regress_poly_degree = traits.Range(
low=1, value=1, usedefault=True, desc="the degree polynomial to use"
Expand Down Expand Up @@ -584,7 +583,7 @@ class CompCor(SimpleInterface):

def __init__(self, *args, **kwargs):
"""exactly the same as compcor except the header"""
super(CompCor, self).__init__(*args, **kwargs)
super().__init__(*args, **kwargs)
self._header = "CompCor"

def _run_interface(self, runtime):
Expand Down Expand Up @@ -713,7 +712,7 @@ def _run_interface(self, runtime):
self.inputs.pre_filter
]
ncols = filter_basis.shape[1] if filter_basis.size > 0 else 0
header = ["{}{:02d}".format(ftype, i) for i in range(ncols)]
header = [f"{ftype}{i:02d}" for i in range(ncols)]
if skip_vols:
old_basis = filter_basis
# nrows defined above
Expand All @@ -724,7 +723,7 @@ def _run_interface(self, runtime):
filter_basis[skip_vols:, :ncols] = old_basis
filter_basis[:skip_vols, -skip_vols:] = np.eye(skip_vols)
header.extend(
["NonSteadyStateOutlier{:02d}".format(i) for i in range(skip_vols)]
[f"NonSteadyStateOutlier{i:02d}" for i in range(skip_vols)]
)
np.savetxt(
self._results["pre_filter_file"],
Expand All @@ -747,7 +746,7 @@ def _run_interface(self, runtime):
not_retained = np.where(np.logical_not(metadata["retained"]))
components_names[retained] = components_header
components_names[not_retained] = [
"dropped{}".format(i) for i in range(len(not_retained[0]))
f"dropped{i}" for i in range(len(not_retained[0]))
]
with open(self._results["metadata_file"], "w") as f:
f.write("\t".join(["component"] + list(metadata.keys())) + "\n")
Expand All @@ -768,7 +767,7 @@ def _make_headers(self, num_col):
if isdefined(self.inputs.header_prefix)
else self._header
)
headers = ["{}{:02d}".format(header, i) for i in range(num_col)]
headers = [f"{header}{i:02d}" for i in range(num_col)]
return headers


Expand All @@ -781,7 +780,7 @@ class ACompCor(CompCor):

def __init__(self, *args, **kwargs):
"""exactly the same as compcor except the header"""
super(ACompCor, self).__init__(*args, **kwargs)
super().__init__(*args, **kwargs)
self._header = "aCompCor"


Expand All @@ -807,7 +806,7 @@ class TCompCorInputSpec(CompCorInputSpec):
class TCompCorOutputSpec(CompCorOutputSpec):
# and all the fields in CompCorOutputSpec
high_variance_masks = OutputMultiPath(
File(exists=True), desc=(("voxels exceeding the variance" " threshold"))
File(exists=True), desc=("voxels exceeding the variance threshold")
)


Expand All @@ -832,7 +831,7 @@ class TCompCor(CompCor):

def __init__(self, *args, **kwargs):
"""exactly the same as compcor except the header"""
super(TCompCor, self).__init__(*args, **kwargs)
super().__init__(*args, **kwargs)
self._header = "tCompCor"
self._mask_files = []

Expand All @@ -854,10 +853,10 @@ def _process_masks(self, mask_images, timeseries=None):
out_image = nb.Nifti1Image(mask_data, affine=img.affine, header=img.header)

# save mask
mask_file = os.path.abspath("mask_{:03d}.nii.gz".format(i))
mask_file = os.path.abspath(f"mask_{i:03d}.nii.gz")
out_image.to_filename(mask_file)
IFLOGGER.debug(
"tCompcor computed and saved mask of shape %s to " "mask_file %s",
"tCompcor computed and saved mask of shape %s to mask_file %s",
str(mask.shape),
mask_file,
)
Expand All @@ -866,7 +865,7 @@ def _process_masks(self, mask_images, timeseries=None):
return out_images

def _list_outputs(self):
outputs = super(TCompCor, self)._list_outputs()
outputs = super()._list_outputs()
outputs["high_variance_masks"] = self._mask_files
return outputs

Expand Down Expand Up @@ -1136,7 +1135,7 @@ def plot_confound(tseries, figsize, name, units=None, series_tr=None, normalize=

xlabel = "Frame #"
if series_tr is not None:
xlabel = "Frame # ({} sec TR)".format(series_tr)
xlabel = f"Frame # ({series_tr} sec TR)"
ax.set_xlabel(xlabel)
ylim = ax.get_ylim()

Expand Down Expand Up @@ -1280,19 +1279,15 @@ def combine_mask_files(mask_files, mask_method=None, mask_index=None):
mask_index = 0
else:
raise ValueError(
(
"When more than one mask file is provided, "
"one of merge_method or mask_index must be "
"set"
)
"When more than one mask file is provided, "
"one of merge_method or mask_index must be "
"set"
)
if mask_index < len(mask_files):
mask = nb.load(mask_files[mask_index])
return [mask]
raise ValueError(
("mask_index {0} must be less than number of mask " "files {1}").format(
mask_index, len(mask_files)
)
f"mask_index {mask_index} must be less than number of mask files {len(mask_files)}"
)
masks = []
if mask_method == "none":
Expand Down
1 change: 0 additions & 1 deletion nipype/algorithms/icc.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,3 @@
# -*- coding: utf-8 -*-
import os
from functools import lru_cache
import numpy as np
Expand Down
19 changes: 9 additions & 10 deletions nipype/algorithms/mesh.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,3 @@
# -*- coding: utf-8 -*-
# emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*-
# vi: set ft=python sts=4 ts=4 sw=4 et:
"""
Expand Down Expand Up @@ -30,7 +29,7 @@ class TVTKBaseInterface(BaseInterface):
def __init__(self, **inputs):
if VTKInfo.no_tvtk():
raise ImportError("This interface requires tvtk to run.")
super(TVTKBaseInterface, self).__init__(**inputs)
super().__init__(**inputs)


class WarpPointsInputSpec(BaseInterfaceInputSpec):
Expand Down Expand Up @@ -92,7 +91,7 @@ def _gen_fname(self, in_file, suffix="generated", ext=None):

if ext[0] == ".":
ext = ext[1:]
return op.abspath("%s_%s.%s" % (fname, suffix, ext))
return op.abspath(f"{fname}_{suffix}.{ext}")

def _run_interface(self, runtime):
import nibabel as nb
Expand Down Expand Up @@ -142,12 +141,12 @@ class ComputeMeshWarpInputSpec(BaseInterfaceInputSpec):
surface1 = File(
exists=True,
mandatory=True,
desc=("Reference surface (vtk format) to which compute " "distance."),
desc=("Reference surface (vtk format) to which compute distance."),
)
surface2 = File(
exists=True,
mandatory=True,
desc=("Test surface (vtk format) from which compute " "distance."),
desc=("Test surface (vtk format) from which compute distance."),
)
metric = traits.Enum(
"euclidean", "sqeuclidean", usedefault=True, desc="norm used to report distance"
Expand All @@ -164,7 +163,7 @@ class ComputeMeshWarpInputSpec(BaseInterfaceInputSpec):
out_warp = File(
"surfwarp.vtk",
usedefault=True,
desc="vtk file based on surface1 and warpings mapping it " "to surface2",
desc="vtk file based on surface1 and warpings mapping it to surface2",
)
out_file = File(
"distance.npy",
Expand All @@ -177,7 +176,7 @@ class ComputeMeshWarpOutputSpec(TraitedSpec):
distance = traits.Float(desc="computed distance")
out_warp = File(
exists=True,
desc=("vtk file with the vertex-wise " "mapping of surface1 to surface2"),
desc=("vtk file with the vertex-wise mapping of surface1 to surface2"),
)
out_file = File(
exists=True, desc="numpy file keeping computed distances and weights"
Expand Down Expand Up @@ -309,15 +308,15 @@ class MeshWarpMathsInputSpec(BaseInterfaceInputSpec):
out_warp = File(
"warp_maths.vtk",
usedefault=True,
desc="vtk file based on in_surf and warpings mapping it " "to out_file",
desc="vtk file based on in_surf and warpings mapping it to out_file",
)
out_file = File("warped_surf.vtk", usedefault=True, desc="vtk with surface warped")


class MeshWarpMathsOutputSpec(TraitedSpec):
out_warp = File(
exists=True,
desc=("vtk file with the vertex-wise " "mapping of surface1 to surface2"),
desc=("vtk file with the vertex-wise mapping of surface1 to surface2"),
)
out_file = File(exists=True, desc="vtk with surface warped")

Expand Down Expand Up @@ -423,7 +422,7 @@ class P2PDistance(ComputeMeshWarp):
"""

def __init__(self, **inputs):
super(P2PDistance, self).__init__(**inputs)
super().__init__(**inputs)
IFLOGGER.warning(
"This interface has been deprecated since 1.0, please "
"use ComputeMeshWarp"
Expand Down
1 change: 0 additions & 1 deletion nipype/algorithms/metrics.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,3 @@
# -*- coding: utf-8 -*-
# emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*-
# vi: set ft=python sts=4 ts=4 sw=4 et:
"""
Expand Down
Loading