Skip to content

Drop support for python 2 and python 3.5 #291

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
merged 5 commits into from
May 27, 2020
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
15 changes: 2 additions & 13 deletions .travis.yml
Original file line number Diff line number Diff line change
Expand Up @@ -7,23 +7,12 @@ env:
- SKGGM_VERSION=a0ed406586c4364ea3297a658f415e13b5cbdaf8
matrix:
include:
- name: "Pytest python 2.7 with skggm"
python: "2.7"
- name: "Pytest python 3.6 without skggm"
python: "3.6"
before_install:
- sudo apt-get install liblapack-dev
- pip install --upgrade pip pytest
- pip install wheel cython numpy scipy codecov pytest-cov scikit-learn
- pip install git+https://github.com/skggm/skggm.git@${SKGGM_VERSION};
script:
- pytest test --cov;
after_success:
- bash <(curl -s https://codecov.io/bash)
- name: "Pytest python 3.4 without skggm"
python: "3.4"
before_install:
- sudo apt-get install liblapack-dev
- pip install --upgrade pip "pytest<5"
- pip install wheel cython numpy scipy codecov pytest-cov scikit-learn
script:
- pytest test --cov;
after_success:
Expand Down
2 changes: 1 addition & 1 deletion README.rst
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,7 @@ metric-learn contains efficient Python implementations of several popular superv

**Dependencies**

- Python 2.7+, 3.4+
- Python 3.6+
- numpy, scipy, scikit-learn>=0.20.3

**Optional dependencies**
Expand Down
2 changes: 0 additions & 2 deletions metric_learn/__init__.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,3 @@
from __future__ import absolute_import

from .constraints import Constraints
from .covariance import Covariance
from .itml import ITML, ITML_Supervised
Expand Down
3 changes: 1 addition & 2 deletions metric_learn/_util.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,4 @@
import numpy as np
import six
from numpy.linalg import LinAlgError
from sklearn.datasets import make_spd_matrix
from sklearn.decomposition import PCA
Expand Down Expand Up @@ -283,7 +282,7 @@ def make_name(estimator):
if a string is given
"""
if estimator is not None:
if isinstance(estimator, six.string_types):
if isinstance(estimator, str):
estimator_name = estimator
else:
estimator_name = estimator.__class__.__name__
Expand Down
9 changes: 4 additions & 5 deletions metric_learn/base_metric.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,12 +8,11 @@
from sklearn.metrics import roc_auc_score, roc_curve, precision_recall_curve
import numpy as np
from abc import ABCMeta, abstractmethod
import six
from ._util import ArrayIndexer, check_input, validate_vector
import warnings


class BaseMetricLearner(six.with_metaclass(ABCMeta, BaseEstimator)):
class BaseMetricLearner(BaseEstimator, metaclass=ABCMeta):
"""
Base class for all metric-learners.

Expand Down Expand Up @@ -145,7 +144,7 @@ def get_metric(self):
"""


class MetricTransformer(six.with_metaclass(ABCMeta)):
class MetricTransformer(metaclass=ABCMeta):

@abstractmethod
def transform(self, X):
Expand All @@ -163,8 +162,8 @@ def transform(self, X):
"""


class MahalanobisMixin(six.with_metaclass(ABCMeta, BaseMetricLearner,
MetricTransformer)):
class MahalanobisMixin(BaseMetricLearner, MetricTransformer,
metaclass=ABCMeta):
r"""Mahalanobis metric learning algorithms.

Algorithm that learns a Mahalanobis (pseudo) distance :math:`d_M(x, x')`,
Expand Down
3 changes: 1 addition & 2 deletions metric_learn/constraints.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,6 @@
"""
import numpy as np
import warnings
from six.moves import xrange
from sklearn.utils import check_random_state
from sklearn.neighbors import NearestNeighbors

Expand Down Expand Up @@ -245,7 +244,7 @@ def chunks(self, num_chunks=100, chunk_size=2, random_state=None):
chunks = -np.ones_like(self.partial_labels, dtype=int)
uniq, lookup = np.unique(self.partial_labels, return_inverse=True)
unknown_uniq = np.where(uniq < 0)[0]
all_inds = [set(np.where(lookup == c)[0]) for c in xrange(len(uniq))
all_inds = [set(np.where(lookup == c)[0]) for c in range(len(uniq))
if c not in unknown_uniq]
max_chunks = int(np.sum([len(s) // chunk_size for s in all_inds]))
if max_chunks < num_chunks:
Expand Down
1 change: 0 additions & 1 deletion metric_learn/covariance.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,6 @@
Covariance metric (baseline method)
"""

from __future__ import absolute_import
import numpy as np
import scipy
from sklearn.base import TransformerMixin
Expand Down
4 changes: 1 addition & 3 deletions metric_learn/itml.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,10 +2,8 @@
Information Theoretic Metric Learning (ITML)
"""

from __future__ import print_function, absolute_import
import warnings
import numpy as np
from six.moves import xrange
from sklearn.exceptions import ChangedBehaviorWarning
from sklearn.metrics import pairwise_distances
from sklearn.utils.validation import check_array
Expand Down Expand Up @@ -69,7 +67,7 @@ def _fit(self, pairs, y, bounds=None):
pos_vv = pos_pairs[:, 0, :] - pos_pairs[:, 1, :]
neg_vv = neg_pairs[:, 0, :] - neg_pairs[:, 1, :]

for it in xrange(self.max_iter):
for it in range(self.max_iter):
# update positives
for i, v in enumerate(pos_vv):
wtw = v.dot(A).dot(v) # scalar
Expand Down
4 changes: 1 addition & 3 deletions metric_learn/lfda.py
Original file line number Diff line number Diff line change
@@ -1,11 +1,9 @@
"""
Local Fisher Discriminant Analysis (LFDA)
"""
from __future__ import division, absolute_import
import numpy as np
import scipy
import warnings
from six.moves import xrange
from sklearn.metrics import pairwise_distances
from sklearn.base import TransformerMixin

Expand Down Expand Up @@ -127,7 +125,7 @@ def fit(self, X, y):
tSb = np.zeros((d, d))
tSw = np.zeros((d, d))

for c in xrange(num_classes):
for c in range(num_classes):
Xc = X[y == c]
nc = Xc.shape[0]

Expand Down
6 changes: 2 additions & 4 deletions metric_learn/lmnn.py
Original file line number Diff line number Diff line change
@@ -1,11 +1,9 @@
"""
Large Margin Nearest Neighbor Metric learning (LMNN)
"""
from __future__ import print_function, absolute_import
import numpy as np
import warnings
from collections import Counter
from six.moves import xrange
from sklearn.exceptions import ChangedBehaviorWarning
from sklearn.metrics import euclidean_distances
from sklearn.base import TransformerMixin
Expand Down Expand Up @@ -229,7 +227,7 @@ def fit(self, X, y):
"| learning rate")

# main loop
for it in xrange(2, self.max_iter):
for it in range(2, self.max_iter):
# then at each iteration, we try to find a value of L that has better
# objective than the previous L, following the gradient:
while True:
Expand Down Expand Up @@ -293,7 +291,7 @@ def _loss_grad(self, X, L, dfG, k, reg, target_neighbors, label_inds):
# compute the gradient
total_active = 0
df = np.zeros((X.shape[1], X.shape[1]))
for nn_idx in reversed(xrange(k)): # note: reverse not useful here
for nn_idx in reversed(range(k)): # note: reverse not useful here
act1 = g0 < g1[:, nn_idx]
act2 = g0 < g2[:, nn_idx]
total_active += act1.sum() + act2.sum()
Expand Down
4 changes: 1 addition & 3 deletions metric_learn/lsml.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,11 +2,9 @@
Metric Learning from Relative Comparisons by Minimizing Squared Residual (LSML)
"""

from __future__ import print_function, absolute_import, division
import warnings
import numpy as np
import scipy.linalg
from six.moves import xrange
from sklearn.base import TransformerMixin
from sklearn.exceptions import ChangedBehaviorWarning

Expand Down Expand Up @@ -66,7 +64,7 @@ def _fit(self, quadruplets, weights=None):
s_best = self._total_loss(M, vab, vcd, prior_inv)
if self.verbose:
print('initial loss', s_best)
for it in xrange(1, self.max_iter + 1):
for it in range(1, self.max_iter + 1):
grad = self._gradient(M, vab, vcd, prior_inv)
grad_norm = scipy.linalg.norm(grad)
if grad_norm < self.tol:
Expand Down
1 change: 0 additions & 1 deletion metric_learn/mlkr.py
Original file line number Diff line number Diff line change
@@ -1,7 +1,6 @@
"""
Metric Learning for Kernel Regression (MLKR)
"""
from __future__ import division, print_function
import time
import sys
import warnings
Expand Down
6 changes: 2 additions & 4 deletions metric_learn/mmc.py
Original file line number Diff line number Diff line change
@@ -1,8 +1,6 @@
"""Mahalanobis Metric for Clustering (MMC)"""
from __future__ import print_function, absolute_import, division
import warnings
import numpy as np
from six.moves import xrange
from sklearn.base import TransformerMixin
from sklearn.utils.validation import assert_all_finite
from sklearn.exceptions import ChangedBehaviorWarning
Expand Down Expand Up @@ -110,12 +108,12 @@ def _fit_full(self, pairs, y):

A_old = A.copy()

for cycle in xrange(self.max_iter):
for cycle in range(self.max_iter):

# projection of constraints C1 and C2
satisfy = False

for it in xrange(self.max_proj):
for it in range(self.max_proj):

# First constraint:
# f(A) = \sum_{i,j \in S} d_ij' A d_ij <= t (1)
Expand Down
1 change: 0 additions & 1 deletion metric_learn/nca.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,6 @@
Neighborhood Components Analysis (NCA)
"""

from __future__ import absolute_import
import warnings
import time
import sys
Expand Down
4 changes: 1 addition & 3 deletions metric_learn/rca.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,10 +2,8 @@
Relative Components Analysis (RCA)
"""

from __future__ import absolute_import
import numpy as np
import warnings
from six.moves import xrange
from sklearn.base import TransformerMixin
from sklearn.exceptions import ChangedBehaviorWarning

Expand All @@ -22,7 +20,7 @@ def _chunk_mean_centering(data, chunks):
# mean on it
chunk_data = data[chunk_mask].astype(float, copy=False)
chunk_labels = chunks[chunk_mask]
for c in xrange(num_chunks):
for c in range(num_chunks):
mask = chunk_labels == c
chunk_data[mask] -= chunk_data[mask].mean(axis=0)

Expand Down
1 change: 0 additions & 1 deletion metric_learn/sdml.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,6 @@
Sparse High-Dimensional Metric Learning (SDML)
"""

from __future__ import absolute_import
import warnings
import numpy as np
from sklearn.base import TransformerMixin
Expand Down
3 changes: 1 addition & 2 deletions setup.py
Original file line number Diff line number Diff line change
Expand Up @@ -29,7 +29,7 @@
classifiers=[
'Development Status :: 4 - Beta',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python',
'Programming Language :: Python :: 3',
'Operating System :: OS Independent',
'Intended Audience :: Science/Research',
'Topic :: Scientific/Engineering'
Expand All @@ -39,7 +39,6 @@
'numpy',
'scipy',
'scikit-learn',
'six'
],
extras_require=dict(
docs=['sphinx', 'shinx_rtd_theme', 'numpydoc'],
Expand Down
13 changes: 6 additions & 7 deletions test/metric_learn_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,6 @@
import numpy as np
import scipy
from scipy.optimize import check_grad, approx_fprime
from six.moves import xrange
from sklearn.metrics import pairwise_distances, euclidean_distances
from sklearn.datasets import (load_iris, make_classification, make_regression,
make_spd_matrix)
Expand Down Expand Up @@ -32,7 +31,7 @@
def class_separation(X, labels):
unique_labels, label_inds = np.unique(labels, return_inverse=True)
ratio = 0
for li in xrange(len(unique_labels)):
for li in range(len(unique_labels)):
Xc = X[label_inds == li]
Xnc = X[label_inds != li]
ratio += pairwise_distances(Xc).mean() / pairwise_distances(Xc, Xnc).mean()
Expand Down Expand Up @@ -385,15 +384,15 @@ def loss_fn(L, X, y, target_neighbors, reg):
for j in target_neighbors[i]:
loss += (1 - reg) * np.sum((Lx[i] - Lx[j]) ** 2)
grad += (1 - reg) * np.outer(Lx[i] - Lx[j], X[i] - X[j])
for l in range(X.shape[0]):
if y[i] != y[l]:
for k in range(X.shape[0]):
if y[i] != y[k]:
hin, active = hinge(1 + np.sum((Lx[i] - Lx[j])**2) -
np.sum((Lx[i] - Lx[l])**2))
np.sum((Lx[i] - Lx[k])**2))
total_active += active
if active:
loss += reg * hin
grad += (reg * (np.outer(Lx[i] - Lx[j], X[i] - X[j]) -
np.outer(Lx[i] - Lx[l], X[i] - X[l])))
np.outer(Lx[i] - Lx[k], X[i] - X[k])))
grad = 2 * grad
return grad, loss, total_active

Expand Down Expand Up @@ -521,7 +520,7 @@ def test_toy_ex_lmnn(X, y, loss):
# storage
a1 = [None] * k
a2 = [None] * k
for nn_idx in xrange(k):
for nn_idx in range(k):
a1[nn_idx] = np.array([])
a2[nn_idx] = np.array([])

Expand Down
Loading