Skip to content

FIX Change MRO for some estimators #17837

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
merged 4 commits into from
Jul 5, 2020
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
5 changes: 3 additions & 2 deletions sklearn/calibration.py
Original file line number Diff line number Diff line change
Expand Up @@ -32,8 +32,9 @@
from .utils.validation import _deprecate_positional_args


class CalibratedClassifierCV(BaseEstimator, ClassifierMixin,
MetaEstimatorMixin):
class CalibratedClassifierCV(ClassifierMixin,
MetaEstimatorMixin,
BaseEstimator):
"""Probability calibration with isotonic regression or logistic regression.

This class uses cross-validation to both estimate the parameters of a
Expand Down
5 changes: 3 additions & 2 deletions sklearn/discriminant_analysis.py
Original file line number Diff line number Diff line change
Expand Up @@ -128,8 +128,9 @@ def _class_cov(X, y, priors, shrinkage=None):
return cov


class LinearDiscriminantAnalysis(BaseEstimator, LinearClassifierMixin,
TransformerMixin):
class LinearDiscriminantAnalysis(LinearClassifierMixin,
TransformerMixin,
BaseEstimator):
"""Linear Discriminant Analysis

A classifier with a linear decision boundary, generated by fitting class
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -392,7 +392,7 @@ def test_missing_values_minmax_imputation():
# The implementation of MIA as an imputation transformer was suggested by
# "Remark 3" in https://arxiv.org/abs/1902.06931

class MinMaxImputer(BaseEstimator, TransformerMixin):
class MinMaxImputer(TransformerMixin, BaseEstimator):

def fit(self, X, y=None):
mm = MinMaxScaler().fit(X)
Expand Down
4 changes: 2 additions & 2 deletions sklearn/ensemble/tests/test_stacking.py
Original file line number Diff line number Diff line change
Expand Up @@ -262,7 +262,7 @@ def test_stacking_classifier_drop_binary_prob():
assert X_meta.shape[1] == 2


class NoWeightRegressor(BaseEstimator, RegressorMixin):
class NoWeightRegressor(RegressorMixin, BaseEstimator):
def fit(self, X, y):
self.reg = DummyRegressor()
return self.reg.fit(X, y)
Expand All @@ -271,7 +271,7 @@ def predict(self, X):
return np.ones(X.shape[0])


class NoWeightClassifier(BaseEstimator, ClassifierMixin):
class NoWeightClassifier(ClassifierMixin, BaseEstimator):
def fit(self, X, y):
self.clf = DummyClassifier(strategy='stratified')
return self.clf.fit(X, y)
Expand Down
10 changes: 6 additions & 4 deletions sklearn/linear_model/_logistic.py
Original file line number Diff line number Diff line change
Expand Up @@ -1008,8 +1008,9 @@ def _log_reg_scoring_path(X, y, train, test, pos_class=None, Cs=10,
return coefs, Cs, np.array(scores), n_iter


class LogisticRegression(BaseEstimator, LinearClassifierMixin,
SparseCoefMixin):
class LogisticRegression(LinearClassifierMixin,
SparseCoefMixin,
BaseEstimator):
"""
Logistic Regression (aka logit, MaxEnt) classifier.

Expand Down Expand Up @@ -1498,8 +1499,9 @@ def predict_log_proba(self, X):
return np.log(self.predict_proba(X))


class LogisticRegressionCV(LogisticRegression, BaseEstimator,
LinearClassifierMixin):
class LogisticRegressionCV(LogisticRegression,
LinearClassifierMixin,
BaseEstimator):
"""Logistic Regression CV (aka logit, MaxEnt) classifier.

See glossary entry for :term:`cross-validation estimator`.
Expand Down
2 changes: 1 addition & 1 deletion sklearn/metrics/_plot/tests/test_plot_precision_recall.py
Original file line number Diff line number Diff line change
Expand Up @@ -63,7 +63,7 @@ def test_errors(pyplot):
def test_error_bad_response(pyplot, response_method, msg):
X, y = make_classification(n_classes=2, n_samples=50, random_state=0)

class MyClassifier(BaseEstimator, ClassifierMixin):
class MyClassifier(ClassifierMixin, BaseEstimator):
def fit(self, X, y):
self.fitted_ = True
self.classes_ = [0, 1]
Expand Down
2 changes: 1 addition & 1 deletion sklearn/model_selection/tests/test_search.py
Original file line number Diff line number Diff line change
Expand Up @@ -1821,7 +1821,7 @@ def test_scalar_fit_param(SearchCV, param_search):
# unofficially sanctioned tolerance for scalar values in fit_params
# non-regression test for:
# https://github.com/scikit-learn/scikit-learn/issues/15805
class TestEstimator(BaseEstimator, ClassifierMixin):
class TestEstimator(ClassifierMixin, BaseEstimator):
def __init__(self, a=None):
self.a = a

Expand Down
3 changes: 2 additions & 1 deletion sklearn/multioutput.py
Original file line number Diff line number Diff line change
Expand Up @@ -60,7 +60,8 @@ def _partial_fit_estimator(estimator, X, y, classes=None, sample_weight=None,
return estimator


class _MultiOutputEstimator(BaseEstimator, MetaEstimatorMixin,
class _MultiOutputEstimator(MetaEstimatorMixin,
BaseEstimator,
metaclass=ABCMeta):
@abstractmethod
@_deprecate_positional_args
Expand Down
5 changes: 3 additions & 2 deletions sklearn/svm/_classes.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,8 +10,9 @@
from ..utils.deprecation import deprecated


class LinearSVC(BaseEstimator, LinearClassifierMixin,
SparseCoefMixin):
class LinearSVC(LinearClassifierMixin,
SparseCoefMixin,
BaseEstimator):
"""Linear Support Vector Classification.

Similar to SVC with parameter kernel='linear', but implemented in terms of
Expand Down