diff --git a/sklearn/calibration.py b/sklearn/calibration.py index f5d52f98438ac..eef8fe7f8487b 100644 --- a/sklearn/calibration.py +++ b/sklearn/calibration.py @@ -32,8 +32,9 @@ from .utils.validation import _deprecate_positional_args -class CalibratedClassifierCV(BaseEstimator, ClassifierMixin, - MetaEstimatorMixin): +class CalibratedClassifierCV(ClassifierMixin, + MetaEstimatorMixin, + BaseEstimator): """Probability calibration with isotonic regression or logistic regression. This class uses cross-validation to both estimate the parameters of a diff --git a/sklearn/discriminant_analysis.py b/sklearn/discriminant_analysis.py index 1d89fd08339b9..b4935306fa0fa 100644 --- a/sklearn/discriminant_analysis.py +++ b/sklearn/discriminant_analysis.py @@ -128,8 +128,9 @@ def _class_cov(X, y, priors, shrinkage=None): return cov -class LinearDiscriminantAnalysis(BaseEstimator, LinearClassifierMixin, - TransformerMixin): +class LinearDiscriminantAnalysis(LinearClassifierMixin, + TransformerMixin, + BaseEstimator): """Linear Discriminant Analysis A classifier with a linear decision boundary, generated by fitting class diff --git a/sklearn/ensemble/_hist_gradient_boosting/tests/test_gradient_boosting.py b/sklearn/ensemble/_hist_gradient_boosting/tests/test_gradient_boosting.py index fe3ce0c617c06..3cbcd4702fe30 100644 --- a/sklearn/ensemble/_hist_gradient_boosting/tests/test_gradient_boosting.py +++ b/sklearn/ensemble/_hist_gradient_boosting/tests/test_gradient_boosting.py @@ -392,7 +392,7 @@ def test_missing_values_minmax_imputation(): # The implementation of MIA as an imputation transformer was suggested by # "Remark 3" in https://arxiv.org/abs/1902.06931 - class MinMaxImputer(BaseEstimator, TransformerMixin): + class MinMaxImputer(TransformerMixin, BaseEstimator): def fit(self, X, y=None): mm = MinMaxScaler().fit(X) diff --git a/sklearn/ensemble/tests/test_stacking.py b/sklearn/ensemble/tests/test_stacking.py index f8a3f290e96b5..2f8e36a1b3abd 100644 --- a/sklearn/ensemble/tests/test_stacking.py +++ b/sklearn/ensemble/tests/test_stacking.py @@ -262,7 +262,7 @@ def test_stacking_classifier_drop_binary_prob(): assert X_meta.shape[1] == 2 -class NoWeightRegressor(BaseEstimator, RegressorMixin): +class NoWeightRegressor(RegressorMixin, BaseEstimator): def fit(self, X, y): self.reg = DummyRegressor() return self.reg.fit(X, y) @@ -271,7 +271,7 @@ def predict(self, X): return np.ones(X.shape[0]) -class NoWeightClassifier(BaseEstimator, ClassifierMixin): +class NoWeightClassifier(ClassifierMixin, BaseEstimator): def fit(self, X, y): self.clf = DummyClassifier(strategy='stratified') return self.clf.fit(X, y) diff --git a/sklearn/linear_model/_logistic.py b/sklearn/linear_model/_logistic.py index c9451c749aaea..da86a1755c2f1 100644 --- a/sklearn/linear_model/_logistic.py +++ b/sklearn/linear_model/_logistic.py @@ -1008,8 +1008,9 @@ def _log_reg_scoring_path(X, y, train, test, pos_class=None, Cs=10, return coefs, Cs, np.array(scores), n_iter -class LogisticRegression(BaseEstimator, LinearClassifierMixin, - SparseCoefMixin): +class LogisticRegression(LinearClassifierMixin, + SparseCoefMixin, + BaseEstimator): """ Logistic Regression (aka logit, MaxEnt) classifier. @@ -1498,8 +1499,9 @@ def predict_log_proba(self, X): return np.log(self.predict_proba(X)) -class LogisticRegressionCV(LogisticRegression, BaseEstimator, - LinearClassifierMixin): +class LogisticRegressionCV(LogisticRegression, + LinearClassifierMixin, + BaseEstimator): """Logistic Regression CV (aka logit, MaxEnt) classifier. See glossary entry for :term:`cross-validation estimator`. diff --git a/sklearn/metrics/_plot/tests/test_plot_precision_recall.py b/sklearn/metrics/_plot/tests/test_plot_precision_recall.py index dc7c858723126..c9ea55601094a 100644 --- a/sklearn/metrics/_plot/tests/test_plot_precision_recall.py +++ b/sklearn/metrics/_plot/tests/test_plot_precision_recall.py @@ -63,7 +63,7 @@ def test_errors(pyplot): def test_error_bad_response(pyplot, response_method, msg): X, y = make_classification(n_classes=2, n_samples=50, random_state=0) - class MyClassifier(BaseEstimator, ClassifierMixin): + class MyClassifier(ClassifierMixin, BaseEstimator): def fit(self, X, y): self.fitted_ = True self.classes_ = [0, 1] diff --git a/sklearn/model_selection/tests/test_search.py b/sklearn/model_selection/tests/test_search.py index cd01916d28ea9..7c4f5a2ee9b1e 100644 --- a/sklearn/model_selection/tests/test_search.py +++ b/sklearn/model_selection/tests/test_search.py @@ -1821,7 +1821,7 @@ def test_scalar_fit_param(SearchCV, param_search): # unofficially sanctioned tolerance for scalar values in fit_params # non-regression test for: # https://github.com/scikit-learn/scikit-learn/issues/15805 - class TestEstimator(BaseEstimator, ClassifierMixin): + class TestEstimator(ClassifierMixin, BaseEstimator): def __init__(self, a=None): self.a = a diff --git a/sklearn/multioutput.py b/sklearn/multioutput.py index b1319e287a3c8..b020e165d35e8 100644 --- a/sklearn/multioutput.py +++ b/sklearn/multioutput.py @@ -60,7 +60,8 @@ def _partial_fit_estimator(estimator, X, y, classes=None, sample_weight=None, return estimator -class _MultiOutputEstimator(BaseEstimator, MetaEstimatorMixin, +class _MultiOutputEstimator(MetaEstimatorMixin, + BaseEstimator, metaclass=ABCMeta): @abstractmethod @_deprecate_positional_args diff --git a/sklearn/svm/_classes.py b/sklearn/svm/_classes.py index a4b409a16974c..ad3dee1e44ae2 100644 --- a/sklearn/svm/_classes.py +++ b/sklearn/svm/_classes.py @@ -10,8 +10,9 @@ from ..utils.deprecation import deprecated -class LinearSVC(BaseEstimator, LinearClassifierMixin, - SparseCoefMixin): +class LinearSVC(LinearClassifierMixin, + SparseCoefMixin, + BaseEstimator): """Linear Support Vector Classification. Similar to SVC with parameter kernel='linear', but implemented in terms of