diff --git a/doc/whats_new/v0.23.rst b/doc/whats_new/v0.23.rst index 3de4c40e109c7..e0efe3217841d 100644 --- a/doc/whats_new/v0.23.rst +++ b/doc/whats_new/v0.23.rst @@ -73,6 +73,10 @@ Changelog `store_cv_values` is `True`. :pr:`15652` by :user:`Jérôme Dockès `. +- |Fix| add `best_score_` attribute to :class:`linear_model.RidgeCV` and + :class:`linear_model.RidgeClassifierCV`. + :pr:`15653` by :user:`Jérôme Dockès `. + :mod:`sklearn.preprocessing` ............................ diff --git a/sklearn/linear_model/_ridge.py b/sklearn/linear_model/_ridge.py index 1c0407066048c..3b54a4eb5b3cb 100644 --- a/sklearn/linear_model/_ridge.py +++ b/sklearn/linear_model/_ridge.py @@ -1576,6 +1576,7 @@ def fit(self, X, y, sample_weight=None): store_cv_values=self.store_cv_values) estimator.fit(X, y, sample_weight=sample_weight) self.alpha_ = estimator.alpha_ + self.best_score_ = estimator.best_score_ if self.store_cv_values: self.cv_values_ = estimator.cv_values_ else: @@ -1591,6 +1592,7 @@ def fit(self, X, y, sample_weight=None): gs.fit(X, y, sample_weight=sample_weight) estimator = gs.best_estimator_ self.alpha_ = gs.best_estimator_.alpha + self.best_score_ = gs.best_score_ self.coef_ = estimator.coef_ self.intercept_ = estimator.intercept_ @@ -1693,6 +1695,9 @@ class RidgeCV(MultiOutputMixin, RegressorMixin, _BaseRidgeCV): alpha_ : float Estimated regularization parameter. + best_score_ : float + Mean cross-validated score of the estimator with the best alpha found. + Examples -------- >>> from sklearn.datasets import load_diabetes @@ -1795,7 +1800,10 @@ class RidgeClassifierCV(LinearClassifierMixin, _BaseRidgeCV): ``fit_intercept = False``. alpha_ : float - Estimated regularization parameter + Estimated regularization parameter. + + best_score_ : float + Mean cross-validated score of the estimator with the best alpha found. classes_ : array of shape (n_classes,) The classes labels. diff --git a/sklearn/linear_model/tests/test_ridge.py b/sklearn/linear_model/tests/test_ridge.py index ab45a093500df..a92e830aba66e 100644 --- a/sklearn/linear_model/tests/test_ridge.py +++ b/sklearn/linear_model/tests/test_ridge.py @@ -664,17 +664,31 @@ def _test_ridge_cv(filter_): @pytest.mark.parametrize( "ridge, make_dataset", - [(RidgeCV(), make_regression), - (RidgeClassifierCV(), make_classification)] + [(RidgeCV(store_cv_values=False), make_regression), + (RidgeClassifierCV(store_cv_values=False), make_classification)] ) def test_ridge_gcv_cv_values_not_stored(ridge, make_dataset): # Check that `cv_values_` is not stored when store_cv_values is False X, y = make_dataset(n_samples=6, random_state=42) - ridge.set_params(store_cv_values=False) ridge.fit(X, y) assert not hasattr(ridge, "cv_values_") +@pytest.mark.parametrize( + "ridge, make_dataset", + [(RidgeCV(), make_regression), + (RidgeClassifierCV(), make_classification)] +) +@pytest.mark.parametrize("cv", [None, 3]) +def test_ridge_best_score(ridge, make_dataset, cv): + # check that the best_score_ is store + X, y = make_dataset(n_samples=6, random_state=42) + ridge.set_params(store_cv_values=False, cv=cv) + ridge.fit(X, y) + assert hasattr(ridge, "best_score_") + assert isinstance(ridge.best_score_, float) + + def _test_ridge_diabetes(filter_): ridge = Ridge(fit_intercept=False) ridge.fit(filter_(X_diabetes), y_diabetes)