Skip to content

DRAFT: Implements fit params for RFECV #21113

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Closed
wants to merge 1 commit into from
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
17 changes: 11 additions & 6 deletions sklearn/feature_selection/_rfe.py
Original file line number Diff line number Diff line change
Expand Up @@ -28,7 +28,7 @@
from ._base import _get_feature_importances


def _rfe_single_fit(rfe, estimator, X, y, train, test, scorer):
def _rfe_single_fit(rfe, estimator, X, y, train, test, scorer, fit_params):
"""
Return the score for a fit across one fold.
"""
Expand All @@ -40,6 +40,7 @@ def _rfe_single_fit(rfe, estimator, X, y, train, test, scorer):
lambda estimator, features: _score(
estimator, X_test[:, features], y_test, scorer
),
**fit_params,
).scores_


Expand Down Expand Up @@ -632,7 +633,7 @@ def __init__(
self.n_jobs = n_jobs
self.min_features_to_select = min_features_to_select

def fit(self, X, y, groups=None):
def fit(self, X, y, groups=None, **fit_params):
"""Fit the RFE model and automatically tune the number of selected features.

Parameters
Expand All @@ -652,6 +653,10 @@ def fit(self, X, y, groups=None):

.. versionadded:: 0.20

**fit_params : dict
Additional parameters passed to the `fit` method of the underlying
estimator.

Returns
-------
self : object
Expand Down Expand Up @@ -700,15 +705,15 @@ def fit(self, X, y, groups=None):
# make sure that user code that sets n_jobs to 1
# and provides bound methods as scorers is not broken with the
# addition of n_jobs parameter in version 0.18.

breakpoint()
if effective_n_jobs(self.n_jobs) == 1:
parallel, func = list, _rfe_single_fit
else:
parallel = Parallel(n_jobs=self.n_jobs)
func = delayed(_rfe_single_fit)

scores = parallel(
func(rfe, self.estimator, X, y, train, test, scorer)
func(rfe, self.estimator, X, y, train, test, scorer, fit_params)
for train, test in cv.split(X, y, groups)
)

Expand All @@ -729,14 +734,14 @@ def fit(self, X, y, groups=None):
verbose=self.verbose,
)

rfe.fit(X, y)
rfe.fit(X, y, **fit_params)

# Set final attributes
self.support_ = rfe.support_
self.n_features_ = rfe.n_features_
self.ranking_ = rfe.ranking_
self.estimator_ = clone(self.estimator)
self.estimator_.fit(self.transform(X), y)
self.estimator_.fit(self.transform(X), y, **fit_params)

# reverse to stay consistent with before
scores_rev = scores[:, ::-1]
Expand Down
25 changes: 25 additions & 0 deletions sklearn/feature_selection/tests/test_rfe.py
Original file line number Diff line number Diff line change
Expand Up @@ -134,6 +134,31 @@ def score(self, X, y, prop=None):
RFE(estimator=TestEstimator()).fit(X, y, prop="foo").score(X, y, prop="foo")


def test_RFECV_fit_score_params():
# Make sure RFECV passes the metadata down to fit and score methods of the
# underlying estimator
class TestEstimator(BaseEstimator, ClassifierMixin):
def fit(self, X, y, prop=None):
if prop is None:
raise ValueError("fit: prop cannot be None")
self.svc_ = SVC(kernel="linear").fit(X, y)
self.coef_ = self.svc_.coef_
return self

def score(self, X, y, prop=None):
if prop is None:
raise ValueError("score: prop cannot be None")
return self.svc_.score(X, y)

X, y = load_iris(return_X_y=True)
# with pytest.raises(ValueError, match="fit: prop cannot be None"):
# RFECV(estimator=TestEstimator()).fit(X, y)
# with pytest.raises(ValueError, match="score: prop cannot be None"):
# RFECV(estimator=TestEstimator()).fit(X, y, prop="foo").score(X, y)

RFECV(estimator=TestEstimator()).fit(X, y, prop="foo").score(X, y, prop="foo")


@pytest.mark.parametrize("n_features_to_select", [-1, 2.1])
def test_rfe_invalid_n_features_errors(n_features_to_select):
clf = SVC(kernel="linear")
Expand Down
1 change: 1 addition & 0 deletions sklearn/model_selection/_validation.py
Original file line number Diff line number Diff line change
Expand Up @@ -751,6 +751,7 @@ def _score(estimator, X_test, y_test, scorer, error_score="raise"):
Will return a dict of floats if `scorer` is a dict, otherwise a single
float is returned.
"""
breakpoint()
if isinstance(scorer, dict):
# will cache method calls if needed. scorer() returns a dict
scorer = _MultimetricScorer(**scorer)
Expand Down