Skip to content

[MRG] Use base.is_classifier instead of isinstance #9482

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
merged 1 commit into from
Aug 8, 2017
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 2 additions & 2 deletions sklearn/ensemble/weight_boosting.py
Original file line number Diff line number Diff line change
Expand Up @@ -29,7 +29,7 @@
from numpy.core.umath_tests import inner1d

from .base import BaseEnsemble
from ..base import ClassifierMixin, RegressorMixin, is_regressor
from ..base import ClassifierMixin, RegressorMixin, is_regressor, is_classifier
from ..externals import six
from ..externals.six.moves import zip
from ..externals.six.moves import xrange as range
Expand Down Expand Up @@ -231,7 +231,7 @@ def staged_score(self, X, y, sample_weight=None):
z : float
"""
for y_pred in self.staged_predict(X):
if isinstance(self, ClassifierMixin):
if is_classifier(self):
yield accuracy_score(y, y_pred, sample_weight=sample_weight)
else:
yield r2_score(y, y_pred, sample_weight=sample_weight)
Expand Down
4 changes: 2 additions & 2 deletions sklearn/multioutput.py
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,7 @@
import scipy.sparse as sp
from abc import ABCMeta, abstractmethod
from .base import BaseEstimator, clone, MetaEstimatorMixin
from .base import RegressorMixin, ClassifierMixin
from .base import RegressorMixin, ClassifierMixin, is_classifier
from .model_selection import cross_val_predict
from .utils import check_array, check_X_y, check_random_state
from .utils.fixes import parallel_helper
Expand Down Expand Up @@ -152,7 +152,7 @@ def fit(self, X, y, sample_weight=None):
multi_output=True,
accept_sparse=True)

if isinstance(self, ClassifierMixin):
if is_classifier(self):
check_classification_targets(y)

if y.ndim == 1:
Expand Down
5 changes: 3 additions & 2 deletions sklearn/neural_network/multilayer_perceptron.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,6 +13,7 @@
import warnings

from ..base import BaseEstimator, ClassifierMixin, RegressorMixin
from ..base import is_classifier
from ._base import ACTIVATIONS, DERIVATIVES, LOSS_FUNCTIONS
from ._stochastic_optimizers import SGDOptimizer, AdamOptimizer
from ..model_selection import train_test_split
Expand Down Expand Up @@ -268,7 +269,7 @@ def _initialize(self, y, layer_units):
self.n_layers_ = len(layer_units)

# Output for regression
if not isinstance(self, ClassifierMixin):
if not is_classifier(self):
self.out_activation_ = 'identity'
# Output for multi class
elif self._label_binarizer.y_type_ == 'multiclass':
Expand Down Expand Up @@ -491,7 +492,7 @@ def _fit_stochastic(self, X, y, activations, deltas, coef_grads,
X, X_val, y, y_val = train_test_split(
X, y, random_state=self._random_state,
test_size=self.validation_fraction)
if isinstance(self, ClassifierMixin):
if is_classifier(self):
y_val = self._label_binarizer.inverse_transform(y_val)
else:
X_val = None
Expand Down
4 changes: 2 additions & 2 deletions sklearn/tree/tests/test_export.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@

from numpy.random import RandomState

from sklearn.base import ClassifierMixin
from sklearn.base import is_classifier
from sklearn.tree import DecisionTreeClassifier, DecisionTreeRegressor
from sklearn.ensemble import GradientBoostingClassifier
from sklearn.tree import export_graphviz
Expand Down Expand Up @@ -292,7 +292,7 @@ def test_precision():
len(search("\.\d+", finding.group()).group()),
precision + 1)
# check impurity
if isinstance(clf, ClassifierMixin):
if is_classifier(clf):
pattern = "gini = \d+\.\d+"
else:
pattern = "friedman_mse = \d+\.\d+"
Expand Down
5 changes: 3 additions & 2 deletions sklearn/tree/tree.py
Original file line number Diff line number Diff line change
Expand Up @@ -29,6 +29,7 @@
from ..base import BaseEstimator
from ..base import ClassifierMixin
from ..base import RegressorMixin
from ..base import is_classifier
from ..externals import six
from ..utils import check_array
from ..utils import check_random_state
Expand Down Expand Up @@ -123,7 +124,7 @@ def fit(self, X, y, sample_weight=None, check_input=True,

# Determine output settings
n_samples, self.n_features_ = X.shape
is_classification = isinstance(self, ClassifierMixin)
is_classification = is_classifier(self)

y = np.atleast_1d(y)
expanded_class_weight = None
Expand Down Expand Up @@ -413,7 +414,7 @@ def predict(self, X, check_input=True):
n_samples = X.shape[0]

# Classification
if isinstance(self, ClassifierMixin):
if is_classifier(self):
if self.n_outputs_ == 1:
return self.classes_.take(np.argmax(proba, axis=1), axis=0)

Expand Down
10 changes: 5 additions & 5 deletions sklearn/utils/estimator_checks.py
Original file line number Diff line number Diff line change
Expand Up @@ -35,8 +35,8 @@
from sklearn.discriminant_analysis import LinearDiscriminantAnalysis


from sklearn.base import (clone, ClassifierMixin, RegressorMixin,
TransformerMixin, ClusterMixin, BaseEstimator)
from sklearn.base import (clone, TransformerMixin, ClusterMixin,
BaseEstimator, is_classifier, is_regressor)
from sklearn.metrics import accuracy_score, adjusted_rand_score, f1_score

from sklearn.random_projection import BaseRandomProjection
Expand Down Expand Up @@ -208,10 +208,10 @@ def _yield_clustering_checks(name, clusterer):
def _yield_all_checks(name, estimator):
for check in _yield_non_meta_checks(name, estimator):
yield check
if isinstance(estimator, ClassifierMixin):
if is_classifier(estimator):
for check in _yield_classifier_checks(name, estimator):
yield check
if isinstance(estimator, RegressorMixin):
if is_regressor(estimator):
for check in _yield_regressor_checks(name, estimator):
yield check
if isinstance(estimator, TransformerMixin):
Expand Down Expand Up @@ -980,7 +980,7 @@ def check_estimators_partial_fit_n_features(name, estimator_orig):
X -= X.min()

try:
if isinstance(estimator, ClassifierMixin):
if is_classifier(estimator):
classes = np.unique(y)
estimator.partial_fit(X, y, classes=classes)
else:
Expand Down