-
-
Notifications
You must be signed in to change notification settings - Fork 25.8k
[MRG + 1] Add test for __dict__ #7553
New issue
Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.
By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.
Already on GitHub? Sign in to your account
Changes from all commits
File filter
Filter by extension
Conversations
Jump to
Diff view
Diff view
There are no files selected for viewing
Original file line number | Diff line number | Diff line change |
---|---|---|
|
@@ -30,6 +30,7 @@ | |
from sklearn.utils.testing import SkipTest | ||
from sklearn.utils.testing import ignore_warnings | ||
from sklearn.utils.testing import assert_warns | ||
from sklearn.utils.testing import assert_dict_equal | ||
|
||
|
||
from sklearn.base import (clone, ClassifierMixin, RegressorMixin, | ||
|
@@ -230,6 +231,7 @@ def _yield_all_checks(name, Estimator): | |
yield check_fit1d_1feature | ||
yield check_fit1d_1sample | ||
yield check_get_params_invariance | ||
yield check_dict_unchanged | ||
|
||
|
||
def check_estimator(Estimator): | ||
|
@@ -409,6 +411,49 @@ def check_dtype_object(name, Estimator): | |
|
||
|
||
@ignore_warnings | ||
def check_dict_unchanged(name, Estimator): | ||
# this estimator raises | ||
# ValueError: Found array with 0 feature(s) (shape=(23, 0)) | ||
# while a minimum of 1 is required. | ||
# error | ||
if name in ['SpectralCoclustering']: | ||
return | ||
rnd = np.random.RandomState(0) | ||
if name in ['RANSACRegressor']: | ||
X = 3 * rnd.uniform(size=(20, 3)) | ||
else: | ||
X = 2 * rnd.uniform(size=(20, 3)) | ||
|
||
y = X[:, 0].astype(np.int) | ||
y = multioutput_estimator_convert_y_2d(name, y) | ||
estimator = Estimator() | ||
set_testing_parameters(estimator) | ||
if hasattr(estimator, "n_components"): | ||
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. *Hmm... I wonder if these should be in There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. Can't say anything here, I've seen the same part on several places, for example here: https://github.com/kiote/scikit-learn/blob/feature-test-__dict__/sklearn/utils/estimator_checks.py#L443 So, it looks like it could be moved there, do you think it should? There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. yeah feel free to move it there. |
||
estimator.n_components = 1 | ||
|
||
if hasattr(estimator, "n_clusters"): | ||
estimator.n_clusters = 1 | ||
|
||
if hasattr(estimator, "n_best"): | ||
estimator.n_best = 1 | ||
|
||
set_random_state(estimator, 1) | ||
|
||
# should be just `estimator.fit(X, y)` | ||
# after merging #6141 | ||
if name in ['SpectralBiclustering']: | ||
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. SpectralBiclustering doesn't take There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. yep! those changes helped (I merged with |
||
estimator.fit(X) | ||
else: | ||
estimator.fit(X, y) | ||
for method in ["predict", "transform", "decision_function", | ||
"predict_proba"]: | ||
if hasattr(estimator, method): | ||
dict_before = estimator.__dict__.copy() | ||
getattr(estimator, method)(X) | ||
assert_dict_equal(estimator.__dict__, dict_before, | ||
'Estimator changes __dict__ during %s' % method) | ||
|
||
|
||
def check_fit2d_predict1d(name, Estimator): | ||
# check by fitting a 2d array and prediting with a 1d array | ||
rnd = np.random.RandomState(0) | ||
|
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
We can't use
3 *
in all cases?There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
If I use
3 *
for all, I getValueError: _BinaryGaussianProcessClassifierLaplace supports only binary classification. y contains classes [0 1 2]
. Which is sounds fair enough.With
2 *
for all I get on the other sideValueError: No inliers found, possible cause is setting residual_threshold (None) too low.
forRANSACRegressor
:(