Skip to content

Commit fe23a0c

Browse files
committed
Udpate unit tests
1 parent 322434f commit fe23a0c

File tree

7 files changed

+17
-14
lines changed

7 files changed

+17
-14
lines changed

sklearn/kernel_approximation.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -291,7 +291,7 @@ class AdditiveChi2Sampler(BaseEstimator, TransformerMixin):
291291
power_t=0.5, random_state=0, shuffle=True, tol=0.001,
292292
validation_fraction=0.1, verbose=0, warm_start=False)
293293
>>> clf.score(X_transformed, y) # doctest: +ELLIPSIS
294-
0.9543...
294+
0.9499...
295295
296296
Notes
297297
-----

sklearn/linear_model/passive_aggressive.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -148,9 +148,9 @@ class PassiveAggressiveClassifier(BaseSGDClassifier):
148148
random_state=0, shuffle=True, tol=0.001,
149149
validation_fraction=0.1, verbose=0, warm_start=False)
150150
>>> print(clf.coef_)
151-
[[-0.6543424 1.54603022 1.35361642 0.22199435]]
151+
[[0.26642044 0.45070924 0.67251877 0.64185414]]
152152
>>> print(clf.intercept_)
153-
[0.63310933]
153+
[1.84127814]
154154
>>> print(clf.predict([[0, 0, 0, 0]]))
155155
[1]
156156

sklearn/linear_model/perceptron.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -137,7 +137,7 @@ class Perceptron(BaseSGDClassifier):
137137
n_jobs=None, penalty=None, random_state=0, shuffle=True, tol=0.001,
138138
validation_fraction=0.1, verbose=0, warm_start=False)
139139
>>> clf.score(X, y) # doctest: +ELLIPSIS
140-
0.946...
140+
0.939...
141141
142142
See also
143143
--------

sklearn/linear_model/tests/test_logistic.py

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -1671,14 +1671,14 @@ def test_elastic_net_versus_sgd(C, l1_ratio):
16711671
n_samples = 500
16721672
X, y = make_classification(n_samples=n_samples, n_classes=2, n_features=5,
16731673
n_informative=5, n_redundant=0, n_repeated=0,
1674-
random_state=0)
1674+
random_state=1)
16751675
X = scale(X)
16761676

16771677
sgd = SGDClassifier(
1678-
penalty='elasticnet', random_state=0, fit_intercept=False, tol=-np.inf,
1678+
penalty='elasticnet', random_state=1, fit_intercept=False, tol=-np.inf,
16791679
max_iter=2000, l1_ratio=l1_ratio, alpha=1. / C / n_samples, loss='log')
16801680
log = LogisticRegression(
1681-
penalty='elasticnet', random_state=0, fit_intercept=False, tol=1e-5,
1681+
penalty='elasticnet', random_state=1, fit_intercept=False, tol=1e-5,
16821682
max_iter=1000, l1_ratio=l1_ratio, C=C, solver='saga')
16831683

16841684
sgd.fit(X, y)

sklearn/linear_model/tests/test_passive_aggressive.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -76,7 +76,7 @@ def test_classifier_accuracy():
7676
for average in (False, True):
7777
clf = PassiveAggressiveClassifier(
7878
C=1.0, max_iter=30, fit_intercept=fit_intercept,
79-
random_state=0, average=average, tol=None)
79+
random_state=1, average=average, tol=None)
8080
clf.fit(data, y)
8181
score = clf.score(data, y)
8282
assert_greater(score, 0.79)

sklearn/linear_model/tests/test_sgd.py

Lines changed: 7 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -1032,22 +1032,25 @@ def test_partial_fit_equal_fit_classif(klass, lr):
10321032

10331033
@pytest.mark.parametrize('klass', [SGDClassifier, SparseSGDClassifier])
10341034
def test_regression_losses(klass):
1035+
random_state = np.random.RandomState(1)
10351036
clf = klass(alpha=0.01, learning_rate="constant",
1036-
eta0=0.1, loss="epsilon_insensitive")
1037+
eta0=0.1, loss="epsilon_insensitive",
1038+
random_state=random_state)
10371039
clf.fit(X, Y)
10381040
assert_equal(1.0, np.mean(clf.predict(X) == Y))
10391041

10401042
clf = klass(alpha=0.01, learning_rate="constant",
1041-
eta0=0.1, loss="squared_epsilon_insensitive")
1043+
eta0=0.1, loss="squared_epsilon_insensitive",
1044+
random_state=random_state)
10421045
clf.fit(X, Y)
10431046
assert_equal(1.0, np.mean(clf.predict(X) == Y))
10441047

1045-
clf = klass(alpha=0.01, loss="huber")
1048+
clf = klass(alpha=0.01, loss="huber", random_state=random_state)
10461049
clf.fit(X, Y)
10471050
assert_equal(1.0, np.mean(clf.predict(X) == Y))
10481051

10491052
clf = klass(alpha=0.01, learning_rate="constant", eta0=0.01,
1050-
loss="squared_loss")
1053+
loss="squared_loss", random_state=random_state)
10511054
clf.fit(X, Y)
10521055
assert_equal(1.0, np.mean(clf.predict(X) == Y))
10531056

sklearn/tests/test_multioutput.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -332,14 +332,14 @@ def test_multi_output_classification_partial_fit_sample_weights():
332332
Xw = [[1, 2, 3], [4, 5, 6], [1.5, 2.5, 3.5]]
333333
yw = [[3, 2], [2, 3], [3, 2]]
334334
w = np.asarray([2., 1., 1.])
335-
sgd_linear_clf = SGDClassifier(random_state=1, max_iter=5)
335+
sgd_linear_clf = SGDClassifier(random_state=1, max_iter=20)
336336
clf_w = MultiOutputClassifier(sgd_linear_clf)
337337
clf_w.fit(Xw, yw, w)
338338

339339
# unweighted, but with repeated samples
340340
X = [[1, 2, 3], [1, 2, 3], [4, 5, 6], [1.5, 2.5, 3.5]]
341341
y = [[3, 2], [3, 2], [2, 3], [3, 2]]
342-
sgd_linear_clf = SGDClassifier(random_state=1, max_iter=5)
342+
sgd_linear_clf = SGDClassifier(random_state=1, max_iter=20)
343343
clf = MultiOutputClassifier(sgd_linear_clf)
344344
clf.fit(X, y)
345345
X_test = [[1.5, 2.5, 3.5]]

0 commit comments

Comments
 (0)