Skip to content

[MRG + 1] Added a random_state=0 to many make_classification tests #7968

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
merged 1 commit into from
Dec 2, 2016
Merged
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
13 changes: 7 additions & 6 deletions sklearn/linear_model/tests/test_logistic.py
Original file line number Diff line number Diff line change
Expand Up @@ -278,7 +278,7 @@ def test_consistency_path():

def test_liblinear_dual_random_state():
# random_state is relevant for liblinear solver only if dual=True
X, y = make_classification(n_samples=20)
X, y = make_classification(n_samples=20, random_state=0)
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

42 would have made @agramfort happier ;)

Copy link
Member

@amueller amueller Dec 2, 2016

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

As @chenhe95 is new to the project I think it's ok to be allow it. Now he knows better ;)

Thanks for the detective work and fix!

lr1 = LogisticRegression(random_state=0, dual=True, max_iter=1, tol=1e-15)
lr1.fit(X, y)
lr2 = LogisticRegression(random_state=0, dual=True, max_iter=1, tol=1e-15)
Expand All @@ -295,7 +295,7 @@ def test_liblinear_dual_random_state():


def test_logistic_loss_and_grad():
X_ref, y = make_classification(n_samples=20)
X_ref, y = make_classification(n_samples=20, random_state=0)
n_features = X_ref.shape[1]

X_sp = X_ref.copy()
Expand Down Expand Up @@ -403,7 +403,8 @@ def test_multinomial_logistic_regression_string_inputs():
# Test with string labels for LogisticRegression(CV)
n_samples, n_features, n_classes = 50, 5, 3
X_ref, y = make_classification(n_samples=n_samples, n_features=n_features,
n_classes=n_classes, n_informative=3)
n_classes=n_classes, n_informative=3,
random_state=0)
y_str = LabelEncoder().fit(['bar', 'baz', 'foo']).inverse_transform(y)
# For numerical labels, let y values be taken from set (-1, 0, 1)
y = np.array(y) - 1
Expand Down Expand Up @@ -745,7 +746,7 @@ def test_multinomial_logistic_regression_with_classweight_auto():
def test_logistic_regression_convergence_warnings():
# Test that warnings are raised if model does not converge

X, y = make_classification(n_samples=20, n_features=20)
X, y = make_classification(n_samples=20, n_features=20, random_state=0)
clf_lib = LogisticRegression(solver='liblinear', max_iter=2, verbose=1)
assert_warns(ConvergenceWarning, clf_lib.fit, X, y)
assert_equal(clf_lib.n_iter_, 2)
Expand Down Expand Up @@ -834,7 +835,7 @@ def test_liblinear_decision_function_zero():
# are zero. This is a test to verify that we do not do the same.
# See Issue: https://github.com/scikit-learn/scikit-learn/issues/3600
# and the PR https://github.com/scikit-learn/scikit-learn/pull/3623
X, y = make_classification(n_samples=5, n_features=5)
X, y = make_classification(n_samples=5, n_features=5, random_state=0)
clf = LogisticRegression(fit_intercept=False)
clf.fit(X, y)

Expand All @@ -846,7 +847,7 @@ def test_liblinear_decision_function_zero():
def test_liblinear_logregcv_sparse():
# Test LogRegCV with solver='liblinear' works for sparse matrices

X, y = make_classification(n_samples=10, n_features=5)
X, y = make_classification(n_samples=10, n_features=5, random_state=0)
clf = LogisticRegressionCV(solver='liblinear')
clf.fit(sparse.csr_matrix(X), y)

Expand Down