Skip to content

TST/MNT clean up some tests in coordinate descent #31909

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
merged 2 commits into from
Aug 14, 2025
Merged
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
69 changes: 5 additions & 64 deletions sklearn/linear_model/tests/test_coordinate_descent.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,16 +17,12 @@
ElasticNetCV,
Lasso,
LassoCV,
LassoLars,
LassoLarsCV,
MultiTaskElasticNet,
MultiTaskElasticNetCV,
MultiTaskLasso,
MultiTaskLassoCV,
Ridge,
RidgeClassifier,
RidgeClassifierCV,
RidgeCV,
enet_path,
lars_path,
lasso_path,
Expand Down Expand Up @@ -325,38 +321,6 @@ def test_lassocv_alphas_validation(alphas, err_type, err_msg):
lassocv.fit(X, y)


def _scale_alpha_inplace(estimator, n_samples):
"""Rescale the parameter alpha from when the estimator is evoked with
normalize set to True as if it were evoked in a Pipeline with normalize set
to False and with a StandardScaler.
"""
if ("alpha" not in estimator.get_params()) and (
"alphas" not in estimator.get_params()
):
return

if isinstance(estimator, (RidgeCV, RidgeClassifierCV)):
# alphas is not validated at this point and can be a list.
# We convert it to a np.ndarray to make sure broadcasting
# is used.
alphas = np.asarray(estimator.alphas) * n_samples
return estimator.set_params(alphas=alphas)
if isinstance(estimator, (Lasso, LassoLars, MultiTaskLasso)):
alpha = estimator.alpha * np.sqrt(n_samples)
if isinstance(estimator, (Ridge, RidgeClassifier)):
alpha = estimator.alpha * n_samples
if isinstance(estimator, (ElasticNet, MultiTaskElasticNet)):
if estimator.l1_ratio == 1:
alpha = estimator.alpha * np.sqrt(n_samples)
elif estimator.l1_ratio == 0:
alpha = estimator.alpha * n_samples
else:
# To avoid silent errors in case of refactoring
raise NotImplementedError

estimator.set_params(alpha=alpha)


def test_lasso_path_return_models_vs_new_return_gives_same_coefficients():
# Test that lasso_path with lars_path style output gives the
# same result
Expand Down Expand Up @@ -395,7 +359,7 @@ def test_enet_path():
clf = ElasticNetCV(
alphas=[0.01, 0.05, 0.1], eps=2e-3, l1_ratio=[0.5, 0.7], cv=3, max_iter=max_iter
)
ignore_warnings(clf.fit)(X, y)
clf.fit(X, y)
# Well-conditioned settings, we should have selected our
# smallest penalty
assert_almost_equal(clf.alpha_, min(clf.alphas_))
Expand All @@ -411,7 +375,7 @@ def test_enet_path():
max_iter=max_iter,
precompute=True,
)
ignore_warnings(clf.fit)(X, y)
clf.fit(X, y)

# Well-conditioned settings, we should have selected our
# smallest penalty
Expand All @@ -429,7 +393,7 @@ def test_enet_path():
clf = MultiTaskElasticNetCV(
alphas=5, eps=2e-3, l1_ratio=[0.5, 0.7], cv=3, max_iter=max_iter
)
ignore_warnings(clf.fit)(X, y)
clf.fit(X, y)
# We are in well-conditioned settings with low noise: we should
# have a good test-set performance
assert clf.score(X_test, y_test) > 0.99
Expand All @@ -446,17 +410,6 @@ def test_enet_path():
assert_almost_equal(clf1.alpha_, clf2.alpha_)


def test_path_parameters():
X, y, _, _ = build_dataset()
max_iter = 100

clf = ElasticNetCV(alphas=50, eps=1e-3, max_iter=max_iter, l1_ratio=0.5, tol=1e-3)
clf.fit(X, y) # new params
assert_almost_equal(0.5, clf.l1_ratio)
assert 50 == clf._alphas
assert 50 == len(clf.alphas_)


def test_warm_start():
X, y, _, _ = build_dataset()
clf = ElasticNet(alpha=0.1, max_iter=5, warm_start=True)
Expand Down Expand Up @@ -1086,7 +1039,7 @@ def test_warm_start_multitask_lasso():
(Lasso, 1, dict(precompute=False)),
],
)
def test_enet_coordinate_descent(klass, n_classes, kwargs):
def test_enet_coordinate_descent_raises_convergence(klass, n_classes, kwargs):
"""Test that a warning is issued if model does not converge"""
clf = klass(
alpha=1e-10,
Expand Down Expand Up @@ -1424,7 +1377,7 @@ def test_enet_cv_sample_weight_consistency(
@pytest.mark.parametrize("X_is_sparse", [False, True])
@pytest.mark.parametrize("fit_intercept", [False, True])
@pytest.mark.parametrize("sample_weight", [np.array([10, 1, 10, 1]), None])
def test_enet_alpha_max_sample_weight(X_is_sparse, fit_intercept, sample_weight):
def test_enet_alpha_max(X_is_sparse, fit_intercept, sample_weight):
X = np.array([[3.0, 1.0], [2.0, 5.0], [5.0, 3.0], [1.0, 4.0]])
beta = np.array([1, 1])
y = X @ beta
Expand Down Expand Up @@ -1563,18 +1516,6 @@ def test_sample_weight_invariance(estimator):
assert_allclose(reg_2sw.intercept_, reg_dup.intercept_)


def test_read_only_buffer():
"""Test that sparse coordinate descent works for read-only buffers"""

rng = np.random.RandomState(0)
clf = ElasticNet(alpha=0.1, copy_X=True, random_state=rng)
X = np.asfortranarray(rng.uniform(size=(100, 10)))
X.setflags(write=False)

y = rng.rand(100)
clf.fit(X, y)


@pytest.mark.parametrize(
"EstimatorCV",
[ElasticNetCV, LassoCV, MultiTaskElasticNetCV, MultiTaskLassoCV],
Expand Down
Loading