Skip to content

Commit 42cbd9d

Browse files
authored
TST/MNT clean up some tests in coordinate descent (scikit-learn#31909)
1 parent 78301f5 commit 42cbd9d

File tree

1 file changed

+5
-64
lines changed

1 file changed

+5
-64
lines changed

sklearn/linear_model/tests/test_coordinate_descent.py

Lines changed: 5 additions & 64 deletions
Original file line numberDiff line numberDiff line change
@@ -17,16 +17,12 @@
1717
ElasticNetCV,
1818
Lasso,
1919
LassoCV,
20-
LassoLars,
2120
LassoLarsCV,
2221
MultiTaskElasticNet,
2322
MultiTaskElasticNetCV,
2423
MultiTaskLasso,
2524
MultiTaskLassoCV,
2625
Ridge,
27-
RidgeClassifier,
28-
RidgeClassifierCV,
29-
RidgeCV,
3026
enet_path,
3127
lars_path,
3228
lasso_path,
@@ -325,38 +321,6 @@ def test_lassocv_alphas_validation(alphas, err_type, err_msg):
325321
lassocv.fit(X, y)
326322

327323

328-
def _scale_alpha_inplace(estimator, n_samples):
329-
"""Rescale the parameter alpha from when the estimator is evoked with
330-
normalize set to True as if it were evoked in a Pipeline with normalize set
331-
to False and with a StandardScaler.
332-
"""
333-
if ("alpha" not in estimator.get_params()) and (
334-
"alphas" not in estimator.get_params()
335-
):
336-
return
337-
338-
if isinstance(estimator, (RidgeCV, RidgeClassifierCV)):
339-
# alphas is not validated at this point and can be a list.
340-
# We convert it to a np.ndarray to make sure broadcasting
341-
# is used.
342-
alphas = np.asarray(estimator.alphas) * n_samples
343-
return estimator.set_params(alphas=alphas)
344-
if isinstance(estimator, (Lasso, LassoLars, MultiTaskLasso)):
345-
alpha = estimator.alpha * np.sqrt(n_samples)
346-
if isinstance(estimator, (Ridge, RidgeClassifier)):
347-
alpha = estimator.alpha * n_samples
348-
if isinstance(estimator, (ElasticNet, MultiTaskElasticNet)):
349-
if estimator.l1_ratio == 1:
350-
alpha = estimator.alpha * np.sqrt(n_samples)
351-
elif estimator.l1_ratio == 0:
352-
alpha = estimator.alpha * n_samples
353-
else:
354-
# To avoid silent errors in case of refactoring
355-
raise NotImplementedError
356-
357-
estimator.set_params(alpha=alpha)
358-
359-
360324
def test_lasso_path_return_models_vs_new_return_gives_same_coefficients():
361325
# Test that lasso_path with lars_path style output gives the
362326
# same result
@@ -395,7 +359,7 @@ def test_enet_path():
395359
clf = ElasticNetCV(
396360
alphas=[0.01, 0.05, 0.1], eps=2e-3, l1_ratio=[0.5, 0.7], cv=3, max_iter=max_iter
397361
)
398-
ignore_warnings(clf.fit)(X, y)
362+
clf.fit(X, y)
399363
# Well-conditioned settings, we should have selected our
400364
# smallest penalty
401365
assert_almost_equal(clf.alpha_, min(clf.alphas_))
@@ -411,7 +375,7 @@ def test_enet_path():
411375
max_iter=max_iter,
412376
precompute=True,
413377
)
414-
ignore_warnings(clf.fit)(X, y)
378+
clf.fit(X, y)
415379

416380
# Well-conditioned settings, we should have selected our
417381
# smallest penalty
@@ -429,7 +393,7 @@ def test_enet_path():
429393
clf = MultiTaskElasticNetCV(
430394
alphas=5, eps=2e-3, l1_ratio=[0.5, 0.7], cv=3, max_iter=max_iter
431395
)
432-
ignore_warnings(clf.fit)(X, y)
396+
clf.fit(X, y)
433397
# We are in well-conditioned settings with low noise: we should
434398
# have a good test-set performance
435399
assert clf.score(X_test, y_test) > 0.99
@@ -446,17 +410,6 @@ def test_enet_path():
446410
assert_almost_equal(clf1.alpha_, clf2.alpha_)
447411

448412

449-
def test_path_parameters():
450-
X, y, _, _ = build_dataset()
451-
max_iter = 100
452-
453-
clf = ElasticNetCV(alphas=50, eps=1e-3, max_iter=max_iter, l1_ratio=0.5, tol=1e-3)
454-
clf.fit(X, y) # new params
455-
assert_almost_equal(0.5, clf.l1_ratio)
456-
assert 50 == clf._alphas
457-
assert 50 == len(clf.alphas_)
458-
459-
460413
def test_warm_start():
461414
X, y, _, _ = build_dataset()
462415
clf = ElasticNet(alpha=0.1, max_iter=5, warm_start=True)
@@ -1086,7 +1039,7 @@ def test_warm_start_multitask_lasso():
10861039
(Lasso, 1, dict(precompute=False)),
10871040
],
10881041
)
1089-
def test_enet_coordinate_descent(klass, n_classes, kwargs):
1042+
def test_enet_coordinate_descent_raises_convergence(klass, n_classes, kwargs):
10901043
"""Test that a warning is issued if model does not converge"""
10911044
clf = klass(
10921045
alpha=1e-10,
@@ -1424,7 +1377,7 @@ def test_enet_cv_sample_weight_consistency(
14241377
@pytest.mark.parametrize("X_is_sparse", [False, True])
14251378
@pytest.mark.parametrize("fit_intercept", [False, True])
14261379
@pytest.mark.parametrize("sample_weight", [np.array([10, 1, 10, 1]), None])
1427-
def test_enet_alpha_max_sample_weight(X_is_sparse, fit_intercept, sample_weight):
1380+
def test_enet_alpha_max(X_is_sparse, fit_intercept, sample_weight):
14281381
X = np.array([[3.0, 1.0], [2.0, 5.0], [5.0, 3.0], [1.0, 4.0]])
14291382
beta = np.array([1, 1])
14301383
y = X @ beta
@@ -1563,18 +1516,6 @@ def test_sample_weight_invariance(estimator):
15631516
assert_allclose(reg_2sw.intercept_, reg_dup.intercept_)
15641517

15651518

1566-
def test_read_only_buffer():
1567-
"""Test that sparse coordinate descent works for read-only buffers"""
1568-
1569-
rng = np.random.RandomState(0)
1570-
clf = ElasticNet(alpha=0.1, copy_X=True, random_state=rng)
1571-
X = np.asfortranarray(rng.uniform(size=(100, 10)))
1572-
X.setflags(write=False)
1573-
1574-
y = rng.rand(100)
1575-
clf.fit(X, y)
1576-
1577-
15781519
@pytest.mark.parametrize(
15791520
"EstimatorCV",
15801521
[ElasticNetCV, LassoCV, MultiTaskElasticNetCV, MultiTaskLassoCV],

0 commit comments

Comments
 (0)