diff --git a/sklearn/cross_decomposition/tests/test_pls.py b/sklearn/cross_decomposition/tests/test_pls.py index aff2b76034b0b..55ef8b482d58e 100644 --- a/sklearn/cross_decomposition/tests/test_pls.py +++ b/sklearn/cross_decomposition/tests/test_pls.py @@ -484,31 +484,35 @@ def test_n_components_upper_bounds(Estimator): @pytest.mark.parametrize("n_samples, n_features", [(100, 10), (100, 200)]) -@pytest.mark.parametrize("seed", range(10)) -def test_singular_value_helpers(n_samples, n_features, seed): +def test_singular_value_helpers(n_samples, n_features, global_random_seed): # Make sure SVD and power method give approximately the same results - X, Y = make_regression(n_samples, n_features, n_targets=5, random_state=seed) + X, Y = make_regression( + n_samples, n_features, n_targets=5, random_state=global_random_seed + ) u1, v1, _ = _get_first_singular_vectors_power_method(X, Y, norm_y_weights=True) u2, v2 = _get_first_singular_vectors_svd(X, Y) _svd_flip_1d(u1, v1) _svd_flip_1d(u2, v2) - rtol = 1e-1 - assert_allclose(u1, u2, rtol=rtol) - assert_allclose(v1, v2, rtol=rtol) + rtol = 1e-3 + # Setting atol because some coordinates are very close to zero + assert_allclose(u1, u2, atol=u2.max() * rtol) + assert_allclose(v1, v2, atol=v2.max() * rtol) -def test_one_component_equivalence(): +def test_one_component_equivalence(global_random_seed): # PLSSVD, PLSRegression and PLSCanonical should all be equivalent when # n_components is 1 - X, Y = make_regression(100, 10, n_targets=5, random_state=0) + X, Y = make_regression(100, 10, n_targets=5, random_state=global_random_seed) svd = PLSSVD(n_components=1).fit(X, Y).transform(X) reg = PLSRegression(n_components=1).fit(X, Y).transform(X) canonical = PLSCanonical(n_components=1).fit(X, Y).transform(X) - assert_allclose(svd, reg, rtol=1e-2) - assert_allclose(svd, canonical, rtol=1e-2) + rtol = 1e-3 + # Setting atol because some entries are very close to zero + assert_allclose(svd, reg, atol=reg.max() * rtol) + assert_allclose(svd, canonical, atol=canonical.max() * rtol) def test_svd_flip_1d(): @@ -526,9 +530,11 @@ def test_svd_flip_1d(): assert_allclose(v, [-1, -2, -3]) -def test_loadings_converges(): +def test_loadings_converges(global_random_seed): """Test that CCA converges. Non-regression test for #19549.""" - X, y = make_regression(n_samples=200, n_features=20, n_targets=20, random_state=20) + X, y = make_regression( + n_samples=200, n_features=20, n_targets=20, random_state=global_random_seed + ) cca = CCA(n_components=10, max_iter=500)