@@ -195,7 +195,8 @@ def test_predict_iris():
195
195
assert np .mean (pred == target ) > .95
196
196
197
197
198
- @pytest .mark .parametrize ('solver' , ['lbfgs' , 'newton-cg' , 'sag' , 'saga' ])
198
+ @pytest .mark .parametrize ('solver' , ['lbfgs' , 'newton-cg' , 'sag' , 'saga' ,
199
+ 'trust-ncg' , 'trust-krylov' ])
199
200
def test_multinomial_validation (solver ):
200
201
lr = LogisticRegression (C = - 1 , solver = solver , multi_class = 'multinomial' )
201
202
assert_raises (ValueError , lr .fit , [[0 , 1 ], [1 , 0 ]], [0 , 1 ])
@@ -247,7 +248,8 @@ def test_check_solver_option(LR):
247
248
assert_raise_message (ValueError , msg , lr .fit , X , y )
248
249
249
250
250
- @pytest .mark .parametrize ('solver' , ['lbfgs' , 'newton-cg' , 'sag' , 'saga' ])
251
+ @pytest .mark .parametrize ('solver' , ['lbfgs' , 'newton-cg' , 'sag' , 'saga' ,
252
+ 'trust-ncg' , 'trust-krylov' ])
251
253
def test_multinomial_binary (solver ):
252
254
# Test multinomial LR on a binary problem.
253
255
target = (iris .target > 0 ).astype (np .intp )
@@ -1233,7 +1235,8 @@ def test_n_iter(solver):
1233
1235
assert clf .n_iter_ .shape == (1 , n_cv_fold , n_Cs )
1234
1236
1235
1237
1236
- @pytest .mark .parametrize ('solver' , ('newton-cg' , 'sag' , 'saga' , 'lbfgs' ))
1238
+ @pytest .mark .parametrize ('solver' , ('newton-cg' , 'sag' , 'saga' , 'lbfgs' ,
1239
+ 'trust-ncg' , 'trust-krylov' ))
1237
1240
@pytest .mark .parametrize ('warm_start' , (True , False ))
1238
1241
@pytest .mark .parametrize ('fit_intercept' , (True , False ))
1239
1242
@pytest .mark .parametrize ('multi_class' , ['ovr' , 'multinomial' ])
@@ -1694,7 +1697,7 @@ def test_logistic_regression_path_coefs_multinomial():
1694
1697
Cs = 3 , tol = 1e-3 )],
1695
1698
ids = lambda x : x .__class__ .__name__ )
1696
1699
@pytest .mark .parametrize ('solver' , ['liblinear' , 'lbfgs' , 'newton-cg' , 'sag' ,
1697
- 'saga' ])
1700
+ 'saga' , 'trust-ncg' , 'trust-krylov' ])
1698
1701
def test_logistic_regression_multi_class_auto (est , solver ):
1699
1702
# check multi_class='auto' => multi_class='ovr' iff binary y or liblinear
1700
1703
@@ -1737,7 +1740,8 @@ def fit(X, y, **kw):
1737
1740
solver = solver ).coef_ )
1738
1741
1739
1742
1740
- @pytest .mark .parametrize ('solver' , ('lbfgs' , 'newton-cg' , 'sag' , 'saga' ))
1743
+ @pytest .mark .parametrize ('solver' , ('lbfgs' , 'newton-cg' , 'sag' , 'saga' ,
1744
+ 'trust-ncg' , 'trust-krylov' ))
1741
1745
def test_penalty_none (solver ):
1742
1746
# - Make sure warning is raised if penalty='none' and C is set to a
1743
1747
# non-default value.
0 commit comments