@@ -1709,17 +1709,23 @@ def test_ocsvm_vs_sgdocsvm():
1709
1709
1710
1710
1711
1711
def test_sgd_oneclass_convergence ():
1712
- # Check that the optimization does not end early, that the stopping criterion is working
1712
+ # Check that the optimization does not end early,
1713
+ # that the stopping criterion is working
1713
1714
for nu in [0.1 , 0.5 , 0.9 ]:
1714
- model = SGDOneClassSVM (nu = nu , max_iter = 100 , tol = 1e-3 , learning_rate = "constant" , eta0 = 1e-3 ) # no need for large max_iter
1715
+ # no need for large max_iter
1716
+ model = SGDOneClassSVM (nu = nu , max_iter = 100 , tol = 1e-3 ,
1717
+ learning_rate = "constant" , eta0 = 1e-3 )
1715
1718
model .fit (iris .data )
1716
- assert model .n_iter_ > 6 # 6 is the minimal number of iterations, after which optimization can stop
1719
+ # 6 is the minimal number of iterations, after which optimization can stop
1720
+ assert model .n_iter_ > 6
1717
1721
1718
1722
1719
1723
def test_sgd_oneclass_vs_linear_oneclass ():
1720
1724
# Test convergence vs. liblinear OCSVM with kernel="linear"
1721
1725
for nu in [0.1 , 0.5 , 0.9 ]:
1722
- model = SGDOneClassSVM (nu = nu , max_iter = 20000 , tol = None , learning_rate = "constant" , eta0 = 1e-3 ) # allow enough iterations, small dataset
1726
+ # allow enough iterations, small dataset
1727
+ model = SGDOneClassSVM (nu = nu , max_iter = 20000 , tol = None ,
1728
+ learning_rate = "constant" , eta0 = 1e-3 )
1723
1729
model_ref = OneClassSVM (kernel = "linear" , nu = nu , tol = 1e-6 ) # reference model
1724
1730
model .fit (iris .data )
1725
1731
model_ref .fit (iris .data )
@@ -1733,7 +1739,10 @@ def test_sgd_oneclass_vs_linear_oneclass():
1733
1739
dec_fn_corr = np .corrcoef (dec_fn , dec_fn_ref )[0 , 1 ]
1734
1740
preds_corr = np .corrcoef (preds , preds_ref )[0 , 1 ]
1735
1741
# check weights and intercept concatenated together for correlation
1736
- coef_corr = np .corrcoef ( np .concatenate ([model .coef_ , - model .offset_ ]), np .concatenate ([model_ref .coef_ .flatten (), model_ref .intercept_ ]) )[0 , 1 ]
1742
+ coef_corr = np .corrcoef (
1743
+ np .concatenate ([model .coef_ , - model .offset_ ]),
1744
+ np .concatenate ([model_ref .coef_ .flatten (), model_ref .intercept_ ])
1745
+ )[0 , 1 ]
1737
1746
# share of predicted 1's
1738
1747
share_ones = (preds == 1 ).sum () / len (preds )
1739
1748
share_ones_ref = (preds_ref == 1 ).sum () / len (preds_ref )
0 commit comments