14
14
"""
15
15
16
16
print (__doc__ )
17
+
18
+ import warnings
19
+
17
20
import matplotlib .pyplot as plt
21
+
18
22
from sklearn .neural_network import MLPClassifier
19
23
from sklearn .preprocessing import MinMaxScaler
20
24
from sklearn import datasets
25
+ from sklearn .exceptions import ConvergenceWarning
21
26
22
27
# different learning rate schedules and momentum parameters
23
28
params = [{'solver' : 'sgd' , 'learning_rate' : 'constant' , 'momentum' : 0 ,
@@ -52,6 +57,7 @@ def plot_on_dataset(X, y, ax, name):
52
57
# for each dataset, plot learning for each learning strategy
53
58
print ("\n learning on dataset %s" % name )
54
59
ax .set_title (name )
60
+
55
61
X = MinMaxScaler ().fit_transform (X )
56
62
mlps = []
57
63
if name == "digits" :
@@ -64,12 +70,19 @@ def plot_on_dataset(X, y, ax, name):
64
70
print ("training: %s" % label )
65
71
mlp = MLPClassifier (verbose = 0 , random_state = 0 ,
66
72
max_iter = max_iter , ** param )
67
- mlp .fit (X , y )
73
+
74
+ # some parameter combinations will not converge as can be seen on the
75
+ # plots so they are ignored here
76
+ with warnings .catch_warnings ():
77
+ warnings .filterwarnings ("ignore" , category = ConvergenceWarning ,
78
+ module = "sklearn" )
79
+ mlp .fit (X , y )
80
+
68
81
mlps .append (mlp )
69
82
print ("Training set score: %f" % mlp .score (X , y ))
70
83
print ("Training set loss: %f" % mlp .loss_ )
71
84
for mlp , label , args in zip (mlps , labels , plot_args ):
72
- ax .plot (mlp .loss_curve_ , label = label , ** args )
85
+ ax .plot (mlp .loss_curve_ , label = label , ** args )
73
86
74
87
75
88
fig , axes = plt .subplots (2 , 2 , figsize = (15 , 10 ))
0 commit comments