We read every piece of feedback, and take your input very seriously.
To see all available qualifiers, see our documentation.
1 parent 02a0101 commit 2df5945Copy full SHA for 2df5945
examples/applications/svm_gui.py
@@ -166,8 +166,8 @@ def __init__(self, root, controller):
166
167
def plot_kernels(self):
168
self.ax.text(-50, -60, "Linear: $u^T v$")
169
- self.ax.text(-20, -60, "RBF: $\exp (-\gamma \| u-v \|^2)$")
170
- self.ax.text(10, -60, "Poly: $(\gamma \, u^T v + r)^d$")
+ self.ax.text(-20, -60, r"RBF: $\exp (-\gamma \| u-v \|^2)$")
+ self.ax.text(10, -60, r"Poly: $(\gamma \, u^T v + r)^d$")
171
172
def onclick(self, event):
173
if event.xdata and event.ydata:
examples/decomposition/plot_kernel_pca.py
@@ -64,7 +64,7 @@
64
plt.scatter(X_kpca[blues, 0], X_kpca[blues, 1], c="blue",
65
s=20, edgecolor='k')
66
plt.title("Projection by KPCA")
67
-plt.xlabel("1st principal component in space induced by $\phi$")
+plt.xlabel(r"1st principal component in space induced by $\phi$")
68
plt.ylabel("2nd component")
69
70
plt.subplot(2, 2, 4, aspect='equal')
examples/ensemble/plot_adaboost_multiclass.py
@@ -1,4 +1,4 @@
1
-"""
+r"""
2
=====================================
3
Multi-class AdaBoosted Decision Trees
4
examples/ensemble/plot_bias_variance.py
@@ -161,12 +161,12 @@ def generate(n_samples, noise, n_repeat=1):
161
162
for i in range(n_repeat):
163
if i == 0:
164
- plt.plot(X_test, y_predict[:, i], "r", label="$\^y(x)$")
+ plt.plot(X_test, y_predict[:, i], "r", label=r"$\^y(x)$")
165
else:
plt.plot(X_test, y_predict[:, i], "r", alpha=0.05)
plt.plot(X_test, np.mean(y_predict, axis=1), "c",
- label="$\mathbb{E}_{LS} \^y(x)$")
+ label=r"$\mathbb{E}_{LS} \^y(x)$")
plt.xlim([-5, 5])
plt.title(name)
examples/ensemble/plot_gradient_boosting_quantile.py
@@ -64,7 +64,7 @@ def f(x):
# Plot the function, the prediction and the 90% confidence interval based on
# the MSE
fig = plt.figure()
-plt.plot(xx, f(xx), 'g:', label=u'$f(x) = x\,\sin(x)$')
+plt.plot(xx, f(xx), 'g:', label=r'$f(x) = x\,\sin(x)$')
plt.plot(X, y, 'b.', markersize=10, label=u'Observations')
plt.plot(xx, y_pred, 'r-', label=u'Prediction')
plt.plot(xx, y_upper, 'k-')
examples/gaussian_process/plot_gpc_isoprobability.py
@@ -78,7 +78,7 @@ def g(x):
78
extent=(-lim, lim, -lim, lim))
79
norm = plt.matplotlib.colors.Normalize(vmin=0., vmax=0.9)
80
cb = plt.colorbar(cax, ticks=[0., 0.2, 0.4, 0.6, 0.8, 1.], norm=norm)
81
-cb.set_label('${\\rm \mathbb{P}}\left[\widehat{G}(\mathbf{x}) \leq 0\\right]$')
+cb.set_label(r'${\rm \mathbb{P}}\left[\widehat{G}(\mathbf{x}) \leq 0\right]$')
82
plt.clim(0, 1)
83
84
plt.plot(X[y <= 0, 0], X[y <= 0, 1], 'r.', markersize=12)
examples/gaussian_process/plot_gpr_noisy_targets.py
@@ -62,7 +62,7 @@ def f(x):
62
# Plot the function, the prediction and the 95% confidence interval based on
63
plt.figure()
-plt.plot(x, f(x), 'r:', label=u'$f(x) = x\,\sin(x)$')
+plt.plot(x, f(x), 'r:', label=r'$f(x) = x\,\sin(x)$')
plt.plot(X, y, 'r.', markersize=10, label=u'Observations')
plt.plot(x, y_pred, 'b-', label=u'Prediction')
plt.fill(np.concatenate([x, x[::-1]]),
@@ -98,7 +98,7 @@ def f(x):
98
99
100
101
102
plt.errorbar(X.ravel(), y, dy, fmt='r.', markersize=10, label=u'Observations')
103
104
examples/model_selection/plot_learning_curve.py
@@ -119,7 +119,7 @@ def plot_learning_curve(estimator, title, X, y, ylim=None, cv=None,
119
estimator = GaussianNB()
120
plot_learning_curve(estimator, title, X, y, ylim=(0.7, 1.01), cv=cv, n_jobs=4)
121
122
-title = "Learning Curves (SVM, RBF kernel, $\gamma=0.001$)"
+title = r"Learning Curves (SVM, RBF kernel, $\gamma=0.001$)"
123
# SVC is more expensive so we do a lower number of CV iterations:
124
cv = ShuffleSplit(n_splits=10, test_size=0.2, random_state=0)
125
estimator = SVC(gamma=0.001)
examples/model_selection/plot_validation_curve.py
@@ -33,7 +33,7 @@
33
test_scores_std = np.std(test_scores, axis=1)
34
35
plt.title("Validation Curve with SVM")
36
-plt.xlabel("$\gamma$")
+plt.xlabel(r"$\gamma$")
37
plt.ylabel("Score")
38
plt.ylim(0.0, 1.1)
39
lw = 2
examples/preprocessing/plot_map_data_to_normal.py
@@ -127,7 +127,7 @@
127
ax.hist(X_trans, color=color, bins=BINS)
128
title = 'After {}'.format(meth_name)
129
if lmbda is not None:
130
- title += '\n$\lambda$ = {}'.format(lmbda)
+ title += r'\n$\lambda$ = {}'.format(lmbda)
131
ax.set_title(title, fontsize=FONT_SIZE)
132
ax.tick_params(axis='both', which='major', labelsize=FONT_SIZE)
133
ax.set_xlim([-3.5, 3.5])
examples/svm/plot_svm_scale_c.py
==============================================
Scaling the regularization parameter for SVCs
0 commit comments