Skip to content

Commit 2df5945

Browse files
BoboTiGqinhanmin2014
authored andcommitted
EXA Fix several DeprecationWarning: invalid escape sequence in examples (#12924)
1 parent 02a0101 commit 2df5945

11 files changed

+14
-14
lines changed

examples/applications/svm_gui.py

+2-2
Original file line numberDiff line numberDiff line change
@@ -166,8 +166,8 @@ def __init__(self, root, controller):
166166

167167
def plot_kernels(self):
168168
self.ax.text(-50, -60, "Linear: $u^T v$")
169-
self.ax.text(-20, -60, "RBF: $\exp (-\gamma \| u-v \|^2)$")
170-
self.ax.text(10, -60, "Poly: $(\gamma \, u^T v + r)^d$")
169+
self.ax.text(-20, -60, r"RBF: $\exp (-\gamma \| u-v \|^2)$")
170+
self.ax.text(10, -60, r"Poly: $(\gamma \, u^T v + r)^d$")
171171

172172
def onclick(self, event):
173173
if event.xdata and event.ydata:

examples/decomposition/plot_kernel_pca.py

+1-1
Original file line numberDiff line numberDiff line change
@@ -64,7 +64,7 @@
6464
plt.scatter(X_kpca[blues, 0], X_kpca[blues, 1], c="blue",
6565
s=20, edgecolor='k')
6666
plt.title("Projection by KPCA")
67-
plt.xlabel("1st principal component in space induced by $\phi$")
67+
plt.xlabel(r"1st principal component in space induced by $\phi$")
6868
plt.ylabel("2nd component")
6969

7070
plt.subplot(2, 2, 4, aspect='equal')

examples/ensemble/plot_adaboost_multiclass.py

+1-1
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,4 @@
1-
"""
1+
r"""
22
=====================================
33
Multi-class AdaBoosted Decision Trees
44
=====================================

examples/ensemble/plot_bias_variance.py

+2-2
Original file line numberDiff line numberDiff line change
@@ -161,12 +161,12 @@ def generate(n_samples, noise, n_repeat=1):
161161

162162
for i in range(n_repeat):
163163
if i == 0:
164-
plt.plot(X_test, y_predict[:, i], "r", label="$\^y(x)$")
164+
plt.plot(X_test, y_predict[:, i], "r", label=r"$\^y(x)$")
165165
else:
166166
plt.plot(X_test, y_predict[:, i], "r", alpha=0.05)
167167

168168
plt.plot(X_test, np.mean(y_predict, axis=1), "c",
169-
label="$\mathbb{E}_{LS} \^y(x)$")
169+
label=r"$\mathbb{E}_{LS} \^y(x)$")
170170

171171
plt.xlim([-5, 5])
172172
plt.title(name)

examples/ensemble/plot_gradient_boosting_quantile.py

+1-1
Original file line numberDiff line numberDiff line change
@@ -64,7 +64,7 @@ def f(x):
6464
# Plot the function, the prediction and the 90% confidence interval based on
6565
# the MSE
6666
fig = plt.figure()
67-
plt.plot(xx, f(xx), 'g:', label=u'$f(x) = x\,\sin(x)$')
67+
plt.plot(xx, f(xx), 'g:', label=r'$f(x) = x\,\sin(x)$')
6868
plt.plot(X, y, 'b.', markersize=10, label=u'Observations')
6969
plt.plot(xx, y_pred, 'r-', label=u'Prediction')
7070
plt.plot(xx, y_upper, 'k-')

examples/gaussian_process/plot_gpc_isoprobability.py

+1-1
Original file line numberDiff line numberDiff line change
@@ -78,7 +78,7 @@ def g(x):
7878
extent=(-lim, lim, -lim, lim))
7979
norm = plt.matplotlib.colors.Normalize(vmin=0., vmax=0.9)
8080
cb = plt.colorbar(cax, ticks=[0., 0.2, 0.4, 0.6, 0.8, 1.], norm=norm)
81-
cb.set_label('${\\rm \mathbb{P}}\left[\widehat{G}(\mathbf{x}) \leq 0\\right]$')
81+
cb.set_label(r'${\rm \mathbb{P}}\left[\widehat{G}(\mathbf{x}) \leq 0\right]$')
8282
plt.clim(0, 1)
8383

8484
plt.plot(X[y <= 0, 0], X[y <= 0, 1], 'r.', markersize=12)

examples/gaussian_process/plot_gpr_noisy_targets.py

+2-2
Original file line numberDiff line numberDiff line change
@@ -62,7 +62,7 @@ def f(x):
6262
# Plot the function, the prediction and the 95% confidence interval based on
6363
# the MSE
6464
plt.figure()
65-
plt.plot(x, f(x), 'r:', label=u'$f(x) = x\,\sin(x)$')
65+
plt.plot(x, f(x), 'r:', label=r'$f(x) = x\,\sin(x)$')
6666
plt.plot(X, y, 'r.', markersize=10, label=u'Observations')
6767
plt.plot(x, y_pred, 'b-', label=u'Prediction')
6868
plt.fill(np.concatenate([x, x[::-1]]),
@@ -98,7 +98,7 @@ def f(x):
9898
# Plot the function, the prediction and the 95% confidence interval based on
9999
# the MSE
100100
plt.figure()
101-
plt.plot(x, f(x), 'r:', label=u'$f(x) = x\,\sin(x)$')
101+
plt.plot(x, f(x), 'r:', label=r'$f(x) = x\,\sin(x)$')
102102
plt.errorbar(X.ravel(), y, dy, fmt='r.', markersize=10, label=u'Observations')
103103
plt.plot(x, y_pred, 'b-', label=u'Prediction')
104104
plt.fill(np.concatenate([x, x[::-1]]),

examples/model_selection/plot_learning_curve.py

+1-1
Original file line numberDiff line numberDiff line change
@@ -119,7 +119,7 @@ def plot_learning_curve(estimator, title, X, y, ylim=None, cv=None,
119119
estimator = GaussianNB()
120120
plot_learning_curve(estimator, title, X, y, ylim=(0.7, 1.01), cv=cv, n_jobs=4)
121121

122-
title = "Learning Curves (SVM, RBF kernel, $\gamma=0.001$)"
122+
title = r"Learning Curves (SVM, RBF kernel, $\gamma=0.001$)"
123123
# SVC is more expensive so we do a lower number of CV iterations:
124124
cv = ShuffleSplit(n_splits=10, test_size=0.2, random_state=0)
125125
estimator = SVC(gamma=0.001)

examples/model_selection/plot_validation_curve.py

+1-1
Original file line numberDiff line numberDiff line change
@@ -33,7 +33,7 @@
3333
test_scores_std = np.std(test_scores, axis=1)
3434

3535
plt.title("Validation Curve with SVM")
36-
plt.xlabel("$\gamma$")
36+
plt.xlabel(r"$\gamma$")
3737
plt.ylabel("Score")
3838
plt.ylim(0.0, 1.1)
3939
lw = 2

examples/preprocessing/plot_map_data_to_normal.py

+1-1
Original file line numberDiff line numberDiff line change
@@ -127,7 +127,7 @@
127127
ax.hist(X_trans, color=color, bins=BINS)
128128
title = 'After {}'.format(meth_name)
129129
if lmbda is not None:
130-
title += '\n$\lambda$ = {}'.format(lmbda)
130+
title += r'\n$\lambda$ = {}'.format(lmbda)
131131
ax.set_title(title, fontsize=FONT_SIZE)
132132
ax.tick_params(axis='both', which='major', labelsize=FONT_SIZE)
133133
ax.set_xlim([-3.5, 3.5])

examples/svm/plot_svm_scale_c.py

+1-1
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,4 @@
1-
"""
1+
r"""
22
==============================================
33
Scaling the regularization parameter for SVCs
44
==============================================

0 commit comments

Comments
 (0)