Skip to content

Commit acddc72

Browse files
committed
COSMIT don't use docstrings in tests (gp kernel tests)
1 parent c726085 commit acddc72

File tree

3 files changed

+52
-57
lines changed

3 files changed

+52
-57
lines changed

sklearn/gaussian_process/tests/test_gpc.py

+10-13
Original file line numberDiff line numberDiff line change
@@ -34,16 +34,15 @@ def f(x):
3434

3535

3636
def test_predict_consistent():
37-
""" Check binary predict decision has also predicted probability above 0.5.
38-
"""
37+
# Check binary predict decision has also predicted probability above 0.5.
3938
for kernel in kernels:
4039
gpc = GaussianProcessClassifier(kernel=kernel).fit(X, y)
4140
assert_array_equal(gpc.predict(X),
4241
gpc.predict_proba(X)[:, 1] >= 0.5)
4342

4443

4544
def test_lml_improving():
46-
""" Test that hyperparameter-tuning improves log-marginal likelihood. """
45+
# Test that hyperparameter-tuning improves log-marginal likelihood.
4746
for kernel in kernels:
4847
if kernel == fixed_kernel:
4948
continue
@@ -53,15 +52,15 @@ def test_lml_improving():
5352

5453

5554
def test_lml_precomputed():
56-
""" Test that lml of optimized kernel is stored correctly. """
55+
# Test that lml of optimized kernel is stored correctly.
5756
for kernel in kernels:
5857
gpc = GaussianProcessClassifier(kernel=kernel).fit(X, y)
5958
assert_almost_equal(gpc.log_marginal_likelihood(gpc.kernel_.theta),
6059
gpc.log_marginal_likelihood(), 7)
6160

6261

6362
def test_converged_to_local_maximum():
64-
""" Test that we are in local maximum after hyperparameter-optimization."""
63+
# Test that we are in local maximum after hyperparameter-optimization.
6564
for kernel in kernels:
6665
if kernel == fixed_kernel:
6766
continue
@@ -76,7 +75,7 @@ def test_converged_to_local_maximum():
7675

7776

7877
def test_lml_gradient():
79-
""" Compare analytic and numeric gradient of log marginal likelihood. """
78+
# Compare analytic and numeric gradient of log marginal likelihood.
8079
for kernel in kernels:
8180
gpc = GaussianProcessClassifier(kernel=kernel).fit(X, y)
8281

@@ -91,10 +90,8 @@ def test_lml_gradient():
9190

9291

9392
def test_random_starts():
94-
"""
95-
Test that an increasing number of random-starts of GP fitting only
96-
increases the log marginal likelihood of the chosen theta.
97-
"""
93+
# Test that an increasing number of random-starts of GP fitting only
94+
# increases the log marginal likelihood of the chosen theta.
9895
n_samples, n_features = 25, 2
9996
np.random.seed(0)
10097
rng = np.random.RandomState(0)
@@ -115,7 +112,7 @@ def test_random_starts():
115112

116113

117114
def test_custom_optimizer():
118-
""" Test that GPC can use externally defined optimizers. """
115+
# Test that GPC can use externally defined optimizers.
119116
# Define a dummy optimizer that simply tests 50 random hyperparameters
120117
def optimizer(obj_func, initial_theta, bounds):
121118
rng = np.random.RandomState(0)
@@ -140,7 +137,7 @@ def optimizer(obj_func, initial_theta, bounds):
140137

141138

142139
def test_multi_class():
143-
""" Test GPC for multi-class classification problems. """
140+
# Test GPC for multi-class classification problems.
144141
for kernel in kernels:
145142
gpc = GaussianProcessClassifier(kernel=kernel)
146143
gpc.fit(X, y_mc)
@@ -153,7 +150,7 @@ def test_multi_class():
153150

154151

155152
def test_multi_class_n_jobs():
156-
""" Test that multi-class GPC produces identical results with n_jobs>1. """
153+
# Test that multi-class GPC produces identical results with n_jobs>1.
157154
for kernel in kernels:
158155
gpc = GaussianProcessClassifier(kernel=kernel)
159156
gpc.fit(X, y_mc)

sklearn/gaussian_process/tests/test_gpr.py

+19-22
Original file line numberDiff line numberDiff line change
@@ -36,7 +36,7 @@ def f(x):
3636

3737

3838
def test_gpr_interpolation():
39-
"""Test the interpolating property for different kernels."""
39+
# Test the interpolating property for different kernels.
4040
for kernel in kernels:
4141
gpr = GaussianProcessRegressor(kernel=kernel).fit(X, y)
4242
y_pred, y_cov = gpr.predict(X, return_cov=True)
@@ -46,7 +46,7 @@ def test_gpr_interpolation():
4646

4747

4848
def test_lml_improving():
49-
""" Test that hyperparameter-tuning improves log-marginal likelihood. """
49+
# Test that hyperparameter-tuning improves log-marginal likelihood.
5050
for kernel in kernels:
5151
if kernel == fixed_kernel:
5252
continue
@@ -56,15 +56,15 @@ def test_lml_improving():
5656

5757

5858
def test_lml_precomputed():
59-
""" Test that lml of optimized kernel is stored correctly. """
59+
# Test that lml of optimized kernel is stored correctly.
6060
for kernel in kernels:
6161
gpr = GaussianProcessRegressor(kernel=kernel).fit(X, y)
6262
assert_equal(gpr.log_marginal_likelihood(gpr.kernel_.theta),
6363
gpr.log_marginal_likelihood())
6464

6565

6666
def test_converged_to_local_maximum():
67-
""" Test that we are in local maximum after hyperparameter-optimization."""
67+
# Test that we are in local maximum after hyperparameter-optimization.
6868
for kernel in kernels:
6969
if kernel == fixed_kernel:
7070
continue
@@ -79,7 +79,7 @@ def test_converged_to_local_maximum():
7979

8080

8181
def test_solution_inside_bounds():
82-
""" Test that hyperparameter-optimization remains in bounds"""
82+
# Test that hyperparameter-optimization remains in bounds#
8383
for kernel in kernels:
8484
if kernel == fixed_kernel:
8585
continue
@@ -95,7 +95,7 @@ def test_solution_inside_bounds():
9595

9696

9797
def test_lml_gradient():
98-
""" Compare analytic and numeric gradient of log marginal likelihood. """
98+
# Compare analytic and numeric gradient of log marginal likelihood.
9999
for kernel in kernels:
100100
gpr = GaussianProcessRegressor(kernel=kernel).fit(X, y)
101101

@@ -110,7 +110,7 @@ def test_lml_gradient():
110110

111111

112112
def test_prior():
113-
""" Test that GP prior has mean 0 and identical variances."""
113+
# Test that GP prior has mean 0 and identical variances.
114114
for kernel in kernels:
115115
gpr = GaussianProcessRegressor(kernel=kernel)
116116

@@ -125,7 +125,7 @@ def test_prior():
125125

126126

127127
def test_sample_statistics():
128-
""" Test that statistics of samples drawn from GP are correct."""
128+
# Test that statistics of samples drawn from GP are correct.
129129
for kernel in kernels:
130130
gpr = GaussianProcessRegressor(kernel=kernel).fit(X, y)
131131

@@ -140,14 +140,14 @@ def test_sample_statistics():
140140

141141

142142
def test_no_optimizer():
143-
""" Test that kernel parameters are unmodified when optimizer is None."""
143+
# Test that kernel parameters are unmodified when optimizer is None.
144144
kernel = RBF(1.0)
145145
gpr = GaussianProcessRegressor(kernel=kernel, optimizer=None).fit(X, y)
146146
assert_equal(np.exp(gpr.kernel_.theta), 1.0)
147147

148148

149149
def test_predict_cov_vs_std():
150-
""" Test that predicted std.-dev. is consistent with cov's diagonal."""
150+
# Test that predicted std.-dev. is consistent with cov's diagonal.
151151
for kernel in kernels:
152152
gpr = GaussianProcessRegressor(kernel=kernel).fit(X, y)
153153
y_mean, y_cov = gpr.predict(X2, return_cov=True)
@@ -156,7 +156,7 @@ def test_predict_cov_vs_std():
156156

157157

158158
def test_anisotropic_kernel():
159-
""" Test that GPR can identify meaningful anisotropic length-scales. """
159+
# Test that GPR can identify meaningful anisotropic length-scales.
160160
# We learn a function which varies in one dimension ten-times slower
161161
# than in the other. The corresponding length-scales should differ by at
162162
# least a factor 5
@@ -171,10 +171,8 @@ def test_anisotropic_kernel():
171171

172172

173173
def test_random_starts():
174-
"""
175-
Test that an increasing number of random-starts of GP fitting only
176-
increases the log marginal likelihood of the chosen theta.
177-
"""
174+
# Test that an increasing number of random-starts of GP fitting only
175+
# increases the log marginal likelihood of the chosen theta.
178176
n_samples, n_features = 25, 2
179177
np.random.seed(0)
180178
rng = np.random.RandomState(0)
@@ -197,11 +195,10 @@ def test_random_starts():
197195

198196

199197
def test_y_normalization():
200-
""" Test normalization of the target values in GP
198+
# Test normalization of the target values in GP
201199

202-
Fitting non-normalizing GP on normalized y and fitting normalizing GP
203-
on unnormalized y should yield identical results
204-
"""
200+
# Fitting non-normalizing GP on normalized y and fitting normalizing GP
201+
# on unnormalized y should yield identical results
205202
y_mean = y.mean(0)
206203
y_norm = y - y_mean
207204
for kernel in kernels:
@@ -226,7 +223,7 @@ def test_y_normalization():
226223

227224

228225
def test_y_multioutput():
229-
""" Test that GPR can deal with multi-dimensional target values"""
226+
# Test that GPR can deal with multi-dimensional target values
230227
y_2d = np.vstack((y, y * 2)).T
231228

232229
# Test for fixed kernel that first dimension of 2d GP equals the output
@@ -269,7 +266,7 @@ def test_y_multioutput():
269266

270267

271268
def test_custom_optimizer():
272-
""" Test that GPR can use externally defined optimizers. """
269+
# Test that GPR can use externally defined optimizers.
273270
# Define a dummy optimizer that simply tests 50 random hyperparameters
274271
def optimizer(obj_func, initial_theta, bounds):
275272
rng = np.random.RandomState(0)
@@ -294,7 +291,7 @@ def optimizer(obj_func, initial_theta, bounds):
294291

295292

296293
def test_duplicate_input():
297-
""" Test GPR can handle two different output-values for the same input. """
294+
# Test GPR can handle two different output-values for the same input.
298295
for kernel in kernels:
299296
gpr_equal_inputs = \
300297
GaussianProcessRegressor(kernel=kernel, alpha=1e-2)

0 commit comments

Comments
 (0)