@@ -36,7 +36,7 @@ def f(x):
36
36
37
37
38
38
def test_gpr_interpolation ():
39
- """ Test the interpolating property for different kernels."""
39
+ # Test the interpolating property for different kernels.
40
40
for kernel in kernels :
41
41
gpr = GaussianProcessRegressor (kernel = kernel ).fit (X , y )
42
42
y_pred , y_cov = gpr .predict (X , return_cov = True )
@@ -46,7 +46,7 @@ def test_gpr_interpolation():
46
46
47
47
48
48
def test_lml_improving ():
49
- """ Test that hyperparameter-tuning improves log-marginal likelihood. """
49
+ # Test that hyperparameter-tuning improves log-marginal likelihood.
50
50
for kernel in kernels :
51
51
if kernel == fixed_kernel :
52
52
continue
@@ -56,15 +56,15 @@ def test_lml_improving():
56
56
57
57
58
58
def test_lml_precomputed ():
59
- """ Test that lml of optimized kernel is stored correctly. """
59
+ # Test that lml of optimized kernel is stored correctly.
60
60
for kernel in kernels :
61
61
gpr = GaussianProcessRegressor (kernel = kernel ).fit (X , y )
62
62
assert_equal (gpr .log_marginal_likelihood (gpr .kernel_ .theta ),
63
63
gpr .log_marginal_likelihood ())
64
64
65
65
66
66
def test_converged_to_local_maximum ():
67
- """ Test that we are in local maximum after hyperparameter-optimization."""
67
+ # Test that we are in local maximum after hyperparameter-optimization.
68
68
for kernel in kernels :
69
69
if kernel == fixed_kernel :
70
70
continue
@@ -79,7 +79,7 @@ def test_converged_to_local_maximum():
79
79
80
80
81
81
def test_solution_inside_bounds ():
82
- """ Test that hyperparameter-optimization remains in bounds"""
82
+ # Test that hyperparameter-optimization remains in bounds#
83
83
for kernel in kernels :
84
84
if kernel == fixed_kernel :
85
85
continue
@@ -95,7 +95,7 @@ def test_solution_inside_bounds():
95
95
96
96
97
97
def test_lml_gradient ():
98
- """ Compare analytic and numeric gradient of log marginal likelihood. """
98
+ # Compare analytic and numeric gradient of log marginal likelihood.
99
99
for kernel in kernels :
100
100
gpr = GaussianProcessRegressor (kernel = kernel ).fit (X , y )
101
101
@@ -110,7 +110,7 @@ def test_lml_gradient():
110
110
111
111
112
112
def test_prior ():
113
- """ Test that GP prior has mean 0 and identical variances."""
113
+ # Test that GP prior has mean 0 and identical variances.
114
114
for kernel in kernels :
115
115
gpr = GaussianProcessRegressor (kernel = kernel )
116
116
@@ -125,7 +125,7 @@ def test_prior():
125
125
126
126
127
127
def test_sample_statistics ():
128
- """ Test that statistics of samples drawn from GP are correct."""
128
+ # Test that statistics of samples drawn from GP are correct.
129
129
for kernel in kernels :
130
130
gpr = GaussianProcessRegressor (kernel = kernel ).fit (X , y )
131
131
@@ -140,14 +140,14 @@ def test_sample_statistics():
140
140
141
141
142
142
def test_no_optimizer ():
143
- """ Test that kernel parameters are unmodified when optimizer is None."""
143
+ # Test that kernel parameters are unmodified when optimizer is None.
144
144
kernel = RBF (1.0 )
145
145
gpr = GaussianProcessRegressor (kernel = kernel , optimizer = None ).fit (X , y )
146
146
assert_equal (np .exp (gpr .kernel_ .theta ), 1.0 )
147
147
148
148
149
149
def test_predict_cov_vs_std ():
150
- """ Test that predicted std.-dev. is consistent with cov's diagonal."""
150
+ # Test that predicted std.-dev. is consistent with cov's diagonal.
151
151
for kernel in kernels :
152
152
gpr = GaussianProcessRegressor (kernel = kernel ).fit (X , y )
153
153
y_mean , y_cov = gpr .predict (X2 , return_cov = True )
@@ -156,7 +156,7 @@ def test_predict_cov_vs_std():
156
156
157
157
158
158
def test_anisotropic_kernel ():
159
- """ Test that GPR can identify meaningful anisotropic length-scales. """
159
+ # Test that GPR can identify meaningful anisotropic length-scales.
160
160
# We learn a function which varies in one dimension ten-times slower
161
161
# than in the other. The corresponding length-scales should differ by at
162
162
# least a factor 5
@@ -171,10 +171,8 @@ def test_anisotropic_kernel():
171
171
172
172
173
173
def test_random_starts ():
174
- """
175
- Test that an increasing number of random-starts of GP fitting only
176
- increases the log marginal likelihood of the chosen theta.
177
- """
174
+ # Test that an increasing number of random-starts of GP fitting only
175
+ # increases the log marginal likelihood of the chosen theta.
178
176
n_samples , n_features = 25 , 2
179
177
np .random .seed (0 )
180
178
rng = np .random .RandomState (0 )
@@ -197,11 +195,10 @@ def test_random_starts():
197
195
198
196
199
197
def test_y_normalization ():
200
- """ Test normalization of the target values in GP
198
+ # Test normalization of the target values in GP
201
199
202
- Fitting non-normalizing GP on normalized y and fitting normalizing GP
203
- on unnormalized y should yield identical results
204
- """
200
+ # Fitting non-normalizing GP on normalized y and fitting normalizing GP
201
+ # on unnormalized y should yield identical results
205
202
y_mean = y .mean (0 )
206
203
y_norm = y - y_mean
207
204
for kernel in kernels :
@@ -226,7 +223,7 @@ def test_y_normalization():
226
223
227
224
228
225
def test_y_multioutput ():
229
- """ Test that GPR can deal with multi-dimensional target values"""
226
+ # Test that GPR can deal with multi-dimensional target values
230
227
y_2d = np .vstack ((y , y * 2 )).T
231
228
232
229
# Test for fixed kernel that first dimension of 2d GP equals the output
@@ -269,7 +266,7 @@ def test_y_multioutput():
269
266
270
267
271
268
def test_custom_optimizer ():
272
- """ Test that GPR can use externally defined optimizers. """
269
+ # Test that GPR can use externally defined optimizers.
273
270
# Define a dummy optimizer that simply tests 50 random hyperparameters
274
271
def optimizer (obj_func , initial_theta , bounds ):
275
272
rng = np .random .RandomState (0 )
@@ -294,7 +291,7 @@ def optimizer(obj_func, initial_theta, bounds):
294
291
295
292
296
293
def test_duplicate_input ():
297
- """ Test GPR can handle two different output-values for the same input. """
294
+ # Test GPR can handle two different output-values for the same input.
298
295
for kernel in kernels :
299
296
gpr_equal_inputs = \
300
297
GaussianProcessRegressor (kernel = kernel , alpha = 1e-2 )
0 commit comments