Skip to content

Commit 9f2dbb8

Browse files
Scowley4jnothman
authored andcommitted
DOC Fix more formatting inconsistencies (scikit-learn#13787)
1 parent 4e34ea9 commit 9f2dbb8

File tree

24 files changed

+49
-49
lines changed

24 files changed

+49
-49
lines changed

sklearn/cluster/k_means_.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -44,7 +44,7 @@ def _k_init(X, n_clusters, x_squared_norms, random_state, n_local_trials=None):
4444
"""Init n_clusters seeds according to k-means++
4545
4646
Parameters
47-
-----------
47+
----------
4848
X : array or sparse matrix, shape (n_samples, n_features)
4949
The data to pick seeds for. To avoid memory copy, the input data
5050
should be double precision (dtype=np.float64).
@@ -887,7 +887,7 @@ class KMeans(BaseEstimator, ClusterMixin, TransformerMixin):
887887
probably much faster than the default batch implementation.
888888
889889
Notes
890-
------
890+
-----
891891
The k-means problem is solved using either Lloyd's or Elkan's algorithm.
892892
893893
The average complexity is given by O(k n T), were n is the number of

sklearn/cluster/mean_shift_.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -409,7 +409,7 @@ def fit(self, X, y=None):
409409
"""Perform clustering.
410410
411411
Parameters
412-
-----------
412+
----------
413413
X : array-like, shape=[n_samples, n_features]
414414
Samples to cluster.
415415

sklearn/cluster/spectral.py

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -173,7 +173,7 @@ def spectral_clustering(affinity, n_clusters=8, n_components=None,
173173
Read more in the :ref:`User Guide <spectral_clustering>`.
174174
175175
Parameters
176-
-----------
176+
----------
177177
affinity : array-like or sparse matrix, shape: (n_samples, n_samples)
178178
The affinity matrix describing the relationship of the samples to
179179
embed. **Must be symmetric**.
@@ -240,7 +240,7 @@ def spectral_clustering(affinity, n_clusters=8, n_components=None,
240240
https://www1.icsi.berkeley.edu/~stellayu/publication/doc/2003kwayICCV.pdf
241241
242242
Notes
243-
------
243+
-----
244244
The graph should contain only one connect component, elsewhere
245245
the results make little sense.
246246
@@ -298,7 +298,7 @@ class SpectralClustering(BaseEstimator, ClusterMixin):
298298
Read more in the :ref:`User Guide <spectral_clustering>`.
299299
300300
Parameters
301-
-----------
301+
----------
302302
n_clusters : integer, optional
303303
The dimension of the projection subspace.
304304

sklearn/datasets/base.py

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -810,7 +810,7 @@ def load_sample_image(image_name):
810810
Read more in the :ref:`User Guide <sample_images>`.
811811
812812
Parameters
813-
-----------
813+
----------
814814
image_name : {`china.jpg`, `flower.jpg`}
815815
The name of the sample image loaded
816816
@@ -820,7 +820,7 @@ def load_sample_image(image_name):
820820
The image as a numpy array: height x width x color
821821
822822
Examples
823-
---------
823+
--------
824824
825825
>>> from sklearn.datasets import load_sample_image
826826
>>> china = load_sample_image('china.jpg') # doctest: +SKIP
@@ -895,7 +895,7 @@ def _fetch_remote(remote, dirname=None):
895895
downloaded file.
896896
897897
Parameters
898-
-----------
898+
----------
899899
remote : RemoteFileMetadata
900900
Named tuple containing remote dataset meta information: url, filename
901901
and checksum

sklearn/datasets/california_housing.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -97,7 +97,7 @@ def fetch_california_housing(data_home=None, download_if_missing=True,
9797
.. versionadded:: 0.20
9898
9999
Notes
100-
------
100+
-----
101101
102102
This dataset consists of 20,640 samples and 9 features.
103103
"""

sklearn/datasets/species_distributions.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -154,7 +154,7 @@ def fetch_species_distributions(data_home=None,
154154
instead of trying to download the data from the source site.
155155
156156
Returns
157-
--------
157+
-------
158158
The data is returned as a Bunch object with the following attributes:
159159
160160
coverages : array, shape = [14, 1592, 1212]

sklearn/ensemble/_hist_gradient_boosting/grower.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -101,7 +101,7 @@ def __lt__(self, other_node):
101101
priority).
102102
103103
Parameters
104-
-----------
104+
----------
105105
other_node : TreeNode
106106
The node to compare with.
107107
"""

sklearn/ensemble/voting.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -281,7 +281,7 @@ def predict(self, X):
281281
The input samples.
282282
283283
Returns
284-
----------
284+
-------
285285
maj : array-like, shape (n_samples,)
286286
Predicted class labels.
287287
"""
@@ -325,7 +325,7 @@ def predict_proba(self):
325325
The input samples.
326326
327327
Returns
328-
----------
328+
-------
329329
avg : array-like, shape (n_samples, n_classes)
330330
Weighted average probability for each class per sample.
331331
"""

sklearn/feature_extraction/image.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -32,7 +32,7 @@ def _make_edges_3d(n_x, n_y, n_z=1):
3232
"""Returns a list of edges for a 3D image.
3333
3434
Parameters
35-
===========
35+
----------
3636
n_x : integer
3737
The size of the grid in the x direction.
3838
n_y : integer

sklearn/gaussian_process/kernels.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1156,7 +1156,7 @@ class RBF(StationaryKernelMixin, NormalizedKernelMixin, Kernel):
11561156
.. versionadded:: 0.18
11571157
11581158
Parameters
1159-
-----------
1159+
----------
11601160
length_scale : float or array with shape (n_features,), default: 1.0
11611161
The length scale of the kernel. If a float, an isotropic kernel is
11621162
used. If an array, an anisotropic kernel is used where each dimension
@@ -1269,7 +1269,7 @@ class Matern(RBF):
12691269
.. versionadded:: 0.18
12701270
12711271
Parameters
1272-
-----------
1272+
----------
12731273
length_scale : float or array with shape (n_features,), default: 1.0
12741274
The length scale of the kernel. If a float, an isotropic kernel is
12751275
used. If an array, an anisotropic kernel is used where each dimension

sklearn/linear_model/coordinate_descent.py

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -224,7 +224,7 @@ def lasso_path(X, y, eps=1e-3, n_alphas=100, alphas=None,
224224
values output by lars_path
225225
226226
Examples
227-
---------
227+
--------
228228
229229
Comparing lasso_path and lars_path with interpolation:
230230
@@ -661,7 +661,7 @@ def fit(self, X, y, check_input=True):
661661
"""Fit model with coordinate descent.
662662
663663
Parameters
664-
-----------
664+
----------
665665
X : ndarray or scipy.sparse matrix, (n_samples, n_features)
666666
Data
667667
@@ -1747,7 +1747,7 @@ def fit(self, X, y):
17471747
"""Fit MultiTaskElasticNet model with coordinate descent
17481748
17491749
Parameters
1750-
-----------
1750+
----------
17511751
X : ndarray, shape (n_samples, n_features)
17521752
Data
17531753
y : ndarray, shape (n_samples, n_tasks)

sklearn/linear_model/least_angle.py

Lines changed: 8 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -42,7 +42,7 @@ def lars_path(X, y, Xy=None, Gram=None, max_iter=500, alpha_min=0,
4242
Read more in the :ref:`User Guide <least_angle_regression>`.
4343
4444
Parameters
45-
-----------
45+
----------
4646
X : None or array, shape (n_samples, n_features)
4747
Input data. Note that if X is None then the Gram matrix must be
4848
specified, i.e., cannot be None or False.
@@ -112,7 +112,7 @@ def lars_path(X, y, Xy=None, Gram=None, max_iter=500, alpha_min=0,
112112
solution of the coordinate descent lasso_path function.
113113
114114
Returns
115-
--------
115+
-------
116116
alphas : array, shape (n_alphas + 1,)
117117
Maximum of covariances (in absolute value) at each iteration.
118118
``n_alphas`` is either ``max_iter``, ``n_features`` or the
@@ -179,7 +179,7 @@ def lars_path_gram(Xy, Gram, n_samples, max_iter=500, alpha_min=0,
179179
Read more in the :ref:`User Guide <least_angle_regression>`.
180180
181181
Parameters
182-
-----------
182+
----------
183183
Xy : array-like, shape (n_samples,) or (n_samples, n_targets)
184184
Xy = np.dot(X.T, y).
185185
@@ -231,7 +231,7 @@ def lars_path_gram(Xy, Gram, n_samples, max_iter=500, alpha_min=0,
231231
solution of the coordinate descent lasso_path function.
232232
233233
Returns
234-
--------
234+
-------
235235
alphas : array, shape (n_alphas + 1,)
236236
Maximum of covariances (in absolute value) at each iteration.
237237
``n_alphas`` is either ``max_iter``, ``n_features`` or the
@@ -295,7 +295,7 @@ def _lars_path_solver(X, y, Xy=None, Gram=None, n_samples=None, max_iter=500,
295295
Read more in the :ref:`User Guide <least_angle_regression>`.
296296
297297
Parameters
298-
-----------
298+
----------
299299
X : None or ndarray, shape (n_samples, n_features)
300300
Input data. Note that if X is None then Gram must be specified,
301301
i.e., cannot be None or False.
@@ -358,7 +358,7 @@ def _lars_path_solver(X, y, Xy=None, Gram=None, n_samples=None, max_iter=500,
358358
solution of the coordinate descent lasso_path function.
359359
360360
Returns
361-
--------
361+
-------
362362
alphas : array, shape (n_alphas + 1,)
363363
Maximum of covariances (in absolute value) at each iteration.
364364
``n_alphas`` is either ``max_iter``, ``n_features`` or the
@@ -1128,7 +1128,7 @@ def _lars_path_residues(X_train, y_train, X_test, y_test, Gram=None,
11281128
"""Compute the residues on left-out data for a full LARS path
11291129
11301130
Parameters
1131-
-----------
1131+
----------
11321132
X_train : array, shape (n_samples, n_features)
11331133
The data to fit the LARS on
11341134
@@ -1189,7 +1189,7 @@ def _lars_path_residues(X_train, y_train, X_test, y_test, Gram=None,
11891189
11901190
11911191
Returns
1192-
--------
1192+
-------
11931193
alphas : array, shape (n_alphas,)
11941194
Maximum of covariances (in absolute value) at each iteration.
11951195
``n_alphas`` is either ``max_iter`` or ``n_features``, whichever

sklearn/linear_model/omp.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -681,7 +681,7 @@ def _omp_path_residues(X_train, y_train, X_test, y_test, copy=True,
681681
"""Compute the residues on left-out data for a full LARS path
682682
683683
Parameters
684-
-----------
684+
----------
685685
X_train : array, shape (n_samples, n_features)
686686
The data to fit the LARS on
687687

sklearn/manifold/isomap.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -145,7 +145,7 @@ def reconstruction_error(self):
145145
reconstruction_error : float
146146
147147
Notes
148-
-------
148+
-----
149149
The cost function of an isomap embedding is
150150
151151
``E = frobenius_norm[K(D) - K(D_fit)] / n_samples``

sklearn/manifold/spectral_embedding_.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -348,7 +348,7 @@ class SpectralEmbedding(BaseEstimator):
348348
Read more in the :ref:`User Guide <spectral_embedding>`.
349349
350350
Parameters
351-
-----------
351+
----------
352352
n_components : integer, default: 2
353353
The dimension of the projected subspace.
354354

sklearn/metrics/pairwise.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -749,7 +749,7 @@ def paired_cosine_distances(X, Y):
749749
distances : ndarray, shape (n_samples, )
750750
751751
Notes
752-
------
752+
-----
753753
The cosine distance is equivalent to the half the squared
754754
euclidean distance if each sample is normalized to unit norm
755755
"""

sklearn/model_selection/_search.py

Lines changed: 7 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -447,7 +447,7 @@ def predict(self, X):
447447
``predict``.
448448
449449
Parameters
450-
-----------
450+
----------
451451
X : indexable, length n_samples
452452
Must fulfill the input assumptions of the
453453
underlying estimator.
@@ -464,7 +464,7 @@ def predict_proba(self, X):
464464
``predict_proba``.
465465
466466
Parameters
467-
-----------
467+
----------
468468
X : indexable, length n_samples
469469
Must fulfill the input assumptions of the
470470
underlying estimator.
@@ -481,7 +481,7 @@ def predict_log_proba(self, X):
481481
``predict_log_proba``.
482482
483483
Parameters
484-
-----------
484+
----------
485485
X : indexable, length n_samples
486486
Must fulfill the input assumptions of the
487487
underlying estimator.
@@ -498,7 +498,7 @@ def decision_function(self, X):
498498
``decision_function``.
499499
500500
Parameters
501-
-----------
501+
----------
502502
X : indexable, length n_samples
503503
Must fulfill the input assumptions of the
504504
underlying estimator.
@@ -515,7 +515,7 @@ def transform(self, X):
515515
``refit=True``.
516516
517517
Parameters
518-
-----------
518+
----------
519519
X : indexable, length n_samples
520520
Must fulfill the input assumptions of the
521521
underlying estimator.
@@ -532,7 +532,7 @@ def inverse_transform(self, Xt):
532532
``inverse_transform`` and ``refit=True``.
533533
534534
Parameters
535-
-----------
535+
----------
536536
Xt : indexable, length n_samples
537537
Must fulfill the input assumptions of the
538538
underlying estimator.
@@ -1103,7 +1103,7 @@ class GridSearchCV(BaseSearchCV):
11031103
This is present only if ``refit`` is not False.
11041104
11051105
Notes
1106-
------
1106+
-----
11071107
The parameters selected are those that maximize the score of the left out
11081108
data, unless an explicit score is passed in which case it is used instead.
11091109

sklearn/model_selection/_split.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -853,7 +853,7 @@ class LeaveOneGroupOut(BaseCrossValidator):
853853
>>> logo = LeaveOneGroupOut()
854854
>>> logo.get_n_splits(X, y, groups)
855855
2
856-
>>> logo.get_n_splits(groups=groups) # 'groups' is always required
856+
>>> logo.get_n_splits(groups=groups) # 'groups' is always required
857857
2
858858
>>> print(logo)
859859
LeaveOneGroupOut()

sklearn/neighbors/base.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -62,14 +62,14 @@ def _get_weights(dist, weights):
6262
"""Get the weights from an array of distances and a parameter ``weights``
6363
6464
Parameters
65-
===========
65+
----------
6666
dist : ndarray
6767
The input distances
6868
weights : {'uniform', 'distance' or a callable}
6969
The kind of weighting used
7070
7171
Returns
72-
========
72+
-------
7373
weights_arr : array of the same shape as ``dist``
7474
if ``weights == 'uniform'``, then returns None
7575
"""

sklearn/preprocessing/data.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1683,7 +1683,7 @@ class Normalizer(BaseEstimator, TransformerMixin):
16831683
>>> X = [[4, 1, 2, 2],
16841684
... [1, 3, 9, 3],
16851685
... [5, 7, 5, 1]]
1686-
>>> transformer = Normalizer().fit(X) # fit does nothing.
1686+
>>> transformer = Normalizer().fit(X) # fit does nothing.
16871687
>>> transformer
16881688
Normalizer(copy=True, norm='l2')
16891689
>>> transformer.transform(X)
@@ -1819,7 +1819,7 @@ class Binarizer(BaseEstimator, TransformerMixin):
18191819
>>> X = [[ 1., -1., 2.],
18201820
... [ 2., 0., 0.],
18211821
... [ 0., 1., -1.]]
1822-
>>> transformer = Binarizer().fit(X) # fit does nothing.
1822+
>>> transformer = Binarizer().fit(X) # fit does nothing.
18231823
>>> transformer
18241824
Binarizer(copy=True, threshold=0.0)
18251825
>>> transformer.transform(X)

0 commit comments

Comments
 (0)