Skip to content

Commit 89b80a6

Browse files
pwalchessensoccergirl34glemaitre
authored
DOC add missing attributes in several modules (#15521)
Co-authored-by: Patricia Walchessen <pwalchessen@gmail.com> Co-authored-by: Guillaume Lemaitre <g.lemaitre58@gmail.com>
1 parent c298cb7 commit 89b80a6

File tree

8 files changed

+238
-138
lines changed

8 files changed

+238
-138
lines changed

sklearn/feature_selection/_rfe.py

+25-19
Original file line numberDiff line numberDiff line change
@@ -52,7 +52,7 @@ class RFE(SelectorMixin, MetaEstimatorMixin, BaseEstimator):
5252
5353
Parameters
5454
----------
55-
estimator : object
55+
estimator : estimator instance
5656
A supervised learning estimator with a ``fit`` method that provides
5757
information about feature importance
5858
(e.g. `coef_`, `feature_importances_`).
@@ -89,19 +89,22 @@ class RFE(SelectorMixin, MetaEstimatorMixin, BaseEstimator):
8989
9090
Attributes
9191
----------
92+
classes_ : ndarray of shape (n_classes,)
93+
Unique class labels.
94+
95+
estimator_ : estimator instance
96+
The fitted estimator used to select features.
97+
9298
n_features_ : int
9399
The number of selected features.
94100
95-
support_ : array of shape [n_features]
96-
The mask of selected features.
97-
98-
ranking_ : array of shape [n_features]
101+
ranking_ : ndarray of shape (n_features,)
99102
The feature ranking, such that ``ranking_[i]`` corresponds to the
100103
ranking position of the i-th feature. Selected (i.e., estimated
101104
best) features are assigned rank 1.
102105
103-
estimator_ : object
104-
The external estimator fit on the reduced dataset.
106+
support_ : ndarray of shape (n_features,)
107+
The mask of selected features.
105108
106109
Examples
107110
--------
@@ -363,7 +366,7 @@ class RFECV(RFE):
363366
364367
Parameters
365368
----------
366-
estimator : object
369+
estimator : estimator instance
367370
A supervised learning estimator with a ``fit`` method that provides
368371
information about feature importance either through a ``coef_``
369372
attribute or through a ``feature_importances_`` attribute.
@@ -439,26 +442,29 @@ class RFECV(RFE):
439442
440443
Attributes
441444
----------
445+
classes_ : ndarray of shape (n_classes,)
446+
Unique class labels.
447+
448+
estimator_ : estimator instance
449+
The fitted estimator used to select features.
450+
451+
grid_scores_ : ndarray of shape (n_subsets_of_features)
452+
The cross-validation scores such that
453+
``grid_scores_[i]`` corresponds to
454+
the CV score of the i-th subset of features.
455+
442456
n_features_ : int
443457
The number of selected features with cross-validation.
444458
445-
support_ : array of shape [n_features]
446-
The mask of selected features.
447-
448-
ranking_ : array of shape [n_features]
459+
ranking_ : narray of shape (n_features,)
449460
The feature ranking, such that `ranking_[i]`
450461
corresponds to the ranking
451462
position of the i-th feature.
452463
Selected (i.e., estimated best)
453464
features are assigned rank 1.
454465
455-
grid_scores_ : array of shape [n_subsets_of_features]
456-
The cross-validation scores such that
457-
``grid_scores_[i]`` corresponds to
458-
the CV score of the i-th subset of features.
459-
460-
estimator_ : object
461-
The external estimator fit on the reduced dataset.
466+
support_ : ndarray of shape (n_features,)
467+
The mask of selected features.
462468
463469
Notes
464470
-----

sklearn/linear_model/_perceptron.py

+10-6
Original file line numberDiff line numberDiff line change
@@ -97,20 +97,24 @@ class Perceptron(BaseSGDClassifier):
9797
9898
Attributes
9999
----------
100-
coef_ : ndarray of shape = [1, n_features] if n_classes == 2 else \
101-
[n_classes, n_features]
100+
classes_ : ndarray of shape (n_classes,)
101+
The unique classes labels.
102+
103+
coef_ : ndarray of shape (1, n_features) if n_classes == 2 else \
104+
(n_classes, n_features)
102105
Weights assigned to the features.
103106
104-
intercept_ : ndarray of shape = [1] if n_classes == 2 else [n_classes]
107+
intercept_ : ndarray of shape (1,) if n_classes == 2 else (n_classes,)
105108
Constants in decision function.
106109
110+
loss_function_ : concrete LossFunction
111+
The function that determines the loss, or difference between the
112+
output of the algorithm and the target values.
113+
107114
n_iter_ : int
108115
The actual number of iterations to reach the stopping criterion.
109116
For multiclass fits, it is the maximum over every binary fit.
110117
111-
classes_ : ndarray of shape (n_classes,)
112-
The unique classes labels.
113-
114118
t_ : int
115119
Number of weight updates performed during training.
116120
Same as ``(n_iter_ * n_samples)``.

sklearn/neighbors/_classification.py

+11-1
Original file line numberDiff line numberDiff line change
@@ -104,6 +104,9 @@ class KNeighborsClassifier(NeighborsBase, KNeighborsMixin,
104104
`p` parameter value if the `effective_metric_` attribute is set to
105105
'minkowski'.
106106
107+
n_samples_fit_ : int
108+
Number of samples in the fitted data.
109+
107110
outputs_2d_ : bool
108111
False when `y`'s shape is (n_samples, ) or (n_samples, 1) during fit
109112
otherwise True.
@@ -344,6 +347,13 @@ class RadiusNeighborsClassifier(NeighborsBase, RadiusNeighborsMixin,
344347
`p` parameter value if the `effective_metric_` attribute is set to
345348
'minkowski'.
346349
350+
n_samples_fit_ : int
351+
Number of samples in the fitted data.
352+
353+
outlier_label_ : int or array-like of shape (n_class,)
354+
Label which is given for outlier samples (samples with no neighbors
355+
on given radius).
356+
347357
outputs_2d_ : bool
348358
False when `y`'s shape is (n_samples, ) or (n_samples, 1) during fit
349359
otherwise True.
@@ -419,7 +429,7 @@ def fit(self, X, y):
419429

420430
elif self.outlier_label == 'most_frequent':
421431
outlier_label_ = []
422-
# iterate over multi-output, get the most frequest label for each
432+
# iterate over multi-output, get the most frequent label for each
423433
# output.
424434
for k, classes_k in enumerate(classes_):
425435
label_count = np.bincount(_y[:, k])

sklearn/neighbors/_graph.py

+32
Original file line numberDiff line numberDiff line change
@@ -275,6 +275,22 @@ class KNeighborsTransformer(KNeighborsMixin, UnsupervisedMixin,
275275
The number of parallel jobs to run for neighbors search.
276276
If ``-1``, then the number of jobs is set to the number of CPU cores.
277277
278+
Attributes
279+
----------
280+
effective_metric_ : str or callable
281+
The distance metric used. It will be same as the `metric` parameter
282+
or a synonym of it, e.g. 'euclidean' if the `metric` parameter set to
283+
'minkowski' and `p` parameter set to 2.
284+
285+
effective_metric_params_ : dict
286+
Additional keyword arguments for the metric function. For most metrics
287+
will be same with `metric_params` parameter, but may also contain the
288+
`p` parameter value if the `effective_metric_` attribute is set to
289+
'minkowski'.
290+
291+
n_samples_fit_ : int
292+
Number of samples in the fitted data.
293+
278294
Examples
279295
--------
280296
>>> from sklearn.manifold import Isomap
@@ -417,6 +433,22 @@ class RadiusNeighborsTransformer(RadiusNeighborsMixin, UnsupervisedMixin,
417433
The number of parallel jobs to run for neighbors search.
418434
If ``-1``, then the number of jobs is set to the number of CPU cores.
419435
436+
Attributes
437+
----------
438+
effective_metric_ : str or callable
439+
The distance metric used. It will be same as the `metric` parameter
440+
or a synonym of it, e.g. 'euclidean' if the `metric` parameter set to
441+
'minkowski' and `p` parameter set to 2.
442+
443+
effective_metric_params_ : dict
444+
Additional keyword arguments for the metric function. For most metrics
445+
will be same with `metric_params` parameter, but may also contain the
446+
`p` parameter value if the `effective_metric_` attribute is set to
447+
'minkowski'.
448+
449+
n_samples_fit_ : int
450+
Number of samples in the fitted data.
451+
420452
Examples
421453
--------
422454
>>> from sklearn.cluster import DBSCAN

sklearn/neighbors/_regression.py

+6
Original file line numberDiff line numberDiff line change
@@ -107,6 +107,9 @@ class KNeighborsRegressor(NeighborsBase, KNeighborsMixin,
107107
`p` parameter value if the `effective_metric_` attribute is set to
108108
'minkowski'.
109109
110+
n_samples_fit_ : int
111+
Number of samples in the fitted data.
112+
110113
Examples
111114
--------
112115
>>> X = [[0], [1], [2], [3]]
@@ -283,6 +286,9 @@ class RadiusNeighborsRegressor(NeighborsBase, RadiusNeighborsMixin,
283286
`p` parameter value if the `effective_metric_` attribute is set to
284287
'minkowski'.
285288
289+
n_samples_fit_ : int
290+
Number of samples in the fitted data.
291+
286292
Examples
287293
--------
288294
>>> X = [[0], [1], [2], [3]]

sklearn/neighbors/_unsupervised.py

+19-14
Original file line numberDiff line numberDiff line change
@@ -73,22 +73,27 @@ class NearestNeighbors(KNeighborsMixin, RadiusNeighborsMixin,
7373
effective_metric_params_ : dict
7474
Parameters for the metric used to compute distances to neighbors.
7575
76+
n_samples_fit_ : int
77+
Number of samples in the fitted data.
78+
7679
Examples
7780
--------
78-
>>> import numpy as np
79-
>>> from sklearn.neighbors import NearestNeighbors
80-
>>> samples = [[0, 0, 2], [1, 0, 0], [0, 0, 1]]
81-
82-
>>> neigh = NearestNeighbors(n_neighbors=2, radius=0.4)
83-
>>> neigh.fit(samples)
84-
NearestNeighbors(...)
85-
86-
>>> neigh.kneighbors([[0, 0, 1.3]], 2, return_distance=False)
87-
array([[2, 0]]...)
88-
89-
>>> nbrs = neigh.radius_neighbors([[0, 0, 1.3]], 0.4, return_distance=False)
90-
>>> np.asarray(nbrs[0][0])
91-
array(2)
81+
>>> import numpy as np
82+
>>> from sklearn.neighbors import NearestNeighbors
83+
>>> samples = [[0, 0, 2], [1, 0, 0], [0, 0, 1]]
84+
85+
>>> neigh = NearestNeighbors(n_neighbors=2, radius=0.4)
86+
>>> neigh.fit(samples)
87+
NearestNeighbors(...)
88+
89+
>>> neigh.kneighbors([[0, 0, 1.3]], 2, return_distance=False)
90+
array([[2, 0]]...)
91+
92+
>>> nbrs = neigh.radius_neighbors(
93+
... [[0, 0, 1.3]], 0.4, return_distance=False
94+
... )
95+
>>> np.asarray(nbrs[0][0])
96+
array(2)
9297
9398
See also
9499
--------

0 commit comments

Comments
 (0)