Skip to content

MAINT: consistent use of print(__doc__) in examples #21307

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
merged 3 commits into from
Oct 22, 2021
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions examples/applications/plot_cyclical_feature_engineering.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,6 +12,7 @@
`extrapolation="periodic"` option.

"""

# %%
# Data exploration on the Bike Sharing Demand dataset
# ---------------------------------------------------
Expand Down
3 changes: 1 addition & 2 deletions examples/applications/plot_digits_denoising.py
Original file line number Diff line number Diff line change
Expand Up @@ -18,9 +18,8 @@
"Learning to find pre-images."
Advances in neural information processing systems 16 (2004): 449-456.
<https://papers.nips.cc/paper/2003/file/ac1ad983e08ad3304a97e147f522747e-Paper.pdf>`_
"""

print(__doc__)
"""

# Authors: Guillaume Lemaitre <guillaume.lemaitre@inria.fr>
# Licence: BSD 3 clause
Expand Down
3 changes: 1 addition & 2 deletions examples/applications/plot_face_recognition.py
Original file line number Diff line number Diff line change
Expand Up @@ -27,6 +27,7 @@
================== ============ ======= ========== =======

"""

from time import time
import logging
import matplotlib.pyplot as plt
Expand All @@ -40,8 +41,6 @@
from sklearn.svm import SVC


print(__doc__)

# Display progress logs on stdout
logging.basicConfig(level=logging.INFO, format="%(asctime)s %(message)s")

Expand Down
2 changes: 0 additions & 2 deletions examples/applications/plot_model_complexity_influence.py
Original file line number Diff line number Diff line change
Expand Up @@ -32,8 +32,6 @@

"""

print(__doc__)

# Authors: Eustache Diemert <eustache@diemert.fr>
# Maria Telenczuk <https://github.com/maikia>
# Guillaume Lemaitre <g.lemaitre58@gmail.com>
Expand Down
1 change: 1 addition & 0 deletions examples/applications/plot_out_of_core_classification.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,6 +11,7 @@
HashingVectorizer that will project each example into the same feature space.
This is especially useful in the case of text classification where new
features (words) may appear in each batch.

"""

# Authors: Eustache Diemert <eustache@diemert.fr>
Expand Down
1 change: 0 additions & 1 deletion examples/applications/plot_outlier_detection_wine.py
Original file line number Diff line number Diff line change
Expand Up @@ -33,7 +33,6 @@
example, none of these could be applied that easily.

"""
print(__doc__)

# Author: Virgile Fritsch <virgile.fritsch@inria.fr>
# License: BSD 3 clause
Expand Down
3 changes: 1 addition & 2 deletions examples/applications/plot_species_distribution_modeling.py
Original file line number Diff line number Diff line change
Expand Up @@ -33,6 +33,7 @@
<http://rob.schapire.net/papers/ecolmod.pdf>`_
S. J. Phillips, R. P. Anderson, R. E. Schapire - Ecological Modelling,
190:231-259, 2006.

"""

# Authors: Peter Prettenhofer <peter.prettenhofer@gmail.com>
Expand All @@ -58,8 +59,6 @@
except ImportError:
basemap = False

print(__doc__)


def construct_grids(batch):
"""Construct the map grid from the batch object
Expand Down
3 changes: 1 addition & 2 deletions examples/applications/plot_stock_market.py
Original file line number Diff line number Diff line change
Expand Up @@ -58,6 +58,7 @@
is to position the labels minimizing overlap. For this we use an
heuristic based on the direction of the nearest neighbor along each
axis.

"""

# Author: Gael Varoquaux gael.varoquaux@normalesup.org
Expand All @@ -73,8 +74,6 @@

from sklearn import cluster, covariance, manifold

print(__doc__)


# #############################################################################
# Retrieve the data from Internet
Expand Down
3 changes: 1 addition & 2 deletions examples/applications/plot_tomography_l1_reconstruction.py
Original file line number Diff line number Diff line change
Expand Up @@ -33,9 +33,8 @@ class :class:`~sklearn.linear_model.Lasso`, that uses the coordinate descent
reconstructed image, contrary to the L1 penalization. Note in particular
the circular artifact separating the pixels in the corners, that have
contributed to fewer projections than the central disk.
"""

print(__doc__)
"""

# Author: Emmanuelle Gouillart <emmanuelle.gouillart@nsup.org>
# License: BSD 3 clause
Expand Down
2 changes: 0 additions & 2 deletions examples/applications/svm_gui.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,8 +14,6 @@

"""

print(__doc__)

# Author: Peter Prettenhoer <peter.prettenhofer@gmail.com>
#
# License: BSD 3 clause
Expand Down
3 changes: 1 addition & 2 deletions examples/applications/wikipedia_principal_eigenvector.py
Original file line number Diff line number Diff line change
Expand Up @@ -27,6 +27,7 @@

The graph data is fetched from the DBpedia dumps. DBpedia is an extraction
of the latent structured data of the Wikipedia content.

"""

# Author: Olivier Grisel <olivier.grisel@ensta.org>
Expand All @@ -46,8 +47,6 @@
from urllib.request import urlopen


print(__doc__)

# #############################################################################
# Where to download the data, if not already on disk
redirects_url = "http://downloads.dbpedia.org/3.5.1/en/redirects_en.nt.bz2"
Expand Down
3 changes: 1 addition & 2 deletions examples/bicluster/plot_bicluster_newsgroups.py
Original file line number Diff line number Diff line change
Expand Up @@ -22,6 +22,7 @@
achieve a better V-measure than clusters found by MiniBatchKMeans.

"""

from collections import defaultdict
import operator
from time import time
Expand All @@ -34,8 +35,6 @@
from sklearn.feature_extraction.text import TfidfVectorizer
from sklearn.metrics.cluster import v_measure_score

print(__doc__)


def number_normalizer(tokens):
"""Map all numeric tokens to a placeholder.
Expand Down
1 change: 0 additions & 1 deletion examples/bicluster/plot_spectral_biclustering.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,6 @@
representation of the checkerboard structure.

"""
print(__doc__)

# Author: Kemal Eren <kemal@kemaleren.com>
# License: BSD 3 clause
Expand Down
1 change: 0 additions & 1 deletion examples/bicluster/plot_spectral_coclustering.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,6 @@
the biclusters.

"""
print(__doc__)

# Author: Kemal Eren <kemal@kemaleren.com>
# License: BSD 3 clause
Expand Down
2 changes: 1 addition & 1 deletion examples/calibration/plot_calibration.py
Original file line number Diff line number Diff line change
Expand Up @@ -20,8 +20,8 @@
to the expected 0.5 for most of the samples belonging to the middle
cluster with heterogeneous labels. This results in a significantly improved
Brier score.

"""
print(__doc__)

# Author: Mathieu Blondel <mathieu@mblondel.org>
# Alexandre Gramfort <alexandre.gramfort@telecom-paristech.fr>
Expand Down
4 changes: 2 additions & 2 deletions examples/calibration/plot_calibration_curve.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,10 +9,10 @@
visualize how well calibrated the predicted probabilities are using calibration
curves, also known as reliability diagrams. Calibration of an uncalibrated
classifier will also be demonstrated.

"""
print(__doc__)
# %%

# %%
# Author: Alexandre Gramfort <alexandre.gramfort@telecom-paristech.fr>
# Jan Hendrik Metzen <jhm@informatik.uni-bremen.de>
# License: BSD 3 clause.
Expand Down
1 change: 1 addition & 0 deletions examples/calibration/plot_calibration_multiclass.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,7 @@
classifier to the probability vectors predicted by the same classifier after
sigmoid calibration on a hold-out validation set. Colors indicate the true
class of an instance (red: class 1, green: class 2, blue: class 3).

"""

# %%
Expand Down
1 change: 1 addition & 0 deletions examples/calibration/plot_compare_calibration.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,6 +13,7 @@
models: :ref:`Logistic_regression`, :ref:`gaussian_naive_bayes`,
:ref:`Random Forest Classifier <forest>` and :ref:`Linear SVM
<svm_classification>`.

"""

# %%
Expand Down
2 changes: 1 addition & 1 deletion examples/classification/plot_classification_probability.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,8 +14,8 @@
The logistic regression with One-Vs-Rest is not a multiclass classifier out of
the box. As a result it has more trouble in separating class 2 and 3 than the
other estimators.

"""
print(__doc__)

# Author: Alexandre Gramfort <alexandre.gramfort@inria.fr>
# License: BSD 3 clause
Expand Down
3 changes: 1 addition & 2 deletions examples/classification/plot_classifier_comparison.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,9 +17,8 @@
The plots show training points in solid colors and testing points
semi-transparent. The lower right shows the classification accuracy on the test
set.
"""
print(__doc__)

"""

# Code source: Gaël Varoquaux
# Andreas Müller
Expand Down
3 changes: 1 addition & 2 deletions examples/classification/plot_digits_classification.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,9 +5,8 @@

This example shows how scikit-learn can be used to recognize images of
hand-written digits, from 0-9.
"""

print(__doc__)
"""

# Author: Gael Varoquaux <gael dot varoquaux at normalesup dot org>
# License: BSD 3 clause
Expand Down
2 changes: 2 additions & 0 deletions examples/classification/plot_lda.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,9 @@

This example illustrates how the Ledoit-Wolf and Oracle Shrinkage
Approximating (OAS) estimators of covariance can improve classification.

"""

import numpy as np
import matplotlib.pyplot as plt

Expand Down
2 changes: 1 addition & 1 deletion examples/classification/plot_lda_qda.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,8 +8,8 @@
the double standard deviation for each class. With LDA, the
standard deviation is the same for all the classes, while each
class has its own standard deviation with QDA.

"""
print(__doc__)

from scipy import linalg
import numpy as np
Expand Down
1 change: 0 additions & 1 deletion examples/cluster/plot_adjusted_for_chance_measures.py
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,6 @@
value of k on various overlapping sub-samples of the dataset.

"""
print(__doc__)

# Author: Olivier Grisel <olivier.grisel@ensta.org>
# License: BSD 3 clause
Expand Down
1 change: 0 additions & 1 deletion examples/cluster/plot_affinity_propagation.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,6 @@
Between Data Points", Science Feb. 2007

"""
print(__doc__)

from sklearn.cluster import AffinityPropagation
from sklearn import metrics
Expand Down
5 changes: 4 additions & 1 deletion examples/cluster/plot_agglomerative_clustering.py
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,10 @@
(try decreasing the number of neighbors in kneighbors_graph) and with
complete linkage. In particular, having a very small number of neighbors in
the graph, imposes a geometry that is close to that of single linkage,
which is well known to have this percolation instability. """
which is well known to have this percolation instability.

"""

# Authors: Gael Varoquaux, Nelle Varoquaux
# License: BSD 3 clause

Expand Down
2 changes: 2 additions & 0 deletions examples/cluster/plot_agglomerative_clustering_metrics.py
Original file line number Diff line number Diff line change
Expand Up @@ -31,7 +31,9 @@
distance, the separation is good and the waveform classes are recovered.
Finally, the cosine distance does not separate at all waveform 1 and 2,
thus the clustering puts them in the same cluster.

"""

# Author: Gael Varoquaux
# License: BSD 3-Clause or CC-0

Expand Down
1 change: 1 addition & 0 deletions examples/cluster/plot_agglomerative_dendrogram.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,7 @@
=========================================
This example plots the corresponding dendrogram of a hierarchical clustering
using AgglomerativeClustering and the dendrogram method available in scipy.

"""

import numpy as np
Expand Down
3 changes: 1 addition & 2 deletions examples/cluster/plot_birch_vs_minibatchkmeans.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,14 +11,13 @@
samples to a set of 158 clusters. This can be viewed as a preprocessing
step before the final (global) clustering step that further reduces these
158 clusters to 100 clusters.

"""

# Authors: Manoj Kumar <manojkumarsivaraj334@gmail.com
# Alexandre Gramfort <alexandre.gramfort@telecom-paristech.fr>
# License: BSD 3 clause

print(__doc__)

from itertools import cycle
from time import time
import numpy as np
Expand Down
2 changes: 1 addition & 1 deletion examples/cluster/plot_cluster_comparison.py
Original file line number Diff line number Diff line change
Expand Up @@ -21,8 +21,8 @@
While these examples give some intuition about the
algorithms, this intuition might not apply to very high
dimensional data.

"""
print(__doc__)

import time
import warnings
Expand Down
2 changes: 0 additions & 2 deletions examples/cluster/plot_cluster_iris.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,8 +14,6 @@
and finally the ground truth.

"""
print(__doc__)


# Code source: Gaël Varoquaux
# Modified for documentation by Jaques Grobler
Expand Down
2 changes: 1 addition & 1 deletion examples/cluster/plot_coin_segmentation.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,8 +16,8 @@
using a kmeans algorithm
* whereas 'discrete' will iteratively search for the closest partition
space to the embedding space.

"""
print(__doc__)

# Author: Gael Varoquaux <gael.varoquaux@normalesup.org>, Brian Cheung
# License: BSD 3 clause
Expand Down
3 changes: 1 addition & 2 deletions examples/cluster/plot_coin_ward_segmentation.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,14 +6,13 @@
Compute the segmentation of a 2D image with Ward hierarchical
clustering. The clustering is spatially constrained in order
for each segmented region to be in one piece.

"""

# Author : Vincent Michel, 2010
# Alexandre Gramfort, 2011
# License: BSD 3 clause

print(__doc__)

import time as time

import numpy as np
Expand Down
3 changes: 2 additions & 1 deletion examples/cluster/plot_color_quantization.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,14 +17,15 @@

For comparison, a quantized image using a random codebook (colors picked up
randomly) is also shown.

"""

# Authors: Robert Layton <robertlayton@gmail.com>
# Olivier Grisel <olivier.grisel@ensta.org>
# Mathieu Blondel <mathieu@mblondel.org>
#
# License: BSD 3 clause

print(__doc__)
import numpy as np
import matplotlib.pyplot as plt
from sklearn.cluster import KMeans
Expand Down
1 change: 0 additions & 1 deletion examples/cluster/plot_dbscan.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,6 @@
Finds core samples of high density and expands clusters from them.

"""
print(__doc__)

import numpy as np

Expand Down
2 changes: 1 addition & 1 deletion examples/cluster/plot_dict_face_patches.py
Original file line number Diff line number Diff line change
Expand Up @@ -18,8 +18,8 @@
partial-fit. This is because the number of patches that they represent
has become too low, and it is better to choose a random new
cluster.

"""
print(__doc__)

import time

Expand Down
Loading