Skip to content

Commit ed74d30

Browse files
noel-smithmengxr
authored andcommitted
[DOC] [PYSPARK] [MLLIB] Added newlines to docstrings to fix parameter formatting (1.5 backport)
Backport of apache#8851 for 1.5 branch. Author: noelsmith <mail@noelsmith.com> Closes apache#8855 from noel-smith/docstring-missing-newline-fix-1-5-backport.
1 parent 7ab4d17 commit ed74d30

File tree

8 files changed

+13
-1
lines changed

8 files changed

+13
-1
lines changed

python/pyspark/ml/param/__init__.py

Lines changed: 4 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -164,6 +164,7 @@ def extractParamMap(self, extra=None):
164164
a flat param map, where the latter value is used if there exist
165165
conflicts, i.e., with ordering: default param values <
166166
user-supplied values < extra.
167+
167168
:param extra: extra param values
168169
:return: merged param map
169170
"""
@@ -182,6 +183,7 @@ def copy(self, extra=None):
182183
embedded and extra parameters over and returns the copy.
183184
Subclasses should override this method if the default approach
184185
is not sufficient.
186+
185187
:param extra: Extra parameters to copy to the new instance
186188
:return: Copy of this instance
187189
"""
@@ -201,6 +203,7 @@ def _shouldOwn(self, param):
201203
def _resolveParam(self, param):
202204
"""
203205
Resolves a param and validates the ownership.
206+
204207
:param param: param name or the param instance, which must
205208
belong to this Params instance
206209
:return: resolved param instance
@@ -243,6 +246,7 @@ def _copyValues(self, to, extra=None):
243246
"""
244247
Copies param values from this instance to another instance for
245248
params shared by them.
249+
246250
:param to: the target instance
247251
:param extra: extra params to be copied
248252
:return: the target instance with param values copied

python/pyspark/ml/pipeline.py

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -154,6 +154,7 @@ def __init__(self, stages=None):
154154
def setStages(self, value):
155155
"""
156156
Set pipeline stages.
157+
157158
:param value: a list of transformers or estimators
158159
:return: the pipeline instance
159160
"""

python/pyspark/ml/tuning.py

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -262,6 +262,7 @@ def copy(self, extra=None):
262262
and some extra params. This copies the underlying bestModel,
263263
creates a deep copy of the embedded paramMap, and
264264
copies the embedded and extra parameters over.
265+
265266
:param extra: Extra parameters to copy to the new instance
266267
:return: Copy of this instance
267268
"""

python/pyspark/ml/wrapper.py

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -119,6 +119,7 @@ def _create_model(self, java_model):
119119
def _fit_java(self, dataset):
120120
"""
121121
Fits a Java model to the input dataset.
122+
122123
:param dataset: input dataset, which is an instance of
123124
:py:class:`pyspark.sql.DataFrame`
124125
:param params: additional params (overwriting embedded values)
@@ -172,6 +173,7 @@ def copy(self, extra=None):
172173
extra params. This implementation first calls Params.copy and
173174
then make a copy of the companion Java model with extra params.
174175
So both the Python wrapper and the Java model get copied.
176+
175177
:param extra: Extra parameters to copy to the new instance
176178
:return: Copy of this instance
177179
"""

python/pyspark/mllib/evaluation.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -147,7 +147,7 @@ class MulticlassMetrics(JavaModelWrapper):
147147
"""
148148
Evaluator for multiclass classification.
149149
150-
:param predictionAndLabels an RDD of (prediction, label) pairs.
150+
:param predictionAndLabels: an RDD of (prediction, label) pairs.
151151
152152
>>> predictionAndLabels = sc.parallelize([(0.0, 0.0), (0.0, 1.0), (0.0, 0.0),
153153
... (1.0, 0.0), (1.0, 1.0), (1.0, 1.0), (1.0, 1.0), (2.0, 2.0), (2.0, 0.0)])

python/pyspark/mllib/linalg/__init__.py

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -232,6 +232,7 @@ class Vector(object):
232232
def toArray(self):
233233
"""
234234
Convert the vector into an numpy.ndarray
235+
235236
:return: numpy.ndarray
236237
"""
237238
raise NotImplementedError

python/pyspark/streaming/context.py

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -240,6 +240,7 @@ def start(self):
240240
def awaitTermination(self, timeout=None):
241241
"""
242242
Wait for the execution to stop.
243+
243244
@param timeout: time to wait in seconds
244245
"""
245246
if timeout is None:
@@ -252,6 +253,7 @@ def awaitTerminationOrTimeout(self, timeout):
252253
Wait for the execution to stop. Return `true` if it's stopped; or
253254
throw the reported error during the execution; or `false` if the
254255
waiting time elapsed before returning from the method.
256+
255257
@param timeout: time to wait in seconds
256258
"""
257259
self._jssc.awaitTerminationOrTimeout(int(timeout * 1000))

python/pyspark/streaming/mqtt.py

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -31,6 +31,7 @@ def createStream(ssc, brokerUrl, topic,
3131
storageLevel=StorageLevel.MEMORY_AND_DISK_SER_2):
3232
"""
3333
Create an input stream that pulls messages from a Mqtt Broker.
34+
3435
:param ssc: StreamingContext object
3536
:param brokerUrl: Url of remote mqtt publisher
3637
:param topic: topic name to subscribe to

0 commit comments

Comments
 (0)