File tree Expand file tree Collapse file tree 1 file changed +7
-1
lines changed
core/src/main/scala/spark Expand file tree Collapse file tree 1 file changed +7
-1
lines changed Original file line number Diff line number Diff line change @@ -253,7 +253,13 @@ class SparkContext(
253
253
def accumulable [T ,R ](initialValue : T )(implicit param : AccumulableParam [T ,R ]) =
254
254
new Accumulable (initialValue, param)
255
255
256
- def accumlableCollection [R <% Growable [T ] with TraversableOnce [T ] with Serializable , T ](initialValue : R ) = {
256
+ /**
257
+ * create an accumulator from a "mutable collection" type.
258
+ *
259
+ * Growable and TraversableOnce are the standard apis that guarantee += and ++=, implemented by
260
+ * standard mutable collections. So you can use this with mutable Map, Set, etc.
261
+ */
262
+ def accumulableCollection [R <% Growable [T ] with TraversableOnce [T ] with Serializable , T ](initialValue : R ) = {
257
263
val param = new GrowableAccumulableParam [R ,T ]
258
264
new Accumulable (initialValue, param)
259
265
}
You can’t perform that action at this time.
0 commit comments