版权声明:本文为博主原创文章,未经博主允许不得转载。 https://blog.csdn.net/weixin_40042143/article/details/85474293
accumulable:创建一个[org.apache.spark.Accumulable]共享变量。
/**
* Create an [[org.apache.spark.Accumulable]] shared variable, to which tasks can add values
* with `+=`. Only the driver can access the accumulable's `value`.
* @tparam R accumulator result type
* @tparam T type that can be added to the accumulator
*/
@deprecated("use AccumulatorV2", "2.0.0")
def accumulable[R, T](initialValue: R)(implicit param: AccumulableParam[R, T])
: Accumulable[R, T] = {
val acc = new Accumulable(initialValue, param)
cleaner.foreach(_.registerAccumulatorForCleanup(acc.newAcc))
acc
}
/**
* Create an [[org.apache.spark.Accumulable]] shared variable, with a name for display in the
* Spark UI. Tasks can add values to the accumulable using the `+=` operator. Only the driver can
* access the accumulable's `value`.
* @tparam R accumulator result type
* @tparam T type that can be added to the accumulator
*/
@deprecated("use AccumulatorV2", "2.0.0")
def accumulable[R, T](initialValue: R, name: String)(implicit param: AccumulableParam[R, T])
: Accumulable[R, T] = {
val acc = new Accumulable(initialValue, param, Option(name))
cleaner.foreach(_.registerAccumulatorForCleanup(acc.newAcc))
acc
}
/**
* Create an accumulator from a "mutable collection" type.
*
* Growable and TraversableOnce are the standard APIs that guarantee += and ++=, implemented by
* standard mutable collections. So you can use this with mutable Map, Set, etc.
*/
@deprecated("use AccumulatorV2", "2.0.0")
def accumulableCollection[R <% Growable[T] with TraversableOnce[T] with Serializable: ClassTag, T]
(initialValue: R): Accumulable[R, T] = {
// TODO the context bound (<%) above should be replaced with simple type bound and implicit
// conversion but is a breaking change. This should be fixed in Spark 3.x.
val param = new GrowableAccumulableParam[R, T]
val acc = new Accumulable(initialValue, param)
cleaner.foreach(_.registerAccumulatorForCleanup(acc.newAcc))
acc
}
未完待续。。。。。