与collect类似,但适用于键值RDD并将它们转换为Map映射以保留其键值结构
java示例如下:
package com.cb.spark.sparkrdd;
import java.util.Arrays;
import java.util.Map;
import org.apache.spark.SparkConf;
import org.apache.spark.api.java.JavaPairRDD;
import org.apache.spark.api.java.JavaRDD;
import org.apache.spark.api.java.JavaSparkContext;
public class CollectAsMapExample {
public static void main(String[] args) {
SparkConf conf = new SparkConf().setAppName("CollectAsMap").setMaster("local");
JavaSparkContext jsc = new JavaSparkContext(conf);
JavaRDD<Integer>javaRDD=jsc.parallelize(Arrays.asList(1,2,1,3),1);
JavaPairRDD<Integer, Integer> javaPairRDD=javaRDD.zip(javaRDD);
Map<Integer, Integer>map=javaPairRDD.collectAsMap();
System.out.println(map);
jsc.stop();
}
}
scala示例如下:
package com.cb.spark.rdd
import org.apache.spark.SparkConf
import org.apache.spark.SparkContext
object CollectAsMap {
def main(args: Array[String]): Unit = {
val conf = new SparkConf().setMaster("local[2]").setAppName("CollectAsMap")
val sc = new SparkContext(conf)
val a = sc.parallelize(List(1, 2, 3, 1, 2), 1)
val b = sc.parallelize(List("a", "b", "c", "b", "d"), 1)
val c = a.zip(b)
c.foreach(println)
println()
c.collectAsMap().foreach(println)
}
}