1、mapPartitions
先 partition,再把每个 partition 进行 map 函数
适用场景
如果在映射的过程中需要频繁创建额外的对象,使用mapPartitions要比map高效的多。
比如,将RDD中的所有数据通过JDBC连接写入数据库,如果使用map函数,可能要为每一个元素都创建一个connection,这样开销很大,如果使用mapPartitions,那么只需要针对每一个分区建立一个connection。
把 RDD 中每一个元素平方!
Java版本
SparkConf conf = new SparkConf().setMaster("local[*]").setAppName("MapPartitionsJava");
JavaSparkContext sc = new JavaSparkContext(conf);
JavaRDD<Integer> rdd = sc.parallelize(
Arrays.asList(1,2,3,4,5,6,7,8,9,10),1);
JavaRDD<Integer> mapPartitionRDD = rdd.mapPartitions(new FlatMapFunction<Iterator<Integer>, Integer>() {
@Override
public Iterator<Integer> call(Iterator<Integer> it) throws Exception {
ArrayList<Integer> results = new ArrayList<>();
while (it.hasNext()) {
int i = it.next();
results.add(i*i);
}
return results.iterator();
}
});
mapPartitionRDD.foreach(new VoidFunction<Integer>() {
@Override
public void call(Integer integer) throws Exception {
System.out.println(integer);
}
});
把 RDD 中每一个数字i
变成一个map(i,i*i)
的形式
Java版本
SparkConf conf = new SparkConf().setAppName("MapPartitionsJava").setMaster("local[*]");
JavaSparkContext sc = new JavaSparkContext(conf);
JavaRDD<Integer> rdd = sc.parallelize(
Arrays.asList(1,2,3,4,5,6,7,8,9,10),1);
JavaRDD<Tuple2<Integer, Integer>> tuple2JavaRDD = rdd.mapPartitions(new FlatMapFunction<Iterator<Integer>, Tuple2<Integer, Integer>>() {
@Override
public Iterator<Tuple2<Integer, Integer>> call(Iterator<Integer> it) throws Exception {
ArrayList<Tuple2<Integer, Integer>> tuple2s = new ArrayList<>();
while (it.hasNext()) {
Integer next = it.next();
tuple2s.add(new Tuple2<Integer, Integer>(next, next * next));
}
return tuple2s.iterator();
}
});
tuple2JavaRDD.foreach(new VoidFunction<Tuple2<Integer, Integer>>() {
@Override
public void call(Tuple2<Integer, Integer> tp2) throws Exception {
System.out.println(tp2);
}
});
Scala版本
val conf = new SparkConf().setAppName("map").setMaster("local[*]")
val sc = new SparkContext(conf)
val rdd = sc.parallelize(List(1,2,3,4,5,6,7,8,9,10),3)
def mapPartFunc(iter: Iterator[Int]):Iterator[(Int,Int)]={
var res = List[(Int,Int)]()
while (iter.hasNext){
val cur = iter.next
res=res.::(cur,cur*cur)
}
res.iterator
}
val mapPartRDD = rdd.mapPartitions(mapPartFunc)
mapPartRDD.foreach(maps=>println(maps))
mapPartitions
操作键值对,把(i,j)
变成(i,j*j)
Scala版本
val conf = new SparkConf().setAppName("map").setMaster("local[*]")
val sc = new SparkContext(conf)
var rdd = sc.parallelize(List((1,1), (1,2), (1,3), (2,1), (2,2), (2,3)))
def mapPartFunc(iter: Iterator[(Int,Int)]):Iterator[(Int,Int)]={
var res = List[(Int,Int)]()
while (iter.hasNext){
val cur = iter.next
res=res.::(cur._1,cur._2*cur._2)
}
res.iterator
}
val mapPartionsRDD = rdd.mapPartitions(mapPartFunc)
mapPartionsRDD.foreach(println)
Java版本
SparkConf conf = new SparkConf().setAppName("MapPartitionsJava").setMaster("local[*]");
JavaSparkContext sc = new JavaSparkContext(conf);
JavaRDD<Tuple2<Integer, Integer>> rdd = sc.parallelize(Arrays.asList(new Tuple2<Integer, Integer>(1, 1), new Tuple2<Integer, Integer>(1, 2)
, new Tuple2<Integer, Integer>(1, 3), new Tuple2<Integer, Integer>(2, 1)
, new Tuple2<Integer, Integer>(2, 2), new Tuple2<Integer, Integer>(2, 3)), 3);
JavaPairRDD<Integer, Integer> pairRDD = JavaPairRDD.fromJavaRDD(rdd);
JavaRDD<Tuple2<Integer, Integer>> tuple2JavaRDD = pairRDD.mapPartitions(new FlatMapFunction<Iterator<Tuple2<Integer, Integer>>, Tuple2<Integer, Integer>>() {
@Override
public Iterator<Tuple2<Integer, Integer>> call(Iterator<Tuple2<Integer, Integer>> tp2It) throws Exception {
ArrayList<Tuple2<Integer, Integer>> tuple2s = new ArrayList<>();
while (tp2It.hasNext()){
Tuple2<Integer, Integer> next = tp2It.next();
tuple2s.add(new Tuple2<Integer, Integer>(next._1,next._2*next._2));
}
return tuple2s.iterator();
}
});
tuple2JavaRDD.foreach(new VoidFunction<Tuple2<Integer, Integer>>() {
@Override
public void call(Tuple2<Integer, Integer> tp2) throws Exception {
System.out.println(tp2);
}
});
2、mapPartitionsWithIndex
按照分区进行的map操作,mapPartitionsWithIndex
传入的参数多了一个分区的值
(1)统计各个分区中的元素
Scala版本
val conf = new SparkConf().setAppName("mapPartitionsWithIndex").setMaster("local[*]")
val sc = new SparkContext(conf)
val rdd = sc.parallelize(List(1,2,3,4,5,6,7,8,9,10),3)
def mapPartIndexFunc(i1:Int,iter: Iterator[Int]):Iterator[(Int,Int)]={
var res = List[(Int,Int)]()
while(iter.hasNext){
var next = iter.next()
res=res.::(i1,next)
}
res.iterator
}
var mapPartIndexRDDs = rdd.mapPartitionsWithIndex(mapPartIndexFunc)
mapPartIndexRDDs.foreach(println)
Java版本
SparkConf conf = new SparkConf().setAppName("mapPartitionsWithIndex").setMaster("local[*]");
JavaSparkContext sc = new JavaSparkContext(conf);
JavaRDD<Integer> rdd = sc.parallelize(Arrays.asList(1, 2, 3, 4, 5, 6, 7, 8, 9, 10), 3);
JavaRDD<Tuple2<Integer, Integer>> tuple2JavaRDD = rdd.mapPartitionsWithIndex(new Function2<Integer, Iterator<Integer>, Iterator<Tuple2<Integer, Integer>>>() {
@Override
public Iterator<Tuple2<Integer, Integer>> call(Integer partIndex, Iterator<Integer> it) throws Exception {
ArrayList<Tuple2<Integer, Integer>> tuple2s = new ArrayList<>();
while (it.hasNext()) {
int next = it.next();
tuple2s.add(new Tuple2<>(partIndex, next));
}
return tuple2s.iterator();
}
}, false);
tuple2JavaRDD.foreach(new VoidFunction<Tuple2<Integer, Integer>>() {
@Override
public void call(Tuple2<Integer, Integer> tp2) throws Exception {
System.out.println(tp2);
}
});
(2)mapPartitionsWithIndex
统计键值对中的各个分区的元素
Scala版本
val conf = new SparkConf().setAppName("mapPartitionsWithIndex").setMaster("local[*]")
val sc = new SparkContext(conf)
var rdd = sc.parallelize(List((1,1), (1,2), (2,3), (2,4), (3,5), (3,6),(4,7), (4,8),(5,9), (5,10)),3)
def mapPartIndexFunc(i1:Int,iter: Iterator[(Int,Int)]):Iterator[(Int,(Int,Int))]={
var res = List[(Int,(Int,Int))]()
while(iter.hasNext){
var next = iter.next()
res=res.::(i1,next)
}
res.iterator
}
val mapPartIndexRDD = rdd.mapPartitionsWithIndex(mapPartIndexFunc)
mapPartIndexRDD.foreach(println)
Java版本
SparkConf conf = new SparkConf().setAppName("mapPartitionsWithIndex").setMaster("local[*]");
JavaSparkContext sc = new JavaSparkContext(conf);
JavaRDD<Tuple2<Integer, Integer>> rdd = sc.parallelize(Arrays.asList(new Tuple2<Integer, Integer>(1, 1), new Tuple2<Integer, Integer>(1, 2)
, new Tuple2<Integer, Integer>(2, 3), new Tuple2<Integer, Integer>(2, 4)
, new Tuple2<Integer, Integer>(3, 5), new Tuple2<Integer, Integer>(3, 6)
, new Tuple2<Integer, Integer>(4, 7), new Tuple2<Integer, Integer>(4, 8)
, new Tuple2<Integer, Integer>(5, 9), new Tuple2<Integer, Integer>(5, 10)
), 3);
JavaPairRDD<Integer, Integer> pairRDD = JavaPairRDD.fromJavaRDD(rdd);
JavaRDD<Tuple2<Integer, Tuple2<Integer, Integer>>> mapPartitionIndexRDD = pairRDD.mapPartitionsWithIndex(new Function2<Integer, Iterator<Tuple2<Integer, Integer>>, Iterator<Tuple2<Integer, Tuple2<Integer, Integer>>>>() {
@Override
public Iterator<Tuple2<Integer, Tuple2<Integer, Integer>>> call(Integer partIndex, Iterator<Tuple2<Integer, Integer>> tuple2Iterator) {
ArrayList<Tuple2<Integer, Tuple2<Integer, Integer>>> tuple2s = new ArrayList<>();
while (tuple2Iterator.hasNext()) {
Tuple2<Integer, Integer> next = tuple2Iterator.next();
tuple2s.add(new Tuple2<Integer, Tuple2<Integer, Integer>>(partIndex, next));
}
return tuple2s.iterator();
}
}, false);
mapPartitionIndexRDD.foreach(new VoidFunction<Tuple2<Integer, Tuple2<Integer, Integer>>>() {
@Override
public void call(Tuple2<Integer, Tuple2<Integer, Integer>> integerTuple2Tuple2) throws Exception {
System.out.println(integerTuple2Tuple2);
}
});
3、glom
打印各个分区
Java版本
SparkConf conf = new SparkConf().setAppName("Glom").setMaster("local[*]");
JavaSparkContext sc = new JavaSparkContext(conf);
JavaRDD<Tuple2<Integer, Integer>> rdd1 = sc.parallelize(Arrays.asList(new Tuple2<Integer, Integer>(1, 1), new Tuple2<Integer, Integer>(1, 2)
, new Tuple2<Integer, Integer>(2, 3), new Tuple2<Integer, Integer>(2, 4)
, new Tuple2<Integer, Integer>(3, 5), new Tuple2<Integer, Integer>(3, 6)
, new Tuple2<Integer, Integer>(4, 7), new Tuple2<Integer, Integer>(4, 8)
, new Tuple2<Integer, Integer>(5, 9), new Tuple2<Integer, Integer>(5, 10)
), 3);
JavaPairRDD<Integer, Integer> pairRDD = JavaPairRDD.fromJavaRDD(rdd1);
JavaRDD<List<Tuple2<Integer, Integer>>> glom = pairRDD.glom();
glom.foreach(new VoidFunction<List<Tuple2<Integer, Integer>>>() {
@Override
public void call(List<Tuple2<Integer, Integer>> tuple2s) throws Exception {
System.out.println(tuple2s);
}
});