package com.atguigu
import org.apache.spark.rdd.RDD
import org.apache.spark.{HashPartitioner, Partitioner, SparkConf, SparkContext}
object Trans {
def main(args: Array[String]): Unit = {
val conf: SparkConf = new SparkConf().setMaster("local[*]").setAppName("Spark01_Partition")
//构建spark上下文对象
val sc = new SparkContext(conf)
val rdd: RDD[(String, Int)] = sc.makeRDD(Array(("a",1),("b",2),("a",3),("b",4),("a",5)))
val rdd1: RDD[(String, Int)] = rdd.combineByKey(
(num: Int) => num,
(x: Int, y: Int) => {
x + y
},
(x: Int, y: Int) => {
x + y
}
)
rdd1.collect().foreach(println)
sc.stop()
}
}
(a,9)
(b,6)
网友评论