import org.apache.spark.{SparkConf, SparkContext}
/**
* Created by liupeng on 2017/6/16.
*/
object T_union {
System.setProperty("hadoop.home.dir","F:\\hadoop-2.6.5")
def main(args: Array[String]): Unit = {
val conf = new SparkConf().setAppName("union_test").setMaster("local")
val sc = new SparkContext(conf)
var rdd = sc.parallelize(1 to 5, 1)
var rdd1 = sc.parallelize(4 to 7, 1)
//union方法可以合并两个RDD,但是不去重,仅仅合并而已
val sum = rdd.union(rdd1)
.foreach(println)
}
}
运行结果:
1
2
3
4
5
2
3
4
5
4
5
6
7
5
6
7