SparkJavaAPI:join的使用

将一组数据转化为RDD后,分别创造出两个PairRDD,然后再对两个PairRDD进行归约(即合并相同Key对应的Value),过程如下图所示:

SparkJavaAPI:join的使用

代码实现如下:

 

 
  1. public class SparkRDDDemo {

  2. public static void main(String[] args){

  3. SparkConf conf = new SparkConf().setAppName("SparkRDD").setMaster("local");

  4. JavaSparkContext sc = new JavaSparkContext(conf);

  5.  
  6. List<Integer> data = Arrays.asList(1,2,3,4,5);

  7. JavaRDD<Integer> rdd = sc.parallelize(data);

  8.  
  9. //FirstRDD

  10. JavaPairRDD<Integer, Integer> firstRDD = rdd.mapToPair(new PairFunction<Integer, Integer, Integer>() {

  11. @Override

  12. public Tuple2<Integer, Integer> call(Integer num) throws Exception {

  13. return new Tuple2<>(num, num * num);

  14. }

  15. });

  16.  
  17. //SecondRDD

  18. JavaPairRDD<Integer, String> secondRDD = rdd.mapToPair(new PairFunction<Integer, Integer, String>() {

  19. @Override

  20. public Tuple2<Integer, String> call(Integer num) throws Exception {

  21. return new Tuple2<>(num, String.valueOf((char)(64 + num * num)));

  22. }

  23. });

  24.  
  25. JavaPairRDD<Integer, Tuple2<Integer, String>> joinRDD = firstRDD.join(secondRDD);

  26.  
  27. JavaRDD<String> res = joinRDD.map(new Function<Tuple2<Integer, Tuple2<Integer, String>>, String>() {

  28. @Override

  29. public String call(Tuple2<Integer, Tuple2<Integer, String>> integerTuple2Tuple2) throws Exception {

  30. int key = integerTuple2Tuple2._1();

  31. int value1 = integerTuple2Tuple2._2()._1();

  32. String value2 = integerTuple2Tuple2._2()._2();

  33. return "<" + key + ",<" + value1 + "," + value2 + ">>";

  34. }

  35. });

  36.  
  37. List<String> resList = res.collect();

  38. for(String str : resList)

  39. System.out.println(str);

  40.  
  41. sc.stop();

  42. }

  43. }