美文网首页大数据学习
大数据学习之Hadoop——11MapReduce相关练习02(

大数据学习之Hadoop——11MapReduce相关练习02(

作者: Jiang锋时刻 | 来源:发表于2020-05-30 02:13 被阅读0次

    欢迎关注我的CSDN: https://blog.csdn.net/bingque6535

    1. 问题:

    求数据集中任意两人之间的共同好友

    2. 数据集
    A:B,C,D,F,E,O
    B:A,C,E,K
    C:F,A,D,I
    D:A,E,F,L
    E:B,C,D,M,L
    F:A,B,C,D,E,O,M
    G:A,C,D,E,F
    H:A,C,D,E,O
    I:A,O
    J:B,O
    K:A,C,D
    L:D,E,F
    M:E,F,G
    O:A,H,I,J,K
    

    说明:
    A:B,C,D,F,E,O 表示 B,C,D,F,E,O 为A的好友

    3. 思路
    1. 首先求出你是那些人的好友
    2. 然后将认识自己的好友, 进行两两配对(因为他们都认识你, 所以肯定有共同好友)
    3. 然后得到了数据集中所有有共同好友的关系集合

    4. 代码

    1. Driver端

      package com.hjf.mr.friend;
      
      import org.apache.hadoop.conf.Configuration;
      import org.apache.hadoop.fs.FileSystem;
      import org.apache.hadoop.fs.Path;
      import org.apache.hadoop.io.Text;
      import org.apache.hadoop.mapreduce.Job;
      import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
      import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
      
      import java.io.IOException;
      
      /**
       * @author Jiang锋时刻
       * @create 2020-05-20 0:01
       *  第一阶段: 生成数据集中所有有共同好友关系的key-value键值对
       */
      public class FriendsDriver {
          public static void main(String[] args) throws IOException, ClassNotFoundException, InterruptedException {
              Configuration conf = new Configuration();
              // ---------------------第一阶段---------------------
              Job job1 = Job.getInstance(conf);
              job1.setJarByClass(FriendsDriver.class);
              // 指定第一阶段的Mapper端和Reducer端的类
              job1.setMapperClass(FriendsMapper1.class);
              job1.setReducerClass(FriendsReducer1.class);
      
              job1.setMapOutputKeyClass(Text.class);
              job1.setMapOutputValueClass(Text.class);
      
              job1.setOutputKeyClass(Text.class);
              job1.setOutputValueClass(Text.class);
      
              Path inputPath = new Path("./Data/friends.txt");
              Path outputPath = new Path("./Data/output1");
      
              FileSystem fs = FileSystem.get(conf);
              if (fs.exists(outputPath)) {
                  fs.delete(outputPath, true);
              }
      
              FileInputFormat.setInputPaths(job1, inputPath);
              FileOutputFormat.setOutputPath(job1, outputPath);
      
              job1.waitForCompletion(true);
      
              // ---------------------第一阶段---------------------
              Job job2 = Job.getInstance(conf);
              job2.setJarByClass(FriendsDriver.class);
              // 指定第二阶段的Mapper端和Reducer端的类
              job2.setMapperClass(FriendsMapper2.class);
              job2.setReducerClass(FriendsReducer2.class);
      
              job2.setMapOutputKeyClass(Text.class);
              job2.setMapOutputValueClass(Text.class);
      
              job2.setOutputKeyClass(Text.class);
              job2.setOutputValueClass(Text.class);
              // 第一阶段的输出路径是第二阶段的输入路径
              Path inputPath2 = new Path("./Data/output1");
              Path outputPath2 = new Path("./Data/output2");
      
              if (fs.exists(outputPath2)) {
                  fs.delete(outputPath2, true);
              }
      
              FileInputFormat.setInputPaths(job2, inputPath2);
              FileOutputFormat.setOutputPath(job2, outputPath2);
      
              job2.waitForCompletion(true);
          }
      }
      
      
    2. Mapper1 端

      package com.hjf.mr.friend;
      
      import org.apache.hadoop.io.LongWritable;
      import org.apache.hadoop.io.Text;
      import org.apache.hadoop.mapreduce.Mapper;
      
      import java.io.IOException;
      
      /**
       * @author Jiang锋时刻
       * @create 2020-05-20 0:03
       *  将数据集中的"自己:自己认识的人" --> "认识自己的人:自己"
       *  因为认识你的人之间都有共同好友
       */
      public class FriendsMapper1 extends Mapper<LongWritable, Text, Text, Text> {
          @Override
          protected void map(LongWritable key, Text value, Context context) throws IOException, InterruptedException {
              String[] split = value.toString().split(":");
              Text name = new Text(split[0]);
              String[] friends = split[1].split(",");
              for (String friend: friends) {
                  context.write(new Text(friend), name);
              }
      
          }
      }
      
    1. Reducer1 端

      package com.hjf.mr.friend;
      
      import org.apache.hadoop.io.Text;
      import org.apache.hadoop.mapreduce.Reducer;
      
      import java.io.IOException;
      
      /**
       * @author Jiang锋时刻
       * @create 2020-05-20 0:04
       * 将认识你的人拼接成一个字符串
       */
      public class FriendsReducer1 extends Reducer<Text, Text, Text, Text> {
          @Override
          protected void reduce(Text key, Iterable<Text> values, Context context) throws IOException, InterruptedException {
              StringBuffer sb = new StringBuffer();
              for (Text value: values){
                  sb.append(value.toString()).append(",");
              }
              sb.deleteCharAt(sb.length() - 1);
              // 
              context.write(key, new Text(sb.toString()));
          }
      }
      
      
    2. Mapper2 端

      package com.hjf.mr.friend;
      
      import org.apache.hadoop.io.LongWritable;
      import org.apache.hadoop.io.Text;
      import org.apache.hadoop.mapreduce.Mapper;
      
      import java.io.IOException;
      import java.util.Arrays;
      
      /**
       * @author Jiang锋时刻
       * @create 2020-05-20 1:03
       * 将认识自己的人两两进行组合, 拼接成有关系字段[他们之间有共同好友]
       */
      public class FriendsMapper2 extends Mapper<LongWritable, Text, Text, Text> {
          @Override
          protected void map(LongWritable key, Text value, Context context) throws IOException, InterruptedException {
              String[] split = value.toString().split("\t");
              Text name = new Text(split[0]);
              String[] friends = split[1].split(",");
              // 好友姓名进行排序, 避免出现重复情况: A-B 和 B-A是同一种情况
              Arrays.sort(friends);
      
              // 本人任意两个朋友之间都存在朋友关系
              for (int i = 0; i < friends.length - 1; i++) {
                  for (int j = i + 1; j < friends.length; j++) {
                      Text relation = new Text(friends[i] + "-" + friends[j] + ":");
                      context.write(relation, name);
                  }
              }
      
          }
      }
      
      
    3. Reducer2端

      package com.hjf.mr.friend;
      
      import org.apache.hadoop.io.Text;
      import org.apache.hadoop.mapreduce.Reducer;
      
      import java.io.IOException;
      import java.util.HashSet;
      
      /**
       * @author Jiang锋时刻
       * @create 2020-05-20 1:03
       */
      public class FriendsReducer2 extends Reducer<Text, Text, Text, Text> {
          @Override
          protected void reduce(Text key, Iterable<Text> values, Context context) throws IOException, InterruptedException {
              StringBuffer sb = new StringBuffer();
              HashSet<String> sets = new HashSet<>();
      
              for (Text value: values) {
                  if (!sets.contains(value.toString())) {
                      sets.add(value.toString());
                  }
              }
      
              for (String set: sets) {
                  sb.append(set).append(",");
              }
              sb.deleteCharAt(sb.length() - 1);
      
              context.write(key, new Text(sb.toString()));
          }
      }
      
      

    欢迎关注我的CSDN: https://blog.csdn.net/bingque6535

    相关文章

      网友评论

        本文标题:大数据学习之Hadoop——11MapReduce相关练习02(

        本文链接:https://www.haomeiwen.com/subject/swgqzhtx.html