package kerberostest1;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.Mapper;
import org.apache.hadoop.mapreduce.Reducer;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
import org.apache.hadoop.security.UserGroupInformation;
import java.io.IOException;
/**
* @ClassName WordCountDemo
* @Desc TODO
* @Author lijk
* @Date 2019/6/1319:13
* @Version 1.0
*/
public class WordCountDemo {
public static class WordCountMapper extends Mapper<LongWritable, Text, Text, IntWritable>{
Text outputKey = new Text();
IntWritable outputValue = new IntWritable(1);
String[] splits;
@Override
protected void map(LongWritable key, Text value, Context context) throws IOException, InterruptedException {
splits = value.toString().split("\\s+");
for (String split : splits) {
outputKey.set(split);
context.write(outputKey,outputValue);
}
}
}
public static class WordCountReduce extends Reducer<Text, IntWritable,Text, IntWritable>{
IntWritable outputValue = new IntWritable();
int sum=0;
@Override
protected void reduce(Text key, Iterable<IntWritable> values, Context context) throws
IOException, InterruptedException {
sum=0;
for (IntWritable value : values) {
sum+=value.get();
}
outputValue.set(sum);
context.write(key,outputValue);
}
}
/**
*@Author lijiankang
*@Desc //todo
*@Date 2019/6/24 14:54
*@Param args[0] principal,args[1] keytab,args[2] inputPath,args[3] outputPath
*@return
*/
public static void main(String[] args) throws IOException, ClassNotFoundException, InterruptedException {
Configuration configuration = new Configuration();
String iuputPath = "/tmp/test/input";
String outputPath = "/tmp/test/output";
String principal = "test";
String keytab = "E:\\project\\ljkcn\\src\\main\\resources\\test.keytab";
String krb5Path = "E:\\project\\ljkcn\\src\\main\\resources\\krb5.conf";
//进行Kerberos认证
System.setProperty("java.security.krb5.conf", krb5Path);
UserGroupInformation.setConfiguration(configuration);
UserGroupInformation.loginUserFromKeytab(principal,keytab);
Job job = Job.getInstance(configuration);
Configuration conf = job.getConfiguration();
conf.set("mapreduce.job.jar", "ljkcn.jar");
job.setJarByClass(WordCountDemo.class);
job.setJar("E:\\project\\ljkcn\\out\\artifacts\\ljkcn_jar\\ljkcn.jar");
job.setJobName("WordCountDemo");
job.setMapperClass(WordCountMapper.class);
job.setReducerClass(WordCountReduce.class);
job.setOutputKeyClass(Text.class);
job.setOutputValueClass(IntWritable.class);
Path inPath = new Path(iuputPath);
Path outPath = new Path(outputPath);
outPath.getFileSystem(conf).delete(outPath,true);
FileInputFormat.addInputPath(job,inPath);
FileOutputFormat.setOutputPath(job,outPath);
System.exit(job.waitForCompletion(true)?0:1);
}
}
windows上提交yarn任务到Kerberies集群
配置项
1.Windows上ets/hosts文件需要配置集群所有节点的IP地址及域名
2.core-site.xml/hdfs-site.xml/mapred-site.xml/yarn-site.xml文件需要加载到conf中
3.mapred-site.xml需要增加属性mapreduce.app-submission.cross-platform,值为true
4.需要添加依赖hadoop-mapreduce-client-jobclient/hadoop-mapreduce-client-common
5.出现Client cannot authenticate via:[TOKEN, KERBEROS]警告可忽略,Hadoop的bug,后续版本已解决
6.xml文件中${变量}需要替换
代码认证
1.只需要使用提交yarn任务者的keytab文件夹进行认证
2.其余代码和非安全模式下一样
3.需要打包
网友评论