美文网首页
CDH为kafka启用kerberos

CDH为kafka启用kerberos

作者: 阿甘骑士 | 来源:发表于2018-06-07 11:40 被阅读0次
前面CDH成功集成了Kerberos和Sentry;Kafka服务使用上会有所不同,下面介绍为Kafka集群启用Kerberos认证及客户端配置使用
实施方案前,假设下面条件满足
  • CDH集成Kerberos和Sentry成功
  • 具备root权限
修改Kafka配置
  • 进入Kafka服务,修改ssl.client.auth配置为none


    修改配置1.png
  • 启用kerberos


    修改配置2.png
  • 修改security.inter.broker.protocol


    修改配置3.png
  • 最后重启kafka服务
  • 完成以上配置Kafka集群已启用Kerberos认证
Kafka客户端 (以下操作基于一个节点做,其他节点类似)
  • 创建jaas.conf文件
#假设在该目录下建
[root@bi-slave1 kafka_client]# pwd
/usr/local/kafka_client

#创建文件
[root@bi-slave1 kafka_client]# vi jaas.conf 
KafkaClient{
 com.sun.security.auth.module.Krb5LoginModule required
 useTicketCache=true;
};
:wq
  • 创建client.properties文件
-rw-r--r-- 1 root root 96 Jun  6 18:34 jaas.conf
[root@bi-slave1 kafka_client]# vi client.properties 
security.protocol=SASL_PLAINTEXT
sasl.kerberos.service.name=kafka
:wq
  • 初始化kerberos账号
[root@bi-slave1 kafka_client]# kinit deng_yb
Password for deng_yb@WONHIGH.COM: 
[root@bi-slave1 kafka_client]# klist
Ticket cache: FILE:/tmp/krb5cc_0
Default principal: deng_yb@WONHIGH.COM

Valid starting     Expires            Service principal
06/07/18 11:19:06  06/08/18 11:19:06  krbtgt/WONHIGH.COM@WONHIGH.COM
        renew until 06/14/18 11:19:06

  • 设置环境变量
#注意这样配置环境变量只针对当前的进程有效
[root@bi-slave1 kafka_client]# export KAFKA_OPTS="-Djava.security.auth.login.config=/usr/local/kafka_client/jaas.conf"
[root@bi-slave1 kafka_client]# echo $KAFKA_OPTS
-Djava.security.auth.login.config=/usr/local/kafka_client/jaas.conf
  • 测试Producer
kafka-console-producer --broker-list 172.17.194.17:9092,172.17.194.18:9092 --topic kerbero --producer.config client.properties

#输入消息
>Hello World!
>Hello Kerberos!
>

启动producer客户端过程和发消息过程没报错即代表producer客户端配置成功

  • 测试Consumer
kafka-console-consumer --topic kerbero --from-beginning --bootstrap-server 172.17.194.17:9092,172.17.194.18:9092 --consumer.config client.properties

#以下消息为针对topic kerbero

Hello World!
Hello Kerberos!
>Hello World!
>Hello Kerberos!

启动consumer客户端过程和接受消息过程没报错即代表客户端配置成功

  • 上述为kafka集成kerberos服务端配置和客户端的使用
  • 接下来介绍代码层面的使用
  • 新建maven工程,pom.xml添加如下
    <dependency>
      <groupId>org.apache.kafka</groupId>
      <artifactId>kafka-clients</artifactId>
      <version>0.10.2.0</version>
    </dependency>
  • 添加krb5.conf,可直接从kdc所在服务器中拷贝
[libdefaults]
 default_realm = WONHIGH.COM
 dns_lookup_realm = false
 dns_lookup_kdc = false
 ticket_lifetime = 24h
 renew_lifetime = 7d
 forwardable = true

[realms]
 WONHIGH.COM = {
  kdc = bi-master
  admin_server = bi-master
  default_realm = WONHIGH.COM
  kdc = bi-slave1
 }

[domain_realm]
 .bi-master = WONHIGH.COM
 bi-master = WONHIGH.COM
  • 添加keytab文件,身份验证

  • 上述资源目录大概如下


    资源位置.png
  • 工具类,初始化kerberos环境
package deng.yb.kafka_kerberos.utils;

import java.io.File;
import java.io.IOException;
import java.io.PrintWriter;
import java.util.Properties;

import org.apache.kafka.clients.consumer.ConsumerConfig;
import org.apache.kafka.clients.producer.ProducerConfig;

public class MyProperties extends Properties{
    private Properties properties;
    
    private static final String JAAS_TEMPLATE =
            "KafkaClient {\n"
            + "com.sun.security.auth.module.Krb5LoginModule required\n" +
              "useKeyTab=true\n" +
              "keyTab=\"%1$s\"\n" +
              "principal=\"%2$s\";\n"
            + "};";
    
    
    public MyProperties(){
        properties = new Properties();
    }
    
    public MyProperties self(){
        return this;
    }
    
    public MyProperties put(String key , String value) {
        if (properties == null) {
            properties = new Properties();
        }
        
        properties.put(key, value);
        return self();
    }
    
    public static MyProperties initKerberos(){
     return new MyProperties()
                .put(ConsumerConfig.GROUP_ID_CONFIG, "DemoConsumer")
                .put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG,
                        "org.apache.kafka.common.serialization.StringDeserializer")
                .put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG,
                        "org.apache.kafka.common.serialization.StringDeserializer")
                .put(ConsumerConfig.ENABLE_AUTO_COMMIT_CONFIG, "true")
                .put(ConsumerConfig.AUTO_COMMIT_INTERVAL_MS_CONFIG, "1000")
                .put("security.protocol", "SASL_PLAINTEXT")
                .put("sasl.kerberos.service.name", "kafka");
        
    }
    
    public static MyProperties initProducer(){
         return new MyProperties()
            .put(ProducerConfig.ACKS_CONFIG, "all")
            .put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, "org.apache.kafka.common.serialization.StringSerializer")
            .put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, "org.apache.kafka.common.serialization.StringSerializer")
            .put("security.protocol", "SASL_PLAINTEXT")
            .put("sasl.kerberos.service.name", "kafka");
    }
    
    public Properties getProperties() {
        return properties;
    }
    
    //生成jaas.conf临时文件
    public static void configureJAAS(String keyTab, String principal) {
         String content = String.format(JAAS_TEMPLATE, keyTab, principal);
        
         File jaasConf = null;
         PrintWriter writer = null;
         
        try {
             
            jaasConf  = File.createTempFile("jaas", ".conf");
            writer = new PrintWriter(jaasConf);
            
            writer.println(content);
            
        } catch (IOException e) {
            // TODO Auto-generated catch block
            e.printStackTrace();
        
        } finally {
            
             if (writer != null) {
                  writer.close();
             }
             
             jaasConf.deleteOnExit();
        }
        
        System.setProperty("java.security.auth.login.config", jaasConf.getAbsolutePath());
        
    }
    
}

  • 生产者类
package deng.yb.kafka_kerberos;

import org.apache.kafka.clients.producer.KafkaProducer;
import org.apache.kafka.clients.producer.ProducerConfig;
import org.apache.kafka.clients.producer.ProducerRecord;

import deng.yb.kafka_kerberos.utils.MyProperties;

public class Producer {
    //发送topic
    public static String TOPIC_NAME = "kafak2hdfs";

    public static void main(String[] args) {

        System.setProperty("java.security.krb5.conf",
                Thread.currentThread().getContextClassLoader().getResource("krb5.conf").getPath());
        //初始化jaas.conf文件
        MyProperties.configureJAAS(Thread.currentThread().getContextClassLoader().getResource("wms_dev.keytab").getPath(), "wms_dev@WONHIGH.COM");
        System.setProperty("javax.security.auth.useSubjectCredsOnly", "false");

        //System.setProperty("sun.security.krb5.debug","true");
        
        //初始化kerberos环境
        MyProperties props = MyProperties.initProducer();
        
        //kafka brokers地址
        props.put(
                ProducerConfig.BOOTSTRAP_SERVERS_CONFIG,
                "bi-slave1:9092,bi-slave2:9092,bi-slave3:9092");

        org.apache.kafka.clients.producer.Producer<String, String> producer = new KafkaProducer<String, String>(
                props.getProperties());

        for (int i = 0; i < 10; i++) {

            String key = "key-" + i;

            String message = "Message-" + i;

            ProducerRecord record = new ProducerRecord<String, String>(
                    TOPIC_NAME, key, message);

            producer.send(record);

            System.out.println(key + "----" + message);

        }

        producer.close();

    }
}
  • 消费者
package deng.yb.kafka_kerberos;

import java.util.Arrays;
import java.util.Properties;

import org.apache.kafka.clients.consumer.ConsumerConfig;
import org.apache.kafka.clients.consumer.ConsumerRecord;
import org.apache.kafka.clients.consumer.ConsumerRecords;
import org.apache.kafka.clients.consumer.KafkaConsumer;
import org.apache.kafka.clients.producer.ProducerConfig;

import deng.yb.kafka_kerberos.utils.MyProperties;

public class Comsumer {
    private static String TOPIC_NAME = "kafak2hdfs";

    public static void main(String[] args) {

        System.setProperty("java.security.krb5.conf", Thread.currentThread()
                .getContextClassLoader().getResource("krb5.conf").getPath());
        //初始化jaas.conf文件
        MyProperties.configureJAAS(Thread.currentThread().getContextClassLoader().getResource("wms_dev.keytab").getPath(), "wms_dev@WONHIGH.COM");

        System.setProperty("javax.security.auth.useSubjectCredsOnly", "false");

        MyProperties props = MyProperties.initKerberos();

        props.put(
                ProducerConfig.BOOTSTRAP_SERVERS_CONFIG,
                "bi-slave1:9092,bi-slave2:9092,bi-slave3:9092");

        KafkaConsumer<String, String> consumer = new KafkaConsumer<String, String>(
                props.getProperties());
        consumer.subscribe(Arrays.asList(TOPIC_NAME));
        /*
         * TopicPartition partition0= new TopicPartition(TOPIC_NAME, 0);
         * 
         * TopicPartition partition1= new TopicPartition(TOPIC_NAME, 1);
         * 
         * TopicPartition partition2= new TopicPartition(TOPIC_NAME, 2);
         */

        // consumer.assign(Arrays.asList(partition0,partition1, partition2));

        ConsumerRecords<String, String> records = null;
        
        while (true) {
            try {
                Thread.sleep(1000);

                System.out.println();
                records = consumer.poll(Long.MAX_VALUE);

                for (ConsumerRecord<String, String> record : records) {

                    System.out.println("Receivedmessage: (" + record.key()
                            + "," + record.value() + ") at offset "
                            + record.offset());

                }

            } catch (InterruptedException e) {
                // TODO Auto-generated catch block
                e.printStackTrace();
            }
        }

        /*
         * while (true){
         * 
         * try {
         * 
         * Thread.sleep(10000l);
         * 
         * System.out.println();
         * 
         * records = consumer.poll(Long.MAX_VALUE);
         * 
         * for (ConsumerRecord<String, String> record : records) {
         * 
         * System.out.println("Receivedmessage: (" + record.key() + "," +
         * record.value() + ") at offset " + record.offset());
         * 
         * }
         * 
         * } **catch** (**InterruptedException** e){
         * 
         * e.printStackTrace();
         * 
         * }
         */

    }
}
  • 测试
  • 生产者发送消息


    生产者发送消息.png
  • 消费者消费信息


    消费者.png

相关文章

网友评论

      本文标题:CDH为kafka启用kerberos

      本文链接:https://www.haomeiwen.com/subject/vlbwsftx.html