美文网首页
hadoop实战-9.kafka的安装与实战

hadoop实战-9.kafka的安装与实战

作者: 笨鸡 | 来源:发表于2019-04-01 15:38 被阅读0次

    1.准备工具

    • zookeeper-3.4.13.tar.gz
    • kafka_2.12-2.2.0.tgz

    2.安装

    tar -zxvf kafka_2.12-2.2.0.tgz
    mv kafka_2.12-2.2.0 kafka
    cd kafka/config
    vim server.properties

    broker.id=0      # 每台机器上不一样
    listeners=PLAINTEXT://master:9092
    log.dirs=/usr/local/kafka/kafka-logs
    zookeeper.connect=master:2181,slave1:2181,slave2:2181,slave3:2181
    

    scp -r kafka slave1:/usr/local
    vim /etc/profile

    export KAFKA_HOME=/usr/local/kafka
    export PATH=$PATH:$HADOOP_HOME/bin:$HADOOP_HOME/sbin:$HIVE_HOME/bin:$ZK_HOME/bin:$HBASE_HOME/bin:$STORM_HOME/bin:$KAFKA_HOME/bin
    

    source /etc/profile

    3.启动kafka

    集群开启kafka

    kafka-server-start.sh /usr/local/kafka/config/server.properties 1>/dev/null 2>&1 &
    

    创建topic test

    kafka-topics.sh --create --zookeeper master:2181,slave1:2181,slave2:2181,slave3:2181 --partitions 3 --replication-factor 3 --topic test
    

    创建生产者

    kafka-console-producer.sh --broker-list master:9092 --topic test
    

    创建消费者

    kafka-console-consumer.sh --bootstrap-server master:9092 --topic test --from-beginning
    

    kafka-tools 工具查看
    kafka-tools:http://www.kafkatool.com/download.html

    kafka.png

    4.kafka java代码

    ProducerDemo.java

    import org.apache.kafka.clients.producer.KafkaProducer;
    import org.apache.kafka.clients.producer.Producer;
    import org.apache.kafka.clients.producer.ProducerRecord;
    
    import java.util.Properties;
    
    public class ProducerDemo {
    
        public static void main(String[] args) throws Exception{
    
            Properties props = new Properties();
            props.put("bootstrap.servers", "master:9092, slave1:9092, slave2:9092, slave3:9092");
            props.put("acks", "all");               // 记录完整提交,最慢的但是最大可能的持久化
            props.put("key.serializer", "org.apache.kafka.common.serialization.StringSerializer");
            props.put("value.serializer", "org.apache.kafka.common.serialization.StringSerializer");
            Producer<String, String> producer = new KafkaProducer<String, String>(props);
            for (int i = 0; i < 100; i++) {
                Thread.sleep(500);
                producer.send(new ProducerRecord<String, String>("my-replicated-topic",
                        Integer.toString(i), "I love you kafka " + i + " times!"));
            }
            producer.close();
        }
    
    }
    
    

    ConsumerDemo.java

    import org.apache.kafka.clients.consumer.ConsumerRecord;
    import org.apache.kafka.clients.consumer.ConsumerRecords;
    import org.apache.kafka.clients.consumer.KafkaConsumer;
    
    import java.time.Duration;
    import java.util.Arrays;
    import java.util.Properties;
    
    public class ConsumerDemo {
    
        public static void main(String[] args) throws Exception{
            Properties props = new Properties();
            props.put("bootstrap.servers", "master:9092, slave1:9092, slave2:9092, slave3:9092");
            props.setProperty("group.id", "my-replicated-topic");
            props.setProperty("enable.auto.commit", "true");
            props.setProperty("auto.commit.interval.ms", "1000");
            props.setProperty("key.deserializer", "org.apache.kafka.common.serialization.StringDeserializer");
            props.setProperty("value.deserializer", "org.apache.kafka.common.serialization.StringDeserializer");
            KafkaConsumer<String, String> consumer = new KafkaConsumer<String, String>(props);
            consumer.subscribe(Arrays.asList("my-replicated-topic"));
            while (true) {
                ConsumerRecords<String, String> records = consumer.poll(Duration.ofMillis(100));
                for (ConsumerRecord<String, String> record : records)
                    System.out.printf("offset = %d, key = %s, value = %s%n", record.offset(), record.key(), record.value());
            }
        }
    }
    

    pom.xml

    <?xml version="1.0" encoding="UTF-8"?>
    <project xmlns="http://maven.apache.org/POM/4.0.0"
             xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
             xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
        <modelVersion>4.0.0</modelVersion>
    
        <groupId>com.ctgu</groupId>
        <artifactId>kafka_test</artifactId>
        <version>1.0-SNAPSHOT</version>
    
        <dependencies>
            <!-- https://mvnrepository.com/artifact/org.apache.kafka/kafka-clients -->
            <dependency>
                <groupId>org.apache.kafka</groupId>
                <artifactId>kafka-clients</artifactId>
                <version>2.2.0</version>
            </dependency>
    
        </dependencies>
    
    </project>
    

    相关文章

      网友评论

          本文标题:hadoop实战-9.kafka的安装与实战

          本文链接:https://www.haomeiwen.com/subject/kjjvbqtx.html