美文网首页elasticsearch
springboot搭建ELK环境

springboot搭建ELK环境

作者: rainbowz | 来源:发表于2021-11-26 13:56 被阅读0次

    在工程中使用logback记录日志,通过logstash的tcp传入es
    需要安装的软件 elasticsearch、kibana、logstash

    image.png

    1.下载maven环境依赖

     <dependency>
                <groupId>net.logstash.logback</groupId>
                <artifactId>logstash-logback-encoder</artifactId>
                <version>5.3</version>
     </dependency>
    
    1. 配置信息
      elasticsearch.yml
    # Use a descriptive name for your cluster:
    #
    #cluster.name: my-application
    cluster.name: yutao
    #
    # ------------------------------------ Node ------------------------------------
    #
    # Use a descriptive name for the node:
    #
    #node.name: node-1
    node.name: node-1
    #
    # Add custom attributes to the node:
    #
    #node.attr.rack: r1
    #
    # ----------------------------------- Paths ------------------------------------
    #
    # Path to directory where to store the data (separate multiple locations by comma):
    #
    #path.data: /path/to/data
    #
    # Path to log files:
    #
    #path.logs: /path/to/logs
    #
    # ----------------------------------- Memory -----------------------------------
    #
    # Lock the memory on startup:
    #
    #bootstrap.memory_lock: true
    #
    # Make sure that the heap size is set to about half the memory available
    # on the system and that the owner of the process is allowed to use this
    # limit.
    #
    # Elasticsearch performs poorly when the system is swapping the memory.
    #
    # ---------------------------------- Network -----------------------------------
    #
    # Set the bind address to a specific IP (IPv4 or IPv6):
    #
    #network.host: 192.168.0.1
    network.host: 0.0.0.0
    #
    # Set a custom port for HTTP:
    #
    http.port: 9200
    #
    # For more information, consult the network module documentation.
    #
    # --------------------------------- Discovery ----------------------------------
    #
    # Pass an initial list of hosts to perform discovery when this node is started:
    # The default list of hosts is ["127.0.0.1", "[::1]"]
    #
    #discovery.seed_hosts: ["host1", "host2"]
    discovery.seed_hosts: ["127.0.0.1"]
    #
    # Bootstrap the cluster using an initial set of master-eligible nodes:
    #
    #cluster.initial_master_nodes: ["node-1", "node-2"]
    cluster.initial_master_nodes: ["node-1"]
    cluster.max_shards_per_node: 10000
    #
    # For more information, consult the discovery and cluster formation module documentation.
    #
    # ---------------------------------- Gateway -----------------------------------
    #
    # Block initial recovery after a full cluster restart until N nodes are started:
    #
    #gateway.recover_after_nodes: 3
    #
    # For more information, consult the gateway module documentation.
    #
    # ---------------------------------- Various -----------------------------------
    #
    # Require explicit names when deleting indices:
    #
    #action.destructive_requires_name: true
    
    
    1. 在logstash的bin目录下创建test.conf文件,内容如下
    # Sample Logstash configuration for creating a simple
    # Beats -> Logstash -> Elasticsearch pipeline.
     
    input {
      #此处不使用beats
      #beats {
      #  port => 5044
      #}
      tcp {
        #本机的ip地址
        host => "127.0.0.1"
        #用作通讯用的端口号,可以自定义,但是必须和SpringBoot工程中logback.xml中的destination保持一致,才能通讯
        port => 5044
        #JSON格式
        codec => json_lines
      }
    }
     
    output {
      elasticsearch {
        #es的地址和端口
        hosts => ["http://127.0.0.1:9200"]
        #可以写死,也可以不写,不写的时候es会生成名字是logstash-yyyymmdd-0000x的index索引
        #YYYY.MM.dd.HH.mm按分钟分区
        #index => "wdnmd-%{+YYYY.MM.dd.HH.mm}"
        index  => "applog"  
        #user => "elastic"
        #password => "changeme"
      }
    }
    
    1. 在springboot项目的resources目录下新建logback-spring.xml文件,这里的5044端口要和logstash配置文件下的端口一致
    <?xml version="1.0" encoding="UTF-8"?>
    <!DOCTYPE configuration>
    <configuration>
        <include resource="org/springframework/boot/logging/logback/defaults.xml"/>
        <include resource="org/springframework/boot/logging/logback/base.xml"/>
    
        <springProperty scope="context" name="appName" source="spring.application.name"/>
    
        <!-- 日志在工程中的输出位置 -->
        <property name="LOG_FILE" value="${BUILD_FOLDER:-build}/${appName}"/>
    
        <!-- 控制台的日志输出样式 -->
        <property name="CONSOLE_LOG_PATTERN" value="%clr(%d{yyyy-MM-dd HH:mm:ss.SSS}){faint} %clr(${LOG_LEVEL_PATTERN:-%5p}) %clr(${PID:- }){magenta} %clr(---){faint} %clr([%15.15t]){faint} %m%n${LOG_EXCEPTION_CONVERSION_WORD:-%wEx}}"/>
    
        <!-- 控制台输出 -->
        <appender name="console" class="ch.qos.logback.core.ConsoleAppender">
            <filter class="ch.qos.logback.classic.filter.ThresholdFilter">
                <level>INFO</level>
            </filter>
            <!-- 日志输出编码 -->
            <encoder>
                <pattern>${CONSOLE_LOG_PATTERN}</pattern>
                <charset>utf8</charset>
            </encoder>
        </appender>
    
        <!--logstash配置-->
        <appender name="LOGSTASH" class="net.logstash.logback.appender.LogstashTcpSocketAppender">
            <destination>127.0.0.1:5044</destination>
            <!-- 日志输出编码 -->
            <encoder charset="UTF-8" class="net.logstash.logback.encoder.LoggingEventCompositeJsonEncoder">
                <providers>
                    <timestamp>
                        <timeZone>UTC</timeZone>
                    </timestamp>
                    <pattern>
                        <pattern>
                            {
                            "logLevel": "%level",
                            "serviceName": "${springAppName:-}",
                            "pid": "${PID:-}",
                            "thread": "%thread",
                            "class": "%logger{40}",
                            "rest": "%message"
                            }
                        </pattern>
                    </pattern>
                </providers>
            </encoder>
            <!--<encoder charset="UTF-8" class="net.logstash.logback.encoder.LogstashEncoder"/>-->
        </appender>
    
        <root level="INFO">
            <appender-ref ref="LOGSTASH"/>
            <appender-ref ref="CONSOLE"/>
        </root>
    
    </configuration>
    

    5.测试
    启动es服务、logstash、kibana
    logstash启动命

    logstash -f  test.conf  (在bin目录下命令行执行)
    
    日志信息

    我们发送一个HTTP请求,打印日志信息,在kibana中已经可以看到我们创建的logstash配置中的索引applog


    kibaba.png

    点击kibana的索引模式添加我们的索引


    索引模式 添加索引模式 收集日志

    文章参考:
    SpringBoot整合Logstash,实现日志统计
    SpringBoot实践之---ELK(ElasticSearch, Logstash, Kibana)+Springboot搭建实时日志分析平台
    Logstash7.3 windows环境安装

    相关文章

      网友评论

        本文标题:springboot搭建ELK环境

        本文链接:https://www.haomeiwen.com/subject/ivatxrtx.html