参考视频
遇到的坑
- logstash 放在 docker-compose 中执行, 一直服务无法正常的启动,需要排查;
- logstash 的配置文件 conf/mysql.conf 格式要求很严格,排查了很久的问题,一直报下边的错误。 解决方法是要把括号内的内容都顶格写。
- 另外需要注意mysql.conf 文件的编码, 需要utf-8
LogStash::ConfigurationError", :message=>"Expected one of #, if, \", ', } at line
重要信息
-
ik分词的安装, 只需要把下载的zip包解压到 es 的 plugins 目录下并重启服务即可
-
ik 两种分词器, ik_max_word, ik_smart
-
分词示例
POST _analyze
{
"analyzer": "ik_max_word",
"text": "我是中国人"
} -
如果需要增加自定义汉语词汇,向plugins 下的字典文件main.dic 添加词语即可;
kibana Dev Tools使用命令
GET /blog/_search
{
"query": {
"bool": {
# should 表示或, must表示 and
"should": [
{
"match": {
"id": 8
}
},
{
"match": {
"id": 7
}
}
]
}
}
}
配置文件
elasticsearch.yml
# 这里这个没生效, 生效的是 docker-compose 文件中定义的
cluster.name: "docker-cluster"
network.host: 0.0.0.0
# minimum_master_nodes need to be explicitly set when bound on a public IP
# set to 1 to allow single node clusters
# Details: https://github.com/elastic/elasticsearch/pull/17288
discovery.zen.minimum_master_nodes: 1
kibana.yml
---
# Default Kibana configuration from kibana-docker.
server.name: kibana
server.host: "0"
elasticsearch.url: http://elasticsearch:9200
xpack.monitoring.ui.container.elasticsearch.enabled: true
logstash.yml
http.host: "0.0.0.0"
xpack.monitoring.elasticsearch.url: http://elasticsearch:9200
conf/mysql.conf
# reference https://blog.csdn.net/qq_38270106/article/details/88699334
input {
jdbc {
jdbc_driver_library => "/usr/share/logstash/mysql-connector-java-5.1.31.jar"
#jdbc_driver_library => "/Users/justin/Desktop/files/docker-app/es-kibana-logstash/mysql-connector-java-5.1.31.jar"
jdbc_driver_class => "com.mysql.jdbc.Driver"
jdbc_connection_string => "jdbc:mysql://192.168.124.3:3306/devops_db"
jdbc_user => "your_admin"
jdbc_password => "your_password"
schedule => "* * * * *"
clean_run => true
statement => "SELECT * FROM t_blog WHERE update_time > :sql_last_value AND update_time < NOW() ORDER BY update_time desc"
#use_column_value => true
#tracking_column_type => "timestamp"
#tracking_column => "update_time"
#last_run_metadata_path => "syncpoint_table"
}
}
output {
elasticsearch {
#hosts => ["localhost:9200"]
hosts => ["elasticsearch:9200"]
index => "blog"
document_id => "%{id}"
}
}
docker-compose-imooc-es-kibana-logstash.yml
# https://www.imooc.com/learn/1161 - Springboot + ElasticSearch 构建博客检索系统
version: '3'
# docker-compose -f docker-compose-imooc-es-kibana-logstash.yml up -d es_01
# docker-compose -f docker-compose-imooc-es-kibana-logstash.yml up -d kibana_01
# docker-compose -f docker-compose-imooc-es-kibana-logstash.yml up -d logstash_01
networks:
default:
external:
name: docker-app_default
services:
es_01:
image: elasticsearch:6.4.0
restart: always
hostname: es_01
ports:
- "9200:9200"
- "9300:9300"
container_name: es_01
environment:
- TZ=sia/Shanghai
- node.name=es_01
- cluster.name=my-application
- discovery.type=single-node
# - discovery.seed_hosts=es02,es03
# - cluster.initial_master_nodes=es01
# - bootstrap.memory_lock=true
- "ES_JAVA_OPTS=-Xms512m -Xmx512m"
volumes:
- ./es_01-data:/usr/share/elasticsearch/data
- ./elasticsearch.yml:/usr/share/elasticsearch/config/elasticsearch.yml
# - es_01_data/package:/tmp/package
kibana_01:
image: kibana:6.4.0
ports:
- "5601:5601"
environment:
TZ: Asia/Shanghai
SERVER_NAME: kibana_01
ELASTICSEARCH_HOSTS: http://es_01:9200
external_links:
- es_01:elasticsearch
volumes:
- ./kibana.yml:/usr/share/kibana/config/kibana.yml
- ./kibana_01-data:/usr/share/kibana/data
depends_on:
- es_01
container_name: kibana_01
logstash_01:
image: logstash:6.4.0
ports:
- "5044:5044" # 不确定有没有这个端口,以及是否需要暴露一个端口
- "9600:9600" # 不确定有没有这个端口,以及是否需要暴露一个端口
environment:
TZ: Asia/Shanghai
# SERVER_NAME: logstash_01
# ELASTICSEARCH_HOSTS: http://es_01:9200
command: ./bin/logstash -f ./config/mysql.conf
volumes:
# - ./logstash_conf:/config-dir
# - ./logstash_plugins:/tmp/plugins
- ./logstash.yml:/usr/share/logstash/config/logstash.yml
- ./mysql.conf:/usr/share/logstash/config/mysql.conf
- ./mysql-connector-java-5.1.31.jar:/usr/share/logstash/mysql-connector-java-5.1.31.jar
external_links:
- es_01:elasticsearch
depends_on:
- es_01
container_name: logstash_01
网友评论