参考
【1】快速搭建Confluent Kafka的本地测试环境:https://www.modb.pro/db/54580
【2】confluent local current:https://docs.confluent.io/confluent-cli/current/command-reference/local/confluent_local_current.html
【3】快速搭建Confluent Kafka的本地测试环境:https://www.modb.pro/db/54580
一、安装Confluent步骤
1,下载confluent platform
下载地址:Confluent Platform (需要输入邮箱)
2,安装Install Confluent CLI
如果在confluent-7.0.1/bin目录下没有confluent cli
需要安装,安装步骤:Install Confluent CLI
设置环境变量
export CONFLUENT_HOME=/Users/sun/Downloads/confluent-7.0.1/
3,confluent local命令
./confluent local current
/var/folders/5w/33wydx3s1ys12smyf72762mr0000gq/T/confluent.917162
// 在root权限下执行的
./confluent local services start
./confluent local services status
./confluent local destroy
4,debezium-connector-mysql插件
share/java/debezium-connector-mysql
5, 验证connectors
curl http://localhost:8083/connectors
6,启动本地mysql docker
因为my.cnf为readonly,无法开启binlog,所以改为本地安装mysql
注意,mysql需要开启binlog
1)brew install安装mysql&启动
brew uninstall mysql --force
rm -fr /usr/local/var/mysql/
brew services start mysql
brew services list
mysqladmin -uroot -p password 123456
mysql -uroot -p
[1] install MySQL 8 in MacOS
[2] MySQL configuration files
[3] Setting The Binary Log Format
2)是否开启了binlog
show variables like '%log_bin%'
show variables like '%binlog%%';
[1] docker mysql 开启binlog
[2] docker开启mysql启binlog日志
[3] 开启MySQL的binlog日志
7, 添加debezium mysql connector配置
[1] Debezium MySQL connector configuration properties
[2] Debezium介绍及动态调用Connector API接口实例
curl -i -X POST -H "Accept:application/json" -H "Content-Type:application/json" localhost:8083/connectors -d '
{
"name": "test_avro_source",
"config": {
"connector.class": "io.debezium.connector.mysql.MySqlConnector",
"database.user": "root",
"database.server.id": "122222",
"database.history.kafka.bootstrap.servers": "172.16.6.218:9092",
"database.history.kafka.topic": "history_test_avro",
"database.server.name": "test_avro",
"database.port": "3306",
"include.schema.changes": "true",
"value.converter.schema.registry.url": "http://172.16.6.218:8081",
"decimal.handling.mode": "string",
"include.schema.comments": "true",
"database.hostname": "localhost",
"database.password": "123456",
"name": "test_avro_source",
"value.converter": "io.confluent.connect.avro.AvroConverter",
"key.converter": "io.confluent.connect.avro.AvroConverter",
"key.converter.schema.registry.url": "http://localhost:8081",
"database.include.list": "workdb",
"snapshot.mode": "schema_only"
}
}
'
8,check connectors
curl http://localhost:8083/connectors
curl http://localhost:8083/connectors/test_avro_source
curl -i -X GET localhost:8083/connectors/test_avro_source/status
curl -i -X POST localhost:8083/connectors/test_avro_source/restart
curl -i -X GET localhost:8083/connectors/test_avro_source/tasks
curl -i -X GET localhost:8083/connectors/test_avro_source/tasks/0/status
curl -i -X POST localhost:8083/connectors/test_avro_source/tasks/0/restart
curl -i -X DELETE localhost:8083/connectors/test_avro_source
9,创建表+插入数据
CREATE TABLE `table_name_1` (
`id` int NOT NULL,
`name` varchar(255) DEFAULT NULL,
PRIMARY KEY (`id`)
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_0900_ai_ci
INSERT INTO test_avro.table_name_1 (id, name) VALUES (1, '22111a');
10,验证是否有对应的topic&是否有数据
cd confluent-7.0.1/bin
// 验证是否有对应topic
kafka-topics --bootstrap-server localhost:9092 --list
// 验证topic中是否有数据,avro反序列化后的信息
kafka-avro-console-consumer --bootstrap-server localhost:9092 --topic test_avro.table_name_1 --from-beginning
====>
{
"before":{
"test_avro.table_name_1.Value":{
"id":1,
"name":{
"string":"22111"
}
}
},
"after":{
"test_avro.test_avro.table_name_1.Value":{
"id":1,
"name":{
"string":"22111a"
}
}
},
"source":{
"version":"1.5.0.Final",
"connector":"mysql",
"name":"test_avro",
"ts_ms":1646621858000,
"snapshot":{
"string":"false"
},
"db":"test_avro",
"sequence":null,
"table":{
"string":"table_name_1"
},
"server_id":1,
"gtid":null,
"file":"binlog.000044",
"pos":4697,
"row":0,
"thread":null,
"query":null
},
"op":"u",
"ts_ms":{
"long":1646621858881
},
"transaction":null
}
// 验证topic中是否有数据,avro序列化后的信息,没有经过发序列化处理
kafka-console-consumer --bootstrap-server localhost:9092 --topic test_avro
其他命令
// 删除topic 1111
kafka-topics --bootstrap-server localhost:9092 --topic 1111 --delete
// 创建topic 1111
kafka-topics --bootstrap-server localhost:9092 --topic 1111 --create
网友评论