Former-commit-id: 81dea9a1cd5cfb99d40336e9b81df671966d8c4b
Former-commit-id: 6fd449d1c0f14a9657d5c87dd5f97ce36c19253d
1.0
wanggang 4 years ago
parent fc7fe236f0
commit f24b712ab7

@ -0,0 +1,16 @@
#!/bin/bash
echo "Launching Kafka Connect worker"
/etc/confluent/docker/run &
#
echo "Waiting for Kafka Connect to start listening on localhost:8083 ⏳"
while : ; do
curl_status=$(curl -s -o /dev/null -w %{http_code} http://localhost:8083/connectors)
echo -e $(date) " Kafka Connect listener HTTP state: " $curl_status " (waiting for 200)"
if [ $curl_status -eq 200 ] ; then
break
fi
sleep 5
done
cd /usr/share/confluent-hub-components/
./start.sh
sleep infinity

@ -1,17 +1,3 @@
#!/bin/bash #!/bin/bash
echo "Launching Kafka Connect worker"
/etc/confluent/docker/run &
#
echo "Waiting for Kafka Connect to start listening on localhost:8083 ⏳"
while : ; do
curl_status=$(curl -s -o /dev/null -w %{http_code} http://localhost:8083/connectors)
echo -e $(date) " Kafka Connect listener HTTP state: " $curl_status " (waiting for 200)"
if [ $curl_status -eq 200 ] ; then
break
fi
sleep 5
done
cd /usr/share/confluent-hub-components/
curl -i -X POST -H "Accept:application/json" -H "Content-Type:application/json" http://localhost:8083/connectors/ -d @mysql2kafka.json curl -i -X POST -H "Accept:application/json" -H "Content-Type:application/json" http://localhost:8083/connectors/ -d @mysql2kafka.json
curl -i -X POST -H "Accept:application/json" -H "Content-Type:application/json" http://localhost:8083/connectors/ -d @kafka2elasticsearch.json curl -i -X POST -H "Accept:application/json" -H "Content-Type:application/json" http://localhost:8083/connectors/ -d @kafka2elasticsearch.json
sleep infinity

@ -70,7 +70,7 @@ services:
CONNECT_STATUS_STORAGE_REPLICATION_FACTOR: "1" CONNECT_STATUS_STORAGE_REPLICATION_FACTOR: "1"
volumes: volumes:
- ./conf/kafka-connect:/usr/share/confluent-hub-components - ./conf/kafka-connect:/usr/share/confluent-hub-components
command: bash -c "/usr/share/confluent-hub-components/start.sh" command: bash -c "/usr/share/confluent-hub-components/boot.sh"
depends_on: depends_on:
- mysql - mysql
- kafka - kafka

@ -7,6 +7,14 @@
1. http://doc.dorisdb.com/2146003 1. http://doc.dorisdb.com/2146003
doris 不支持 superset 和 metabase以后通过二次开发解决 doris 不支持 superset 和 metabase以后通过二次开发解决
## doris 硬件配置
http://doc.dorisdb.com/2228586
BE推荐16核64GB以上FE推荐8核16GB以上。
磁盘可以使用HDD或者SSD。
CPU需要支持AVX2指令集cat /proc/cpuinfo |grep avx2 确认有输出即可如果没有支持建议更换机器DorisDB的向量化技术需要CPU指令集支持才能发挥更好的效果。
网络需要万兆网卡和万兆交换机。
## kafka connect ## kafka connect
1. https://www.confluent.io/hub/confluentinc/kafka-connect-elasticsearch 1. https://www.confluent.io/hub/confluentinc/kafka-connect-elasticsearch
@ -20,33 +28,18 @@ doris 不支持 superset 和 metabase以后通过二次开发解决
mysql配置了主从创建了数据库example新建了User表 mysql配置了主从创建了数据库example新建了User表
## debezium
进入容器执行 /start 目录,执行 ./start.sh
## debezium-ui
查看debezium状态: http://localhost:8080
## kafka ## kafka
通过 kafkacat 查看实时信息 通过 kafkacat 查看实时信息
## kafaka-connect ## kafaka-connect
进入容器执行 /usr/share/java/confluentinc-kafka-connect-elasticsearch 目录,执行 ./start.sh 查看全部插件http://localhost:8083/connector-plugins
查看运行插件http://localhost:8083/connectors 查看插件
## kibana ## kibana
查看 elasticsearch 状态: http://localhost:9601 查看 elasticsearch 状态: http://localhost:5601
## doris 硬件配置
http://doc.dorisdb.com/2228586
BE推荐16核64GB以上FE推荐8核16GB以上。
磁盘可以使用HDD或者SSD。
CPU需要支持AVX2指令集cat /proc/cpuinfo |grep avx2 确认有输出即可如果没有支持建议更换机器DorisDB的向量化技术需要CPU指令集支持才能发挥更好的效果。
网络需要万兆网卡和万兆交换机。
### 进入 mysql 容器内,连接 doris fe ### 进入 mysql 容器内,连接 doris fe
@ -108,8 +101,12 @@ AGGREGATE KEY(Id,UserName)
DISTRIBUTED BY HASH(Id) BUCKETS 10 DISTRIBUTED BY HASH(Id) BUCKETS 10
PROPERTIES("replication_num" = "1"); PROPERTIES("replication_num" = "1");
======================================================================= ## kafka 导入 doris未完成
从kafka导入
查看导入任务:
SHOW ALL ROUTINE LOAD;
创建导入任务:
CREATE ROUTINE LOAD example.job1 on User CREATE ROUTINE LOAD example.job1 on User
PROPERTIES PROPERTIES
@ -125,3 +122,4 @@ FROM KAFKA
"kafka_broker_list"= "kafka:9092", "kafka_broker_list"= "kafka:9092",
"kafka_topic" = "mysql.example.User" "kafka_topic" = "mysql.example.User"
); );

Loading…
Cancel
Save