|
|
|
@ -2,6 +2,7 @@ package Kafka2ESService
|
|
|
|
|
|
|
|
|
|
import (
|
|
|
|
|
"dsDataex/MyTask/Kafka2ES/Kafka2ESDAO"
|
|
|
|
|
"dsDataex/Utils/ES7Util"
|
|
|
|
|
"dsDataex/Utils/KafkaUtil"
|
|
|
|
|
"fmt"
|
|
|
|
|
"github.com/go-co-op/gocron"
|
|
|
|
@ -11,18 +12,30 @@ import (
|
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
var ChanTopic chan []string
|
|
|
|
|
var LstTopic []string
|
|
|
|
|
|
|
|
|
|
//var LstTopic []string
|
|
|
|
|
|
|
|
|
|
var GROUP_NO = 0
|
|
|
|
|
|
|
|
|
|
var loc sync.Mutex
|
|
|
|
|
|
|
|
|
|
/**
|
|
|
|
|
* @Author zhangjun
|
|
|
|
|
* @Description Kafka 2 ES 数据处理服务启动
|
|
|
|
|
* @Date 2020-08-04 09:59
|
|
|
|
|
* @Param
|
|
|
|
|
* @return
|
|
|
|
|
**/
|
|
|
|
|
func ServiceStart() {
|
|
|
|
|
|
|
|
|
|
cronMan := gocron.NewScheduler(time.UTC)
|
|
|
|
|
|
|
|
|
|
cronMan.Every(5).Seconds().StartImmediately().Do(DBWatch)
|
|
|
|
|
cronMan.Every(5).Seconds().StartImmediately().Do(DBWatchProcess)
|
|
|
|
|
|
|
|
|
|
cronMan.Every(10).Seconds().Do(LogProcess)
|
|
|
|
|
|
|
|
|
|
cronMan.Every(60).Seconds().Do(ESRefreshProcess)
|
|
|
|
|
|
|
|
|
|
cronMan.StartAsync()
|
|
|
|
|
|
|
|
|
|
defer func() {
|
|
|
|
@ -35,8 +48,8 @@ func ServiceStart() {
|
|
|
|
|
|
|
|
|
|
//var procNo = int(ConfigUtil.KafkaProcNo)
|
|
|
|
|
|
|
|
|
|
KafkaUtil.ChanTopicProc = make(map[string] chan bool)
|
|
|
|
|
KafkaUtil.StateTopicProc = make(map[string] bool)
|
|
|
|
|
//KafkaUtil.ChanTopicProc = make(map[string]chan bool)
|
|
|
|
|
//KafkaUtil.StateTopicProc = make(map[string]bool)
|
|
|
|
|
KafkaUtil.CountTopicProc = make(map[string]int)
|
|
|
|
|
|
|
|
|
|
ChanTopic = make(chan []string, 100)
|
|
|
|
@ -45,110 +58,137 @@ func ServiceStart() {
|
|
|
|
|
for no := 0; no < len(topics); no++ {
|
|
|
|
|
|
|
|
|
|
topic := topics[no]
|
|
|
|
|
_, f := KafkaUtil.CountTopicProc[topic]
|
|
|
|
|
|
|
|
|
|
if Contains(LstTopic, topic) == -1 {
|
|
|
|
|
|
|
|
|
|
LstTopic = append(LstTopic, topic)
|
|
|
|
|
if f == false {
|
|
|
|
|
|
|
|
|
|
//change by zhangjun 2020-08-02
|
|
|
|
|
cronMan.Every(60).Seconds().SetTag([]string{"kafka_" + topic}).StartImmediately().Do(KafkaProcess, topic)
|
|
|
|
|
//go KafkaProcess(topic, procNo)
|
|
|
|
|
//cronMan.Every(60).Seconds().SetTag([]string{"kafka_" + topic}).StartImmediately().Do(KafkaProcess, topic)
|
|
|
|
|
KafkaProcess(topic)
|
|
|
|
|
|
|
|
|
|
//time.Sleep(time.Second * 1)
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
if len(LstTopic) > len(topics) {
|
|
|
|
|
for no := 0; no < len(LstTopic); no++ {
|
|
|
|
|
if Contains(topics, LstTopic[no]) == -1 {
|
|
|
|
|
if len(KafkaUtil.CountTopicProc) > len(topics) {
|
|
|
|
|
for k, _ := range KafkaUtil.CountTopicProc {
|
|
|
|
|
if Contains(topics, k) == -1 {
|
|
|
|
|
|
|
|
|
|
//删除任务
|
|
|
|
|
cronMan.RemoveJobByTag("kafka_" + LstTopic[no])
|
|
|
|
|
//cronMan.RemoveJobByTag("kafka_" + LstTopic[no])
|
|
|
|
|
|
|
|
|
|
//关闭子线程
|
|
|
|
|
//for no2 := 0; no2 < len(KafkaUtil.ChanTopicProc[LstTopic[no]]); no2++ {
|
|
|
|
|
KafkaUtil.ChanTopicProc[LstTopic[no]] <- true
|
|
|
|
|
//}
|
|
|
|
|
//KafkaUtil.ChanTopicProc[LstTopic[no]] <- true
|
|
|
|
|
|
|
|
|
|
delete(KafkaUtil.ChanTopicProc, LstTopic[no])
|
|
|
|
|
delete(KafkaUtil.StateTopicProc, LstTopic[no])
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
loc.Lock()
|
|
|
|
|
|
|
|
|
|
LstTopic = []string{}
|
|
|
|
|
//delete(KafkaUtil.ChanTopicProc, LstTopic[no])
|
|
|
|
|
//delete(KafkaUtil.StateTopicProc, k)
|
|
|
|
|
delete(KafkaUtil.CountTopicProc, k)
|
|
|
|
|
|
|
|
|
|
LstTopic = append(LstTopic, topics...)
|
|
|
|
|
loc.Unlock()
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
//LstTopic = []string{}
|
|
|
|
|
//
|
|
|
|
|
//LstTopic = append(LstTopic, topics...)
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
func DBWatch() {
|
|
|
|
|
/**
|
|
|
|
|
* @Author zhangjun
|
|
|
|
|
* @Description 监测 mysql数据、数据源
|
|
|
|
|
* @Date 2020-08-04 09:59
|
|
|
|
|
* @Param
|
|
|
|
|
* @return
|
|
|
|
|
**/
|
|
|
|
|
func DBWatchProcess() {
|
|
|
|
|
|
|
|
|
|
var _, topics = Kafka2ESDAO.GetTopics()
|
|
|
|
|
|
|
|
|
|
ChanTopic <- topics
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
/**
|
|
|
|
|
* @Author zhangjun
|
|
|
|
|
* @Description Consume Kafka数据
|
|
|
|
|
* @Date 2020-08-04 09:59
|
|
|
|
|
* @Param
|
|
|
|
|
* @return
|
|
|
|
|
**/
|
|
|
|
|
func KafkaProcess(topic string) {
|
|
|
|
|
|
|
|
|
|
_, f := KafkaUtil.ChanTopicProc[topic]
|
|
|
|
|
_, f := KafkaUtil.CountTopicProc[topic]
|
|
|
|
|
|
|
|
|
|
if f == false {
|
|
|
|
|
|
|
|
|
|
//var lst []chan bool
|
|
|
|
|
//var lst2 []bool
|
|
|
|
|
//var lst3 []int
|
|
|
|
|
//
|
|
|
|
|
//for no := 0; no < procNo; no++ {
|
|
|
|
|
//
|
|
|
|
|
// var chanProc = make(chan bool, 100)
|
|
|
|
|
//
|
|
|
|
|
// lst = append(lst, chanProc)
|
|
|
|
|
// lst2 = append(lst2, true)
|
|
|
|
|
// lst3 = append(lst3, 0)
|
|
|
|
|
//}
|
|
|
|
|
//add by zhangjun 2020-07-30
|
|
|
|
|
loc.Lock()
|
|
|
|
|
|
|
|
|
|
KafkaUtil.ChanTopicProc[topic] = nil
|
|
|
|
|
KafkaUtil.StateTopicProc[topic] = true
|
|
|
|
|
//loc.Lock()
|
|
|
|
|
//KafkaUtil.ChanTopicProc[topic] = nil
|
|
|
|
|
//KafkaUtil.StateTopicProc[topic] = true
|
|
|
|
|
KafkaUtil.CountTopicProc[topic] = 0
|
|
|
|
|
//loc.Unlock()
|
|
|
|
|
|
|
|
|
|
loc.Unlock()
|
|
|
|
|
|
|
|
|
|
//for no := 0; no < procNo; no++ {
|
|
|
|
|
|
|
|
|
|
if GROUP_NO == 0 {
|
|
|
|
|
fmt.Printf("Dataex Kafka2ES Process Start,Topic:%s,ConsumerGroup:%s.\n", topic, "group_"+topic)
|
|
|
|
|
|
|
|
|
|
//开启子线程
|
|
|
|
|
go KafkaUtil.Consume(topic, "group_"+topic)
|
|
|
|
|
//time.Sleep(time.Second * 10)
|
|
|
|
|
} else {
|
|
|
|
|
//add by zhangjun 2020-08-04
|
|
|
|
|
//开启双 Consume Group 线程处理,确保数据准确!!!
|
|
|
|
|
//time.Sleep(time.Second * 5)
|
|
|
|
|
fmt.Printf("Dataex Kafka2ES Process Start,Topic:%s,ConsumerGroup:%s.\n", topic, "group2_"+topic)
|
|
|
|
|
go KafkaUtil.Consume(topic, "group2_"+topic)
|
|
|
|
|
}
|
|
|
|
|
}// else { //TODO:处理异常子线程!!!
|
|
|
|
|
//
|
|
|
|
|
// if KafkaUtil.CountTopicProc[topic] == false {
|
|
|
|
|
//
|
|
|
|
|
// loc.Lock()
|
|
|
|
|
//
|
|
|
|
|
// //KafkaUtil.ChanTopicProc[topic] = nil
|
|
|
|
|
// //KafkaUtil.StateTopicProc[topic] = true
|
|
|
|
|
// KafkaUtil.CountTopicProc[topic] = 0
|
|
|
|
|
//
|
|
|
|
|
// loc.Unlock()
|
|
|
|
|
//
|
|
|
|
|
// if GROUP_NO == 0 {
|
|
|
|
|
// fmt.Printf("Dataex Kafka2ES Process Start,Topic:%s,ConsumerGroup:%s.\n", topic, "group_"+topic)
|
|
|
|
|
// go KafkaUtil.Consume(topic, "group_"+topic)
|
|
|
|
|
// } else {
|
|
|
|
|
// fmt.Printf("Dataex Kafka2ES Process Start,Topic:%s,ConsumerGroup:%s.\n", topic, "group2_"+topic)
|
|
|
|
|
// go KafkaUtil.Consume(topic, "group2_"+topic)
|
|
|
|
|
// }
|
|
|
|
|
// //time.Sleep(time.Second * 10)
|
|
|
|
|
// }
|
|
|
|
|
} else { //TODO:处理异常子线程!!!
|
|
|
|
|
|
|
|
|
|
//for no := 0; no < len(KafkaUtil.StateTopicProc[topic]); no++ {
|
|
|
|
|
if KafkaUtil.StateTopicProc[topic] == false {
|
|
|
|
|
fmt.Printf("Dataex Kafka2ES Process Start,Topic:%s,ConsumerGroup:%s.\n", topic, "group_"+topic)
|
|
|
|
|
|
|
|
|
|
KafkaUtil.StateTopicProc[topic] = true
|
|
|
|
|
KafkaUtil.CountTopicProc[topic] = 0
|
|
|
|
|
|
|
|
|
|
go KafkaUtil.Consume(topic, "group_"+topic)
|
|
|
|
|
|
|
|
|
|
time.Sleep(time.Second * 10)
|
|
|
|
|
}
|
|
|
|
|
//}
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
/**
|
|
|
|
|
* @Author zhangjun
|
|
|
|
|
* @Description 打印 Kafka 数据处理进度
|
|
|
|
|
* @Date 2020-08-04 09:59
|
|
|
|
|
* @Param
|
|
|
|
|
* @return
|
|
|
|
|
**/
|
|
|
|
|
func LogProcess() {
|
|
|
|
|
|
|
|
|
|
for k, v := range KafkaUtil.CountTopicProc {
|
|
|
|
|
|
|
|
|
|
//if len(KafkaUtil.CountTopicProc[k])>0{
|
|
|
|
|
//for no:=0;no< len(v);no++{
|
|
|
|
|
fmt.Println("[Kafka] ["+k+"] "+time.Now().Format("2006/01/02 15:04:05")+" Process message total:", v)
|
|
|
|
|
//}
|
|
|
|
|
//}
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
/**
|
|
|
|
|
* @Author zhangjun
|
|
|
|
|
* @Description 定期刷新ES 索引
|
|
|
|
|
* @Date 2020-08-04 09:59
|
|
|
|
|
* @Param
|
|
|
|
|
* @return
|
|
|
|
|
**/
|
|
|
|
|
func ESRefreshProcess() {
|
|
|
|
|
|
|
|
|
|
for k,_:=range KafkaUtil.CountTopicProc {
|
|
|
|
|
|
|
|
|
|
ES7Util.IndexRefresh( k )
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|