Connect相关代码

This commit is contained in:
zengqiao
2022-12-06 18:13:44 +08:00
committed by EricZeng
parent fb5964af84
commit e8652d5db5
169 changed files with 6472 additions and 317 deletions

View File

@@ -0,0 +1,44 @@
package com.xiaojukeji.know.streaming.km.persistence.connect;
import com.didiglobal.logi.log.ILog;
import com.didiglobal.logi.log.LogFactory;
import com.xiaojukeji.know.streaming.km.common.bean.entity.connect.ConnectCluster;
import com.xiaojukeji.know.streaming.km.common.bean.event.cluster.connect.ConnectClusterLoadChangedEvent;
import org.springframework.context.ApplicationListener;
import java.util.concurrent.locks.ReentrantLock;
/**
* @author wyb
* @date 2022/11/7
*/
public abstract class AbstractConnectClusterChangeHandler implements ApplicationListener<ConnectClusterLoadChangedEvent> {
private static final ILog log = LogFactory.getLog(AbstractConnectClusterChangeHandler.class);
protected final ReentrantLock modifyClientMapLock = new ReentrantLock();
protected abstract void add(ConnectCluster connectCluster);
protected abstract void modify(ConnectCluster newConnectCluster, ConnectCluster oldConnectCluster);
protected abstract void remove(ConnectCluster connectCluster);
@Override
public void onApplicationEvent(ConnectClusterLoadChangedEvent event) {
switch (event.getOperationEnum()) {
case ADD:
this.add(event.getInDBConnectCluster());
break;
case EDIT:
this.modify(event.getInDBConnectCluster(), event.getInCacheConnectCluster());
break;
case DELETE:
this.remove(event.getInCacheConnectCluster());
break;
default:
log.error("method=onApplicationEvent||event={}||msg=illegal event", event);
}
}
}

View File

@@ -0,0 +1,146 @@
package com.xiaojukeji.know.streaming.km.persistence.connect;
import com.baomidou.mybatisplus.core.conditions.query.LambdaQueryWrapper;
import com.didiglobal.logi.log.ILog;
import com.didiglobal.logi.log.LogFactory;
import com.xiaojukeji.know.streaming.km.common.bean.entity.config.JmxConfig;
import com.xiaojukeji.know.streaming.km.common.bean.entity.connect.ConnectCluster;
import com.xiaojukeji.know.streaming.km.common.bean.entity.connect.ConnectWorker;
import com.xiaojukeji.know.streaming.km.common.bean.po.connect.ConnectWorkerPO;
import com.xiaojukeji.know.streaming.km.common.jmx.JmxConnectorWrap;
import com.xiaojukeji.know.streaming.km.common.utils.ConvertUtil;
import com.xiaojukeji.know.streaming.km.common.utils.ValidateUtils;
import com.xiaojukeji.know.streaming.km.persistence.connect.cache.LoadedConnectClusterCache;
import com.xiaojukeji.know.streaming.km.persistence.mysql.connect.ConnectWorkerDAO;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Component;
import java.util.Map;
import java.util.concurrent.ConcurrentHashMap;
/**
* @author wyb
* @date 2022/10/31
*/
@Component
public class ConnectJMXClient extends AbstractConnectClusterChangeHandler {
private static final ILog log = LogFactory.getLog(ConnectJMXClient.class);
private static final Map<Long, Map<String, JmxConnectorWrap>> JMX_MAP = new ConcurrentHashMap<>();
@Autowired
private ConnectWorkerDAO connectWorkerDAO;
public JmxConnectorWrap getClientWithCheck(Long connectClusterId, String workerId) {
JmxConnectorWrap jmxConnectorWrap = this.getClient(connectClusterId, workerId);
if (ValidateUtils.isNull(jmxConnectorWrap) || !jmxConnectorWrap.checkJmxConnectionAndInitIfNeed()) {
log.error("method=getClientWithCheck||connectClusterId={}||workerId={}||msg=get jmx connector failed!", connectClusterId, workerId);
return null;
}
return jmxConnectorWrap;
}
public JmxConnectorWrap getClient(Long connectorClusterId, String workerId) {
Map<String, JmxConnectorWrap> jmxMap = JMX_MAP.getOrDefault(connectorClusterId, new ConcurrentHashMap<>());
JmxConnectorWrap jmxConnectorWrap = jmxMap.get(workerId);
if (jmxConnectorWrap != null) {
// 已新建成功,则直接返回
return jmxConnectorWrap;
}
// 未创建,则进行创建
return this.createJmxConnectorWrap(connectorClusterId, workerId);
}
private JmxConnectorWrap createJmxConnectorWrap(Long connectorClusterId, String workerId) {
ConnectCluster connectCluster = LoadedConnectClusterCache.getByPhyId(connectorClusterId);
if (connectCluster == null) {
return null;
}
return this.createJmxConnectorWrap(connectCluster, workerId);
}
private JmxConnectorWrap createJmxConnectorWrap(ConnectCluster connectCluster, String workerId) {
ConnectWorker connectWorker = this.getConnectWorkerFromDB(connectCluster.getId(), workerId);
if (connectWorker == null) {
return null;
}
try {
modifyClientMapLock.lock();
JmxConnectorWrap jmxConnectorWrap = JMX_MAP.getOrDefault(connectCluster.getId(), new ConcurrentHashMap<>()).get(workerId);
if (jmxConnectorWrap != null) {
return jmxConnectorWrap;
}
log.debug("method=createJmxConnectorWrap||connectClusterId={}||workerId={}||msg=create JmxConnectorWrap starting", connectCluster.getId(), workerId);
JmxConfig jmxConfig = ConvertUtil.str2ObjByJson(connectCluster.getJmxProperties(), JmxConfig.class);
if (jmxConfig == null) {
jmxConfig = new JmxConfig();
}
jmxConnectorWrap = new JmxConnectorWrap(
"connectClusterId: " + connectCluster.getId() + " workerId: " + workerId,
null,
connectWorker.getHost(),
connectWorker.getJmxPort() != null ? connectWorker.getJmxPort() : jmxConfig.getJmxPort(),
jmxConfig
);
Map<String, JmxConnectorWrap> workerMap = JMX_MAP.getOrDefault(connectCluster.getId(), new ConcurrentHashMap<>());
workerMap.put(workerId, jmxConnectorWrap);
JMX_MAP.put(connectCluster.getId(), workerMap);
return jmxConnectorWrap;
} catch (Exception e) {
log.debug("method=createJmxConnectorWrap||connectClusterId={}||workerId={}||msg=create JmxConnectorWrap failed||errMsg=exception||", connectCluster.getId(), workerId, e);
} finally {
modifyClientMapLock.unlock();
}
return null;
}
private ConnectWorker getConnectWorkerFromDB(Long connectorClusterId, String workerId) {
LambdaQueryWrapper<ConnectWorkerPO> lambdaQueryWrapper = new LambdaQueryWrapper<>();
lambdaQueryWrapper.eq(ConnectWorkerPO::getConnectClusterId, connectorClusterId);
lambdaQueryWrapper.eq(ConnectWorkerPO::getWorkerId, workerId);
ConnectWorkerPO connectWorkerPO = connectWorkerDAO.selectOne(lambdaQueryWrapper);
if (connectWorkerPO == null) {
return null;
}
return ConvertUtil.obj2Obj(connectWorkerPO, ConnectWorker.class);
}
@Override
protected void add(ConnectCluster connectCluster) {
JMX_MAP.putIfAbsent(connectCluster.getId(), new ConcurrentHashMap<>());
}
@Override
protected void modify(ConnectCluster newConnectCluster, ConnectCluster oldConnectCluster) {
if (newConnectCluster.getJmxProperties().equals(oldConnectCluster.getJmxProperties())) {
return;
}
this.remove(newConnectCluster);
this.add(newConnectCluster);
}
@Override
protected void remove(ConnectCluster connectCluster) {
Map<String, JmxConnectorWrap> jmxMap = JMX_MAP.remove(connectCluster.getId());
if (jmxMap == null) {
return;
}
for (JmxConnectorWrap jmxConnectorWrap : jmxMap.values()) {
jmxConnectorWrap.close();
}
}
}

View File

@@ -0,0 +1,37 @@
package com.xiaojukeji.know.streaming.km.persistence.connect.cache;
import com.xiaojukeji.know.streaming.km.common.bean.entity.cluster.ClusterPhy;
import com.xiaojukeji.know.streaming.km.common.bean.entity.connect.ConnectCluster;
import java.util.Map;
import java.util.concurrent.ConcurrentHashMap;
/**
* @author wyb
* @date 2022/11/7
*/
public class LoadedConnectClusterCache {
private static final Map<Long, ConnectCluster> CONNECT_CLUSTER_MAP = new ConcurrentHashMap<>();
public static boolean containsByPhyId(Long connectClusterId) {
return CONNECT_CLUSTER_MAP.containsKey(connectClusterId);
}
public static ConnectCluster getByPhyId(Long connectClusterId) {
return CONNECT_CLUSTER_MAP.get(connectClusterId);
}
public static ConnectCluster remove(Long connectClusterId) {
return CONNECT_CLUSTER_MAP.remove(connectClusterId);
}
public static void replace(ConnectCluster connectCluster) {
CONNECT_CLUSTER_MAP.put(connectCluster.getId(), connectCluster);
}
public static Map<Long, ConnectCluster> listAll() {
return CONNECT_CLUSTER_MAP;
}
}

View File

@@ -0,0 +1,104 @@
package com.xiaojukeji.know.streaming.km.persistence.connect.schedule;
import com.didiglobal.logi.log.ILog;
import com.didiglobal.logi.log.LogFactory;
import com.xiaojukeji.know.streaming.km.common.bean.entity.connect.ConnectCluster;
import com.xiaojukeji.know.streaming.km.common.bean.event.cluster.connect.ConnectClusterLoadChangedEvent;
import com.xiaojukeji.know.streaming.km.common.component.SpringTool;
import com.xiaojukeji.know.streaming.km.common.enums.operaterecord.OperationEnum;
import com.xiaojukeji.know.streaming.km.common.utils.ConvertUtil;
import com.xiaojukeji.know.streaming.km.persistence.connect.cache.LoadedConnectClusterCache;
import com.xiaojukeji.know.streaming.km.persistence.mysql.connect.ConnectClusterDAO;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.scheduling.annotation.Scheduled;
import org.springframework.stereotype.Component;
import javax.annotation.PostConstruct;
import java.util.List;
import java.util.Map;
import java.util.concurrent.BlockingQueue;
import java.util.concurrent.LinkedBlockingQueue;
import java.util.function.Function;
import java.util.stream.Collectors;
/**
* @author wyb
* @date 2022/11/7
*/
@Component
public class ScheduleFlushConnectClusterTask {
private static final ILog log = LogFactory.getLog(ScheduleFlushConnectClusterTask.class);
@Autowired
private ConnectClusterDAO connectClusterDAO;
private final BlockingQueue<ConnectClusterLoadChangedEvent> eventQueue = new LinkedBlockingQueue<>(2000);
private final Thread handleEventThread = new Thread(() -> handleEvent(), "ScheduleFlushConnectClusterTask");
@PostConstruct
public void init() {
// 启动线程
handleEventThread.start();
// 立即加载集群
flush();
}
@Scheduled(cron="0/10 * * * * ?")
public void flush() {
List<ConnectCluster> inDBConnectClusterList = ConvertUtil.list2List(connectClusterDAO.selectList(null), ConnectCluster.class);
Map<Long, ConnectCluster> inDBConnectClusterMap = inDBConnectClusterList.stream().collect(Collectors.toMap(ConnectCluster::getId, Function.identity()));
//排查新增
for (ConnectCluster inDBConnectCluster : inDBConnectClusterList) {
ConnectCluster inCacheConnectCluster = LoadedConnectClusterCache.getByPhyId(inDBConnectCluster.getId());
//存在,查看是否需要替换
if (inCacheConnectCluster != null) {
if (inCacheConnectCluster.equals(inDBConnectCluster)) {
continue;
}
LoadedConnectClusterCache.replace(inCacheConnectCluster);
this.put2Queue(new ConnectClusterLoadChangedEvent(this, inDBConnectCluster, inCacheConnectCluster, OperationEnum.EDIT));
} else {
LoadedConnectClusterCache.replace(inDBConnectCluster);
this.put2Queue(new ConnectClusterLoadChangedEvent(this, inDBConnectCluster, null, OperationEnum.ADD));
}
}
//排查删除
for (ConnectCluster inCacheConnectCluster : LoadedConnectClusterCache.listAll().values()) {
if (inDBConnectClusterMap.containsKey(inCacheConnectCluster.getId())) {
continue;
}
LoadedConnectClusterCache.remove(inCacheConnectCluster.getId());
this.put2Queue(new ConnectClusterLoadChangedEvent(this, null, inCacheConnectCluster, OperationEnum.DELETE));
}
}
private void put2Queue(ConnectClusterLoadChangedEvent event) {
try {
eventQueue.put(event);
} catch (Exception e) {
log.error("method=put2Queue||event={}||errMsg=exception", event, e);
}
}
private void handleEvent() {
while (true) {
try {
ConnectClusterLoadChangedEvent event = eventQueue.take();
SpringTool.publish(event);
} catch (Exception e) {
log.error("method=handleEvent||errMsg=exception", e);
}
}
}
}

View File

@@ -26,7 +26,7 @@ public class JmxDAOImpl implements JmxDAO {
public Object getJmxValue(Long clusterPhyId, String jmxHost, Integer jmxPort, JmxConfig jmxConfig, ObjectName objectName, String attribute) {
JmxConnectorWrap jmxConnectorWrap = null;
try {
jmxConnectorWrap = new JmxConnectorWrap(clusterPhyId, null, null, jmxHost, jmxPort, jmxConfig);
jmxConnectorWrap = new JmxConnectorWrap("clusterPhyId: " + clusterPhyId, null, jmxHost, jmxPort, jmxConfig);
if (!jmxConnectorWrap.checkJmxConnectionAndInitIfNeed()) {
log.error(
"method=getJmxValue||clusterPhyId={}||jmxHost={}||jmxPort={}||jmxConfig={}||errMgs=create jmx client failed",

View File

@@ -159,8 +159,7 @@ public class KafkaJMXClient extends AbstractClusterLoadedChangedHandler {
}
JmxConnectorWrap jmxConnectorWrap = new JmxConnectorWrap(
clusterPhy.getId(),
brokerId,
"clusterPhyId: " + clusterPhy.getId() + " brokerId: " + brokerId,
broker.getStartTimestamp(),
jmxConfig != null ? broker.getJmxHost(jmxConfig.getUseWhichEndpoint()) : broker.getHost(),
broker.getJmxPort() != null ? broker.getJmxPort() : jmxConfig.getJmxPort(),

View File

@@ -0,0 +1,9 @@
package com.xiaojukeji.know.streaming.km.persistence.mysql.connect;
import com.baomidou.mybatisplus.core.mapper.BaseMapper;
import com.xiaojukeji.know.streaming.km.common.bean.po.connect.ConnectClusterPO;
import org.springframework.stereotype.Repository;
@Repository
public interface ConnectClusterDAO extends BaseMapper<ConnectClusterPO> {
}

View File

@@ -0,0 +1,9 @@
package com.xiaojukeji.know.streaming.km.persistence.mysql.connect;
import com.baomidou.mybatisplus.core.mapper.BaseMapper;
import com.xiaojukeji.know.streaming.km.common.bean.po.connect.ConnectWorkerPO;
import org.springframework.stereotype.Repository;
@Repository
public interface ConnectWorkerDAO extends BaseMapper<ConnectWorkerPO> {
}

View File

@@ -0,0 +1,9 @@
package com.xiaojukeji.know.streaming.km.persistence.mysql.connect;
import com.baomidou.mybatisplus.core.mapper.BaseMapper;
import com.xiaojukeji.know.streaming.km.common.bean.po.connect.ConnectorPO;
import org.springframework.stereotype.Repository;
@Repository
public interface ConnectorDAO extends BaseMapper<ConnectorPO> {
}

View File

@@ -0,0 +1,9 @@
package com.xiaojukeji.know.streaming.km.persistence.mysql.connect;
import com.baomidou.mybatisplus.core.mapper.BaseMapper;
import com.xiaojukeji.know.streaming.km.common.bean.po.connect.WorkerConnectorPO;
import org.springframework.stereotype.Repository;
@Repository
public interface WorkerConnectorDAO extends BaseMapper<WorkerConnectorPO> {
}