合并3.3.0分支

This commit is contained in:
zengqiao
2023-02-24 17:13:50 +08:00
616 changed files with 32894 additions and 8421 deletions

View File

@@ -7,8 +7,9 @@ import com.didiglobal.logi.job.core.job.Job;
import com.didiglobal.logi.job.core.job.JobContext;
import com.didiglobal.logi.log.ILog;
import com.didiglobal.logi.log.LogFactory;
import com.xiaojukeji.know.streaming.km.common.bean.entity.EntifyIdInterface;
import com.xiaojukeji.know.streaming.km.common.bean.entity.EntityIdInterface;
import com.xiaojukeji.know.streaming.km.common.exception.AdminTaskCodeException;
import com.xiaojukeji.know.streaming.km.common.utils.ConvertUtil;
import com.xiaojukeji.know.streaming.km.common.utils.ValidateUtils;
import javax.annotation.PostConstruct;
@@ -16,7 +17,7 @@ import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
public abstract class AbstractDispatchTask<E extends Comparable & EntifyIdInterface> implements Job {
public abstract class AbstractDispatchTask<E extends Comparable & EntityIdInterface> implements Job {
private static final ILog LOGGER = LogFactory.getLog(AbstractDispatchTask.class);
/**
@@ -82,7 +83,15 @@ public abstract class AbstractDispatchTask<E extends Comparable & EntifyIdInterf
}
// 进行任务处理
return this.processTask(subTaskList, triggerTimeUnitMs);
TaskResult ret = this.processTask(subTaskList, triggerTimeUnitMs);
//组装信息
TaskResult taskResult = new TaskResult();
taskResult.setCode(ret.getCode());
taskResult.setMessage(ConvertUtil.list2String(subTaskList, ","));
return taskResult;
} catch (Exception e) {
LOGGER.error("process task failed, taskName:{}", taskName, e);

View File

@@ -1,60 +0,0 @@
package com.xiaojukeji.know.streaming.km.task.client;
import com.didiglobal.logi.job.annotation.Task;
import com.didiglobal.logi.job.common.TaskResult;
import com.didiglobal.logi.job.core.consensual.ConsensualEnum;
import com.didiglobal.logi.job.core.job.Job;
import com.didiglobal.logi.job.core.job.JobContext;
import com.didiglobal.logi.log.ILog;
import com.didiglobal.logi.log.LogFactory;
import com.xiaojukeji.know.streaming.km.common.bean.entity.cluster.ClusterPhy;
import com.xiaojukeji.know.streaming.km.core.service.broker.BrokerService;
import com.xiaojukeji.know.streaming.km.persistence.cache.LoadedClusterPhyCache;
import com.xiaojukeji.know.streaming.km.persistence.kafka.KafkaJMXClient;
import org.springframework.beans.factory.annotation.Autowired;
@Task(name = "CheckJmxClientTask",
description = "检查Jmx客户端",
cron = "0 0/1 * * * ? *",
autoRegister = true,
timeout = 2 * 60,
consensual = ConsensualEnum.BROADCAST)
public class CheckJmxClientTask implements Job {
private static final ILog log = LogFactory.getLog(CheckJmxClientTask.class);
@Autowired
private BrokerService brokerService;
@Autowired
private KafkaJMXClient kafkaJMXClient;
@Override
public TaskResult execute(JobContext jobContext) {
boolean status = true;
for (ClusterPhy clusterPhy: LoadedClusterPhyCache.listAll().values()) {
if (this.checkJmxClient(clusterPhy)) {
continue;
}
status = false;
}
return status? TaskResult.SUCCESS: TaskResult.FAIL;
}
private boolean checkJmxClient(ClusterPhy clusterPhy) {
try {
kafkaJMXClient.checkAndRemoveIfIllegal(
clusterPhy.getId(),
brokerService.listAliveBrokersFromDB(clusterPhy.getId())
);
return true;
} catch (Exception e) {
log.error("method=checkJmxClient||clusterPhyId={}||errMsg=exception", clusterPhy.getId(), e);
}
return false;
}
}

View File

@@ -0,0 +1,51 @@
package com.xiaojukeji.know.streaming.km.task.connect;
import com.didiglobal.logi.job.common.TaskResult;
import com.didiglobal.logi.log.ILog;
import com.didiglobal.logi.log.LogFactory;
import com.xiaojukeji.know.streaming.km.common.bean.entity.connect.ConnectCluster;
import com.xiaojukeji.know.streaming.km.core.service.connect.cluster.ConnectClusterService;
import com.xiaojukeji.know.streaming.km.task.AbstractDispatchTask;
import org.springframework.beans.factory.annotation.Autowired;
import java.util.List;
/**
* @author wyb
* @date 2022/11/7
*/
public abstract class AbstractConnectClusterDispatchTask extends AbstractDispatchTask<ConnectCluster> {
private static final ILog log = LogFactory.getLog(AbstractConnectClusterDispatchTask.class);
@Autowired
private ConnectClusterService connectClusterService;
protected abstract TaskResult processSubTask(ConnectCluster connectCluster, long triggerTimeUnitMs) throws Exception;
@Override
protected List<ConnectCluster> listAllTasks() {
return connectClusterService.listAllClusters();
}
@Override
protected TaskResult processTask(List<ConnectCluster> subTaskList, long triggerTimeUnitMs) {
boolean allSuccess = true;
for (ConnectCluster elem : subTaskList) {
try {
log.debug("method=processTask||taskName={}||connectClusterId={}||msg=start", this.taskName, elem.getId());
TaskResult tr = this.processSubTask(elem, triggerTimeUnitMs);
if (TaskResult.SUCCESS.getCode() != tr.getCode()) {
log.warn("method=processTask||taskName={}||connectClusterId={}||msg=process failed", this.taskName, elem.getId());
allSuccess = false;
continue;
}
log.debug("method=processTask||taskName={}||connectClusterId={}||msg=finished", this.taskName, elem.getId());
} catch (Exception e) {
log.error("method=processTask||taskName={}||connectClusterId={}||errMsg=throw exception", this.taskName, elem.getId(), e);
}
}
return allSuccess ? TaskResult.SUCCESS : TaskResult.FAIL;
}
}

View File

@@ -0,0 +1,124 @@
package com.xiaojukeji.know.streaming.km.task.connect.health;
import com.didiglobal.logi.job.common.TaskResult;
import com.didiglobal.logi.log.ILog;
import com.didiglobal.logi.log.LogFactory;
import com.xiaojukeji.know.streaming.km.collector.service.CollectThreadPoolService;
import com.xiaojukeji.know.streaming.km.common.bean.entity.config.healthcheck.BaseClusterHealthConfig;
import com.xiaojukeji.know.streaming.km.common.bean.entity.connect.ConnectCluster;
import com.xiaojukeji.know.streaming.km.common.bean.entity.health.HealthCheckResult;
import com.xiaojukeji.know.streaming.km.common.bean.entity.param.cluster.ClusterParam;
import com.xiaojukeji.know.streaming.km.common.constant.Constant;
import com.xiaojukeji.know.streaming.km.common.enums.health.HealthCheckDimensionEnum;
import com.xiaojukeji.know.streaming.km.common.utils.FutureWaitUtil;
import com.xiaojukeji.know.streaming.km.common.utils.ValidateUtils;
import com.xiaojukeji.know.streaming.km.core.service.health.checker.AbstractHealthCheckService;
import com.xiaojukeji.know.streaming.km.core.service.health.checkresult.HealthCheckResultService;
import com.xiaojukeji.know.streaming.km.task.connect.metrics.AbstractAsyncMetricsDispatchTask;
import org.springframework.beans.factory.annotation.Autowired;
import java.util.*;
/**
* @author wyb
* @date 2022/11/8
*/
public abstract class AbstractHealthCheckTask extends AbstractAsyncMetricsDispatchTask {
private static final ILog log = LogFactory.getLog(AbstractHealthCheckTask.class);
@Autowired
private HealthCheckResultService healthCheckResultService;
@Autowired
private CollectThreadPoolService collectThreadPoolService;
public abstract AbstractHealthCheckService getCheckService();
@Override
public TaskResult processClusterTask(ConnectCluster connectCluster, long triggerTimeUnitMs) throws Exception {
return this.calAndUpdateHealthCheckResult(connectCluster, triggerTimeUnitMs);
}
private TaskResult calAndUpdateHealthCheckResult(ConnectCluster connectCluster, long triggerTimeUnitMs){
// 获取配置,<配置名,配置信息>
Map<String, BaseClusterHealthConfig> healthConfigMap = healthCheckResultService.getClusterHealthConfig(connectCluster.getId());
// 获取资源列表
List<ClusterParam> paramList = this.getCheckService().getResList(connectCluster.getId());
// 检查结果
List<HealthCheckResult> checkResultList = Collections.synchronizedList(new ArrayList<>());
if (ValidateUtils.isEmptyList(paramList)) {
// 当前无该维度的资源,则直接设置为
checkResultList.addAll(this.getNoResResult(connectCluster.getId(), this.getCheckService(), healthConfigMap));
}
// 获取合适的线程池
FutureWaitUtil<Void> futureWaitUtil = collectThreadPoolService.selectSuitableFutureUtil(connectCluster.getId());
// 遍历资源
for (ClusterParam clusterParam: paramList) {
futureWaitUtil.runnableTask(
String.format("class=%s||method=calAndUpdateHealthCheckResult||clusterId=%d||resData=%s", this.getCheckService().getClass().getSimpleName(), connectCluster.getId(), clusterParam),
30000,
() -> checkResultList.addAll(this.checkAndGetResult(clusterParam, healthConfigMap))
);
}
futureWaitUtil.waitExecute(30000);
try {
healthCheckResultService.batchReplace(connectCluster.getId(), this.getCheckService().getHealthCheckDimensionEnum().getDimension(), checkResultList);
} catch (Exception e) {
log.error(
"class=%s||method=calAndUpdateHealthCheckResult||clusterId={}||errMsg=exception!",
this.getCheckService().getClass().getSimpleName(), connectCluster.getId(), e
);
}
return TaskResult.SUCCESS;
}
private List<HealthCheckResult> getNoResResult(Long connectClusterId, AbstractHealthCheckService healthCheckService, Map<String, BaseClusterHealthConfig> healthConfigMap) {
List<HealthCheckResult> resultList = new ArrayList<>();
// 进行检查
for (BaseClusterHealthConfig clusterHealthConfig: healthConfigMap.values()) {
HealthCheckDimensionEnum dimensionEnum = healthCheckService.getHealthCheckDimensionEnum();
if (!clusterHealthConfig.getCheckNameEnum().getDimensionEnum().equals(dimensionEnum)) {
// 类型不匹配
continue;
}
// 记录
HealthCheckResult checkResult = new HealthCheckResult(
dimensionEnum.getDimension(),
clusterHealthConfig.getCheckNameEnum().getConfigName(),
connectClusterId,
"-1"
);
checkResult.setPassed(Constant.YES);
resultList.add(checkResult);
}
return resultList;
}
private List<HealthCheckResult> checkAndGetResult(ClusterParam clusterParam,
Map<String, BaseClusterHealthConfig> healthConfigMap) {
List<HealthCheckResult> resultList = new ArrayList<>();
// 进行检查
for (BaseClusterHealthConfig clusterHealthConfig: healthConfigMap.values()) {
HealthCheckResult healthCheckResult = this.getCheckService().checkAndGetResult(clusterParam, clusterHealthConfig);
if (healthCheckResult == null) {
continue;
}
// 记录
resultList.add(healthCheckResult);
}
return resultList;
}
}

View File

@@ -0,0 +1,32 @@
package com.xiaojukeji.know.streaming.km.task.connect.health;
import com.didiglobal.logi.job.annotation.Task;
import com.didiglobal.logi.job.core.consensual.ConsensualEnum;
import com.xiaojukeji.know.streaming.km.core.service.health.checker.AbstractHealthCheckService;
import com.xiaojukeji.know.streaming.km.core.service.health.checker.connect.HealthCheckConnectClusterService;
import lombok.AllArgsConstructor;
import lombok.NoArgsConstructor;
import org.springframework.beans.factory.annotation.Autowired;
/**
* @author wyc
* @date 2022/11/9
*/
@NoArgsConstructor
@AllArgsConstructor
@Task(name = "ConnectClusterHealthCheckTask",
description = "ConnectCluster健康检查",
cron = "0 0/1 * * * ? *",
autoRegister = true,
consensual = ConsensualEnum.BROADCAST,
timeout = 2 * 60)
public class ConnectClusterHealthCheckTask extends AbstractHealthCheckTask {
@Autowired
private HealthCheckConnectClusterService healthCheckConnectClusterService;
@Override
public AbstractHealthCheckService getCheckService() {
return healthCheckConnectClusterService;
}
}

View File

@@ -0,0 +1,32 @@
package com.xiaojukeji.know.streaming.km.task.connect.health;
import com.didiglobal.logi.job.annotation.Task;
import com.didiglobal.logi.job.core.consensual.ConsensualEnum;
import com.xiaojukeji.know.streaming.km.core.service.health.checker.AbstractHealthCheckService;
import com.xiaojukeji.know.streaming.km.core.service.health.checker.connect.HealthCheckConnectorService;
import lombok.AllArgsConstructor;
import lombok.NoArgsConstructor;
import org.springframework.beans.factory.annotation.Autowired;
/**
* @author wyb
* @date 2022/11/8
*/
@NoArgsConstructor
@AllArgsConstructor
@Task(name = "ConnectorHealthCheckTask",
description = "Connector健康检查",
cron = "0 0/1 * * * ? *",
autoRegister = true,
consensual = ConsensualEnum.BROADCAST,
timeout = 2 * 60)
public class ConnectorHealthCheckTask extends AbstractHealthCheckTask {
@Autowired
HealthCheckConnectorService healthCheckConnectorService;
@Override
public AbstractHealthCheckService getCheckService() {
return healthCheckConnectorService;
}
}

View File

@@ -0,0 +1,47 @@
package com.xiaojukeji.know.streaming.km.task.connect.metadata;
import com.didiglobal.logi.job.common.TaskResult;
import com.didiglobal.logi.log.ILog;
import com.didiglobal.logi.log.LogFactory;
import com.xiaojukeji.know.streaming.km.common.bean.entity.connect.ConnectCluster;
import com.xiaojukeji.know.streaming.km.task.connect.AbstractConnectClusterDispatchTask;
import com.xiaojukeji.know.streaming.km.task.service.TaskThreadPoolService;
import org.springframework.beans.factory.annotation.Autowired;
/**
* 元数据同步相关任务
*/
public abstract class AbstractAsyncMetadataDispatchTask extends AbstractConnectClusterDispatchTask {
private static final ILog log = LogFactory.getLog(AbstractAsyncMetadataDispatchTask.class);
public abstract TaskResult processClusterTask(ConnectCluster connectCluster, long triggerTimeUnitMs) throws Exception;
@Autowired
private TaskThreadPoolService taskThreadPoolService;
@Override
protected TaskResult processSubTask(ConnectCluster connectCluster, long triggerTimeUnitMs) throws Exception {
return this.asyncProcessSubTask(connectCluster, triggerTimeUnitMs);
}
public TaskResult asyncProcessSubTask(ConnectCluster connectCluster, long triggerTimeUnitMs) {
taskThreadPoolService.submitMetadataTask(
String.format("taskName=%s||clusterPhyId=%d", this.taskName, connectCluster.getId()),
this.timeoutUnitSec.intValue() * 1000,
() -> {
try {
TaskResult tr = this.processClusterTask(connectCluster, triggerTimeUnitMs);
if (TaskResult.SUCCESS_CODE != tr.getCode()) {
log.error("class=AbstractAsyncMetadataDispatchTask||taskName={}||connectClusterId={}||taskResult={}||msg=failed", this.taskName, connectCluster.getId(), tr);
} else {
log.debug("class=AbstractAsyncMetadataDispatchTask||taskName={}||connectClusterId={}||msg=success", this.taskName, connectCluster.getId());
}
} catch (Exception e) {
log.error("class=AbstractAsyncMetadataDispatchTask||taskName={}||connectClusterId={}||errMsg=exception", this.taskName, connectCluster.getId(), e);
}
}
);
return TaskResult.SUCCESS;
}
}

View File

@@ -0,0 +1,62 @@
package com.xiaojukeji.know.streaming.km.task.connect.metadata;
import com.didiglobal.logi.job.annotation.Task;
import com.didiglobal.logi.job.common.TaskResult;
import com.didiglobal.logi.job.core.consensual.ConsensualEnum;
import com.didiglobal.logi.log.ILog;
import com.didiglobal.logi.log.LogFactory;
import com.xiaojukeji.know.streaming.km.common.bean.entity.connect.ConnectCluster;
import com.xiaojukeji.know.streaming.km.common.bean.entity.connect.connector.KSConnector;
import com.xiaojukeji.know.streaming.km.common.bean.entity.result.Result;
import com.xiaojukeji.know.streaming.km.core.service.connect.connector.ConnectorService;
import org.springframework.beans.factory.annotation.Autowired;
import java.util.ArrayList;
import java.util.HashSet;
import java.util.List;
@Task(name = "SyncConnectorTask",
description = "Connector信息同步到DB",
cron = "0 0/1 * * * ? *",
autoRegister = true,
consensual = ConsensualEnum.BROADCAST,
timeout = 2 * 60)
public class SyncConnectorTask extends AbstractAsyncMetadataDispatchTask {
private static final ILog LOGGER = LogFactory.getLog(SyncConnectorTask.class);
@Autowired
private ConnectorService connectorService;
@Override
public TaskResult processClusterTask(ConnectCluster connectCluster, long triggerTimeUnitMs) {
Result<List<String>> nameListResult = connectorService.listConnectorsFromCluster(connectCluster.getId());
if (nameListResult.failed()) {
return TaskResult.FAIL;
}
boolean allSuccess = true;
List<KSConnector> connectorList = new ArrayList<>();
for (String connectorName: nameListResult.getData()) {
Result<KSConnector> ksConnectorResult = connectorService.getAllConnectorInfoFromCluster(connectCluster.getId(), connectorName);
if (ksConnectorResult.failed()) {
LOGGER.error(
"method=processClusterTask||connectClusterId={}||connectorName={}||result={}",
connectCluster.getId(), connectorName, ksConnectorResult
);
allSuccess = false;
continue;
}
connectorList.add(ksConnectorResult.getData());
}
//mm2相关信息的添加
connectorService.completeMirrorMakerInfo(connectCluster, connectorList);
connectorService.batchReplace(connectCluster.getKafkaClusterPhyId(), connectCluster.getId(), connectorList, new HashSet<>(nameListResult.getData()));
return allSuccess? TaskResult.SUCCESS: TaskResult.FAIL;
}
}

View File

@@ -0,0 +1,83 @@
package com.xiaojukeji.know.streaming.km.task.connect.metadata;
import com.didiglobal.logi.job.annotation.Task;
import com.didiglobal.logi.job.common.TaskResult;
import com.didiglobal.logi.job.core.consensual.ConsensualEnum;
import com.didiglobal.logi.log.ILog;
import com.didiglobal.logi.log.LogFactory;
import com.xiaojukeji.know.streaming.km.common.bean.entity.cluster.ClusterPhy;
import com.xiaojukeji.know.streaming.km.common.bean.entity.connect.ConnectCluster;
import com.xiaojukeji.know.streaming.km.common.bean.entity.connect.WorkerConnector;
import com.xiaojukeji.know.streaming.km.common.bean.entity.kafka.KSGroupDescription;
import com.xiaojukeji.know.streaming.km.common.bean.po.connect.ConnectorPO;
import com.xiaojukeji.know.streaming.km.core.service.connect.connector.ConnectorService;
import com.xiaojukeji.know.streaming.km.core.service.connect.worker.WorkerConnectorService;
import com.xiaojukeji.know.streaming.km.core.service.group.GroupService;
import com.xiaojukeji.know.streaming.km.persistence.cache.LoadedClusterPhyCache;
import org.springframework.beans.factory.annotation.Autowired;
import java.util.ArrayList;
import java.util.List;
/**
* @author wyb
* @date 2022/11/14
*/
@Task(name = "SyncWorkerConnectorTask",
description = "WorkerConnector信息同步到DB",
cron = "0 0/1 * * * ? *",
autoRegister = true,
consensual = ConsensualEnum.BROADCAST,
timeout = 2 * 60)
public class SyncWorkerConnectorTask extends AbstractAsyncMetadataDispatchTask{
private static final ILog LOGGER = LogFactory.getLog(SyncWorkerConnectorTask.class);
@Autowired
ConnectorService connectorService;
@Autowired
private GroupService groupService;
@Autowired
private WorkerConnectorService workerConnectorService;
@Override
public TaskResult processClusterTask(ConnectCluster connectCluster, long triggerTimeUnitMs) throws Exception {
List<ConnectorPO> connectorPOList = connectorService.listByConnectClusterIdFromDB(connectCluster.getId());
if (connectorPOList.isEmpty()) {
return TaskResult.SUCCESS;
}
//获取集群信息
ClusterPhy clusterPhy = LoadedClusterPhyCache.getByPhyId(connectCluster.getKafkaClusterPhyId());
if (clusterPhy == null) {
LOGGER.error(
"class=SyncWorkerConnectorTask||method=processClusterTask||connectClusterId={}||clusterPhyId={}||errMsg=clusterPhy not exist!.",
connectCluster.getId(), connectCluster.getKafkaClusterPhyId()
);
return TaskResult.FAIL;
}
KSGroupDescription ksGroupDescription = groupService.getGroupDescriptionFromKafka(clusterPhy, connectCluster.getGroupName());
//获取workerConnector列表
try {
List<WorkerConnector> workerConnectorList = new ArrayList<>();
for (ConnectorPO connectorPO : connectorPOList) {
workerConnectorList.addAll(workerConnectorService.getWorkerConnectorListFromCluster(connectCluster, connectorPO.getConnectorName()));
}
workerConnectorService.batchReplaceInDB(connectCluster.getId(), workerConnectorList);
} catch (Exception e) {
LOGGER.error(
"class=SyncWorkerConnectorTask||method=processClusterTask||connectClusterId={}||ksGroupDescription={}||errMsg=exception.",
connectCluster.getId(), ksGroupDescription, e
);
return TaskResult.FAIL;
}
return TaskResult.SUCCESS;
}
}

View File

@@ -0,0 +1,47 @@
package com.xiaojukeji.know.streaming.km.task.connect.metrics;
import com.didiglobal.logi.job.common.TaskResult;
import com.didiglobal.logi.log.ILog;
import com.didiglobal.logi.log.LogFactory;
import com.xiaojukeji.know.streaming.km.common.bean.entity.connect.ConnectCluster;
import com.xiaojukeji.know.streaming.km.task.connect.AbstractConnectClusterDispatchTask;
import com.xiaojukeji.know.streaming.km.task.service.TaskThreadPoolService;
import org.springframework.beans.factory.annotation.Autowired;
/**
* Metrics相关任务
*/
public abstract class AbstractAsyncMetricsDispatchTask extends AbstractConnectClusterDispatchTask {
private static final ILog log = LogFactory.getLog(AbstractAsyncMetricsDispatchTask.class);
public abstract TaskResult processClusterTask(ConnectCluster connectCluster, long triggerTimeUnitMs) throws Exception;
@Autowired
private TaskThreadPoolService taskThreadPoolService;
@Override
protected TaskResult processSubTask(ConnectCluster connectCluster, long triggerTimeUnitMs) throws Exception {
return this.asyncProcessSubTask(connectCluster, triggerTimeUnitMs);
}
public TaskResult asyncProcessSubTask(ConnectCluster connectCluster, long triggerTimeUnitMs) {
taskThreadPoolService.submitMetricsTask(
String.format("taskName=%s||clusterPhyId=%d", this.taskName, connectCluster.getId()),
this.timeoutUnitSec.intValue() * 1000,
() -> {
try {
TaskResult tr = this.processClusterTask(connectCluster, triggerTimeUnitMs);
if (TaskResult.SUCCESS_CODE != tr.getCode()) {
log.error("class=AbstractAsyncMetricsDispatchTask||taskName={}||connectClusterId={}||taskResult={}||msg=failed", this.taskName, connectCluster.getId(), tr);
} else {
log.debug("class=AbstractAsyncMetricsDispatchTask||taskName={}||connectClusterId={}||msg=success", this.taskName, connectCluster.getId());
}
} catch (Exception e) {
log.error("class=AbstractAsyncMetricsDispatchTask||taskName={}||connectClusterId={}||errMsg=exception", this.taskName, connectCluster.getId(), e);
}
}
);
return TaskResult.SUCCESS;
}
}

View File

@@ -0,0 +1,31 @@
package com.xiaojukeji.know.streaming.km.task.connect.metrics;
import com.didiglobal.logi.job.annotation.Task;
import com.didiglobal.logi.job.common.TaskResult;
import com.didiglobal.logi.job.core.consensual.ConsensualEnum;
import com.xiaojukeji.know.streaming.km.collector.metric.connect.ConnectClusterMetricCollector;
import com.xiaojukeji.know.streaming.km.common.bean.entity.connect.ConnectCluster;
import org.springframework.beans.factory.annotation.Autowired;
/**
* @author wyb
* @date 2022/11/7
*/
@Task(name = "ConnectClusterMetricCollectorTask",
description = "ConnectCluster指标采集任务",
cron = "0 0/1 * * * ? *",
autoRegister = true,
consensual = ConsensualEnum.BROADCAST,
timeout = 2 * 60)
public class ConnectClusterMetricCollectorTask extends AbstractAsyncMetricsDispatchTask {
@Autowired
private ConnectClusterMetricCollector connectClusterMetricCollector;
@Override
public TaskResult processClusterTask(ConnectCluster connectCluster, long triggerTimeUnitMs) throws Exception {
connectClusterMetricCollector.collectMetrics(connectCluster);
return TaskResult.SUCCESS;
}
}

View File

@@ -0,0 +1,31 @@
package com.xiaojukeji.know.streaming.km.task.connect.metrics;
import com.didiglobal.logi.job.annotation.Task;
import com.didiglobal.logi.job.common.TaskResult;
import com.didiglobal.logi.job.core.consensual.ConsensualEnum;
import com.xiaojukeji.know.streaming.km.collector.metric.connect.ConnectConnectorMetricCollector;
import com.xiaojukeji.know.streaming.km.common.bean.entity.connect.ConnectCluster;
import org.springframework.beans.factory.annotation.Autowired;
/**
* @author wyb
* @date 2022/11/7
*/
@Task(name = "ConnectorMetricCollectorTask",
description = "Connector指标采集任务",
cron = "0 0/1 * * * ? *",
autoRegister = true,
consensual = ConsensualEnum.BROADCAST,
timeout = 2 * 60)
public class ConnectorMetricCollectorTask extends AbstractAsyncMetricsDispatchTask {
@Autowired
private ConnectConnectorMetricCollector connectConnectorMetricCollector;
@Override
public TaskResult processClusterTask(ConnectCluster connectCluster, long triggerTimeUnitMs) throws Exception {
connectConnectorMetricCollector.collectMetrics(connectCluster);
return TaskResult.SUCCESS;
}
}

View File

@@ -0,0 +1,33 @@
package com.xiaojukeji.know.streaming.km.task.connect.mm2.health;
import com.didiglobal.logi.job.annotation.Task;
import com.didiglobal.logi.job.core.consensual.ConsensualEnum;
import com.xiaojukeji.know.streaming.km.core.service.health.checker.AbstractHealthCheckService;
import com.xiaojukeji.know.streaming.km.core.service.health.checker.connect.mm2.HealthCheckMirrorMakerService;
import com.xiaojukeji.know.streaming.km.task.connect.health.AbstractHealthCheckTask;
import lombok.AllArgsConstructor;
import lombok.NoArgsConstructor;
import org.springframework.beans.factory.annotation.Autowired;
/**
* @author wyb
* @date 2022/12/21
*/
@NoArgsConstructor
@AllArgsConstructor
@Task(name = "MirrorMakerHealthCheckTask",
description = "MirrorMaker健康检查",
cron = "0 0/1 * * * ? *",
autoRegister = true,
consensual = ConsensualEnum.BROADCAST,
timeout = 2 * 60)
public class MirrorMakerHealthCheckTask extends AbstractHealthCheckTask {
@Autowired
private HealthCheckMirrorMakerService healthCheckMirrorMakerService;
@Override
public AbstractHealthCheckService getCheckService() {
return healthCheckMirrorMakerService;
}
}

View File

@@ -0,0 +1,31 @@
package com.xiaojukeji.know.streaming.km.task.connect.mm2.metrics;
import com.didiglobal.logi.job.annotation.Task;
import com.didiglobal.logi.job.common.TaskResult;
import com.didiglobal.logi.job.core.consensual.ConsensualEnum;
import com.xiaojukeji.know.streaming.km.collector.metric.connect.mm2.MirrorMakerMetricCollector;
import com.xiaojukeji.know.streaming.km.common.bean.entity.connect.ConnectCluster;
import com.xiaojukeji.know.streaming.km.task.connect.metrics.AbstractAsyncMetricsDispatchTask;
import org.springframework.beans.factory.annotation.Autowired;
/**
* @author wyb
* @date 2022/12/21
*/
@Task(name = "MirrorMakerCollectorTask",
description = "MirrorMaker指标采集任务",
cron = "0 0/1 * * * ? *",
autoRegister = true,
consensual = ConsensualEnum.BROADCAST,
timeout = 2 * 60)
public class MirrorMakerCollectorTask extends AbstractAsyncMetricsDispatchTask {
@Autowired
private MirrorMakerMetricCollector mirrorMakerMetricCollector;
@Override
public TaskResult processClusterTask(ConnectCluster connectCluster, long triggerTimeUnitMs) throws Exception {
mirrorMakerMetricCollector.collectConnectMetrics(connectCluster);
return TaskResult.SUCCESS;
}
}

View File

@@ -9,7 +9,7 @@ import com.xiaojukeji.know.streaming.km.common.bean.entity.result.Result;
import com.xiaojukeji.know.streaming.km.core.enterprise.rebalance.service.ClusterBalanceJobService;
import com.xiaojukeji.know.streaming.km.core.enterprise.rebalance.service.ClusterBalanceReassignService;
import com.xiaojukeji.know.streaming.km.core.enterprise.rebalance.service.ClusterBalanceService;
import com.xiaojukeji.know.streaming.km.task.AbstractAsyncCommonDispatchTask;
import com.xiaojukeji.know.streaming.km.task.kafka.AbstractAsyncCommonDispatchTask;
import org.springframework.beans.factory.annotation.Autowired;
@EnterpriseLoadReBalance

View File

@@ -1,4 +1,4 @@
package com.xiaojukeji.know.streaming.km.task;
package com.xiaojukeji.know.streaming.km.task.kafka;
import com.didiglobal.logi.job.common.TaskResult;
import com.didiglobal.logi.log.ILog;
@@ -31,12 +31,12 @@ public abstract class AbstractAsyncCommonDispatchTask extends AbstractClusterPhy
try {
TaskResult tr = this.processClusterTask(clusterPhy, triggerTimeUnitMs);
if (TaskResult.SUCCESS_CODE != tr.getCode()) {
log.error("class=AbstractAsyncCommonDispatchTask||taskName={}||clusterPhyId={}||taskResult={}||msg=failed", this.taskName, clusterPhy.getId(), tr);
log.error("method=asyncProcessSubTask||taskName={}||clusterPhyId={}||taskResult={}||msg=failed", this.taskName, clusterPhy.getId(), tr);
} else {
log.debug("class=AbstractAsyncCommonDispatchTask||taskName={}||clusterPhyId={}||msg=success", this.taskName, clusterPhy.getId());
log.debug("method=asyncProcessSubTask||taskName={}||clusterPhyId={}||msg=success", this.taskName, clusterPhy.getId());
}
} catch (Exception e) {
log.error("class=AbstractAsyncCommonDispatchTask||taskName={}||clusterPhyId={}||errMsg=exception", this.taskName, clusterPhy.getId(), e);
log.error("method=asyncProcessSubTask||taskName={}||clusterPhyId={}||errMsg=exception", this.taskName, clusterPhy.getId(), e);
}
}
);

View File

@@ -1,10 +1,11 @@
package com.xiaojukeji.know.streaming.km.task;
package com.xiaojukeji.know.streaming.km.task.kafka;
import com.didiglobal.logi.job.common.TaskResult;
import com.didiglobal.logi.log.ILog;
import com.didiglobal.logi.log.LogFactory;
import com.xiaojukeji.know.streaming.km.common.bean.entity.cluster.ClusterPhy;
import com.xiaojukeji.know.streaming.km.core.service.cluster.ClusterPhyService;
import com.xiaojukeji.know.streaming.km.task.AbstractDispatchTask;
import org.springframework.beans.factory.annotation.Autowired;
import java.util.List;

View File

@@ -1,31 +1,33 @@
package com.xiaojukeji.know.streaming.km.task.health;
package com.xiaojukeji.know.streaming.km.task.kafka.health;
import com.didiglobal.logi.job.common.TaskResult;
import com.didiglobal.logi.log.ILog;
import com.didiglobal.logi.log.LogFactory;
import com.xiaojukeji.know.streaming.km.collector.service.CollectThreadPoolService;
import com.xiaojukeji.know.streaming.km.common.bean.entity.cluster.ClusterPhy;
import com.xiaojukeji.know.streaming.km.common.bean.entity.config.healthcheck.BaseClusterHealthConfig;
import com.xiaojukeji.know.streaming.km.common.bean.entity.health.HealthCheckResult;
import com.xiaojukeji.know.streaming.km.common.bean.entity.param.cluster.ClusterPhyParam;
import com.xiaojukeji.know.streaming.km.common.bean.entity.param.cluster.ClusterParam;
import com.xiaojukeji.know.streaming.km.common.constant.Constant;
import com.xiaojukeji.know.streaming.km.common.enums.health.HealthCheckDimensionEnum;
import com.xiaojukeji.know.streaming.km.common.utils.FutureWaitUtil;
import com.xiaojukeji.know.streaming.km.common.utils.ValidateUtils;
import com.xiaojukeji.know.streaming.km.core.service.health.checker.AbstractHealthCheckService;
import com.xiaojukeji.know.streaming.km.core.service.health.checkresult.HealthCheckResultService;
import com.xiaojukeji.know.streaming.km.task.metrics.AbstractAsyncMetricsDispatchTask;
import com.xiaojukeji.know.streaming.km.task.kafka.metrics.AbstractAsyncMetricsDispatchTask;
import org.springframework.beans.factory.annotation.Autowired;
import java.util.ArrayList;
import java.util.Date;
import java.util.List;
import java.util.Map;
import java.util.*;
public abstract class AbstractHealthCheckTask extends AbstractAsyncMetricsDispatchTask {
private static final ILog log = LogFactory.getLog(AbstractHealthCheckTask.class);
private static final ILog LOGGER = LogFactory.getLog(AbstractHealthCheckTask.class);
@Autowired
private HealthCheckResultService healthCheckResultService;
@Autowired
private CollectThreadPoolService collectThreadPoolService;
public abstract AbstractHealthCheckService getCheckService();
@Override
@@ -37,32 +39,37 @@ public abstract class AbstractHealthCheckTask extends AbstractAsyncMetricsDispat
// 获取配置<配置名配置信息>
Map<String, BaseClusterHealthConfig> healthConfigMap = healthCheckResultService.getClusterHealthConfig(clusterPhy.getId());
// 检查结果
List<HealthCheckResult> resultList = new ArrayList<>();
// 获取资源列表
List<ClusterParam> paramList = this.getCheckService().getResList(clusterPhy.getId());
// 遍历Check-Service
List<ClusterPhyParam> paramList = this.getCheckService().getResList(clusterPhy.getId());
// 检查结果
List<HealthCheckResult> checkResultList = Collections.synchronizedList(new ArrayList<>());
if (ValidateUtils.isEmptyList(paramList)) {
// 当前无该维度的资源则直接设置为
resultList.addAll(this.getNoResResult(clusterPhy.getId(), this.getCheckService(), healthConfigMap));
checkResultList.addAll(this.getNoResResult(clusterPhy.getId(), this.getCheckService(), healthConfigMap));
}
// 获取合适的线程池
FutureWaitUtil<Void> futureWaitUtil = collectThreadPoolService.selectSuitableFutureUtil(clusterPhy.getId() * 100 + this.getCheckService().getHealthCheckDimensionEnum().getDimension());
// 遍历资源
for (ClusterPhyParam clusterPhyParam: paramList) {
resultList.addAll(this.checkAndGetResult(clusterPhyParam, healthConfigMap));
for (ClusterParam clusterParam: paramList) {
futureWaitUtil.runnableTask(
String.format("class=%s||method=calAndUpdateHealthCheckResult||clusterId=%d", this.getCheckService().getClass().getSimpleName(), clusterPhy.getId()),
30000,
() -> checkResultList.addAll(this.checkAndGetResult(clusterParam, healthConfigMap))
);
}
try {
healthCheckResultService.batchReplace(clusterPhy.getId(), resultList);
} catch (Exception e) {
log.error("class=AbstractHealthCheckTask||method=processSubTask||clusterPhyId={}||errMsg=exception!", clusterPhy.getId(), e);
}
futureWaitUtil.waitExecute(30000);
// 删除10分钟之前的检查结果
try {
healthCheckResultService.deleteByUpdateTimeBeforeInDB(clusterPhy.getId(), new Date(triggerTimeUnitMs - 20 * 60 * 1000));
healthCheckResultService.batchReplace(clusterPhy.getId(), this.getCheckService().getHealthCheckDimensionEnum().getDimension(), checkResultList);
} catch (Exception e) {
log.error("class=AbstractHealthCheckTask||method=processSubTask||clusterPhyId={}||errMsg=exception!", clusterPhy.getId(), e);
LOGGER.error(
"extendClass={}||method=calAndUpdateHealthCheckResult||clusterPhyId={}||errMsg=exception!",
this.getCheckService().getClass().getSimpleName(), clusterPhy.getId(), e
);
}
return TaskResult.SUCCESS;
@@ -93,13 +100,13 @@ public abstract class AbstractHealthCheckTask extends AbstractAsyncMetricsDispat
return resultList;
}
private List<HealthCheckResult> checkAndGetResult(ClusterPhyParam clusterPhyParam,
private List<HealthCheckResult> checkAndGetResult(ClusterParam clusterParam,
Map<String, BaseClusterHealthConfig> healthConfigMap) {
List<HealthCheckResult> resultList = new ArrayList<>();
// 进行检查
for (BaseClusterHealthConfig clusterHealthConfig: healthConfigMap.values()) {
HealthCheckResult healthCheckResult = this.getCheckService().checkAndGetResult(clusterPhyParam, clusterHealthConfig);
HealthCheckResult healthCheckResult = this.getCheckService().checkAndGetResult(clusterParam, clusterHealthConfig);
if (healthCheckResult == null) {
continue;
}

View File

@@ -1,4 +1,4 @@
package com.xiaojukeji.know.streaming.km.task.health;
package com.xiaojukeji.know.streaming.km.task.kafka.health;
import com.didiglobal.logi.job.annotation.Task;
import com.didiglobal.logi.job.core.consensual.ConsensualEnum;

View File

@@ -1,4 +1,4 @@
package com.xiaojukeji.know.streaming.km.task.health;
package com.xiaojukeji.know.streaming.km.task.kafka.health;
import com.didiglobal.logi.job.annotation.Task;
import com.didiglobal.logi.job.core.consensual.ConsensualEnum;

View File

@@ -1,4 +1,4 @@
package com.xiaojukeji.know.streaming.km.task.health;
package com.xiaojukeji.know.streaming.km.task.kafka.health;
import com.didiglobal.logi.job.annotation.Task;
import com.didiglobal.logi.job.core.consensual.ConsensualEnum;

View File

@@ -1,4 +1,4 @@
package com.xiaojukeji.know.streaming.km.task.health;
package com.xiaojukeji.know.streaming.km.task.kafka.health;
import com.didiglobal.logi.job.annotation.Task;
import com.didiglobal.logi.job.core.consensual.ConsensualEnum;
@@ -12,7 +12,7 @@ import org.springframework.beans.factory.annotation.Autowired;
@AllArgsConstructor
@Task(name = "TopicHealthCheckTask",
description = "Topic健康检查",
cron = "0 0/1 * * * ? *",
cron = "30 0/1 * * * ? *",
autoRegister = true,
consensual = ConsensualEnum.BROADCAST,
timeout = 2 * 60)

View File

@@ -1,4 +1,4 @@
package com.xiaojukeji.know.streaming.km.task.health;
package com.xiaojukeji.know.streaming.km.task.kafka.health;
import com.didiglobal.logi.job.annotation.Task;
import com.didiglobal.logi.job.core.consensual.ConsensualEnum;

View File

@@ -1,4 +1,4 @@
package com.xiaojukeji.know.streaming.km.task.job;
package com.xiaojukeji.know.streaming.km.task.kafka.job;
import com.didiglobal.logi.job.annotation.Task;
import com.didiglobal.logi.job.common.TaskResult;
@@ -8,7 +8,7 @@ import com.didiglobal.logi.log.LogFactory;
import com.xiaojukeji.know.streaming.km.common.bean.entity.cluster.ClusterPhy;
import com.xiaojukeji.know.streaming.km.common.bean.entity.result.Result;
import com.xiaojukeji.know.streaming.km.core.service.reassign.ReassignJobService;
import com.xiaojukeji.know.streaming.km.task.AbstractAsyncCommonDispatchTask;
import com.xiaojukeji.know.streaming.km.task.kafka.AbstractAsyncCommonDispatchTask;
import org.springframework.beans.factory.annotation.Autowired;
@Task(name = "CommunityReassignJobTask",
@@ -35,7 +35,7 @@ public class CommunityReassignJobTask extends AbstractAsyncCommonDispatchTask {
// 更新任务的状态
Result<Void> rv = reassignJobService.verifyAndUpdateStatue(jobId);
if (rv != null && rv.failed()) {
log.error("class=CommunityReassignJobTask||method=processSubTask||jobId={}||result={}||msg=verify and update task status failed", jobId, rv);
log.error("method=processSubTask||jobId={}||result={}||msg=verify and update task status failed", jobId, rv);
}
// 更新同步进度信息

View File

@@ -1,11 +1,11 @@
package com.xiaojukeji.know.streaming.km.task.job;
package com.xiaojukeji.know.streaming.km.task.kafka.job;
import com.didiglobal.logi.job.annotation.Task;
import com.didiglobal.logi.job.common.TaskResult;
import com.didiglobal.logi.job.core.consensual.ConsensualEnum;
import com.xiaojukeji.know.streaming.km.common.bean.entity.cluster.ClusterPhy;
import com.xiaojukeji.know.streaming.km.core.service.job.JobService;
import com.xiaojukeji.know.streaming.km.task.AbstractAsyncCommonDispatchTask;
import com.xiaojukeji.know.streaming.km.task.kafka.AbstractAsyncCommonDispatchTask;
import org.springframework.beans.factory.annotation.Autowired;
@Task(name = "kmJobTask",

View File

@@ -1,10 +1,10 @@
package com.xiaojukeji.know.streaming.km.task.metadata;
package com.xiaojukeji.know.streaming.km.task.kafka.metadata;
import com.didiglobal.logi.job.common.TaskResult;
import com.didiglobal.logi.log.ILog;
import com.didiglobal.logi.log.LogFactory;
import com.xiaojukeji.know.streaming.km.common.bean.entity.cluster.ClusterPhy;
import com.xiaojukeji.know.streaming.km.task.AbstractClusterPhyDispatchTask;
import com.xiaojukeji.know.streaming.km.task.kafka.AbstractClusterPhyDispatchTask;
import com.xiaojukeji.know.streaming.km.task.service.TaskThreadPoolService;
import org.springframework.beans.factory.annotation.Autowired;
@@ -32,12 +32,12 @@ public abstract class AbstractAsyncMetadataDispatchTask extends AbstractClusterP
try {
TaskResult tr = this.processClusterTask(clusterPhy, triggerTimeUnitMs);
if (TaskResult.SUCCESS_CODE != tr.getCode()) {
log.error("class=AbstractAsyncMetadataDispatchTask||taskName={}||clusterPhyId={}||taskResult={}||msg=failed", this.taskName, clusterPhy.getId(), tr);
log.error("method=asyncProcessSubTask||taskName={}||clusterPhyId={}||taskResult={}||msg=failed", this.taskName, clusterPhy.getId(), tr);
} else {
log.debug("class=AbstractAsyncMetadataDispatchTask||taskName={}||clusterPhyId={}||msg=success", this.taskName, clusterPhy.getId());
log.debug("method=asyncProcessSubTask||taskName={}||clusterPhyId={}||msg=success", this.taskName, clusterPhy.getId());
}
} catch (Exception e) {
log.error("class=AbstractAsyncMetadataDispatchTask||taskName={}||clusterPhyId={}||errMsg=exception", this.taskName, clusterPhy.getId(), e);
log.error("method=asyncProcessSubTask||taskName={}||clusterPhyId={}||errMsg=exception", this.taskName, clusterPhy.getId(), e);
}
}
);

View File

@@ -1,4 +1,4 @@
package com.xiaojukeji.know.streaming.km.task.metadata;
package com.xiaojukeji.know.streaming.km.task.kafka.metadata;
import com.didiglobal.logi.job.annotation.Task;
import com.didiglobal.logi.job.common.TaskResult;

View File

@@ -1,4 +1,4 @@
package com.xiaojukeji.know.streaming.km.task.metadata;
package com.xiaojukeji.know.streaming.km.task.kafka.metadata;
import com.didiglobal.logi.job.annotation.Task;
import com.didiglobal.logi.job.common.TaskResult;

View File

@@ -0,0 +1,166 @@
package com.xiaojukeji.know.streaming.km.task.kafka.metadata;
import com.didiglobal.logi.job.annotation.Task;
import com.didiglobal.logi.job.common.TaskResult;
import com.didiglobal.logi.job.core.consensual.ConsensualEnum;
import com.didiglobal.logi.log.ILog;
import com.didiglobal.logi.log.LogFactory;
import com.xiaojukeji.know.streaming.km.common.bean.entity.cluster.ClusterPhy;
import com.xiaojukeji.know.streaming.km.common.bean.entity.connect.ConnectCluster;
import com.xiaojukeji.know.streaming.km.common.bean.entity.connect.ConnectClusterMetadata;
import com.xiaojukeji.know.streaming.km.common.bean.entity.connect.ConnectWorker;
import com.xiaojukeji.know.streaming.km.common.bean.entity.group.Group;
import com.xiaojukeji.know.streaming.km.common.bean.entity.kafka.KSGroupDescription;
import com.xiaojukeji.know.streaming.km.common.bean.entity.kafka.KSMemberConnectAssignment;
import com.xiaojukeji.know.streaming.km.common.bean.entity.kafka.KSMemberDescription;
import com.xiaojukeji.know.streaming.km.common.bean.entity.result.Result;
import com.xiaojukeji.know.streaming.km.common.bean.entity.result.ResultStatus;
import com.xiaojukeji.know.streaming.km.common.constant.Constant;
import com.xiaojukeji.know.streaming.km.common.enums.group.GroupStateEnum;
import com.xiaojukeji.know.streaming.km.common.enums.group.GroupTypeEnum;
import com.xiaojukeji.know.streaming.km.core.service.connect.cluster.ConnectClusterService;
import com.xiaojukeji.know.streaming.km.core.service.connect.worker.WorkerConnectorService;
import com.xiaojukeji.know.streaming.km.core.service.connect.worker.WorkerService;
import com.xiaojukeji.know.streaming.km.core.service.group.GroupService;
import com.xiaojukeji.know.streaming.km.persistence.connect.cache.LoadedConnectClusterCache;
import org.springframework.beans.factory.annotation.Autowired;
import java.util.ArrayList;
import java.util.List;
import java.util.stream.Collectors;
import static com.xiaojukeji.know.streaming.km.common.enums.group.GroupTypeEnum.CONNECT_CLUSTER_PROTOCOL_TYPE;
@Task(name = "SyncClusterAndWorkerTask",
description = "Connect-Cluster&Worker信息同步到DB",
cron = "0 0/1 * * * ? *",
autoRegister = true,
consensual = ConsensualEnum.BROADCAST,
timeout = 2 * 60)
public class SyncConnectClusterAndWorkerTask extends AbstractAsyncMetadataDispatchTask {
private static final ILog LOGGER = LogFactory.getLog(SyncConnectClusterAndWorkerTask.class);
@Autowired
private GroupService groupService;
@Autowired
private WorkerService workerService;
@Autowired
private WorkerConnectorService workerConnectorService;
@Autowired
private ConnectClusterService connectClusterService;
@Override
public TaskResult processClusterTask(ClusterPhy clusterPhy, long triggerTimeUnitMs) {
boolean allSuccess = true;
//获取connect集群
List<Group> groupList = groupService.listClusterGroups(clusterPhy.getId()).stream().filter(elem->elem.getType()==GroupTypeEnum.CONNECT_CLUSTER).collect(Collectors.toList());
for (Group group: groupList) {
try {
KSGroupDescription ksGroupDescription = groupService.getGroupDescriptionFromKafka(clusterPhy, group.getName());
if (!ksGroupDescription.protocolType().equals(CONNECT_CLUSTER_PROTOCOL_TYPE)) {
continue;
}
Result<Long> rl = this.handleConnectClusterMetadata(clusterPhy.getId(), group.getName(), ksGroupDescription);
if (rl.failed()) {
allSuccess = false;
continue;
}
Result<Void> rv = this.handleWorkerMetadata(rl.getData(), ksGroupDescription);
if (rv.failed()) {
allSuccess = false;
}
} catch (Exception e) {
LOGGER.error(
"class=SyncClusterAndWorkerTask||method=processClusterTask||clusterPhyId={}||groupName={}||errMsg=exception.",
clusterPhy.getId(), group.getName(), e
);
allSuccess = false;
}
}
return allSuccess? TaskResult.SUCCESS: TaskResult.FAIL;
}
private Result<Void> handleWorkerMetadata(Long connectClusterId, KSGroupDescription ksGroupDescription) {
try {
List<ConnectWorker> workerList = new ArrayList<>();
ConnectCluster connectCluster = LoadedConnectClusterCache.getByPhyId(connectClusterId);
for (KSMemberDescription memberDescription: ksGroupDescription.members()) {
KSMemberConnectAssignment assignment = (KSMemberConnectAssignment) memberDescription.assignment();
if (assignment != null) {
workerList.add(new ConnectWorker(
connectCluster.getKafkaClusterPhyId(),
connectClusterId,
memberDescription.consumerId(),
memberDescription.host().substring(1),
Constant.INVALID_CODE,
assignment.getWorkerState().url(),
assignment.getAssignment().leaderUrl(),
memberDescription.consumerId().equals(assignment.getAssignment().leader()) ? Constant.YES : Constant.NO
));
} else {
workerList.add(new ConnectWorker(
connectCluster.getKafkaClusterPhyId(),
connectClusterId,
memberDescription.consumerId(),
memberDescription.host().substring(1),
Constant.INVALID_CODE,
"",
"",
Constant.NO
));
}
}
workerService.batchReplaceInDB(connectClusterId, workerList);
} catch (Exception e) {
LOGGER.error(
"class=SyncClusterAndWorkerTask||method=handleWorkerMetadata||connectClusterId={}||ksGroupDescription={}||errMsg=exception.",
connectClusterId, ksGroupDescription, e
);
return Result.buildFromRSAndMsg(ResultStatus.MYSQL_OPERATE_FAILED, e.getMessage());
}
return Result.buildSuc();
}
private Result<Long> handleConnectClusterMetadata(Long clusterPhyId, String groupName, KSGroupDescription ksGroupDescription) {
try {
for (KSMemberDescription memberDescription: ksGroupDescription.members()) {
KSMemberConnectAssignment assignment = (KSMemberConnectAssignment) memberDescription.assignment();
ConnectClusterMetadata metadata = new ConnectClusterMetadata(
clusterPhyId,
groupName,
GroupStateEnum.getByRawState(ksGroupDescription.state()),
assignment == null? "": assignment.getAssignment().leaderUrl()
);
Long connectClusterId = connectClusterService.replaceAndReturnIdInDB(metadata);
return Result.buildSuc(connectClusterId);
}
} catch (Exception e) {
LOGGER.error(
"class=SyncClusterAndWorkerTask||method=handleConnectClusterMetadata||clusterPhyId={}||groupName={}||ksGroupDescription={}||errMsg=exception.",
clusterPhyId, groupName, ksGroupDescription, e
);
return Result.buildFromRSAndMsg(ResultStatus.MYSQL_OPERATE_FAILED, e.getMessage());
}
return Result.buildFromRSAndMsg(ResultStatus.KAFKA_OPERATE_FAILED, "消费组无成员");
}
}

View File

@@ -1,16 +1,20 @@
package com.xiaojukeji.know.streaming.km.task.metadata;
package com.xiaojukeji.know.streaming.km.task.kafka.metadata;
import com.didiglobal.logi.job.annotation.Task;
import com.didiglobal.logi.job.common.TaskResult;
import com.didiglobal.logi.job.core.consensual.ConsensualEnum;
import com.didiglobal.logi.log.ILog;
import com.didiglobal.logi.log.LogFactory;
import com.xiaojukeji.know.streaming.km.common.bean.entity.broker.Broker;
import com.xiaojukeji.know.streaming.km.common.bean.entity.cluster.ClusterPhy;
import com.xiaojukeji.know.streaming.km.common.bean.entity.kafkacontroller.KafkaController;
import com.xiaojukeji.know.streaming.km.common.bean.entity.result.Result;
import com.xiaojukeji.know.streaming.km.core.service.broker.BrokerService;
import com.xiaojukeji.know.streaming.km.core.service.kafkacontroller.KafkaControllerService;
import org.springframework.beans.factory.annotation.Autowired;
import java.util.List;
@Task(name = "SyncControllerTask",
description = "Controller信息同步到DB",
@@ -19,7 +23,10 @@ import org.springframework.beans.factory.annotation.Autowired;
consensual = ConsensualEnum.BROADCAST,
timeout = 2 * 60)
public class SyncControllerTask extends AbstractAsyncMetadataDispatchTask {
private static final ILog log = LogFactory.getLog(SyncControllerTask.class);
private static final ILog LOGGER = LogFactory.getLog(SyncControllerTask.class);
@Autowired
private BrokerService brokerService;
@Autowired
private KafkaControllerService kafkaControllerService;
@@ -33,10 +40,30 @@ public class SyncControllerTask extends AbstractAsyncMetadataDispatchTask {
if (controllerResult.getData() == null) {
kafkaControllerService.setNoKafkaController(clusterPhy.getId(), System.currentTimeMillis() / 1000L * 1000L);
} else {
kafkaControllerService.insertAndIgnoreDuplicateException(controllerResult.getData());
return TaskResult.SUCCESS;
}
Broker controllerBroker = null;
Result<List<Broker>> brokerListResult = brokerService.listBrokersFromKafka(clusterPhy);
if (brokerListResult.failed()) {
LOGGER.error("method=processClusterTask||clusterPhyId={}||result={}||errMsg=list brokers failed", clusterPhy.getId(), brokerListResult);
} else {
for (Broker broker: brokerListResult.getData()) {
if (broker.getBrokerId().equals(controllerResult.getData().getBrokerId())) {
controllerBroker = broker;
}
}
}
kafkaControllerService.insertAndIgnoreDuplicateException(
controllerResult.getData(),
controllerBroker != null? controllerBroker.getHost(): "",
controllerBroker != null? controllerBroker.getRack(): ""
);
return TaskResult.SUCCESS;
}
}

View File

@@ -1,4 +1,4 @@
package com.xiaojukeji.know.streaming.km.task.metadata;
package com.xiaojukeji.know.streaming.km.task.kafka.metadata;
import com.didiglobal.logi.job.annotation.Task;
import com.didiglobal.logi.job.common.TaskResult;

View File

@@ -1,4 +1,4 @@
package com.xiaojukeji.know.streaming.km.task.metadata;
package com.xiaojukeji.know.streaming.km.task.kafka.metadata;
import com.didiglobal.logi.job.annotation.Task;
import com.didiglobal.logi.job.common.TaskResult;
@@ -34,7 +34,7 @@ public class SyncKafkaGroupTask extends AbstractAsyncMetadataDispatchTask {
@Override
public TaskResult processClusterTask(ClusterPhy clusterPhy, long triggerTimeUnitMs) throws Exception {
// 获取集群的Group列表
List<String> groupNameList = groupService.listGroupsFromKafka(clusterPhy.getId());
List<String> groupNameList = groupService.listGroupsFromKafka(clusterPhy);
TaskResult allSuccess = TaskResult.SUCCESS;
@@ -42,7 +42,7 @@ public class SyncKafkaGroupTask extends AbstractAsyncMetadataDispatchTask {
List<Group> groupList = new ArrayList<>();
for (String groupName : groupNameList) {
try {
Group group = groupService.getGroupFromKafka(clusterPhy.getId(), groupName);
Group group = groupService.getGroupFromKafka(clusterPhy, groupName);
if (group == null) {
continue;
}

View File

@@ -1,4 +1,4 @@
package com.xiaojukeji.know.streaming.km.task.metadata;
package com.xiaojukeji.know.streaming.km.task.kafka.metadata;
import com.didiglobal.logi.job.annotation.Task;
import com.didiglobal.logi.job.common.TaskResult;

View File

@@ -1,4 +1,4 @@
package com.xiaojukeji.know.streaming.km.task.metadata;
package com.xiaojukeji.know.streaming.km.task.kafka.metadata;
import com.didiglobal.logi.job.annotation.Task;
import com.didiglobal.logi.job.common.TaskResult;
@@ -49,7 +49,7 @@ public class SyncPartitionTask extends AbstractAsyncMetadataDispatchTask {
try {
partitionService.updatePartitions(clusterPhy.getId(), entry.getKey(), entry.getValue(), dbPartitionMap.getOrDefault(entry.getKey(), new ArrayList<>()));
} catch (Exception e) {
log.error("class=SyncPartitionTask||method=processSubTask||clusterPhyId={}||topicName={}||errMsg=exception", clusterPhy.getId(), entry.getKey(), e);
log.error("method=processSubTask||clusterPhyId={}||topicName={}||errMsg=exception", clusterPhy.getId(), entry.getKey(), e);
}
}

View File

@@ -1,4 +1,4 @@
package com.xiaojukeji.know.streaming.km.task.metadata;
package com.xiaojukeji.know.streaming.km.task.kafka.metadata;
import com.didiglobal.logi.job.annotation.Task;
import com.didiglobal.logi.job.common.TaskResult;
@@ -7,8 +7,8 @@ import com.didiglobal.logi.log.ILog;
import com.didiglobal.logi.log.LogFactory;
import com.xiaojukeji.know.streaming.km.common.bean.entity.cluster.ClusterPhy;
import com.xiaojukeji.know.streaming.km.common.bean.entity.result.Result;
import com.xiaojukeji.know.streaming.km.common.bean.entity.topic.Topic;
import com.xiaojukeji.know.streaming.km.common.bean.entity.topic.TopicConfig;
import com.xiaojukeji.know.streaming.km.common.bean.po.topic.TopicPO;
import com.xiaojukeji.know.streaming.km.common.utils.ConvertUtil;
import com.xiaojukeji.know.streaming.km.core.service.topic.TopicConfigService;
import com.xiaojukeji.know.streaming.km.core.service.topic.TopicService;
@@ -44,18 +44,25 @@ public class SyncTopicConfigTask extends AbstractAsyncMetadataDispatchTask {
public TaskResult processClusterTask(ClusterPhy clusterPhy, long triggerTimeUnitMs) {
boolean success = true;
List<TopicConfig> topicConfigList = new ArrayList<>();
for (Topic topic: topicService.listTopicsFromDB(clusterPhy.getId())) {
Result<TopicConfig> configResult = this.getTopicConfig(clusterPhy.getId(), topic.getTopicName());
List<TopicConfig> changedConfigList = new ArrayList<>();
for (TopicPO topicPO: topicService.listTopicPOsFromDB(clusterPhy.getId())) {
Result<TopicConfig> configResult = this.getTopicConfig(clusterPhy.getId(), topicPO.getTopicName());
if (configResult.failed()) {
success = false;
continue;
}
topicConfigList.add(configResult.getData());
TopicConfig config = configResult.getData();
if (topicPO.getRetentionMs().equals(config.getRetentionMs())) {
// 数据无变化不需要加入待更新列表中
continue;
}
config.setId(topicPO.getId());
changedConfigList.add(configResult.getData());
}
topicService.batchReplaceConfig(clusterPhy.getId(), topicConfigList);
topicService.batchReplaceChangedConfig(clusterPhy.getId(), changedConfigList);
return success? TaskResult.SUCCESS: TaskResult.FAIL;
}

View File

@@ -1,4 +1,4 @@
package com.xiaojukeji.know.streaming.km.task.metadata;
package com.xiaojukeji.know.streaming.km.task.kafka.metadata;
import com.didiglobal.logi.job.annotation.Task;
import com.didiglobal.logi.job.common.TaskResult;

View File

@@ -1,4 +1,4 @@
package com.xiaojukeji.know.streaming.km.task.metadata;
package com.xiaojukeji.know.streaming.km.task.kafka.metadata;
import com.didiglobal.logi.job.annotation.Task;
import com.didiglobal.logi.job.common.TaskResult;

View File

@@ -1,10 +1,10 @@
package com.xiaojukeji.know.streaming.km.task.metrics;
package com.xiaojukeji.know.streaming.km.task.kafka.metrics;
import com.didiglobal.logi.job.common.TaskResult;
import com.didiglobal.logi.log.ILog;
import com.didiglobal.logi.log.LogFactory;
import com.xiaojukeji.know.streaming.km.common.bean.entity.cluster.ClusterPhy;
import com.xiaojukeji.know.streaming.km.task.AbstractClusterPhyDispatchTask;
import com.xiaojukeji.know.streaming.km.task.kafka.AbstractClusterPhyDispatchTask;
import com.xiaojukeji.know.streaming.km.task.service.TaskThreadPoolService;
import org.springframework.beans.factory.annotation.Autowired;
@@ -32,12 +32,12 @@ public abstract class AbstractAsyncMetricsDispatchTask extends AbstractClusterPh
try {
TaskResult tr = this.processClusterTask(clusterPhy, triggerTimeUnitMs);
if (TaskResult.SUCCESS_CODE != tr.getCode()) {
log.error("class=AbstractAsyncMetricsDispatchTask||taskName={}||clusterPhyId={}||taskResult={}||msg=failed", this.taskName, clusterPhy.getId(), tr);
log.error("method=asyncProcessSubTask||taskName={}||clusterPhyId={}||taskResult={}||msg=failed", this.taskName, clusterPhy.getId(), tr);
} else {
log.debug("class=AbstractAsyncMetricsDispatchTask||taskName={}||clusterPhyId={}||msg=success", this.taskName, clusterPhy.getId());
log.debug("method=asyncProcessSubTask||taskName={}||clusterPhyId={}||msg=success", this.taskName, clusterPhy.getId());
}
} catch (Exception e) {
log.error("class=AbstractAsyncMetricsDispatchTask||taskName={}||clusterPhyId={}||errMsg=exception", this.taskName, clusterPhy.getId(), e);
log.error("method=asyncProcessSubTask||taskName={}||clusterPhyId={}||errMsg=exception", this.taskName, clusterPhy.getId(), e);
}
}
);

View File

@@ -1,11 +1,9 @@
package com.xiaojukeji.know.streaming.km.task.metrics;
package com.xiaojukeji.know.streaming.km.task.kafka.metrics;
import com.didiglobal.logi.job.annotation.Task;
import com.didiglobal.logi.job.common.TaskResult;
import com.didiglobal.logi.job.core.consensual.ConsensualEnum;
import com.didiglobal.logi.log.ILog;
import com.didiglobal.logi.log.LogFactory;
import com.xiaojukeji.know.streaming.km.collector.metric.BrokerMetricCollector;
import com.xiaojukeji.know.streaming.km.collector.metric.kafka.BrokerMetricCollector;
import com.xiaojukeji.know.streaming.km.common.bean.entity.cluster.ClusterPhy;
import org.springframework.beans.factory.annotation.Autowired;
@@ -14,13 +12,11 @@ import org.springframework.beans.factory.annotation.Autowired;
*/
@Task(name = "BrokerMetricCollectorTask",
description = "Broker指标采集任务",
cron = "0 0/1 * * * ? *",
cron = "20 0/1 * * * ? *",
autoRegister = true,
consensual = ConsensualEnum.BROADCAST,
timeout = 2 * 60)
public class BrokerMetricCollectorTask extends AbstractAsyncMetricsDispatchTask {
private static final ILog log = LogFactory.getLog(BrokerMetricCollectorTask.class);
@Autowired
private BrokerMetricCollector brokerMetricCollector;

View File

@@ -1,11 +1,9 @@
package com.xiaojukeji.know.streaming.km.task.metrics;
package com.xiaojukeji.know.streaming.km.task.kafka.metrics;
import com.didiglobal.logi.job.annotation.Task;
import com.didiglobal.logi.job.common.TaskResult;
import com.didiglobal.logi.job.core.consensual.ConsensualEnum;
import com.didiglobal.logi.log.ILog;
import com.didiglobal.logi.log.LogFactory;
import com.xiaojukeji.know.streaming.km.collector.metric.ClusterMetricCollector;
import com.xiaojukeji.know.streaming.km.collector.metric.kafka.ClusterMetricCollector;
import com.xiaojukeji.know.streaming.km.common.bean.entity.cluster.ClusterPhy;
import org.springframework.beans.factory.annotation.Autowired;
@@ -14,13 +12,11 @@ import org.springframework.beans.factory.annotation.Autowired;
*/
@Task(name = "ClusterMetricCollectorTask",
description = "Cluster指标采集任务",
cron = "0 0/1 * * * ? *",
cron = "30 0/1 * * * ? *",
autoRegister = true,
consensual = ConsensualEnum.BROADCAST,
timeout = 2 * 60)
public class ClusterMetricCollectorTask extends AbstractAsyncMetricsDispatchTask {
private static final ILog log = LogFactory.getLog(ClusterMetricCollectorTask.class);
@Autowired
private ClusterMetricCollector clusterMetricCollector;

View File

@@ -1,11 +1,11 @@
package com.xiaojukeji.know.streaming.km.task.metrics;
package com.xiaojukeji.know.streaming.km.task.kafka.metrics;
import com.didiglobal.logi.job.annotation.Task;
import com.didiglobal.logi.job.common.TaskResult;
import com.didiglobal.logi.job.core.consensual.ConsensualEnum;
import com.didiglobal.logi.log.ILog;
import com.didiglobal.logi.log.LogFactory;
import com.xiaojukeji.know.streaming.km.collector.metric.GroupMetricCollector;
import com.xiaojukeji.know.streaming.km.collector.metric.kafka.GroupMetricCollector;
import com.xiaojukeji.know.streaming.km.common.bean.entity.cluster.ClusterPhy;
import org.springframework.beans.factory.annotation.Autowired;
@@ -14,7 +14,7 @@ import org.springframework.beans.factory.annotation.Autowired;
*/
@Task(name = "GroupMetricCollectorTask",
description = "Group指标采集任务",
cron = "0 0/1 * * * ? *",
cron = "40 0/1 * * * ? *",
autoRegister = true,
consensual = ConsensualEnum.BROADCAST,
timeout = 2 * 60)

View File

@@ -1,9 +1,9 @@
package com.xiaojukeji.know.streaming.km.task.metrics;
package com.xiaojukeji.know.streaming.km.task.kafka.metrics;
import com.didiglobal.logi.job.annotation.Task;
import com.didiglobal.logi.job.common.TaskResult;
import com.didiglobal.logi.job.core.consensual.ConsensualEnum;
import com.xiaojukeji.know.streaming.km.collector.metric.PartitionMetricCollector;
import com.xiaojukeji.know.streaming.km.collector.metric.kafka.PartitionMetricCollector;
import com.xiaojukeji.know.streaming.km.common.bean.entity.cluster.ClusterPhy;
import org.springframework.beans.factory.annotation.Autowired;

View File

@@ -1,11 +1,9 @@
package com.xiaojukeji.know.streaming.km.task.metrics;
package com.xiaojukeji.know.streaming.km.task.kafka.metrics;
import com.didiglobal.logi.job.annotation.Task;
import com.didiglobal.logi.job.common.TaskResult;
import com.didiglobal.logi.job.core.consensual.ConsensualEnum;
import com.didiglobal.logi.log.ILog;
import com.didiglobal.logi.log.LogFactory;
import com.xiaojukeji.know.streaming.km.collector.metric.TopicMetricCollector;
import com.xiaojukeji.know.streaming.km.collector.metric.kafka.TopicMetricCollector;
import com.xiaojukeji.know.streaming.km.common.bean.entity.cluster.ClusterPhy;
import org.springframework.beans.factory.annotation.Autowired;
@@ -14,13 +12,11 @@ import org.springframework.beans.factory.annotation.Autowired;
*/
@Task(name = "TopicMetricCollectorTask",
description = "Topic指标采集任务",
cron = "0 0/1 * * * ? *",
cron = "10 0/1 * * * ? *",
autoRegister = true,
consensual = ConsensualEnum.BROADCAST,
timeout = 2 * 60)
public class TopicMetricCollectorTask extends AbstractAsyncMetricsDispatchTask {
private static final ILog log = LogFactory.getLog(TopicMetricCollectorTask.class);
@Autowired
private TopicMetricCollector topicMetricCollector;

View File

@@ -1,11 +1,9 @@
package com.xiaojukeji.know.streaming.km.task.metrics;
package com.xiaojukeji.know.streaming.km.task.kafka.metrics;
import com.didiglobal.logi.job.annotation.Task;
import com.didiglobal.logi.job.common.TaskResult;
import com.didiglobal.logi.job.core.consensual.ConsensualEnum;
import com.didiglobal.logi.log.ILog;
import com.didiglobal.logi.log.LogFactory;
import com.xiaojukeji.know.streaming.km.collector.metric.ZookeeperMetricCollector;
import com.xiaojukeji.know.streaming.km.collector.metric.kafka.ZookeeperMetricCollector;
import com.xiaojukeji.know.streaming.km.common.bean.entity.cluster.ClusterPhy;
import org.springframework.beans.factory.annotation.Autowired;
@@ -14,13 +12,11 @@ import org.springframework.beans.factory.annotation.Autowired;
*/
@Task(name = "ZookeeperMetricCollectorTask",
description = "Zookeeper指标采集任务",
cron = "0 0/1 * * * ? *",
cron = "50 0/1 * * * ? *",
autoRegister = true,
consensual = ConsensualEnum.BROADCAST,
timeout = 2 * 60)
public class ZookeeperMetricCollectorTask extends AbstractAsyncMetricsDispatchTask {
private static final ILog log = LogFactory.getLog(ZookeeperMetricCollectorTask.class);
@Autowired
private ZookeeperMetricCollector zookeeperMetricCollector;

View File

@@ -1,32 +0,0 @@
//package com.xiaojukeji.know.streaming.km.task.metrics;
//
//import com.didiglobal.logi.job.annotation.Task;
//import com.didiglobal.logi.job.common.TaskResult;
//import com.didiglobal.logi.job.core.consensual.ConsensualEnum;
//import com.xiaojukeji.know.streaming.km.collector.metric.ReplicaMetricCollector;
//import com.xiaojukeji.know.streaming.km.common.bean.entity.cluster.ClusterPhy;
//import lombok.extern.slf4j.Slf4j;
//import org.springframework.beans.factory.annotation.Autowired;
//
///**
// * @author didi
// */
//@Slf4j
//@Task(name = "ReplicaMetricCollectorTask",
// description = "Replica指标采集任务",
// cron = "0 0/1 * * * ? *",
// autoRegister = true,
// consensual = ConsensualEnum.BROADCAST,
// timeout = 2 * 60)
//public class ReplicaMetricCollectorTask extends AbstractAsyncMetricsDispatchTask {
//
// @Autowired
// private ReplicaMetricCollector replicaMetricCollector;
//
// @Override
// public TaskResult processClusterTask(ClusterPhy clusterPhy, long triggerTimeUnitMs) throws Exception {
// replicaMetricCollector.collectMetrics(clusterPhy);
//
// return TaskResult.SUCCESS;
// }
//}

View File

@@ -52,21 +52,21 @@ public class TaskThreadPoolService {
@PostConstruct
private void init() {
metricsTaskThreadPool = FutureNoWaitUtil.init(
"metricsTaskThreadPool",
"MetricsTaskTP",
metricsTaskThreadNum,
metricsTaskThreadNum,
metricsTaskQueueSize
);
metadataTaskThreadPool = FutureNoWaitUtil.init(
"metadataTaskThreadPool",
"MetadataTaskTP",
metadataTaskThreadNum,
metadataTaskThreadNum,
metadataTaskQueueSize
);
commonTaskThreadPool = FutureNoWaitUtil.init(
"commonTaskThreadPool",
"CommonTaskTP",
commonTaskThreadNum,
commonTaskThreadNum,
commonTaskQueueSize

View File

@@ -8,8 +8,8 @@ import com.xiaojukeji.know.streaming.km.common.component.SpringTool;
import com.xiaojukeji.know.streaming.km.common.utils.BackoffUtils;
import com.xiaojukeji.know.streaming.km.common.utils.FutureUtil;
import com.xiaojukeji.know.streaming.km.persistence.cache.LoadedClusterPhyCache;
import com.xiaojukeji.know.streaming.km.task.metadata.AbstractAsyncMetadataDispatchTask;
import com.xiaojukeji.know.streaming.km.task.metrics.AbstractAsyncMetricsDispatchTask;
import com.xiaojukeji.know.streaming.km.task.kafka.metadata.AbstractAsyncMetadataDispatchTask;
import com.xiaojukeji.know.streaming.km.task.kafka.metrics.AbstractAsyncMetricsDispatchTask;
import org.springframework.context.ApplicationListener;
import org.springframework.stereotype.Service;
@@ -22,7 +22,7 @@ public class TaskClusterAddedListener implements ApplicationListener<ClusterPhyA
@Override
public void onApplicationEvent(ClusterPhyAddedEvent event) {
LOGGER.info("class=TaskClusterAddedListener||method=onApplicationEvent||clusterPhyId={}||msg=listened new cluster", event.getClusterPhyId());
LOGGER.info("method=onApplicationEvent||clusterPhyId={}||msg=listened new cluster", event.getClusterPhyId());
Long now = System.currentTimeMillis();
// 交由KS自定义的线程池异步执行任务